text
stringlengths 7
1.01M
|
|---|
package com.example.greatbook.local.activity;
import android.support.v7.app.AppCompatActivity;
/**
* Created by MDove on 17/9/15.
*/
public class CooperateTopicActivity extends AppCompatActivity{
}
|
/**
* GetLastOrdersForMerchants.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.7.9 Built on : Nov 16, 2018 (12:06:07 GMT)
*/
package merchant;
/**
* GetLastOrdersForMerchants bean class
*/
@SuppressWarnings({"unchecked",
"unused"
})
public class GetLastOrdersForMerchants implements org.apache.axis2.databinding.ADBBean {
public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName("http://engine.paymentgate.ru/webservices/merchant",
"getLastOrdersForMerchants", "ns1");
/**
* field for Request
*/
protected GetLastOrdersForMerchantsRequest localRequest;
/**
* Auto generated getter method
* @return ru.paymentgate.engine.webservices.merchant.GetLastOrdersForMerchantsRequest
*/
public GetLastOrdersForMerchantsRequest getRequest() {
return localRequest;
}
/**
* Auto generated setter method
* @param param Request
*/
public void setRequest(
GetLastOrdersForMerchantsRequest param) {
this.localRequest = param;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement(
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory)
throws org.apache.axis2.databinding.ADBException {
return factory.createOMElement(new org.apache.axis2.databinding.ADBDataSource(
this, MY_QNAME));
}
public void serialize(final javax.xml.namespace.QName parentQName,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException,
org.apache.axis2.databinding.ADBException {
serialize(parentQName, xmlWriter, false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
javax.xml.stream.XMLStreamWriter xmlWriter, boolean serializeType)
throws javax.xml.stream.XMLStreamException,
org.apache.axis2.databinding.ADBException {
String prefix = null;
String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
writeStartElement(prefix, namespace, parentQName.getLocalPart(),
xmlWriter);
if (serializeType) {
String namespacePrefix = registerPrefix(xmlWriter,
"http://engine.paymentgate.ru/webservices/merchant");
if ((namespacePrefix != null) &&
(namespacePrefix.trim().length() > 0)) {
writeAttribute("xsi",
"http://www.w3.org/2001/XMLSchema-instance", "type",
namespacePrefix + ":getLastOrdersForMerchants", xmlWriter);
} else {
writeAttribute("xsi",
"http://www.w3.org/2001/XMLSchema-instance", "type",
"getLastOrdersForMerchants", xmlWriter);
}
}
if (localRequest == null) {
writeStartElement(null, "", "request", xmlWriter);
// write the nil attribute
writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance",
"nil", "1", xmlWriter);
xmlWriter.writeEndElement();
} else {
localRequest.serialize(new javax.xml.namespace.QName("", "request"),
xmlWriter);
}
xmlWriter.writeEndElement();
}
private static String generatePrefix(String namespace) {
if (namespace.equals(
"http://engine.paymentgate.ru/webservices/merchant")) {
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* Utility method to write an element start tag.
*/
private void writeStartElement(String prefix,
String namespace, String localPart,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException {
String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(writerPrefix, localPart, namespace);
} else {
if (namespace.length() == 0) {
prefix = "";
} else if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, localPart, namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(String prefix,
String namespace, String attName,
String attValue, javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException {
String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeAttribute(writerPrefix, namespace, attName, attValue);
} else {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
xmlWriter.writeAttribute(prefix, namespace, attName, attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(String namespace,
String attName, String attValue,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException {
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attValue);
} else {
xmlWriter.writeAttribute(registerPrefix(xmlWriter, namespace),
namespace, attName, attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(String namespace,
String attName, javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException {
String attributeNamespace = qname.getNamespaceURI();
String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(attributePrefix, namespace, attName,
attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException {
String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix, namespaceURI);
}
if (prefix.trim().length() > 0) {
xmlWriter.writeCharacters(prefix + ":" +
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
StringBuffer stringToWrite = new StringBuffer();
String namespaceURI = null;
String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix, namespaceURI);
}
if (prefix.trim().length() > 0) {
stringToWrite.append(prefix).append(":")
.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private String registerPrefix(
javax.xml.stream.XMLStreamWriter xmlWriter, String namespace)
throws javax.xml.stream.XMLStreamException {
String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
javax.xml.namespace.NamespaceContext nsContext = xmlWriter.getNamespaceContext();
while (true) {
String uri = nsContext.getNamespaceURI(prefix);
if ((uri == null) || (uri.length() == 0)) {
break;
}
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* Factory class that keeps the parse method
*/
public static class Factory {
private static org.apache.commons.logging.Log log = org.apache.commons.logging.LogFactory.getLog(Factory.class);
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static GetLastOrdersForMerchants parse(
javax.xml.stream.XMLStreamReader reader) throws Exception {
GetLastOrdersForMerchants object = new GetLastOrdersForMerchants();
int event;
javax.xml.namespace.QName currentQName = null;
String nillableValue = null;
String prefix = "";
String namespaceuri = "";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
currentQName = reader.getName();
if (reader.getAttributeValue(
"http://www.w3.org/2001/XMLSchema-instance", "type") != null) {
String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName != null) {
String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1) {
nsPrefix = fullTypeName.substring(0,
fullTypeName.indexOf(":"));
}
nsPrefix = (nsPrefix == null) ? "" : nsPrefix;
String type = fullTypeName.substring(fullTypeName.indexOf(
":") + 1);
if (!"getLastOrdersForMerchants".equals(type)) {
//find namespace for the prefix
String nsUri = reader.getNamespaceContext()
.getNamespaceURI(nsPrefix);
return (GetLastOrdersForMerchants) ExtensionMapper.getTypeObject(nsUri,
type, reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement() &&
new javax.xml.namespace.QName("", "request").equals(
reader.getName())) {
nillableValue = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"nil");
if ("true".equals(nillableValue) ||
"1".equals(nillableValue)) {
object.setRequest(null);
reader.next();
reader.next();
} else {
object.setRequest(GetLastOrdersForMerchantsRequest.Factory.parse(
reader));
reader.next();
}
} // End of if for expected property start element
else {
// 1 - A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException(
"Unexpected subelement " + reader.getName());
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement()) {
// 2 - A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException(
"Unexpected subelement " + reader.getName());
}
} catch (javax.xml.stream.XMLStreamException e) {
throw new Exception(e);
}
return object;
}
} //end of factory class
}
|
package com.sendtomoon.eroica2.allergo.impl;
import com.sendtomoon.eroica2.allergo.AllergoManager;
import com.sendtomoon.eroica2.allergo.AllergoManagerFactory;
import com.sendtomoon.eroica.common.utils.URLUtils;
public class ClassPathAllergoManagerFactory implements AllergoManagerFactory {
@Override
public AllergoManager create(URLUtils configURL) {
ClassPathAllergoManager manager = new ClassPathAllergoManager();
configURL = configURL.addParameter("localBackup", false);
if (configURL.getParameter("rootPath") == null) {
configURL = configURL.addParameter("rootPath", "/");
}
manager.init(configURL);
return manager;
}
}
|
package com.sohu.tv.mq.cloud.dao;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import com.sohu.tv.mq.cloud.Application;
import com.sohu.tv.mq.cloud.bo.AuditConsumer;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = Application.class)
public class AuditConsumerDaoTest {
@Autowired
private AuditConsumerDao auditConsumerDao;
@Test
public void insertAuditConsumer() {
AuditConsumer auditConsumer = new AuditConsumer();
auditConsumer.setAid(1);
auditConsumer.setTid(1);
auditConsumer.setConsumer("c1");
auditConsumer.setConsumeWay(1);
auditConsumerDao.insert(auditConsumer);
}
}
|
package external_module_0952.a;
import javax.annotation.processing.*;
import javax.lang.model.*;
import javax.management.*;
/**
* Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut
* labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum.
* Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.
*
* @see java.io.File
* @see java.rmi.Remote
* @see java.nio.file.FileStore
*/
@SuppressWarnings("all")
public abstract class Foo0<K> implements external_module_0952.a.IFoo0<K> {
java.sql.Array f0 = null;
java.util.logging.Filter f1 = null;
java.util.zip.Deflater f2 = null;
public K element;
public static Foo0 instance;
public static Foo0 getInstance() {
return instance;
}
public static <T> T create(java.util.List<T> input) {
return null;
}
public String getName() {
return element.toString();
}
public void setName(String string) {
return;
}
public K get() {
return element;
}
public void set(Object element) {
this.element = (K)element;
}
public K call() throws Exception {
return (K)getInstance().call();
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.netbeans.modules.bamboo.client.rest.call;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
import java.util.Map;
import java.util.Properties;
import java.util.logging.Level;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.netbeans.modules.bamboo.client.glue.HttpUtility;
import org.netbeans.modules.bamboo.model.rcp.DefaultInstanceValues;
import org.netbeans.modules.bamboo.model.rcp.ResultExpandParameter;
import org.netbeans.modules.bamboo.model.rest.Change;
import org.netbeans.modules.bamboo.model.rest.Files;
import org.netbeans.modules.bamboo.model.rest.Issue;
import org.netbeans.modules.bamboo.model.rest.Result;
import org.netbeans.modules.bamboo.model.rest.ResultsResponse;
import static java.util.Collections.singletonMap;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assumptions.assumeFalse;
import static org.junit.jupiter.api.Assumptions.assumeTrue;
import static org.netbeans.modules.bamboo.client.glue.ExpandParameter.EXPAND;
import static org.netbeans.modules.bamboo.client.glue.ExpandParameter.RESULT_COMMENTS;
import static org.netbeans.modules.bamboo.client.glue.RestResources.RESULT;
import static org.netbeans.modules.bamboo.client.glue.RestResources.RESULTS;
/**
*
* @author Mario Schroeder
*/
class BambooCallerXmlIT {
private static final String FOO = "foo";
private static final String URL = "url";
private WebTargetFactory factory;
private static Properties props;
private final HttpUtility httpUtility;
BambooCallerXmlIT() {
this.httpUtility = new HttpUtility();
}
@BeforeAll
static void prepare() throws IOException {
props = new Properties();
InputStream input = BambooCallerXmlIT.class.getResourceAsStream("bamboo.properties");
props.load(input);
}
@BeforeEach
void setUp() {
DefaultInstanceValues values = new DefaultInstanceValues();
values.setName(FOO);
values.setUrl(props.getProperty(URL));
values.setUsername(props.getProperty("user"));
values.setPassword(props.getProperty("password").toCharArray());
factory = new WebTargetFactory(values, Level.FINE);
}
private boolean existsUrl() {
return httpUtility.exists(props.getProperty(URL));
}
private String newResultPath() {
String key = props.getProperty("result.key");
return String.format(RESULT, key);
}
@Test
public void testGetResults_SizeGtZero() {
assumeTrue(existsUrl());
Map<String, String> params = singletonMap(EXPAND, RESULT_COMMENTS);
WebTarget webTarget = factory.create(RESULTS, params);
ResultsResponse response = webTarget.request().accept(MediaType.APPLICATION_XML).get(ResultsResponse.class);
final int size = response.getResults().getSize();
assertTrue(size > 0);
}
@Test
void testGetResults_ResultsNotEmpty() {
assumeTrue(existsUrl());
Map<String, String> params = singletonMap(EXPAND, RESULT_COMMENTS);
WebTarget webTarget = factory.create(RESULTS, params);
ResultsResponse response = webTarget.request().accept(MediaType.APPLICATION_XML).get(ResultsResponse.class);
Collection<Result> results = response.asCollection();
assertFalse(results.isEmpty());
}
@Test
void testGetChanges_FilesNotEmpty() {
assumeTrue(existsUrl());
Map<String, String> params = singletonMap(EXPAND, ResultExpandParameter.Changes.toString());
WebTarget webTarget = factory.create(newResultPath(), params);
Result response = webTarget.request().accept(MediaType.APPLICATION_XML).get(Result.class);
Collection<Change> changes = response.getChanges().asCollection();
assumeFalse(changes.isEmpty());
Files files = changes.iterator().next().getFiles();
assertFalse(files.asCollection().isEmpty());
}
@Test
void testGetChanges_ChangeSetIdNotEmpty() {
assumeTrue(existsUrl());
Map<String, String> params = singletonMap(EXPAND, ResultExpandParameter.Changes.toString());
WebTarget webTarget = factory.create(newResultPath(), params);
Result response = webTarget.request().accept(MediaType.APPLICATION_XML).get(Result.class);
Collection<Change> changes = response.getChanges().asCollection();
assumeFalse(changes.isEmpty());
Change first = changes.iterator().next();
assertFalse(first.getChangesetId().isEmpty());
}
@Test
void testGetJiraIssues_ResultNotEmpty() {
assumeTrue(existsUrl());
Map<String, String> params = singletonMap(EXPAND, ResultExpandParameter.Jira.toString());
WebTarget webTarget = factory.create(newResultPath(), params);
Result response = webTarget.request().accept(MediaType.APPLICATION_XML).get(Result.class);
Collection<Issue> issues = response.getJiraIssues().asCollection();
assertFalse(issues.isEmpty());
}
}
|
/**
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.dekorate.utils;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* @author <a href="claprun@redhat.com">Christophe Laprun</a>
*/
public class SanitizeRemoteUrlTest {
@ParameterizedTest(name = "{0} should be sanitized to {1}")
@CsvSource({
"git@github.com:myorg/myproject.git, https://github.com/myorg/myproject.git",
"https://github.com/myorg/myproject.git, https://github.com/myorg/myproject.git",
"git+ssh://git@github.com/halkyonio/operator, https://github.com/halkyonio/operator.git",
"https://gitlab.com/foo/bar.git, https://gitlab.com/foo/bar.git",
"git@gitlab.com:foo/bar.git, https://gitlab.com/foo/bar.git",
"git+ssh://git@gitlab.com/foo/bar.git, https://gitlab.com/foo/bar.git",
})
void sanitizeRemoteUrlShouldWork(String original, String expected) {
assertEquals(expected, Git.sanitizeRemoteUrl(original));
}
}
|
package ru.intertrust.cm.core.dao.impl.sqlparser;
import java.util.HashMap;
import net.sf.jsqlparser.statement.select.Select;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.springframework.test.util.ReflectionTestUtils;
import ru.intertrust.cm.core.business.api.dto.Id;
import ru.intertrust.cm.core.business.api.dto.impl.RdbmsId;
import ru.intertrust.cm.core.dao.access.UserGroupGlobalCache;
import ru.intertrust.cm.core.dao.api.CurrentUserAccessor;
import ru.intertrust.cm.core.dao.api.RequestInfo;
import ru.intertrust.cm.core.dao.api.SecurityStamp;
import ru.intertrust.cm.core.dao.impl.DomainObjectQueryHelper;
import ru.intertrust.cm.core.dao.impl.PGSqlDomainObjectQueryHelperOptimized;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Да, с т.з. unit-тестов, это не правильный класс с тестами, т.к. все методы внутри завязаны друг на друга
* Но такой тест позволяет выявить достаточно много проблем, по этой причине я решил его сделать.
*
* Внимание! Надо быть аккуратным и не допускать разные иерархии для одних и тех же таблиц
*/
public class AddingAclOptimizedVisitorWithCachesTest {
private static final String STAMP_SUBQUERY = "WITH person_stamp_values AS (SELECT stamp FROM person_stamp WHERE person = :user_id), ";
private static class User {
private final Id id;
private final String login;
private final boolean isAdmin;
private final boolean isSuperUser;
public User(Id id, String login, boolean isAdmin, boolean isSuperUser) {
this.id = id;
this.login = login;
this.isAdmin = isAdmin;
this.isSuperUser = isSuperUser;
}
public Id getId() {
return id;
}
public String getLogin() {
return login;
}
public boolean isAdmin() {
return isAdmin;
}
public boolean isSuperUser() {
return isSuperUser;
}
}
private static final String GROUPS_SUBQUERY = "%s_read_tmp AS (SELECT DISTINCT gg.\"parent_group_id\" FROM \"group_member\" gm INNER JOIN \"group_group\" gg ON gg.\"child_group_id\" = gm.\"usergroup\" INNER JOIN \"%s_read\" r ON r.\"group_id\" = gg.\"parent_group_id\" WHERE gm.\"person_id\" = :user_id) ";
private final HashMap<Id, User> users = new HashMap<Id, User>();
private final HashMap<String, User> usersByLogin = new HashMap<String, User>();
private final FakeConfigurationExplorer configurationExplorer = new FakeConfigurationExplorer();
private Id currentUser;
private final CurrentUserAccessor accessor = new CurrentUserAccessor() {
@Override
public Id getCurrentUserId() {
return currentUser;
}
@Override
public String getCurrentUser() {
return users.get(currentUser).getLogin();
}
@Override
public void setTicket(String ticket) {
}
@Override
public void cleanTicket() {
}
@Override
public RequestInfo getRequestInfo() {
return null;
}
@Override
public void setRequestInfo(RequestInfo requestInfo) {
}
};
private final UserGroupGlobalCache userCache = new UserGroupGlobalCache() {
@Override
public boolean isPersonSuperUser(Id personId) {
return users.get(personId).isSuperUser();
}
@Override
public boolean isAdministrator(Id personId) {
return users.get(personId).isAdmin();
}
@Override
public Id getUserIdByLogin(String login) {
return usersByLogin.get(login).getId();
}
@Override
public void cleanCache() {
}
@Override
public boolean isInfoSecAuditor(Id personId) {
return false;
}
};
private final DomainObjectQueryHelper queryHelper = new PGSqlDomainObjectQueryHelperOptimized();
private SecurityStamp securityStamp;
@Before
public void setUp() {
queryHelper.setConfigurationExplorer(configurationExplorer);
queryHelper.setCurrentUserAccessor(accessor);
queryHelper.setUserGroupCache(userCache);
securityStamp = mock(SecurityStamp.class);
ReflectionTestUtils.setField(queryHelper, "securityStamp", securityStamp);
addUser(new User(new RdbmsId(1, 1), "user", false, false));
}
@Test
public void testNoAclForNonTypeTable() {
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select id from documents2");
Select select = parser.getSelectStatement();
String expected = select.toString();
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testSingleType1() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("documents1")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select id from documents1");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("documents1") + "SELECT id FROM " + aclSubquery("documents1", "documents1", "documents1", null);
select.accept(visitor);
assertEquals(expected, select.toString());
}
// TODO! Данный кейс показывает проблему при добавлении штампа. Сам тест не упадет, но из-за того, что он запишет значения в кэш, упадет тест выше.
// Без кэша (см. дочернюю реализацию) тесты будут проходить корректно. Это влечет только небольшую потерю в скорости исполнения запросов, заведу запрос
@Ignore
@Test
public void testSingleType1_with_stamp() {
when(securityStamp.isSupportSecurityStamp("documents3")).thenReturn(true);
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("documents3")));
AddAclVisitor visitor = new AddAclVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select id from documents3");
Select select = parser.getSelectStatement();
String expected = STAMP_SUBQUERY + groupsSubqueryWithoutWith("documents3") + "SELECT id FROM " + aclSubquery("documents3", "documents3", "documents3", null, true);
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testUsageOfParentType() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("base_documents")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("documents").parent("base_documents")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select id from documents");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("base_documents") + "SELECT id FROM " + aclSubquery("documents", "base_documents", "base_documents", null);
select.accept(visitor);
assertEquals(expected, select.toString());
}
private String groupsSubquery(String table) {
return "WITH " + String.format(GROUPS_SUBQUERY, table, table);
}
private String groupsSubqueryWithoutWith(String table) {
return String.format(GROUPS_SUBQUERY, table, table);
}
@Test
public void testUsageOfLinkedType() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("base_documents")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("linked_attribute").linkedTo("base_documents", "base_document_id")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select string_value from linked_attribute");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("base_documents") + "SELECT string_value FROM " + aclSubquery("linked_attribute", "base_documents", "linked_attribute", null);
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testCaseInsensitiveness() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("Base_Documents")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("Documents", false).parent("Base_Documents")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select id from documents d join base_documents bd on bd.id = d.id");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("base_documents")
+ "SELECT id FROM " + aclSubquery("documents", "base_documents", "base_documents", "d")
+ " JOIN base_documents bd ON bd.id = d.id";
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testSubslectWithoutFrom() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("Base_Documents")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select id, (select coaleasce(bd.a, bd.b)) ab from base_documents bd");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("base_documents")
+ select.toString().replaceFirst("base_documents bd", aclSubquery("base_documents", "base_documents", "base_documents", "bd"));
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testSelectWithWith() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("Base_Documents")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("with t as(select x from y where z = 0) select id from base_documents bd where id = t.x");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("base_documents").trim() + ", " + select.toString().replace("WITH ", "").trim()
.replaceFirst("base_documents bd", aclSubquery("base_documents", "base_documents", "base_documents", "bd"));
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testUsageOfLinkedTypeWithParent() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("base_documents")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("documents", false).parent("base_documents")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("linked_attribute").linkedTo("documents", "document_id")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("linked_attribute_child", false).parent("linked_attribute")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select string_value from linked_attribute_child");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("base_documents") + "SELECT string_value FROM " + aclSubquery("linked_attribute_child", "base_documents", "linked_attribute", null);
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testJoinOfIndependentTables() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("a")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("b")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select id from a join b on a.x = b.x");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("b").trim() + ", " + groupsSubqueryWithoutWith("a") + "SELECT id FROM "
+ aclSubquery("a", "a", "a", null)
+ " JOIN " + aclSubquery("b", "b", "b", null) + " ON a.x = b.x";
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testSubquery() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("a")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select id, created_date, abc from (select id, created_date, s || b as abc from a) t");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("a") + "SELECT id, created_date, abc FROM (SELECT id, created_date, s || b AS abc FROM "
+ aclSubquery("a", "a", "a", null)
+ ") t";
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testExists() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("a")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select id, created_date, abc from x where exists (select id from a where x.n = a.n)");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("a") + "SELECT id, created_date, abc FROM x WHERE EXISTS (SELECT id FROM "
+ aclSubquery("a", "a", "a", null)
+ " WHERE x.n = a.n)";
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testEliminateExcessiveAcl() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("base_documents")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("documents", false).parent("base_documents")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select id from documents d join base_documents bd on bd.id = d.id");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("base_documents") + "SELECT id FROM "
+ aclSubquery("documents", "base_documents", "base_documents", "d")
+ " JOIN base_documents bd ON bd.id = d.id";
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testEliminateExcessiveAclMoreComplex() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("base_documents")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("documents", false).parent("base_documents")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("documents_m").linkedTo("documents", "owner")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser(
"select id from documents d join base_documents bd on bd.id = d.id left join documents_m dm on dm.owner = d.id");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("base_documents") + "SELECT id FROM "
+ aclSubquery("documents", "base_documents", "base_documents", "d")
+ " JOIN base_documents bd ON bd.id = d.id"
+ " LEFT JOIN documents_m dm ON dm.owner = d.id";
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testEliminateExcessiveAclEvenMoreComplex() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("base_documents")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("documents", false).parent("base_documents")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("base_attribute", false)));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("attribute").parent("base_attribute").linkedTo("base_documents", "root")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("attribute_attribute").linkedTo("base_attribute", "root")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser(
"select count(*) from "
+ "(select b.id, b.created_date, b.x from attribute a"
+ " left join document d on d.id = a.root"
+ " left join base_document bd on bd.id = d.id"
+ " left join base_attribute ba on ba.id = a.id"
+ " left join attribute_attribute aa on aa.root = a.id"
+ " where a.t is not null and ba.x <> '1') t"
);
Select select = parser.getSelectStatement();
String expected = groupsSubquery("base_documents")
+ select.toString().replaceAll("FROM attribute a", "FROM " + aclSubquery("attribute", "base_documents", "base_attribute", "a"));
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testBasicQueryStartsWithWithRecursive() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("base_documents")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("with recursive t as (select id from base_documents bd) select id from t");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("base_documents").replaceAll("WITH", "WITH RECURSIVE").replaceAll(":user_id\\) ", ":user_id\\)")
+ select.toString().replaceAll("WITH RECURSIVE", ",")
.replaceAll("FROM base_documents bd", "FROM " + aclSubquery("base_documents", "base_documents", "base_documents", "bd"));
select.accept(visitor);
assertEquals(expected, select.toString());
}
@Test
public void testEliminateExcessiveAclInSubquery() {
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("documents1")));
configurationExplorer.createTypeConfig((new FakeConfigurationExplorer.TypeConfigBuilder("attributes").linkedTo("documents1", "Owner")));
AddingAclOptimizedVisitor visitor = new AddingAclOptimizedVisitor(configurationExplorer, userCache, accessor, queryHelper);
SqlQueryParser parser = new SqlQueryParser("select id, (select string_agg(v, ', ') from attributes where owner = d.id) from documents1 d");
Select select = parser.getSelectStatement();
String expected = groupsSubquery("documents1")
+ select.toString().replaceAll("FROM documents1 d", "FROM " + aclSubquery("documents1", "documents1", "documents1", "d"));
select.accept(visitor);
assertEquals(expected, select.toString());
}
private String aclSubquery(String type, String aclType, String accessObjectIdType, String alias) {
return aclSubquery(type, aclType, accessObjectIdType, alias, false);
}
private String aclSubquery(String type, String aclType, String accessObjectIdType, String alias, boolean useStamps) {
return "(SELECT "
+ type
+ ".* FROM "
+ quote(type)
+ " "
+ type
+ " WHERE 1 = 1 " +
// stamp проверки
(useStamps ?
"AND EXISTS (" +
"SELECT 1 FROM " + type + " ptf WHERE ptf.id = " + type + ".access_object_id AND (ptf.security_stamp IS NULL OR " +
"ptf.security_stamp IN (SELECT stamp FROM person_stamp_values))) " : "") +
// read проверки:
"AND EXISTS ("
+ "SELECT 1 FROM \""
+ aclType
+ "_read_tmp\" r "
+ (type.equals(accessObjectIdType) ? "" : "INNER JOIN \""
+ accessObjectIdType
+ "\" rt ON r.\"object_id\" = rt.\"access_object_id\" ")
+ (type.equals(accessObjectIdType) ? "WHERE r.\"object_id\" = " : "WHERE rt.\"id\" = ")
+ type
+ (type.equals(accessObjectIdType) ? ".\"access_object_id\" LIMIT 1)) " : ".\"id\" LIMIT 1)) ")
+ (alias == null ? type : alias);
}
private String quote(String typeName) {
return "\"" + typeName + "\"";
}
private void addUser(User user) {
users.put(user.getId(), user);
usersByLogin.put(user.getLogin(), user);
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
import java.math.BigDecimal;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
/**
* A WritableConstantByteObjectInspector is a WritableByteObjectInspector
* that implements ConstantObjectInspector.
*/
public class WritableConstantByteObjectInspector extends
WritableByteObjectInspector implements
ConstantObjectInspector {
private ByteWritable value;
protected WritableConstantByteObjectInspector() {
super();
}
WritableConstantByteObjectInspector(ByteWritable value) {
super();
this.value = value;
}
@Override
public ByteWritable getWritableConstantValue() {
return value;
}
@Override
public int precision() {
return BigDecimal.valueOf(value.get()).precision();
}
}
|
package lsieun.bytecode.classfile;
import lsieun.utils.radix.ByteUtils;
public final class FieldsCount extends Node {
private final int value;
public FieldsCount(byte[] bytes) {
super.setBytes(bytes);
this.value = ByteUtils.bytesToInt(bytes, 0);
}
public int getValue() {
return value;
}
@Override
public void accept(Visitor obj) {
obj.visitFieldsCount(this);
}
}
|
package com.xiaojiezhu.jrc.client.spring;
import com.xiaojiezhu.jrc.client.JrcConfig;
import com.xiaojiezhu.jrc.client.JrcConfigFactory;
import com.xiaojiezhu.jrc.kit.JrcConstant;
import com.xiaojiezhu.jrc.kit.JrcUtil;
import com.xiaojiezhu.jrc.kit.exception.ConfigNotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.context.event.ApplicationEnvironmentPreparedEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.env.MapPropertySource;
import org.springframework.stereotype.Component;
import java.util.Map;
/**
* @author xiaojie.zhu
*/
@Component
public class JrcSpringConfiguration implements ApplicationListener<ApplicationEnvironmentPreparedEvent> {
public final static Logger LOG = LoggerFactory.getLogger(JrcSpringConfiguration.class);
private ConfigInject<ConfigurableEnvironment> configInject = new SpringConfigInject();
private PropertiesCreator propertiesCreator = PropertiesCreatorFactory.getPropertiesCreator();
@Override
public void onApplicationEvent(ApplicationEnvironmentPreparedEvent event) {
boolean enableJrcConfig = JrcUtil.isEnableJrcConfig();
if(!enableJrcConfig){
LOG.warn("the jrc config is disable");
return;
}
Map<String, ?> configMap = propertiesCreator.getConfigMap();
if(configMap == null || configMap.size() == 0){
throw new ConfigNotFoundException("can not fond the remote config from jrc,please check you config");
}
configInject.inject(event.getEnvironment(),configMap);
}
}
|
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.util.ElapsedTime;
@Autonomous(name="red full auto", group="Knightbot")
//@Disabled
public class RedUp extends LinearOpMode {
HardwareKnightbot robot = new HardwareKnightbot(); // Use a Knightbot's hardware
private ElapsedTime runtime = new ElapsedTime();
@Override
public void runOpMode() {
robot.init(hardwareMap);
telemetry.addData("Status", "Ready to run");
telemetry.update();
waitForStart();
openClaw();
robot.arm.setPower(-.8);
sleep(500);
robot.arm.setPower(-.2);
stopMotors();
sleep(1000);
forward(1200);
stopMotors();
sleep(1000);
robot.arm.setPower(.1);
sleep(300);
stopMotors();
sleep(1000);
backward(1300);
stopMotors();
sleep(1000);
robot.arm.setPower(-.8);
sleep(500);
robot.arm.setPower(-.2);
stopMotors();
sleep(1000);
parallelLeft(1000);
sleep(1000);
forward(700);
robot.arm.setPower(.05);
sleep(10);
robot.arm.setPower(0);
sleep(1000);
parallelLeft(1300);
stopMotors();
}
private void forward(int time) {
robot.frontLeft.setPower(-robot.FL_POWER);
robot.frontRight.setPower(-robot.FR_POWER);
robot.backLeft.setPower(-robot.BL_POWER);
robot.backRight.setPower(-robot.BR_POWER);
sleep(time);
stopMotors();
}
private void backward(int time) {
robot.frontLeft.setPower(robot.FL_POWER);
robot.frontRight.setPower(robot.FR_POWER);
robot.backLeft.setPower(robot.BL_POWER);
robot.backRight.setPower(robot.BR_POWER);
sleep(time);
stopMotors();
}
private void turnLeft(int time) {
robot.frontLeft.setPower(robot.FL_POWER);
robot.frontRight.setPower(-robot.FR_POWER);
robot.backLeft.setPower(robot.BL_POWER);
robot.backRight.setPower(-robot.BR_POWER);
sleep(time);
stopMotors();
}
private void turnRight(int time) {
robot.frontLeft.setPower(-robot.FL_POWER);
robot.frontRight.setPower(robot.FR_POWER);
robot.backLeft.setPower(-robot.BL_POWER);
robot.backRight.setPower(robot.BR_POWER);
sleep(time);
stopMotors();
}
private void parallelLeft(int time) {
robot.frontLeft.setPower(robot.FL_POWER);
robot.frontRight.setPower(-robot.FR_POWER);
robot.backLeft.setPower(-robot.BL_POWER);
robot.backRight.setPower(robot.BR_POWER);
sleep(time);
stopMotors();
}
private void parallelRight(int time) {
robot.frontLeft.setPower(-robot.FL_POWER);
robot.frontRight.setPower(robot.FR_POWER);
robot.backLeft.setPower(robot.BL_POWER);
robot.backRight.setPower(-robot.BR_POWER);
sleep(time);
stopMotors();
}
private void stopMotors() {
robot.frontLeft.setPower(0);
robot.frontRight.setPower(0);
robot.backLeft.setPower(0);
robot.backRight.setPower(0);
}
private void openClaw() {
robot.leftClaw.setPosition(1);
robot.rightClaw.setPosition(0);
}
private void closeClaw() {
robot.leftClaw.setPosition(.4);
robot.rightClaw.setPosition(1);
}
}
|
package org.ink.web.http;
import io.netty.channel.Channel;
import org.ink.security.user.User;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* @author zhuyichen 2017-8-15
*/
public class HttpSession {
/*
* session id
*/
private String sessionId;
/*
* for security
*/
private User user;
/**
* for personal attributes, lazy initialization
*/
private Map<String, Object> attributes;
private Channel channel;
private long createTime = System.currentTimeMillis();
private long maxAge = 2*60*60;
public HttpSession() {
}
public void setMaxAge(long maxAge) {
this.maxAge = maxAge;
}
public long maxAge() {
return maxAge;
}
/**
* judge current session has expires
*/
public boolean hasExpires() {
return createTime + TimeUnit.SECONDS.toMillis(maxAge) <= System.currentTimeMillis();
}
public Channel channel() {
return channel;
}
public void setChannel(Channel channel) {
this.channel = channel;
}
public HttpSession(String sessionId) {
this.sessionId = sessionId;
}
public String sessionId() {
return sessionId;
}
public void setSessionId(String sessionId) {
this.sessionId = sessionId;
}
public User user() {
return user;
}
public void setUser(User user) {
this.user = user;
}
/**
* lazy init attributes
*/
public void addAttribute(String key, Object value) {
if (attributes == null) {
attributes = new HashMap<>();
}
attributes.put(key, value);
}
}
|
package io.github.xeonpowder.fabric.rpg.resource.resources;
import io.github.xeonpowder.fabric.rpg.resource.FabricRPGResource;
/**
* Energy
*/
public class Energy extends FabricRPGResource {
public Energy() {
super("Energy", 100, 0, 0, .5f, 1, ResourceType.ENERGY);
}
}
|
package com.interest.service;
import com.interest.model.entity.UserQQEntity;
public interface UserQQService {
void insertEntity(UserQQEntity userQQEntity);
}
|
package io.jans.agama.timer;
public class TranspilationEvent { }
|
/* Copyright 2002-2022 CS GROUP
* Licensed to CS GROUP (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.time;
import org.hipparchus.CalculusFieldElement;
import org.hipparchus.util.FastMath;
import org.orekit.utils.Constants;
/** Barycentric Dynamic Time.
* <p>Time used to take account of time dilation when calculating orbits of planets,
* asteroids, comets and interplanetary spacecraft in the Solar system. It was based
* on a Dynamical time scale but was not well defined and not rigorously correct as
* a relativistic time scale. It was subsequently deprecated in favour of
* Barycentric Coordinate Time (TCB), but at the 2006 General Assembly of the
* International Astronomical Union TDB was rehabilitated by making it a specific
* fixed linear transformation of TCB.</p>
* <p>By convention, TDB = TT + 0.001658 sin(g) + 0.000014 sin(2g)seconds
* where g = 357.53 + 0.9856003 (JD - 2451545) degrees.</p>
* @author Aude Privat
*/
public class TDBScale implements TimeScale {
/** Serializable UID. */
private static final long serialVersionUID = 20131209L;
/** Constant term for g angle. */
private static final double G0 = 357.53;
/** Slope term for g angle. */
private static final double G1 = 0.9856003;
/** Factor for sin(g). */
private static final double SIN_G_FACTOR = 0.001658;
/** Factor for sin(2g). */
private static final double SIN_2G_FACTOR = 0.000014;
/** TT time scale. */
private final TimeScale tt;
/** Reference Epoch. */
private final AbsoluteDate j2000Epoch;
/**
* Package private constructor for the factory.
*
* @param tt TT time scale.
* @param j2000Epoch reference date for this time scale.
*/
TDBScale(final TimeScale tt, final AbsoluteDate j2000Epoch) {
this.tt = tt;
this.j2000Epoch = j2000Epoch;
}
/** {@inheritDoc} */
@Override
public double offsetFromTAI(final AbsoluteDate date) {
final double dtDays = date.durationFrom(j2000Epoch) / Constants.JULIAN_DAY;
final double g = FastMath.toRadians(G0 + G1 * dtDays);
return tt.offsetFromTAI(date) + (SIN_G_FACTOR * FastMath.sin(g) + SIN_2G_FACTOR * FastMath.sin(2 * g));
}
/** {@inheritDoc} */
@Override
public <T extends CalculusFieldElement<T>> T offsetFromTAI(final FieldAbsoluteDate<T> date) {
final T dtDays = date.durationFrom(j2000Epoch).divide(Constants.JULIAN_DAY);
final T g = dtDays.multiply(G1).add(G0).multiply(dtDays.getPi().divide(180));
return tt.offsetFromTAI(date).
add(g.sin().multiply(SIN_G_FACTOR).add(g.multiply(2).sin().multiply(SIN_2G_FACTOR)));
}
/** {@inheritDoc} */
public String getName() {
return "TDB";
}
/** {@inheritDoc} */
public String toString() {
return getName();
}
}
|
package com.wemote.scorpio.modules.utils;
import com.wemote.scorpio.modules.security.utils.Cryptos;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class CryptosTest {
@Test
public void mac() {
String input = "foo message";
// key可为任意字符串
// byte[] key = "a foo key".getBytes();
byte[] key = Cryptos.generateHmacSha1Key();
assertThat(key).hasSize(20);
byte[] macResult = Cryptos.hmacSha1(input.getBytes(), key);
System.out.println("hmac-sha1 key in hex :" + Encodes.encodeHex(key));
System.out.println("hmac-sha1 in hex result :" + Encodes.encodeHex(macResult));
assertThat(Cryptos.isMacValid(macResult, input.getBytes(), key)).isTrue();
}
@Test
public void aes() {
byte[] key = Cryptos.generateAesKey();
assertThat(key).hasSize(16);
String input = "foo message";
byte[] encryptResult = Cryptos.aesEncrypt(input.getBytes(), key);
String descryptResult = Cryptos.aesDecrypt(encryptResult, key);
System.out.println("aes key in hex :" + Encodes.encodeHex(key));
System.out.println("aes encrypt in hex result :" + Encodes.encodeHex(encryptResult));
assertThat(descryptResult).isEqualTo(input);
}
@Test
public void aesWithIV() {
byte[] key = Cryptos.generateAesKey();
byte[] iv = Cryptos.generateIV();
assertThat(key).hasSize(16);
assertThat(iv).hasSize(16);
String input = "foo message";
byte[] encryptResult = Cryptos.aesEncrypt(input.getBytes(), key, iv);
String descryptResult = Cryptos.aesDecrypt(encryptResult, key, iv);
System.out.println("aes key in hex :" + Encodes.encodeHex(key));
System.out.println("iv in hex :" + Encodes.encodeHex(iv));
System.out.println("aes encrypt in hex result :" + Encodes.encodeHex(encryptResult));
assertThat(descryptResult).isEqualTo(input);
}
}
|
// //////////////////////////////////////////////////////////////////////////////
//
// RMG - Reaction Mechanism Generator
//
// Copyright (c) 2002-2011 Prof. William H. Green (whgreen@mit.edu) and the
// RMG Team (rmg_dev@mit.edu)
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
// //////////////////////////////////////////////////////////////////////////////
package jing.rxn;
import jing.chem.*;
import jing.chemUtil.Graph;
import jing.chemUtil.Node;
import java.util.*;
import jing.param.*;
import jing.rxnSys.Logger;
import jing.rxnSys.SystemSnapshot;
// ## package jing::rxn
// ----------------------------------------------------------------------------
// jing\rxn\TemplateReaction.java
// ----------------------------------------------------------------------------
/**
* Reaction generated from templates. Immutable object.
*/
// ## class TemplateReaction
public class TemplateReaction extends Reaction {
protected PDepNetwork pDepNetwork;
protected ReactionTemplate reactionTemplate;
// Constructors
// ## operation TemplateReaction(Structure,RateConstant,ReactionTemplate)
private TemplateReaction(Structure p_structure, Kinetics[] p_kinetics,
ReactionTemplate p_template) {
// #[ operation TemplateReaction(Structure,RateConstant,ReactionTemplate)
structure = p_structure;
kinetics = p_kinetics;
reactionTemplate = p_template;
if (kinetics != null)
kineticsFromPrimaryKineticLibrary = p_kinetics[0]
.isFromPrimaryKineticLibrary();
}
public TemplateReaction() {
}
// ## operation calculatePDepRate(Temperature)
/*
* public double calculatePDepRate(Temperature p_temperature) { //#[ operation calculatePDepRate(Temperature)
* PDepNetwork pdn = getPDepNetwork(); if (pdn != null) { Iterator iter = pdn.getPDepNetReactionList(); while
* (iter.hasNext()) { PDepNetReaction pdnr = (PDepNetReaction)iter.next(); if
* (pdnr.getStructure().equals(getStructure())) { double temp = pdnr.getTemperature(); if (temp !=
* p_temperature.getK()) { System.out.println("Different temperature used!"); System.exit(0); }
* //System.out.println("for reaction " + toString() + "\t using p dep rate:" + String.valueOf(pdnr.getRate()));
* return pdnr.getRate(); } } iter = pdn.getPDepNonincludedReactionList(); while (iter.hasNext()) { PDepNetReaction
* pdnr = (PDepNetReaction)iter.next(); if (pdnr.getStructure().equals(getStructure())) { double temp =
* pdnr.getTemperature(); if (temp != p_temperature.getK()) { System.out.println("Different temperature used!");
* System.exit(0); } //System.out.println("for reaction " + toString() + "\t using p dep rate:" +
* String.valueOf(pdnr.getRate())); return pdnr.getRate(); } } } return calculateRate(p_temperature); //#] }
*/
// ## operation calculatePDepRate(Temperature)
public double calculateTotalPDepRate(Temperature p_temperature,
Pressure p_pressure) {
// #[ operation calculatePDepRate(Temperature)
PDepNetwork pdn = getPDepNetwork();
if (pdn != null) {
ListIterator iter = pdn.getNetReactions().listIterator();
while (iter.hasNext()) {
PDepReaction pdnr = (PDepReaction) iter.next();
if (pdnr.getStructure().equals(getStructure()))
return pdnr.calculateRate(p_temperature, p_pressure);
}
iter = pdn.getNonincludedReactions().listIterator();
while (iter.hasNext()) {
PDepReaction pdnr = (PDepReaction) iter.next();
if (pdnr.getStructure().equals(getStructure()))
return pdnr.calculateRate(p_temperature, p_pressure);
}
}
return calculateTotalRate(p_temperature);
// #]
}
// ## operation generateReverseForBackwardReaction()
private TemplateReaction generateReverseForBackwardReaction(Structure fs,
Structure fsSp) {
// #[ operation generateReverseForBackwardReaction()
// we need to only generate reverse reaction for backward reaction, so that we wont be stuck into a self loop.
if (!this.isBackward()) {
return null;
}
ReactionTemplate fRT = getReactionTemplate();
ReactionTemplate rRT = null;
if (fRT.isForward()) {
return null;
} else if (fRT.isNeutral()) {
rRT = fRT;
} else if (fRT.isBackward()) {
rRT = fRT.getReverseReactionTemplate();
} else {
throw new InvalidReactionTemplateDirectionException(); // Structure fs = getStructure();
}
LinkedList freactant = fs.getReactantList();
LinkedList fproduct = fs.getProductList();
Structure rs = new Structure(fproduct, freactant, -1
* this.getDirection());
Structure rsSp = new Structure(fsSp.products, fsSp.reactants, -1
* this.getDirection());
// If it's in the reverse ReactionTemplate.reactionDictionaryByStructure then just return that one.
TemplateReaction rr = rRT.getReactionFromStructure(rsSp);
if (rr != null) {
rr.setReverseReaction(this);
return rr;
}
int rNum = fproduct.size();
Kinetics[] k = rRT.findReverseRateConstant(rs);
if (k == null && rRT.name.equals("R_Recombination")) {
ChemGraph cg = ((ChemGraph) fproduct.get(0));
Graph g = cg.getGraph();
Node n = (Node) g.getCentralNodeAt(2);
if (n == null) {
cg = ((ChemGraph) fproduct.get(1));
g = cg.getGraph();
n = (Node) g.getCentralNodeAt(2);
}
g.clearCentralNode();
g.setCentralNode(1, n);
k = rRT.findRateConstant(rs);
} else if (k == null && rRT.name.equals("H_Abstraction")) {
ChemGraph cg1 = ((ChemGraph) fproduct.get(0));
Graph g1 = cg1.getGraph();
Node n3 = (Node) g1.getCentralNodeAt(3);
if (n3 == null) {
cg1 = ((ChemGraph) fproduct.get(1));
g1 = cg1.getGraph();
n3 = (Node) g1.getCentralNodeAt(3);
Node n2 = (Node) g1.getCentralNodeAt(2);
g1.clearCentralNode();
g1.setCentralNode(1, n3);
g1.setCentralNode(2, n2);
ChemGraph cg2 = ((ChemGraph) fproduct.get(0));
Graph g2 = cg2.getGraph();
Node n1 = (Node) g2.getCentralNodeAt(1);
g2.clearCentralNode();
g2.setCentralNode(3, n1);
} else {
Node n2 = (Node) g1.getCentralNodeAt(2);
g1.clearCentralNode();
g1.setCentralNode(1, n3);
g1.setCentralNode(2, n2);
ChemGraph cg2 = ((ChemGraph) fproduct.get(1));
Graph g2 = cg2.getGraph();
Node n1 = (Node) g2.getCentralNodeAt(1);
g2.clearCentralNode();
g2.setCentralNode(3, n1);
}
k = rRT.findRateConstant(rs);
}
/*
* Added by MRH on 27-Aug-2009 This hard-coding is necessary for rxn family templates that are labeled
* "thermo_consistence". After the chemgraphs are mutated, the central nodes for the products are not correct
* (see example below). These hard-coded portions are necessary for RMG to find Kinetics for the structure.
* Example: CH4 + H CH4 1 *1 C 0 {2,S} {3,S} {4,S} {5,S} 2 *2 H 0 {1,S} 3 H 0 {1,S} 4 H 0 {1,S} 5 H 0 {1,S} H 1
* *3 H 1 After RMG has "reactChemGraph" and "mutate" the chemgraphs of the reactants, the products would look
* as such: prod1 1 *1 C 1 {2,S} {3,S} {4,S} 2 H 0 {1,S} 3 H 0 {1,S} 4 H 0 {1,S} prod2 1 *3 H 0 {2,S} 2 *2 H 0
* {1,S} Assuming the reaction as written (CH4+H=CH3+H2) is endothermic at 298K, RMG will label this structure
* as direction=-1 (backward). When attempting to find Kinetics for the backward reaction, RMG will try to match
* the prod1 graph against the generic graphs X_H and Y_rad_birad. It cannot match Y_rad_birad (because there is
* no *3 node) and it cannot match X_H (because there is no *2 node). Thus, a "null" Kinetics will be returned
* from the findReverseRateConstant call. We then relabel the central nodes on prod1 and prod2 and attempt to
* get Kinetics for this structure. I am adding the following bit of code to work with the new reaction family
* Aaron Vandeputte is adding to RMG: "".
*/
else if (k == null
&& rRT.name.equals("intra_substitutionS_isomerization")) {
ChemGraph cg1 = ((ChemGraph) fproduct.get(0));
Graph g1 = cg1.getGraph();
Node n1 = (Node) g1.getCentralNodeAt(1);
Node n2 = (Node) g1.getCentralNodeAt(2);
Node n3 = (Node) g1.getCentralNodeAt(3);
Node n4 = (Node) g1.getCentralNodeAt(4);
Node n5 = (Node) g1.getCentralNodeAt(5);
Node n6 = (Node) g1.getCentralNodeAt(6);
Node n7 = (Node) g1.getCentralNodeAt(7);
g1.clearCentralNode();
g1.setCentralNode(1, n1);
g1.setCentralNode(2, n3);
g1.setCentralNode(3, n2);
if (n7 != null) {
g1.setCentralNode(7, n4);
g1.setCentralNode(6, n5);
g1.setCentralNode(5, n6);
g1.setCentralNode(4, n7);
} else if (n6 != null) {
g1.setCentralNode(6, n4);
g1.setCentralNode(5, n5);
g1.setCentralNode(4, n6);
} else if (n5 != null) {
g1.setCentralNode(5, n4);
g1.setCentralNode(4, n5);
} else if (n4 != null)
g1.setCentralNode(4, n4);
k = rRT.findRateConstant(rs);
}
// Adding another elseif statement for Aaron Vandeputte rxn family
// RMG expects to find *1 and *2 in the same ChemGraph (for this rxn family)
// but will instead find *1 and *3 in the same ChemGraph (if we've reached this far)
// Need to switch *2 and *3
else if (k == null
&& (rRT.name.equals("substitutionS") || rRT.name
.equals("Substitution_O"))) {
ChemGraph cg1 = ((ChemGraph) fproduct.get(0));
ChemGraph cg2 = ((ChemGraph) fproduct.get(1));
Graph g1 = cg1.getGraph();
Graph g2 = cg2.getGraph();
Node n3 = (Node) g1.getCentralNodeAt(3);
if (n3 == null) {
// Switch the identities of cg1/g1 and cg2/g2
cg1 = ((ChemGraph) fproduct.get(1));
g1 = cg1.getGraph();
cg2 = ((ChemGraph) fproduct.get(0));
g2 = cg2.getGraph();
n3 = (Node) g1.getCentralNodeAt(3);
}
Node n1 = (Node) g1.getCentralNodeAt(1);
g1.clearCentralNode();
g1.setCentralNode(2, n3);
g1.setCentralNode(1, n1);
Node n2 = (Node) g2.getCentralNodeAt(2);
g2.clearCentralNode();
g2.setCentralNode(3, n2);
k = rRT.findRateConstant(rs);
} else if (k == null && rRT.name.equals("intra_H_migration")) {
ChemGraph cg = ((ChemGraph) fproduct.get(0));
rr = rRT.calculateForwardRateConstant(cg, rs);
if (!rr.isForward()) {
String err = "Backward:"
+ structure.toString()
+ String.valueOf(structure
.calculateKeq(new Temperature(298, "K")))
+ '\n';
err = err
+ "Forward:"
+ rr.structure.toString()
+ String.valueOf(rr.structure
.calculateKeq(new Temperature(298, "K")));
throw new InvalidReactionDirectionException(err);
}
rr.setReverseReaction(this);
rRT.addReaction(rr);
return rr;
}
if (k == null) {
Logger.error("Couldn't find the rate constant for reaction: "
+ rs.toChemkinString(true) + " with " + rRT.name);
// System.exit(0);
return null;
}
rr = new TemplateReaction(rsSp, k, rRT);
if (!rr.isForward()) {
String err = "Backward:"
+ structure.toString()
+ String.valueOf(structure.calculateKeq(new Temperature(
298, "K"))) + '\n';
err = err
+ "Forward:"
+ rr.structure.toString()
+ String.valueOf(rr.structure.calculateKeq(new Temperature(
298, "K")));
throw new InvalidReactionDirectionException(err);
}
rr.setReverseReaction(this);
rRT.addReaction(rr);
return rr;
// #]
}
/**
* Requires: Effects: return the type of itsReactionTemplate as the type of this reaction. Modifies:
*/
// ## operation getType()
public String getType() {
// #[ operation getType()
return reactionTemplate.getName();
// #]
}
public static TemplateReaction makeTemplateReaction(
Structure p_structureSp, Kinetics[] p_kinetics,
ReactionTemplate p_template, Structure p_structure) {
double PT = System.currentTimeMillis();
// Look for pre-existing reaction in Template's reactionDictionaryByStructure.
TemplateReaction reaction = p_template
.getReactionFromStructure(p_structureSp);
Global.getReacFromStruc = Global.getReacFromStruc
+ (System.currentTimeMillis() - PT) / 1000 / 60;
if (reaction == null) {
// Create a new reaction.
reaction = new TemplateReaction(p_structureSp, p_kinetics,
p_template);
if (reaction.isBackward()) {
Logger.info("Created new reverse " + p_template.getName()
+ " reaction: " + reaction.toString());
TemplateReaction reverse = reaction
.generateReverseForBackwardReaction(p_structure,
p_structureSp);
if (reverse == null)
return null;
reaction.setReverseReaction(reverse);
} else {
Logger.info("Created new forwards " + p_template.getName()
+ " reaction: " + reaction.toString());
ReactionTemplate fRT = reaction.getReactionTemplate();
ReactionTemplate rRT = null;
if (fRT.isNeutral())
rRT = fRT;
else
rRT = fRT.getReverseReactionTemplate();
if (rRT != null) {
TemplateReaction reverse = new TemplateReaction(
p_structureSp.generateReverseStructure(),
p_kinetics, rRT);
reaction.setReverseReaction(reverse);
reverse.setReverseReaction(reaction);
rRT.addReaction(reverse);
}
}
p_template.addReaction(reaction);
if (!reaction.repOk()) {
throw new InvalidTemplateReactionException();
}
}
Global.makeTR += (System.currentTimeMillis() - PT) / 1000 / 60;
return reaction;
}
// ## operation repOk()
public boolean repOk() {
// #[ operation repOk()
return (super.repOk() && reactionTemplate.repOk());
// #]
}
// ## operation toString()
public String toString(Temperature p_temperature) {
// #[ operation toString()
String totalString = "";
String s = getStructure().toString() + '\t';
Kinetics[] k = getKinetics();
for (int i = 0; i < k.length; i++) {
totalString += s
+ k[i].toChemkinString(calculateHrxn(p_temperature),
p_temperature, false);
if (k.length > 1)
totalString += "\n";
}
return totalString;
// #]
}
/*
* MRH 24MAR2010: Commented out toStringWithReverseReaction method as it is never called
*/
// ## operation toStringWithReveseReaction()
// public String toStringWithReveseReaction(Temperature p_temperature) {
// //#[ operation toStringWithReveseReaction()
// TemplateReaction rr = (TemplateReaction)getReverseReaction();
// if (rr == null) return getStructure().toChemkinString(false).toString() + '\t' + getReactionTemplate().getName() +
// '\t' + getKinetics().toChemkinString(calculateHrxn(p_temperature), p_temperature, true);
// else {
// TemplateReaction temp = null;
// if (isForward()) temp = this;
// else if (isBackward()) temp = rr;
// else throw new InvalidReactionDirectionException();
//
// return temp.getStructure().toChemkinString(false).toString() + '\t' + temp.getReactionTemplate().getName() + '\t' +
// temp.getKinetics().toChemkinString(calculateHrxn(p_temperature), p_temperature, true);
// }
//
// //#]
// }
public PDepNetwork getPDepNetwork() {
return pDepNetwork;
}
public ReactionTemplate getReactionTemplate() {
return reactionTemplate;
}
}
/*********************************************************************
* File Path : RMG\RMG\jing\rxn\TemplateReaction.java
*********************************************************************/
|
package com.LimePencil.Q1000;
import java.util.Scanner;
public class Main{
public static void main(String[] args){
Scanner in = new Scanner(System.in);
int a = in.nextInt();
int b = in.nextInt();
System.out.println(a+b);
in.close();
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.nitobi.jsf.component.combo;
import javax.faces.component.UIComponentBase;
/**
*
* @author eric
*/
public class UIComboColumnDefinition extends UIComponentBase {
public static final String DEFAULT_RENDERER_TYPE = "ComboColumnDefinitionRenderer";
public static final String COMPONENT_FAMILY = "ComboColumnDefinitionFamily";
public static final String COMPONENT_TYPE = "UIComboColumnDefinition";
@Override
public String getFamily() {
return COMPONENT_FAMILY;
}
}
|
package online.himakeit.lightmusic.bean;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
/**
* @author:LiXueLong
* @date:2018/2/7
* @mail1:skylarklxlong@outlook.com
* @mail2:li_xuelong@126.com
* @des:
*/
public class MusicInfo implements Parcelable {
public static final String KEY_SONG_ID = "songid";
public static final String KEY_ALBUM_ID = "albumid";
public static final String KEY_ALBUM_NAME = "albumname";
public static final String KEY_ALBUM_DATA = "albumdata";
public static final String KEY_DURATION = "duration";
public static final String KEY_MUSIC_NAME = "musicname";
public static final String KEY_ARTIST = "artist";
public static final String KEY_ARTIST_ID = "artist_id";
public static final String KEY_DATA = "data";
public static final String KEY_FOLDER = "folder";
public static final String KEY_SIZE = "size";
public static final String KEY_FAVORITE = "favorite";
public static final String KEY_LRC = "lrc";
public static final String KEY_ISLOCAL = "islocal";
public static final String KEY_SORT = "sort";
/**
* 数据库中的_id
*/
public long songId = -1;
public int albumId = -1;
public String albumName;
public String albumData;
public int duration;
public String musicName;
public String artist;
public long artistId;
public String data;
public String folder;
public String lrc;
public boolean islocal;
public String sort;
public int size;
/**
* 0表示没有收藏 1表示收藏
*/
public int favorite = 0;
public static final Creator<MusicInfo> CREATOR = new Creator<MusicInfo>() {
@Override
public MusicInfo createFromParcel(Parcel source) {
MusicInfo music = new MusicInfo();
Bundle bundle = new Bundle();
bundle = source.readBundle();
music.songId = bundle.getLong(KEY_SONG_ID);
music.albumId = bundle.getInt(KEY_ALBUM_ID);
music.albumName = bundle.getString(KEY_ALBUM_NAME);
music.duration = bundle.getInt(KEY_DURATION);
music.musicName = bundle.getString(KEY_MUSIC_NAME);
music.artist = bundle.getString(KEY_ARTIST);
music.artistId = bundle.getLong(KEY_ARTIST_ID);
music.data = bundle.getString(KEY_DATA);
music.folder = bundle.getString(KEY_FOLDER);
music.albumData = bundle.getString(KEY_ALBUM_DATA);
music.size = bundle.getInt(KEY_SIZE);
music.lrc = bundle.getString(KEY_LRC);
music.islocal = bundle.getBoolean(KEY_ISLOCAL);
music.sort = bundle.getString(KEY_SORT);
return music;
}
@Override
public MusicInfo[] newArray(int size) {
return new MusicInfo[size];
}
};
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
Bundle bundle = new Bundle();
bundle.putLong(KEY_SONG_ID, songId);
bundle.putInt(KEY_ALBUM_ID, albumId);
bundle.putString(KEY_ALBUM_NAME, albumName);
bundle.putString(KEY_ALBUM_DATA, albumData);
bundle.putInt(KEY_DURATION, duration);
bundle.putString(KEY_MUSIC_NAME, musicName);
bundle.putString(KEY_ARTIST, artist);
bundle.putLong(KEY_ARTIST_ID, artistId);
bundle.putString(KEY_DATA, data);
bundle.putString(KEY_FOLDER, folder);
bundle.putInt(KEY_SIZE, size);
bundle.putString(KEY_LRC, lrc);
bundle.putBoolean(KEY_ISLOCAL, islocal);
bundle.putString(KEY_SORT, sort);
dest.writeBundle(bundle);
}
public int getFavorite() {
return favorite;
}
public void setFavorite(int favorite) {
this.favorite = favorite;
}
}
|
/*
* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.util.prefs;
import java.util.*;
import java.io.*;
import java.security.AccessController;
import java.security.PrivilegedAction;
// These imports needed only as a workaround for a JavaDoc bug
import java.lang.Integer;
import java.lang.Long;
import java.lang.Float;
import java.lang.Double;
/**
* This class provides a skeletal implementation of the {@link Preferences}
* class, greatly easing the task of implementing it.
*
* <p><strong>This class is for <tt>Preferences</tt> implementers only.
* Normal users of the <tt>Preferences</tt> facility should have no need to
* consult this documentation. The {@link Preferences} documentation
* should suffice.</strong>
*
* <p>Implementors must override the nine abstract service-provider interface
* (SPI) methods: {@link #getSpi(String)}, {@link #putSpi(String, String)},
* {@link #removeSpi(String)}, {@link #childSpi(String)}, {@link
* #removeNodeSpi()}, {@link #keysSpi()}, {@link #childrenNamesSpi()}, {@link
* #syncSpi()} and {@link #flushSpi()}. All of the concrete methods specify
* precisely how they are implemented atop these SPI methods. The implementor
* may, at his discretion, override one or more of the concrete methods if the
* default implementation is unsatisfactory for any reason, such as
* performance.
*
* <p>The SPI methods fall into three groups concerning exception
* behavior. The <tt>getSpi</tt> method should never throw exceptions, but it
* doesn't really matter, as any exception thrown by this method will be
* intercepted by {@link #get(String, String)}, which will return the specified
* default value to the caller. The <tt>removeNodeSpi, keysSpi,
* childrenNamesSpi, syncSpi</tt> and <tt>flushSpi</tt> methods are specified
* to throw {@link BackingStoreException}, and the implementation is required
* to throw this checked exception if it is unable to perform the operation.
* The exception propagates outward, causing the corresponding API method
* to fail.
*
* <p>The remaining SPI methods {@link #putSpi(String, String)}, {@link
* #removeSpi(String)} and {@link #childSpi(String)} have more complicated
* exception behavior. They are not specified to throw
* <tt>BackingStoreException</tt>, as they can generally obey their contracts
* even if the backing store is unavailable. This is true because they return
* no information and their effects are not required to become permanent until
* a subsequent call to {@link Preferences#flush()} or
* {@link Preferences#sync()}. Generally speaking, these SPI methods should not
* throw exceptions. In some implementations, there may be circumstances
* under which these calls cannot even enqueue the requested operation for
* later processing. Even under these circumstances it is generally better to
* simply ignore the invocation and return, rather than throwing an
* exception. Under these circumstances, however, all subsequent invocations
* of <tt>flush()</tt> and <tt>sync</tt> should return <tt>false</tt>, as
* returning <tt>true</tt> would imply that all previous operations had
* successfully been made permanent.
*
* <p>There is one circumstance under which <tt>putSpi, removeSpi and
* childSpi</tt> <i>should</i> throw an exception: if the caller lacks
* sufficient privileges on the underlying operating system to perform the
* requested operation. This will, for instance, occur on most systems
* if a non-privileged user attempts to modify system preferences.
* (The required privileges will vary from implementation to
* implementation. On some implementations, they are the right to modify the
* contents of some directory in the file system; on others they are the right
* to modify contents of some key in a registry.) Under any of these
* circumstances, it would generally be undesirable to let the program
* continue executing as if these operations would become permanent at a later
* time. While implementations are not required to throw an exception under
* these circumstances, they are encouraged to do so. A {@link
* SecurityException} would be appropriate.
*
* <p>Most of the SPI methods require the implementation to read or write
* information at a preferences node. The implementor should beware of the
* fact that another VM may have concurrently deleted this node from the
* backing store. It is the implementation's responsibility to recreate the
* node if it has been deleted.
*
* <p>Implementation note: In Sun's default <tt>Preferences</tt>
* implementations, the user's identity is inherited from the underlying
* operating system and does not change for the lifetime of the virtual
* machine. It is recognized that server-side <tt>Preferences</tt>
* implementations may have the user identity change from request to request,
* implicitly passed to <tt>Preferences</tt> methods via the use of a
* static {@link ThreadLocal} instance. Authors of such implementations are
* <i>strongly</i> encouraged to determine the user at the time preferences
* are accessed (for example by the {@link #get(String, String)} or {@link
* #put(String, String)} method) rather than permanently associating a user
* with each <tt>Preferences</tt> instance. The latter behavior conflicts
* with normal <tt>Preferences</tt> usage and would lead to great confusion.
*
* @author Josh Bloch
* @see Preferences
* @since 1.4
*/
public abstract class AbstractPreferences extends Preferences {
/**
* Our name relative to parent.
*/
private final String name;
/**
* Our absolute path name.
*/
private final String absolutePath;
/**
* Our parent node.
*/
final AbstractPreferences parent;
/**
* Our root node.
*/
private final AbstractPreferences root; // Relative to this node
/**
* This field should be <tt>true</tt> if this node did not exist in the
* backing store prior to the creation of this object. The field
* is initialized to false, but may be set to true by a subclass
* constructor (and should not be modified thereafter). This field
* indicates whether a node change event should be fired when
* creation is complete.
*/
protected boolean newNode = false;
/**
* All known unremoved children of this node. (This "cache" is consulted
* prior to calling childSpi() or getChild().
*/
private Map<String, AbstractPreferences> kidCache = new HashMap<>();
/**
* This field is used to keep track of whether or not this node has
* been removed. Once it's set to true, it will never be reset to false.
*/
private boolean removed = false;
/**
* Registered preference change listeners.
*/
private PreferenceChangeListener[] prefListeners = new PreferenceChangeListener[0];
/**
* Registered node change listeners.
*/
private NodeChangeListener[] nodeListeners = new NodeChangeListener[0];
/**
* An object whose monitor is used to lock this node. This object
* is used in preference to the node itself to reduce the likelihood of
* intentional or unintentional denial of service due to a locked node.
* To avoid deadlock, a node is <i>never</i> locked by a thread that
* holds a lock on a descendant of that node.
*/
protected final Object lock = new Object();
/**
* Creates a preference node with the specified parent and the specified
* name relative to its parent.
*
* @param parent the parent of this preference node, or null if this
* is the root.
* @param name the name of this preference node, relative to its parent,
* or <tt>""</tt> if this is the root.
* @throws IllegalArgumentException if <tt>name</tt> contains a slash
* (<tt>'/'</tt>), or <tt>parent</tt> is <tt>null</tt> and
* name isn't <tt>""</tt>.
*/
protected AbstractPreferences(AbstractPreferences parent, String name) {
if (parent == null) {
if (!name.equals("")) { throw new IllegalArgumentException("Root name '" + name + "' must be \"\""); }
this.absolutePath = "/";
root = this;
} else {
if (name.indexOf('/') != -1) { throw new IllegalArgumentException("Name '" + name + "' contains '/'"); }
if (name.equals("")) { throw new IllegalArgumentException("Illegal name: empty string"); }
root = parent.root;
absolutePath = (parent == root ? "/" + name : parent.absolutePath() + "/" + name);
}
this.name = name;
this.parent = parent;
}
/**
* Implements the <tt>put</tt> method as per the specification in
* {@link Preferences#put(String, String)}.
*
* <p>This implementation checks that the key and value are legal,
* obtains this preference node's lock, checks that the node
* has not been removed, invokes {@link #putSpi(String, String)}, and if
* there are any preference change listeners, enqueues a notification
* event for processing by the event dispatch thread.
*
* @param key key with which the specified value is to be associated.
* @param value value to be associated with the specified key.
* @throws NullPointerException if key or value is <tt>null</tt>.
* @throws IllegalArgumentException if <tt>key.length()</tt> exceeds
* <tt>MAX_KEY_LENGTH</tt> or if <tt>value.length</tt> exceeds
* <tt>MAX_VALUE_LENGTH</tt>.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
*/
public void put(String key, String value) {
if (key == null || value == null) { throw new NullPointerException(); }
if (key.length() > MAX_KEY_LENGTH) { throw new IllegalArgumentException("Key too long: " + key); }
if (value.length() > MAX_VALUE_LENGTH) { throw new IllegalArgumentException("Value too long: " + value); }
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node has been removed."); }
putSpi(key, value);
enqueuePreferenceChangeEvent(key, value);
}
}
/**
* Implements the <tt>get</tt> method as per the specification in
* {@link Preferences#get(String, String)}.
*
* <p>This implementation first checks to see if <tt>key</tt> is
* <tt>null</tt> throwing a <tt>NullPointerException</tt> if this is
* the case. Then it obtains this preference node's lock,
* checks that the node has not been removed, invokes {@link
* #getSpi(String)}, and returns the result, unless the <tt>getSpi</tt>
* invocation returns <tt>null</tt> or throws an exception, in which case
* this invocation returns <tt>def</tt>.
*
* @param key key whose associated value is to be returned.
* @param def the value to be returned in the event that this
* preference node has no value associated with <tt>key</tt>.
* @return the value associated with <tt>key</tt>, or <tt>def</tt>
* if no value is associated with <tt>key</tt>.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
* @throws NullPointerException if key is <tt>null</tt>. (A
* <tt>null</tt> default <i>is</i> permitted.)
*/
public String get(String key, String def) {
if (key == null) { throw new NullPointerException("Null key"); }
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node has been removed."); }
String result = null;
try {
result = getSpi(key);
} catch (Exception e) {
// Ignoring exception causes default to be returned
}
return (result == null ? def : result);
}
}
/**
* Implements the <tt>remove(String)</tt> method as per the specification
* in {@link Preferences#remove(String)}.
*
* <p>This implementation obtains this preference node's lock,
* checks that the node has not been removed, invokes
* {@link #removeSpi(String)} and if there are any preference
* change listeners, enqueues a notification event for processing by the
* event dispatch thread.
*
* @param key key whose mapping is to be removed from the preference node.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
* @throws NullPointerException {@inheritDoc}.
*/
public void remove(String key) {
Objects.requireNonNull(key, "Specified key cannot be null");
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node has been removed."); }
removeSpi(key);
enqueuePreferenceChangeEvent(key, null);
}
}
/**
* Implements the <tt>clear</tt> method as per the specification in
* {@link Preferences#clear()}.
*
* <p>This implementation obtains this preference node's lock,
* invokes {@link #keys()} to obtain an array of keys, and
* iterates over the array invoking {@link #remove(String)} on each key.
*
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
*/
public void clear() throws BackingStoreException {
synchronized (lock) {
String[] keys = keys();
for (int i = 0; i < keys.length; i++) { remove(keys[i]); }
}
}
/**
* Implements the <tt>putInt</tt> method as per the specification in
* {@link Preferences#putInt(String, int)}.
*
* <p>This implementation translates <tt>value</tt> to a string with
* {@link Integer#toString(int)} and invokes {@link #put(String, String)}
* on the result.
*
* @param key key with which the string form of value is to be associated.
* @param value value whose string form is to be associated with key.
* @throws NullPointerException if key is <tt>null</tt>.
* @throws IllegalArgumentException if <tt>key.length()</tt> exceeds
* <tt>MAX_KEY_LENGTH</tt>.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
*/
public void putInt(String key, int value) {
put(key, Integer.toString(value));
}
/**
* Implements the <tt>getInt</tt> method as per the specification in
* {@link Preferences#getInt(String, int)}.
*
* <p>This implementation invokes {@link #get(String, String) <tt>get(key,
* null)</tt>}. If the return value is non-null, the implementation
* attempts to translate it to an <tt>int</tt> with
* {@link Integer#parseInt(String)}. If the attempt succeeds, the return
* value is returned by this method. Otherwise, <tt>def</tt> is returned.
*
* @param key key whose associated value is to be returned as an int.
* @param def the value to be returned in the event that this
* preference node has no value associated with <tt>key</tt>
* or the associated value cannot be interpreted as an int.
* @return the int value represented by the string associated with
* <tt>key</tt> in this preference node, or <tt>def</tt> if the
* associated value does not exist or cannot be interpreted as
* an int.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
* @throws NullPointerException if <tt>key</tt> is <tt>null</tt>.
*/
public int getInt(String key, int def) {
int result = def;
try {
String value = get(key, null);
if (value != null) { result = Integer.parseInt(value); }
} catch (NumberFormatException e) {
// Ignoring exception causes specified default to be returned
}
return result;
}
/**
* Implements the <tt>putLong</tt> method as per the specification in
* {@link Preferences#putLong(String, long)}.
*
* <p>This implementation translates <tt>value</tt> to a string with
* {@link Long#toString(long)} and invokes {@link #put(String, String)}
* on the result.
*
* @param key key with which the string form of value is to be associated.
* @param value value whose string form is to be associated with key.
* @throws NullPointerException if key is <tt>null</tt>.
* @throws IllegalArgumentException if <tt>key.length()</tt> exceeds
* <tt>MAX_KEY_LENGTH</tt>.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
*/
public void putLong(String key, long value) {
put(key, Long.toString(value));
}
/**
* Implements the <tt>getLong</tt> method as per the specification in
* {@link Preferences#getLong(String, long)}.
*
* <p>This implementation invokes {@link #get(String, String) <tt>get(key,
* null)</tt>}. If the return value is non-null, the implementation
* attempts to translate it to a <tt>long</tt> with
* {@link Long#parseLong(String)}. If the attempt succeeds, the return
* value is returned by this method. Otherwise, <tt>def</tt> is returned.
*
* @param key key whose associated value is to be returned as a long.
* @param def the value to be returned in the event that this
* preference node has no value associated with <tt>key</tt>
* or the associated value cannot be interpreted as a long.
* @return the long value represented by the string associated with
* <tt>key</tt> in this preference node, or <tt>def</tt> if the
* associated value does not exist or cannot be interpreted as
* a long.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
* @throws NullPointerException if <tt>key</tt> is <tt>null</tt>.
*/
public long getLong(String key, long def) {
long result = def;
try {
String value = get(key, null);
if (value != null) { result = Long.parseLong(value); }
} catch (NumberFormatException e) {
// Ignoring exception causes specified default to be returned
}
return result;
}
/**
* Implements the <tt>putBoolean</tt> method as per the specification in
* {@link Preferences#putBoolean(String, boolean)}.
*
* <p>This implementation translates <tt>value</tt> to a string with
* {@link String#valueOf(boolean)} and invokes {@link #put(String, String)}
* on the result.
*
* @param key key with which the string form of value is to be associated.
* @param value value whose string form is to be associated with key.
* @throws NullPointerException if key is <tt>null</tt>.
* @throws IllegalArgumentException if <tt>key.length()</tt> exceeds
* <tt>MAX_KEY_LENGTH</tt>.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
*/
public void putBoolean(String key, boolean value) {
put(key, String.valueOf(value));
}
/**
* Implements the <tt>getBoolean</tt> method as per the specification in
* {@link Preferences#getBoolean(String, boolean)}.
*
* <p>This implementation invokes {@link #get(String, String) <tt>get(key,
* null)</tt>}. If the return value is non-null, it is compared with
* <tt>"true"</tt> using {@link String#equalsIgnoreCase(String)}. If the
* comparison returns <tt>true</tt>, this invocation returns
* <tt>true</tt>. Otherwise, the original return value is compared with
* <tt>"false"</tt>, again using {@link String#equalsIgnoreCase(String)}.
* If the comparison returns <tt>true</tt>, this invocation returns
* <tt>false</tt>. Otherwise, this invocation returns <tt>def</tt>.
*
* @param key key whose associated value is to be returned as a boolean.
* @param def the value to be returned in the event that this
* preference node has no value associated with <tt>key</tt>
* or the associated value cannot be interpreted as a boolean.
* @return the boolean value represented by the string associated with
* <tt>key</tt> in this preference node, or <tt>def</tt> if the
* associated value does not exist or cannot be interpreted as
* a boolean.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
* @throws NullPointerException if <tt>key</tt> is <tt>null</tt>.
*/
public boolean getBoolean(String key, boolean def) {
boolean result = def;
String value = get(key, null);
if (value != null) {
if (value.equalsIgnoreCase("true")) { result = true; } else if (value.equalsIgnoreCase("false")) {
result = false;
}
}
return result;
}
/**
* Implements the <tt>putFloat</tt> method as per the specification in
* {@link Preferences#putFloat(String, float)}.
*
* <p>This implementation translates <tt>value</tt> to a string with
* {@link Float#toString(float)} and invokes {@link #put(String, String)}
* on the result.
*
* @param key key with which the string form of value is to be associated.
* @param value value whose string form is to be associated with key.
* @throws NullPointerException if key is <tt>null</tt>.
* @throws IllegalArgumentException if <tt>key.length()</tt> exceeds
* <tt>MAX_KEY_LENGTH</tt>.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
*/
public void putFloat(String key, float value) {
put(key, Float.toString(value));
}
/**
* Implements the <tt>getFloat</tt> method as per the specification in
* {@link Preferences#getFloat(String, float)}.
*
* <p>This implementation invokes {@link #get(String, String) <tt>get(key,
* null)</tt>}. If the return value is non-null, the implementation
* attempts to translate it to an <tt>float</tt> with
* {@link Float#parseFloat(String)}. If the attempt succeeds, the return
* value is returned by this method. Otherwise, <tt>def</tt> is returned.
*
* @param key key whose associated value is to be returned as a float.
* @param def the value to be returned in the event that this
* preference node has no value associated with <tt>key</tt>
* or the associated value cannot be interpreted as a float.
* @return the float value represented by the string associated with
* <tt>key</tt> in this preference node, or <tt>def</tt> if the
* associated value does not exist or cannot be interpreted as
* a float.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
* @throws NullPointerException if <tt>key</tt> is <tt>null</tt>.
*/
public float getFloat(String key, float def) {
float result = def;
try {
String value = get(key, null);
if (value != null) { result = Float.parseFloat(value); }
} catch (NumberFormatException e) {
// Ignoring exception causes specified default to be returned
}
return result;
}
/**
* Implements the <tt>putDouble</tt> method as per the specification in
* {@link Preferences#putDouble(String, double)}.
*
* <p>This implementation translates <tt>value</tt> to a string with
* {@link Double#toString(double)} and invokes {@link #put(String, String)}
* on the result.
*
* @param key key with which the string form of value is to be associated.
* @param value value whose string form is to be associated with key.
* @throws NullPointerException if key is <tt>null</tt>.
* @throws IllegalArgumentException if <tt>key.length()</tt> exceeds
* <tt>MAX_KEY_LENGTH</tt>.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
*/
public void putDouble(String key, double value) {
put(key, Double.toString(value));
}
/**
* Implements the <tt>getDouble</tt> method as per the specification in
* {@link Preferences#getDouble(String, double)}.
*
* <p>This implementation invokes {@link #get(String, String) <tt>get(key,
* null)</tt>}. If the return value is non-null, the implementation
* attempts to translate it to an <tt>double</tt> with
* {@link Double#parseDouble(String)}. If the attempt succeeds, the return
* value is returned by this method. Otherwise, <tt>def</tt> is returned.
*
* @param key key whose associated value is to be returned as a double.
* @param def the value to be returned in the event that this
* preference node has no value associated with <tt>key</tt>
* or the associated value cannot be interpreted as a double.
* @return the double value represented by the string associated with
* <tt>key</tt> in this preference node, or <tt>def</tt> if the
* associated value does not exist or cannot be interpreted as
* a double.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
* @throws NullPointerException if <tt>key</tt> is <tt>null</tt>.
*/
public double getDouble(String key, double def) {
double result = def;
try {
String value = get(key, null);
if (value != null) { result = Double.parseDouble(value); }
} catch (NumberFormatException e) {
// Ignoring exception causes specified default to be returned
}
return result;
}
/**
* Implements the <tt>putByteArray</tt> method as per the specification in
* {@link Preferences#putByteArray(String, byte[])}.
*
* @param key key with which the string form of value is to be associated.
* @param value value whose string form is to be associated with key.
* @throws NullPointerException if key or value is <tt>null</tt>.
* @throws IllegalArgumentException if key.length() exceeds MAX_KEY_LENGTH
* or if value.length exceeds MAX_VALUE_LENGTH*3/4.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
*/
public void putByteArray(String key, byte[] value) {
put(key, Base64.byteArrayToBase64(value));
}
/**
* Implements the <tt>getByteArray</tt> method as per the specification in
* {@link Preferences#getByteArray(String, byte[])}.
*
* @param key key whose associated value is to be returned as a byte array.
* @param def the value to be returned in the event that this
* preference node has no value associated with <tt>key</tt>
* or the associated value cannot be interpreted as a byte array.
* @return the byte array value represented by the string associated with
* <tt>key</tt> in this preference node, or <tt>def</tt> if the
* associated value does not exist or cannot be interpreted as
* a byte array.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
* @throws NullPointerException if <tt>key</tt> is <tt>null</tt>. (A
* <tt>null</tt> value for <tt>def</tt> <i>is</i> permitted.)
*/
public byte[] getByteArray(String key, byte[] def) {
byte[] result = def;
String value = get(key, null);
try {
if (value != null) { result = Base64.base64ToByteArray(value); }
} catch (RuntimeException e) {
// Ignoring exception causes specified default to be returned
}
return result;
}
/**
* Implements the <tt>keys</tt> method as per the specification in
* {@link Preferences#keys()}.
*
* <p>This implementation obtains this preference node's lock, checks that
* the node has not been removed and invokes {@link #keysSpi()}.
*
* @return an array of the keys that have an associated value in this
* preference node.
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
*/
public String[] keys() throws BackingStoreException {
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node has been removed."); }
return keysSpi();
}
}
/**
* Implements the <tt>children</tt> method as per the specification in
* {@link Preferences#childrenNames()}.
*
* <p>This implementation obtains this preference node's lock, checks that
* the node has not been removed, constructs a <tt>TreeSet</tt> initialized
* to the names of children already cached (the children in this node's
* "child-cache"), invokes {@link #childrenNamesSpi()}, and adds all of the
* returned child-names into the set. The elements of the tree set are
* dumped into a <tt>String</tt> array using the <tt>toArray</tt> method,
* and this array is returned.
*
* @return the names of the children of this preference node.
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
* @see #cachedChildren()
*/
public String[] childrenNames() throws BackingStoreException {
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node has been removed."); }
Set<String> s = new TreeSet<>(kidCache.keySet());
for (String kid : childrenNamesSpi()) { s.add(kid); }
return s.toArray(EMPTY_STRING_ARRAY);
}
}
private static final String[] EMPTY_STRING_ARRAY = new String[0];
/**
* Returns all known unremoved children of this node.
*
* @return all known unremoved children of this node.
*/
protected final AbstractPreferences[] cachedChildren() {
return kidCache.values().toArray(EMPTY_ABSTRACT_PREFS_ARRAY);
}
private static final AbstractPreferences[] EMPTY_ABSTRACT_PREFS_ARRAY = new AbstractPreferences[0];
/**
* Implements the <tt>parent</tt> method as per the specification in
* {@link Preferences#parent()}.
*
* <p>This implementation obtains this preference node's lock, checks that
* the node has not been removed and returns the parent value that was
* passed to this node's constructor.
*
* @return the parent of this preference node.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
*/
public Preferences parent() {
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node has been removed."); }
return parent;
}
}
/**
* Implements the <tt>node</tt> method as per the specification in
* {@link Preferences#node(String)}.
*
* <p>This implementation obtains this preference node's lock and checks
* that the node has not been removed. If <tt>path</tt> is <tt>""</tt>,
* this node is returned; if <tt>path</tt> is <tt>"/"</tt>, this node's
* root is returned. If the first character in <tt>path</tt> is
* not <tt>'/'</tt>, the implementation breaks <tt>path</tt> into
* tokens and recursively traverses the path from this node to the
* named node, "consuming" a name and a slash from <tt>path</tt> at
* each step of the traversal. At each step, the current node is locked
* and the node's child-cache is checked for the named node. If it is
* not found, the name is checked to make sure its length does not
* exceed <tt>MAX_NAME_LENGTH</tt>. Then the {@link #childSpi(String)}
* method is invoked, and the result stored in this node's child-cache.
* If the newly created <tt>Preferences</tt> object's {@link #newNode}
* field is <tt>true</tt> and there are any node change listeners,
* a notification event is enqueued for processing by the event dispatch
* thread.
*
* <p>When there are no more tokens, the last value found in the
* child-cache or returned by <tt>childSpi</tt> is returned by this
* method. If during the traversal, two <tt>"/"</tt> tokens occur
* consecutively, or the final token is <tt>"/"</tt> (rather than a name),
* an appropriate <tt>IllegalArgumentException</tt> is thrown.
*
* <p> If the first character of <tt>path</tt> is <tt>'/'</tt>
* (indicating an absolute path name) this preference node's
* lock is dropped prior to breaking <tt>path</tt> into tokens, and
* this method recursively traverses the path starting from the root
* (rather than starting from this node). The traversal is otherwise
* identical to the one described for relative path names. Dropping
* the lock on this node prior to commencing the traversal at the root
* node is essential to avoid the possibility of deadlock, as per the
* {@link #lock locking invariant}.
*
* @param path the path name of the preference node to return.
* @return the specified preference node.
* @throws IllegalArgumentException if the path name is invalid (i.e.,
* it contains multiple consecutive slash characters, or ends
* with a slash character and is more than one character long).
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
*/
public Preferences node(String path) {
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node has been removed."); }
if (path.equals("")) { return this; }
if (path.equals("/")) { return root; }
if (path.charAt(0) != '/') { return node(new StringTokenizer(path, "/", true)); }
}
// Absolute path. Note that we've dropped our lock to avoid deadlock
return root.node(new StringTokenizer(path.substring(1), "/", true));
}
/**
* tokenizer contains <name> {'/' <name>}*
*/
private Preferences node(StringTokenizer path) {
String token = path.nextToken();
if (token.equals("/")) // Check for consecutive slashes
{ throw new IllegalArgumentException("Consecutive slashes in path"); }
synchronized (lock) {
AbstractPreferences child = kidCache.get(token);
if (child == null) {
if (token.length() > MAX_NAME_LENGTH) {
throw new IllegalArgumentException("Node name " + token + " too long");
}
child = childSpi(token);
if (child.newNode) { enqueueNodeAddedEvent(child); }
kidCache.put(token, child);
}
if (!path.hasMoreTokens()) { return child; }
path.nextToken(); // Consume slash
if (!path.hasMoreTokens()) { throw new IllegalArgumentException("Path ends with slash"); }
return child.node(path);
}
}
/**
* Implements the <tt>nodeExists</tt> method as per the specification in
* {@link Preferences#nodeExists(String)}.
*
* <p>This implementation is very similar to {@link #node(String)},
* except that {@link #getChild(String)} is used instead of {@link
* #childSpi(String)}.
*
* @param path the path name of the node whose existence is to be checked.
* @return true if the specified node exists.
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
* @throws IllegalArgumentException if the path name is invalid (i.e.,
* it contains multiple consecutive slash characters, or ends
* with a slash character and is more than one character long).
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method and
* <tt>pathname</tt> is not the empty string (<tt>""</tt>).
*/
public boolean nodeExists(String path) throws BackingStoreException {
synchronized (lock) {
if (path.equals("")) { return !removed; }
if (removed) { throw new IllegalStateException("Node has been removed."); }
if (path.equals("/")) { return true; }
if (path.charAt(0) != '/') { return nodeExists(new StringTokenizer(path, "/", true)); }
}
// Absolute path. Note that we've dropped our lock to avoid deadlock
return root.nodeExists(new StringTokenizer(path.substring(1), "/", true));
}
/**
* tokenizer contains <name> {'/' <name>}*
*/
private boolean nodeExists(StringTokenizer path) throws BackingStoreException {
String token = path.nextToken();
if (token.equals("/")) // Check for consecutive slashes
{ throw new IllegalArgumentException("Consecutive slashes in path"); }
synchronized (lock) {
AbstractPreferences child = kidCache.get(token);
if (child == null) { child = getChild(token); }
if (child == null) { return false; }
if (!path.hasMoreTokens()) { return true; }
path.nextToken(); // Consume slash
if (!path.hasMoreTokens()) { throw new IllegalArgumentException("Path ends with slash"); }
return child.nodeExists(path);
}
}
/**
* Implements the <tt>removeNode()</tt> method as per the specification in
* {@link Preferences#removeNode()}.
*
* <p>This implementation checks to see that this node is the root; if so,
* it throws an appropriate exception. Then, it locks this node's parent,
* and calls a recursive helper method that traverses the subtree rooted at
* this node. The recursive method locks the node on which it was called,
* checks that it has not already been removed, and then ensures that all
* of its children are cached: The {@link #childrenNamesSpi()} method is
* invoked and each returned child name is checked for containment in the
* child-cache. If a child is not already cached, the {@link
* #childSpi(String)} method is invoked to create a <tt>Preferences</tt>
* instance for it, and this instance is put into the child-cache. Then
* the helper method calls itself recursively on each node contained in its
* child-cache. Next, it invokes {@link #removeNodeSpi()}, marks itself
* as removed, and removes itself from its parent's child-cache. Finally,
* if there are any node change listeners, it enqueues a notification
* event for processing by the event dispatch thread.
*
* <p>Note that the helper method is always invoked with all ancestors up
* to the "closest non-removed ancestor" locked.
*
* @throws IllegalStateException if this node (or an ancestor) has already
* been removed with the {@link #removeNode()} method.
* @throws UnsupportedOperationException if this method is invoked on
* the root node.
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
*/
public void removeNode() throws BackingStoreException {
if (this == root) { throw new UnsupportedOperationException("Can't remove the root!"); }
synchronized (parent.lock) {
removeNode2();
parent.kidCache.remove(name);
}
}
/*
* Called with locks on all nodes on path from parent of "removal root"
* to this (including the former but excluding the latter).
*/
private void removeNode2() throws BackingStoreException {
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node already removed."); }
// Ensure that all children are cached
String[] kidNames = childrenNamesSpi();
for (int i = 0; i < kidNames.length; i++) {
if (!kidCache.containsKey(kidNames[i])) { kidCache.put(kidNames[i], childSpi(kidNames[i])); }
}
// Recursively remove all cached children
for (Iterator<AbstractPreferences> i = kidCache.values().iterator(); i.hasNext(); ) {
try {
i.next().removeNode2();
i.remove();
} catch (BackingStoreException x) { }
}
// Now we have no descendants - it's time to die!
removeNodeSpi();
removed = true;
parent.enqueueNodeRemovedEvent(this);
}
}
/**
* Implements the <tt>name</tt> method as per the specification in
* {@link Preferences#name()}.
*
* <p>This implementation merely returns the name that was
* passed to this node's constructor.
*
* @return this preference node's name, relative to its parent.
*/
public String name() {
return name;
}
/**
* Implements the <tt>absolutePath</tt> method as per the specification in
* {@link Preferences#absolutePath()}.
*
* <p>This implementation merely returns the absolute path name that
* was computed at the time that this node was constructed (based on
* the name that was passed to this node's constructor, and the names
* that were passed to this node's ancestors' constructors).
*
* @return this preference node's absolute path name.
*/
public String absolutePath() {
return absolutePath;
}
/**
* Implements the <tt>isUserNode</tt> method as per the specification in
* {@link Preferences#isUserNode()}.
*
* <p>This implementation compares this node's root node (which is stored
* in a private field) with the value returned by
* {@link Preferences#userRoot()}. If the two object references are
* identical, this method returns true.
*
* @return <tt>true</tt> if this preference node is in the user
* preference tree, <tt>false</tt> if it's in the system
* preference tree.
*/
public boolean isUserNode() {
return AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
public Boolean run() {
return root == Preferences.userRoot();
}
}).booleanValue();
}
public void addPreferenceChangeListener(PreferenceChangeListener pcl) {
if (pcl == null) { throw new NullPointerException("Change listener is null."); }
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node has been removed."); }
// Copy-on-write
PreferenceChangeListener[] old = prefListeners;
prefListeners = new PreferenceChangeListener[old.length + 1];
System.arraycopy(old, 0, prefListeners, 0, old.length);
prefListeners[old.length] = pcl;
}
startEventDispatchThreadIfNecessary();
}
public void removePreferenceChangeListener(PreferenceChangeListener pcl) {
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node has been removed."); }
if ((prefListeners == null) || (prefListeners.length == 0)) {
throw new IllegalArgumentException("Listener not registered.");
}
// Copy-on-write
PreferenceChangeListener[] newPl = new PreferenceChangeListener[prefListeners.length - 1];
int i = 0;
while (i < newPl.length && prefListeners[i] != pcl) { newPl[i] = prefListeners[i++]; }
if (i == newPl.length && prefListeners[i] != pcl) {
throw new IllegalArgumentException("Listener not registered.");
}
while (i < newPl.length) { newPl[i] = prefListeners[++i]; }
prefListeners = newPl;
}
}
public void addNodeChangeListener(NodeChangeListener ncl) {
if (ncl == null) { throw new NullPointerException("Change listener is null."); }
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node has been removed."); }
// Copy-on-write
if (nodeListeners == null) {
nodeListeners = new NodeChangeListener[1];
nodeListeners[0] = ncl;
} else {
NodeChangeListener[] old = nodeListeners;
nodeListeners = new NodeChangeListener[old.length + 1];
System.arraycopy(old, 0, nodeListeners, 0, old.length);
nodeListeners[old.length] = ncl;
}
}
startEventDispatchThreadIfNecessary();
}
public void removeNodeChangeListener(NodeChangeListener ncl) {
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node has been removed."); }
if ((nodeListeners == null) || (nodeListeners.length == 0)) {
throw new IllegalArgumentException("Listener not registered.");
}
// Copy-on-write
int i = 0;
while (i < nodeListeners.length && nodeListeners[i] != ncl) { i++; }
if (i == nodeListeners.length) { throw new IllegalArgumentException("Listener not registered."); }
NodeChangeListener[] newNl = new NodeChangeListener[nodeListeners.length - 1];
if (i != 0) { System.arraycopy(nodeListeners, 0, newNl, 0, i); }
if (i != newNl.length) { System.arraycopy(nodeListeners, i + 1, newNl, i, newNl.length - i); }
nodeListeners = newNl;
}
}
// "SPI" METHODS
/**
* Put the given key-value association into this preference node. It is
* guaranteed that <tt>key</tt> and <tt>value</tt> are non-null and of
* legal length. Also, it is guaranteed that this node has not been
* removed. (The implementor needn't check for any of these things.)
*
* <p>This method is invoked with the lock on this node held.
*
* @param key the key
* @param value the value
*/
protected abstract void putSpi(String key, String value);
/**
* Return the value associated with the specified key at this preference
* node, or <tt>null</tt> if there is no association for this key, or the
* association cannot be determined at this time. It is guaranteed that
* <tt>key</tt> is non-null. Also, it is guaranteed that this node has
* not been removed. (The implementor needn't check for either of these
* things.)
*
* <p> Generally speaking, this method should not throw an exception
* under any circumstances. If, however, if it does throw an exception,
* the exception will be intercepted and treated as a <tt>null</tt>
* return value.
*
* <p>This method is invoked with the lock on this node held.
*
* @param key the key
* @return the value associated with the specified key at this preference
* node, or <tt>null</tt> if there is no association for this
* key, or the association cannot be determined at this time.
*/
protected abstract String getSpi(String key);
/**
* Remove the association (if any) for the specified key at this
* preference node. It is guaranteed that <tt>key</tt> is non-null.
* Also, it is guaranteed that this node has not been removed.
* (The implementor needn't check for either of these things.)
*
* <p>This method is invoked with the lock on this node held.
*
* @param key the key
*/
protected abstract void removeSpi(String key);
/**
* Removes this preference node, invalidating it and any preferences that
* it contains. The named child will have no descendants at the time this
* invocation is made (i.e., the {@link Preferences#removeNode()} method
* invokes this method repeatedly in a bottom-up fashion, removing each of
* a node's descendants before removing the node itself).
*
* <p>This method is invoked with the lock held on this node and its
* parent (and all ancestors that are being removed as a
* result of a single invocation to {@link Preferences#removeNode()}).
*
* <p>The removal of a node needn't become persistent until the
* <tt>flush</tt> method is invoked on this node (or an ancestor).
*
* <p>If this node throws a <tt>BackingStoreException</tt>, the exception
* will propagate out beyond the enclosing {@link #removeNode()}
* invocation.
*
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
*/
protected abstract void removeNodeSpi() throws BackingStoreException;
/**
* Returns all of the keys that have an associated value in this
* preference node. (The returned array will be of size zero if
* this node has no preferences.) It is guaranteed that this node has not
* been removed.
*
* <p>This method is invoked with the lock on this node held.
*
* <p>If this node throws a <tt>BackingStoreException</tt>, the exception
* will propagate out beyond the enclosing {@link #keys()} invocation.
*
* @return an array of the keys that have an associated value in this
* preference node.
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
*/
protected abstract String[] keysSpi() throws BackingStoreException;
/**
* Returns the names of the children of this preference node. (The
* returned array will be of size zero if this node has no children.)
* This method need not return the names of any nodes already cached,
* but may do so without harm.
*
* <p>This method is invoked with the lock on this node held.
*
* <p>If this node throws a <tt>BackingStoreException</tt>, the exception
* will propagate out beyond the enclosing {@link #childrenNames()}
* invocation.
*
* @return an array containing the names of the children of this
* preference node.
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
*/
protected abstract String[] childrenNamesSpi() throws BackingStoreException;
/**
* Returns the named child if it exists, or <tt>null</tt> if it does not.
* It is guaranteed that <tt>nodeName</tt> is non-null, non-empty,
* does not contain the slash character ('/'), and is no longer than
* {@link #MAX_NAME_LENGTH} characters. Also, it is guaranteed
* that this node has not been removed. (The implementor needn't check
* for any of these things if he chooses to override this method.)
*
* <p>Finally, it is guaranteed that the named node has not been returned
* by a previous invocation of this method or {@link #childSpi} after the
* last time that it was removed. In other words, a cached value will
* always be used in preference to invoking this method. (The implementor
* needn't maintain his own cache of previously returned children if he
* chooses to override this method.)
*
* <p>This implementation obtains this preference node's lock, invokes
* {@link #childrenNames()} to get an array of the names of this node's
* children, and iterates over the array comparing the name of each child
* with the specified node name. If a child node has the correct name,
* the {@link #childSpi(String)} method is invoked and the resulting
* node is returned. If the iteration completes without finding the
* specified name, <tt>null</tt> is returned.
*
* @param nodeName name of the child to be searched for.
* @return the named child if it exists, or null if it does not.
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
*/
protected AbstractPreferences getChild(String nodeName) throws BackingStoreException {
synchronized (lock) {
// assert kidCache.get(nodeName)==null;
String[] kidNames = childrenNames();
for (int i = 0; i < kidNames.length; i++) {
if (kidNames[i].equals(nodeName)) { return childSpi(kidNames[i]); }
}
}
return null;
}
/**
* Returns the named child of this preference node, creating it if it does
* not already exist. It is guaranteed that <tt>name</tt> is non-null,
* non-empty, does not contain the slash character ('/'), and is no longer
* than {@link #MAX_NAME_LENGTH} characters. Also, it is guaranteed that
* this node has not been removed. (The implementor needn't check for any
* of these things.)
*
* <p>Finally, it is guaranteed that the named node has not been returned
* by a previous invocation of this method or {@link #getChild(String)}
* after the last time that it was removed. In other words, a cached
* value will always be used in preference to invoking this method.
* Subclasses need not maintain their own cache of previously returned
* children.
*
* <p>The implementer must ensure that the returned node has not been
* removed. If a like-named child of this node was previously removed, the
* implementer must return a newly constructed <tt>AbstractPreferences</tt>
* node; once removed, an <tt>AbstractPreferences</tt> node
* cannot be "resuscitated."
*
* <p>If this method causes a node to be created, this node is not
* guaranteed to be persistent until the <tt>flush</tt> method is
* invoked on this node or one of its ancestors (or descendants).
*
* <p>This method is invoked with the lock on this node held.
*
* @param name The name of the child node to return, relative to
* this preference node.
* @return The named child node.
*/
protected abstract AbstractPreferences childSpi(String name);
/**
* Returns the absolute path name of this preferences node.
*/
public String toString() {
return (this.isUserNode() ? "User" : "System") + " Preference Node: " + this.absolutePath();
}
/**
* Implements the <tt>sync</tt> method as per the specification in
* {@link Preferences#sync()}.
*
* <p>This implementation calls a recursive helper method that locks this
* node, invokes syncSpi() on it, unlocks this node, and recursively
* invokes this method on each "cached child." A cached child is a child
* of this node that has been created in this VM and not subsequently
* removed. In effect, this method does a depth first traversal of the
* "cached subtree" rooted at this node, calling syncSpi() on each node in
* the subTree while only that node is locked. Note that syncSpi() is
* invoked top-down.
*
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
* @throws IllegalStateException if this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
* @see #flush()
*/
public void sync() throws BackingStoreException {
sync2();
}
private void sync2() throws BackingStoreException {
AbstractPreferences[] cachedKids;
synchronized (lock) {
if (removed) { throw new IllegalStateException("Node has been removed"); }
syncSpi();
cachedKids = cachedChildren();
}
for (int i = 0; i < cachedKids.length; i++) { cachedKids[i].sync2(); }
}
/**
* This method is invoked with this node locked. The contract of this
* method is to synchronize any cached preferences stored at this node
* with any stored in the backing store. (It is perfectly possible that
* this node does not exist on the backing store, either because it has
* been deleted by another VM, or because it has not yet been created.)
* Note that this method should <i>not</i> synchronize the preferences in
* any subnodes of this node. If the backing store naturally syncs an
* entire subtree at once, the implementer is encouraged to override
* sync(), rather than merely overriding this method.
*
* <p>If this node throws a <tt>BackingStoreException</tt>, the exception
* will propagate out beyond the enclosing {@link #sync()} invocation.
*
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
*/
protected abstract void syncSpi() throws BackingStoreException;
/**
* Implements the <tt>flush</tt> method as per the specification in
* {@link Preferences#flush()}.
*
* <p>This implementation calls a recursive helper method that locks this
* node, invokes flushSpi() on it, unlocks this node, and recursively
* invokes this method on each "cached child." A cached child is a child
* of this node that has been created in this VM and not subsequently
* removed. In effect, this method does a depth first traversal of the
* "cached subtree" rooted at this node, calling flushSpi() on each node in
* the subTree while only that node is locked. Note that flushSpi() is
* invoked top-down.
*
* <p> If this method is invoked on a node that has been removed with
* the {@link #removeNode()} method, flushSpi() is invoked on this node,
* but not on others.
*
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
* @see #flush()
*/
public void flush() throws BackingStoreException {
flush2();
}
private void flush2() throws BackingStoreException {
AbstractPreferences[] cachedKids;
synchronized (lock) {
flushSpi();
if (removed) { return; }
cachedKids = cachedChildren();
}
for (int i = 0; i < cachedKids.length; i++) { cachedKids[i].flush2(); }
}
/**
* This method is invoked with this node locked. The contract of this
* method is to force any cached changes in the contents of this
* preference node to the backing store, guaranteeing their persistence.
* (It is perfectly possible that this node does not exist on the backing
* store, either because it has been deleted by another VM, or because it
* has not yet been created.) Note that this method should <i>not</i>
* flush the preferences in any subnodes of this node. If the backing
* store naturally flushes an entire subtree at once, the implementer is
* encouraged to override flush(), rather than merely overriding this
* method.
*
* <p>If this node throws a <tt>BackingStoreException</tt>, the exception
* will propagate out beyond the enclosing {@link #flush()} invocation.
*
* @throws BackingStoreException if this operation cannot be completed
* due to a failure in the backing store, or inability to
* communicate with it.
*/
protected abstract void flushSpi() throws BackingStoreException;
/**
* Returns <tt>true</tt> iff this node (or an ancestor) has been
* removed with the {@link #removeNode()} method. This method
* locks this node prior to returning the contents of the private
* field used to track this state.
*
* @return <tt>true</tt> iff this node (or an ancestor) has been
* removed with the {@link #removeNode()} method.
*/
protected boolean isRemoved() {
synchronized (lock) {
return removed;
}
}
/**
* Queue of pending notification events. When a preference or node
* change event for which there are one or more listeners occurs,
* it is placed on this queue and the queue is notified. A background
* thread waits on this queue and delivers the events. This decouples
* event delivery from preference activity, greatly simplifying
* locking and reducing opportunity for deadlock.
*/
private static final List<EventObject> eventQueue = new LinkedList<>();
/**
* These two classes are used to distinguish NodeChangeEvents on
* eventQueue so the event dispatch thread knows whether to call
* childAdded or childRemoved.
*/
private class NodeAddedEvent extends NodeChangeEvent {
private static final long serialVersionUID = -6743557530157328528L;
NodeAddedEvent(Preferences parent, Preferences child) {
super(parent, child);
}
}
private class NodeRemovedEvent extends NodeChangeEvent {
private static final long serialVersionUID = 8735497392918824837L;
NodeRemovedEvent(Preferences parent, Preferences child) {
super(parent, child);
}
}
/**
* A single background thread ("the event notification thread") monitors
* the event queue and delivers events that are placed on the queue.
*/
private static class EventDispatchThread extends Thread {
public void run() {
while (true) {
// Wait on eventQueue till an event is present
EventObject event = null;
synchronized (eventQueue) {
try {
while (eventQueue.isEmpty()) { eventQueue.wait(); }
event = eventQueue.remove(0);
} catch (InterruptedException e) {
// XXX Log "Event dispatch thread interrupted. Exiting"
return;
}
}
// Now we have event & hold no locks; deliver evt to listeners
AbstractPreferences src = (AbstractPreferences) event.getSource();
if (event instanceof PreferenceChangeEvent) {
PreferenceChangeEvent pce = (PreferenceChangeEvent) event;
PreferenceChangeListener[] listeners = src.prefListeners();
for (int i = 0; i < listeners.length; i++) { listeners[i].preferenceChange(pce); }
} else {
NodeChangeEvent nce = (NodeChangeEvent) event;
NodeChangeListener[] listeners = src.nodeListeners();
if (nce instanceof NodeAddedEvent) {
for (int i = 0; i < listeners.length; i++) { listeners[i].childAdded(nce); }
} else {
// assert nce instanceof NodeRemovedEvent;
for (int i = 0; i < listeners.length; i++) { listeners[i].childRemoved(nce); }
}
}
}
}
}
private static Thread eventDispatchThread = null;
/**
* This method starts the event dispatch thread the first time it
* is called. The event dispatch thread will be started only
* if someone registers a listener.
*/
private static synchronized void startEventDispatchThreadIfNecessary() {
if (eventDispatchThread == null) {
// XXX Log "Starting event dispatch thread"
eventDispatchThread = new EventDispatchThread();
eventDispatchThread.setDaemon(true);
eventDispatchThread.start();
}
}
/**
* Return this node's preference/node change listeners. Even though
* we're using a copy-on-write lists, we use synchronized accessors to
* ensure information transmission from the writing thread to the
* reading thread.
*/
PreferenceChangeListener[] prefListeners() {
synchronized (lock) {
return prefListeners;
}
}
NodeChangeListener[] nodeListeners() {
synchronized (lock) {
return nodeListeners;
}
}
/**
* Enqueue a preference change event for delivery to registered
* preference change listeners unless there are no registered
* listeners. Invoked with this.lock held.
*/
private void enqueuePreferenceChangeEvent(String key, String newValue) {
if (prefListeners.length != 0) {
synchronized (eventQueue) {
eventQueue.add(new PreferenceChangeEvent(this, key, newValue));
eventQueue.notify();
}
}
}
/**
* Enqueue a "node added" event for delivery to registered node change
* listeners unless there are no registered listeners. Invoked with
* this.lock held.
*/
private void enqueueNodeAddedEvent(Preferences child) {
if (nodeListeners.length != 0) {
synchronized (eventQueue) {
eventQueue.add(new NodeAddedEvent(this, child));
eventQueue.notify();
}
}
}
/**
* Enqueue a "node removed" event for delivery to registered node change
* listeners unless there are no registered listeners. Invoked with
* this.lock held.
*/
private void enqueueNodeRemovedEvent(Preferences child) {
if (nodeListeners.length != 0) {
synchronized (eventQueue) {
eventQueue.add(new NodeRemovedEvent(this, child));
eventQueue.notify();
}
}
}
/**
* Implements the <tt>exportNode</tt> method as per the specification in
* {@link Preferences#exportNode(OutputStream)}.
*
* @param os the output stream on which to emit the XML document.
* @throws IOException if writing to the specified output stream
* results in an <tt>IOException</tt>.
* @throws BackingStoreException if preference data cannot be read from
* backing store.
*/
public void exportNode(OutputStream os) throws IOException, BackingStoreException {
XmlSupport.export(os, this, false);
}
/**
* Implements the <tt>exportSubtree</tt> method as per the specification in
* {@link Preferences#exportSubtree(OutputStream)}.
*
* @param os the output stream on which to emit the XML document.
* @throws IOException if writing to the specified output stream
* results in an <tt>IOException</tt>.
* @throws BackingStoreException if preference data cannot be read from
* backing store.
*/
public void exportSubtree(OutputStream os) throws IOException, BackingStoreException {
XmlSupport.export(os, this, true);
}
}
|
/**
* Copyright 2021 UCloud Technology Co., Ltd.
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.ucloud.vpc.models;
import cn.ucloud.common.annotation.NotEmpty;
import cn.ucloud.common.annotation.UCloudParam;
import cn.ucloud.common.request.Request;
import java.util.List;
public class CreateNetworkAclEntryRequest extends Request {
/** 地域。 参见 [地域和可用区列表](../summary/regionlist.html) */
@NotEmpty
@UCloudParam("Region")
private String region;
/** 项目ID。不填写为默认项目,子帐号必须填写。 请参考[GetProjectList接口](../summary/get_project_list.html) */
@UCloudParam("ProjectId")
private String projectId;
/** ACL的ID */
@NotEmpty
@UCloudParam("AclId")
private String aclId;
/** Entry的优先级,对于同样的Direction来说,不能重复 */
@NotEmpty
@UCloudParam("Priority")
private Integer priority;
/** 出向或者入向(“Ingress”, "Egress") */
@NotEmpty
@UCloudParam("Direction")
private String direction;
/** 协议规则描述 */
@NotEmpty
@UCloudParam("IpProtocol")
private String ipProtocol;
/** IPv4段的CIDR表示 */
@NotEmpty
@UCloudParam("CidrBlock")
private String cidrBlock;
/** 针对的端口范围 */
@NotEmpty
@UCloudParam("PortRange")
private String portRange;
/** 规则的行为("Accept", "Reject") */
@NotEmpty
@UCloudParam("EntryAction")
private String entryAction;
/** 描述。长度限制为不超过32字节。 */
@UCloudParam("Description")
private String description;
/** 应用目标类型。0代表“子网内全部资源”,1代表“子网内指定资源”,默认为0 */
@UCloudParam("TargetType")
private Integer targetType;
/** 应用目标资源列表。默认为全部资源生效。TargetType为0时不用填写该值。 */
@UCloudParam("TargetResourceIds")
private List<String> targetResourceIds;
public String getRegion() {
return region;
}
public void setRegion(String region) {
this.region = region;
}
public String getProjectId() {
return projectId;
}
public void setProjectId(String projectId) {
this.projectId = projectId;
}
public String getAclId() {
return aclId;
}
public void setAclId(String aclId) {
this.aclId = aclId;
}
public Integer getPriority() {
return priority;
}
public void setPriority(Integer priority) {
this.priority = priority;
}
public String getDirection() {
return direction;
}
public void setDirection(String direction) {
this.direction = direction;
}
public String getIpProtocol() {
return ipProtocol;
}
public void setIpProtocol(String ipProtocol) {
this.ipProtocol = ipProtocol;
}
public String getCidrBlock() {
return cidrBlock;
}
public void setCidrBlock(String cidrBlock) {
this.cidrBlock = cidrBlock;
}
public String getPortRange() {
return portRange;
}
public void setPortRange(String portRange) {
this.portRange = portRange;
}
public String getEntryAction() {
return entryAction;
}
public void setEntryAction(String entryAction) {
this.entryAction = entryAction;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Integer getTargetType() {
return targetType;
}
public void setTargetType(Integer targetType) {
this.targetType = targetType;
}
public List<String> getTargetResourceIds() {
return targetResourceIds;
}
public void setTargetResourceIds(List<String> targetResourceIds) {
this.targetResourceIds = targetResourceIds;
}
}
|
/*
* Copyright 2014 Akamai Technologies.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.akamai.open.moonlighting.persistence.service.impl.dao.queries;
import com.akamai.open.moonlighting.persistence.service.impl.dao.EntityQuery;
import com.akamai.open.moonlighting.persistence.service.impl.dao.Query;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
/**
*
* @author Shreyas Dube <sdube@akamai.com>
*/
public class SelectEntitiesByType extends EntityQuery {
private final String workspace;
private final String type;
public SelectEntitiesByType(String workspace, String type) {
this.workspace = workspace;
this.type = type;
}
@Override
public String getQuery() {
return Query.SELECT_ENTITIES_BY_TYPE.getSql();
}
@Override
public PreparedStatementInitializer getPreparedStatementInitializer() {
return new PreparedStatementInitializer() {
@Override
public void initialize(Connection c, PreparedStatement s) throws SQLException {
s.setString(1, workspace);
s.setString(2, type);
}
};
}
}
|
/*
* Copyright 2009 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.iq80.leveldb.util;
import com.google.common.base.Preconditions;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.FileChannel;
import java.nio.channels.GatheringByteChannel;
import java.nio.channels.ScatteringByteChannel;
import java.nio.charset.Charset;
import java.util.Arrays;
import static java.nio.ByteOrder.LITTLE_ENDIAN;
import static org.iq80.leveldb.util.SizeOf.*;
/**
* Little Endian slice of a byte array.
*/
public final class Slice implements Comparable<Slice>
{
private final byte[] data;
private final int offset;
private final int length;
private int hash;
public Slice(int length)
{
data = new byte[length];
this.offset = 0;
this.length = length;
}
public Slice(byte[] data)
{
Preconditions.checkNotNull(data, "array is null");
this.data = data;
this.offset = 0;
this.length = data.length;
}
public Slice(byte[] data, int offset, int length)
{
Preconditions.checkNotNull(data, "array is null");
this.data = data;
this.offset = offset;
this.length = length;
}
/**
* Length of this slice.
*/
public int length()
{
return length;
}
/**
* Gets the array underlying this slice.
*/
public byte[] getRawArray()
{
return data;
}
/**
* Gets the offset of this slice in the underlying array.
*/
public int getRawOffset()
{
return offset;
}
/**
* Gets a byte at the specified absolute {@code index} in this buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* {@code index + 1} is greater than {@code this.capacity}
*/
public byte getByte(int index)
{
Preconditions.checkPositionIndexes(index, index + SIZE_OF_BYTE, this.length);
index += offset;
return data[index];
}
/**
* Gets an unsigned byte at the specified absolute {@code index} in this
* buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* {@code index + 1} is greater than {@code this.capacity}
*/
public short getUnsignedByte(int index)
{
return (short) (getByte(index) & 0xFF);
}
/**
* Gets a 16-bit short integer at the specified absolute {@code index} in
* this slice.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
public short getShort(int index)
{
Preconditions.checkPositionIndexes(index, index + SIZE_OF_SHORT, this.length);
index += offset;
return (short) (data[index] & 0xFF | data[index + 1] << 8);
}
/**
* Gets a 32-bit integer at the specified absolute {@code index} in
* this buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
public int getInt(int index)
{
Preconditions.checkPositionIndexes(index, index + SIZE_OF_INT, this.length);
index += offset;
return (data[index] & 0xff) |
(data[index + 1] & 0xff) << 8 |
(data[index + 2] & 0xff) << 16 |
(data[index + 3] & 0xff) << 24;
}
/**
* Gets a 64-bit long integer at the specified absolute {@code index} in
* this buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* {@code index + 8} is greater than {@code this.capacity}
*/
public long getLong(int index)
{
Preconditions.checkPositionIndexes(index, index + SIZE_OF_LONG, this.length);
index += offset;
return ((long) data[index] & 0xff) |
((long) data[index + 1] & 0xff) << 8 |
((long) data[index + 2] & 0xff) << 16 |
((long) data[index + 3] & 0xff) << 24 |
((long) data[index + 4] & 0xff) << 32 |
((long) data[index + 5] & 0xff) << 40 |
((long) data[index + 6] & 0xff) << 48 |
((long) data[index + 7] & 0xff) << 56;
}
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index}.
*
* @param dstIndex the first index of the destination
* @param length the number of bytes to transfer
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0},
* if the specified {@code dstIndex} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code dstIndex + length} is greater than
* {@code dst.capacity}
*/
public void getBytes(int index, Slice dst, int dstIndex, int length)
{
getBytes(index, dst.data, dstIndex, length);
}
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index}.
*
* @param destinationIndex the first index of the destination
* @param length the number of bytes to transfer
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0},
* if the specified {@code dstIndex} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code dstIndex + length} is greater than
* {@code dst.length}
*/
public void getBytes(int index, byte[] destination, int destinationIndex, int length)
{
Preconditions.checkPositionIndexes(index, index + length, this.length);
Preconditions.checkPositionIndexes(destinationIndex, destinationIndex + length, destination.length);
index += offset;
System.arraycopy(data, index, destination, destinationIndex, length);
}
public byte[] getBytes()
{
return getBytes(0, length);
}
public byte[] getBytes(int index, int length)
{
index += offset;
if (index == 0) {
return Arrays.copyOf(data, length);
} else {
byte[] value = new byte[length];
System.arraycopy(data, index, value, 0, length);
return value;
}
}
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index} until the destination's position
* reaches its limit.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* if {@code index + dst.remaining()} is greater than
* {@code this.capacity}
*/
public void getBytes(int index, ByteBuffer destination)
{
Preconditions.checkPositionIndex(index, this.length);
index += offset;
destination.put(data, index, Math.min(length, destination.remaining()));
}
/**
* Transfers this buffer's data to the specified stream starting at the
* specified absolute {@code index}.
*
* @param length the number of bytes to transfer
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than
* {@code this.capacity}
* @throws java.io.IOException if the specified stream threw an exception during I/O
*/
public void getBytes(int index, OutputStream out, int length)
throws IOException
{
Preconditions.checkPositionIndexes(index, index + length, this.length);
index += offset;
out.write(data, index, length);
}
/**
* Transfers this buffer's data to the specified channel starting at the
* specified absolute {@code index}.
*
* @param length the maximum number of bytes to transfer
* @return the actual number of bytes written out to the specified channel
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than
* {@code this.capacity}
* @throws java.io.IOException if the specified channel threw an exception during I/O
*/
public int getBytes(int index, GatheringByteChannel out, int length)
throws IOException
{
Preconditions.checkPositionIndexes(index, index + length, this.length);
index += offset;
return out.write(ByteBuffer.wrap(data, index, length));
}
/**
* Sets the specified 16-bit short integer at the specified absolute
* {@code index} in this buffer. The 16 high-order bits of the specified
* value are ignored.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
public void setShort(int index, int value)
{
Preconditions.checkPositionIndexes(index, index + SIZE_OF_SHORT, this.length);
index += offset;
data[index] = (byte) (value);
data[index + 1] = (byte) (value >>> 8);
}
/**
* Sets the specified 32-bit integer at the specified absolute
* {@code index} in this buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
public void setInt(int index, int value)
{
Preconditions.checkPositionIndexes(index, index + SIZE_OF_INT, this.length);
index += offset;
data[index] = (byte) (value);
data[index + 1] = (byte) (value >>> 8);
data[index + 2] = (byte) (value >>> 16);
data[index + 3] = (byte) (value >>> 24);
}
/**
* Sets the specified 64-bit long integer at the specified absolute
* {@code index} in this buffer.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* {@code index + 8} is greater than {@code this.capacity}
*/
public void setLong(int index, long value)
{
Preconditions.checkPositionIndexes(index, index + SIZE_OF_LONG, this.length);
index += offset;
data[index] = (byte) (value);
data[index + 1] = (byte) (value >>> 8);
data[index + 2] = (byte) (value >>> 16);
data[index + 3] = (byte) (value >>> 24);
data[index + 4] = (byte) (value >>> 32);
data[index + 5] = (byte) (value >>> 40);
data[index + 6] = (byte) (value >>> 48);
data[index + 7] = (byte) (value >>> 56);
}
/**
* Sets the specified byte at the specified absolute {@code index} in this
* buffer. The 24 high-order bits of the specified value are ignored.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* {@code index + 1} is greater than {@code this.capacity}
*/
public void setByte(int index, int value)
{
Preconditions.checkPositionIndexes(index, index + SIZE_OF_BYTE, this.length);
index += offset;
data[index] = (byte) value;
}
/**
* Transfers the specified source buffer's data to this buffer starting at
* the specified absolute {@code index}.
*
* @param srcIndex the first index of the source
* @param length the number of bytes to transfer
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0},
* if the specified {@code srcIndex} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code srcIndex + length} is greater than
* {@code src.capacity}
*/
public void setBytes(int index, Slice src, int srcIndex, int length)
{
setBytes(index, src.data, src.offset + srcIndex, length);
}
/**
* Transfers the specified source array's data to this buffer starting at
* the specified absolute {@code index}.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0},
* if the specified {@code srcIndex} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code srcIndex + length} is greater than {@code src.length}
*/
public void setBytes(int index, byte[] source, int sourceIndex, int length)
{
Preconditions.checkPositionIndexes(index, index + length, this.length);
Preconditions.checkPositionIndexes(sourceIndex, sourceIndex + length, source.length);
index += offset;
System.arraycopy(source, sourceIndex, data, index, length);
}
/**
* Transfers the specified source buffer's data to this buffer starting at
* the specified absolute {@code index} until the source buffer's position
* reaches its limit.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* if {@code index + src.remaining()} is greater than
* {@code this.capacity}
*/
public void setBytes(int index, ByteBuffer source)
{
Preconditions.checkPositionIndexes(index, index + source.remaining(), this.length);
index += offset;
source.get(data, index, source.remaining());
}
/**
* Transfers the content of the specified source stream to this buffer
* starting at the specified absolute {@code index}.
*
* @param length the number of bytes to transfer
* @return the actual number of bytes read in from the specified channel.
* {@code -1} if the specified channel is closed.
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than {@code this.capacity}
* @throws java.io.IOException if the specified stream threw an exception during I/O
*/
public int setBytes(int index, InputStream in, int length)
throws IOException
{
Preconditions.checkPositionIndexes(index, index + length, this.length);
index += offset;
int readBytes = 0;
do {
int localReadBytes = in.read(data, index, length);
if (localReadBytes < 0) {
if (readBytes == 0) {
return -1;
}
else {
break;
}
}
readBytes += localReadBytes;
index += localReadBytes;
length -= localReadBytes;
} while (length > 0);
return readBytes;
}
/**
* Transfers the content of the specified source channel to this buffer
* starting at the specified absolute {@code index}.
*
* @param length the maximum number of bytes to transfer
* @return the actual number of bytes read in from the specified channel.
* {@code -1} if the specified channel is closed.
* @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than {@code this.capacity}
* @throws java.io.IOException if the specified channel threw an exception during I/O
*/
public int setBytes(int index, ScatteringByteChannel in, int length)
throws IOException
{
Preconditions.checkPositionIndexes(index, index + length, this.length);
index += offset;
ByteBuffer buf = ByteBuffer.wrap(data, index, length);
int readBytes = 0;
do {
int localReadBytes;
try {
localReadBytes = in.read(buf);
}
catch (ClosedChannelException e) {
localReadBytes = -1;
}
if (localReadBytes < 0) {
if (readBytes == 0) {
return -1;
}
else {
break;
}
}
else if (localReadBytes == 0) {
break;
}
readBytes += localReadBytes;
} while (readBytes < length);
return readBytes;
}
public int setBytes(int index, FileChannel in, int position, int length)
throws IOException
{
Preconditions.checkPositionIndexes(index, index + length, this.length);
index += offset;
ByteBuffer buf = ByteBuffer.wrap(data, index, length);
int readBytes = 0;
do {
int localReadBytes;
try {
localReadBytes = in.read(buf, position + readBytes);
}
catch (ClosedChannelException e) {
localReadBytes = -1;
}
if (localReadBytes < 0) {
if (readBytes == 0) {
return -1;
}
else {
break;
}
}
else if (localReadBytes == 0) {
break;
}
readBytes += localReadBytes;
} while (readBytes < length);
return readBytes;
}
public Slice copySlice()
{
return copySlice(0, length);
}
/**
* Returns a copy of this buffer's sub-region. Modifying the content of
* the returned buffer or this buffer does not affect each other at all.
*/
public Slice copySlice(int index, int length)
{
Preconditions.checkPositionIndexes(index, index + length, this.length);
index += offset;
byte[] copiedArray = new byte[length];
System.arraycopy(data, index, copiedArray, 0, length);
return new Slice(copiedArray);
}
public byte[] copyBytes()
{
return copyBytes(0, length);
}
public byte[] copyBytes(int index, int length)
{
Preconditions.checkPositionIndexes(index, index + length, this.length);
index += offset;
if (index == 0) {
return Arrays.copyOf(data, length);
} else {
byte[] value = new byte[length];
System.arraycopy(data, index, value, 0, length);
return value;
}
}
/**
* Returns a slice of this buffer's readable bytes. Modifying the content
* of the returned buffer or this buffer affects each other's content
* while they maintain separate indexes and marks.
*/
public Slice slice()
{
return slice(0, length);
}
/**
* Returns a slice of this buffer's sub-region. Modifying the content of
* the returned buffer or this buffer affects each other's content while
* they maintain separate indexes and marks.
*/
public Slice slice(int index, int length)
{
if (index == 0 && length == this.length) {
return this;
}
Preconditions.checkPositionIndexes(index, index + length, this.length);
if (index >= 0 && length == 0) {
return Slices.EMPTY_SLICE;
}
return new Slice(data, offset + index, length);
}
/**
* Creates an input stream over this slice.
*/
public SliceInput input()
{
return new SliceInput(this);
}
/**
* Creates an output stream over this slice.
*/
public SliceOutput output()
{
return new BasicSliceOutput(this);
}
/**
* Converts this buffer's readable bytes into a NIO buffer. The returned
* buffer shares the content with this buffer.
*/
public ByteBuffer toByteBuffer()
{
return toByteBuffer(0, length);
}
/**
* Converts this buffer's sub-region into a NIO buffer. The returned
* buffer shares the content with this buffer.
*/
public ByteBuffer toByteBuffer(int index, int length)
{
Preconditions.checkPositionIndexes(index, index + length, this.length);
index += offset;
return ByteBuffer.wrap(data, index, length).order(LITTLE_ENDIAN);
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Slice slice = (Slice) o;
// do lengths match
if (length != slice.length) {
return false;
}
// if arrays have same base offset, some optimizations can be taken...
if (offset == slice.offset && data == slice.data) {
return true;
}
for (int i = 0; i < length; i++) {
if (data[offset + i] != slice.data[slice.offset + i]) {
return false;
}
}
return true;
}
@Override
public int hashCode()
{
if (hash != 0) {
return hash;
}
int result = length;
for (int i = offset; i < offset + length; i++) {
result = 31 * result + data[i];
}
if (result == 0) {
result = 1;
}
hash = result;
return hash;
}
/**
* Compares the content of the specified buffer to the content of this
* buffer. This comparison is performed byte by byte using an unsigned
* comparison.
*/
public int compareTo(Slice that)
{
if (this == that) {
return 0;
}
if (this.data == that.data && length == that.length && offset == that.offset) {
return 0;
}
int minLength = Math.min(this.length, that.length);
for (int i = 0; i < minLength; i++) {
int thisByte = 0xFF & this.data[this.offset + i];
int thatByte = 0xFF & that.data[that.offset + i];
if (thisByte != thatByte) {
return (thisByte) - (thatByte);
}
}
return this.length - that.length;
}
/**
* Decodes this buffer's readable bytes into a string with the specified
* character set name.
*/
public String toString(Charset charset)
{
return toString(0, length, charset);
}
/**
* Decodes this buffer's sub-region into a string with the specified
* character set.
*/
public String toString(int index, int length, Charset charset)
{
if (length == 0) {
return "";
}
return Slices.decodeString(toByteBuffer(index, length), charset);
}
public String toString()
{
return getClass().getSimpleName() + '(' +
"length=" + length() +
')';
}
}
|
package com.github.tsavo.apiomatic.xml;
import java.util.Map;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
public class XMLNode {
public static class XMLNodeFactory {
Document document;
public XMLNodeFactory(Document aDocument) {
document = aDocument;
}
public XMLNode l(String aName, XMLNode... children) {
return new XMLNode(document, aName, children);
}
public XMLNode l(String aName, String attribute, String value, XMLNode... children) {
XMLNode n = new XMLNode(document, aName, children);
n.node.setAttribute(attribute, value);
return n;
}
public XMLNode l(String aName, String attribute, String value, String aText) {
XMLNode n = new XMLNode(document, aName, new XMLNode[0]);
n.node.setAttribute(attribute, value);
n.node.appendChild(document.createTextNode(aText));
return n;
}
public XMLNode l(String aName, Map<String, String> attributes, XMLNode... children) {
XMLNode n = new XMLNode(document, aName, children);
for (Map.Entry<String, String> entry : attributes.entrySet()) {
n.node.setAttribute(entry.getKey(), entry.getValue());
}
return n;
}
public XMLNode l(String aName, String aText) {
XMLNode x = new XMLNode(document, aName, new XMLNode[0]);
x.node.appendChild(document.createTextNode(aText));
return x;
}
}
public Element node;
public XMLNode(Document aDocument, String aName, XMLNode... nodes) {
Element newNode = aDocument.createElement(aName);
for (XMLNode n : nodes) {
newNode.appendChild(n.node);
}
node = newNode;
}
public XMLNode(Document aDocument, Map<String,String>attributes, XMLNode... nodes) {
Element root = aDocument.getDocumentElement();
for (Map.Entry<String,String> entry : attributes.entrySet()){
root.setAttribute(entry.getKey(), entry.getValue());
}
for (XMLNode n : nodes) {
root.appendChild(n.node);
}
node=root;
}
public XMLNode() {
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.spark.util;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.Serializable;
import org.apache.beam.sdk.coders.Coder;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.util.SizeEstimator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Broadcast helper for side inputs. Helps to do the transformation from bytes transform to
* broadcast transform to value by coder
*/
@SuppressWarnings("nullness") // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
public class SideInputBroadcast<T> implements Serializable {
private static final Logger LOG = LoggerFactory.getLogger(SideInputBroadcast.class);
private Broadcast<byte[]> bcast;
private final Coder<T> coder;
private transient T value;
private transient byte[] bytes = null;
private SideInputBroadcast(byte[] bytes, Coder<T> coder) {
this.bytes = bytes;
this.coder = coder;
}
public static <T> SideInputBroadcast<T> create(byte[] bytes, Coder<T> coder) {
return new SideInputBroadcast<>(bytes, coder);
}
public synchronized T getValue() {
if (value == null) {
value = deserialize();
}
return value;
}
public void broadcast(JavaSparkContext jsc) {
this.bcast = jsc.broadcast(bytes);
}
public void unpersist() {
this.bcast.unpersist();
}
private T deserialize() {
T val;
try {
val = coder.decode(new ByteArrayInputStream(bcast.value()));
} catch (IOException ioe) {
// this should not ever happen, log it if it does.
LOG.warn(ioe.getMessage());
val = null;
}
return val;
}
public long getBroadcastSizeEstimate() {
return SizeEstimator.estimate(bytes);
}
}
|
// Copyright (c) 2021 Terminus, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cloud.erda.analyzer.alert.functions;
import cloud.erda.analyzer.alert.models.NotifyEvent;
import cloud.erda.analyzer.alert.models.UniversalTemplate;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;
import java.util.HashMap;
import java.util.Map;
@Slf4j
public class NotifyTemplateProcessFunction extends BroadcastProcessFunction<NotifyEvent, UniversalTemplate, NotifyEvent> {
private MapStateDescriptor<String, Map<String,UniversalTemplate>> notifyTemplateMapStateDescriptor;
private long stateTtl;
private long lastCleanTime;
public NotifyTemplateProcessFunction(long stateTtl, MapStateDescriptor<String, Map<String,UniversalTemplate>> notifyTemplateMapStateDescriptor) {
this.stateTtl = stateTtl;
this.notifyTemplateMapStateDescriptor = notifyTemplateMapStateDescriptor;
}
@Override
public void processElement(NotifyEvent notifyEvent, ReadOnlyContext readOnlyContext, Collector<NotifyEvent> collector) throws Exception {
if (notifyEvent == null) {
return;
}
ReadOnlyBroadcastState<String, Map<String,UniversalTemplate>> templateState = readOnlyContext.getBroadcastState(notifyTemplateMapStateDescriptor);
//这里不用遍历直接获取templateid
Map<String,UniversalTemplate> template = templateState.get(notifyEvent.getTemplateId());
if (template != null && !template.isEmpty()) {
for (String chan : notifyEvent.getNotify().getTarget().getChannels()) {
UniversalTemplate t = template.get(chan);
if (t != null) {
NotifyEvent event = notifyEvent.copy();
event.setNotifyTemplate(t);
collector.collect(event);
}
}
}
}
@Override
public void processBroadcastElement(UniversalTemplate notifyTemplate, Context context, Collector<NotifyEvent> collector) throws Exception {
if (notifyTemplate == null) {
return;
}
cleanExpireState(context);
BroadcastState<String,Map<String,UniversalTemplate>> notifyTemplateState= context.getBroadcastState(notifyTemplateMapStateDescriptor);
Map<String,UniversalTemplate> items = notifyTemplateState.get(notifyTemplate.getNotifyId());
if (items == null) {
items = new HashMap<>();
notifyTemplateState.put(notifyTemplate.getNotifyId(),items);
}
items.put(notifyTemplate.getTemplate().getTarget(),notifyTemplate);
}
private void cleanExpireState(Context ctx) throws Exception {
long now = System.currentTimeMillis();
if (now - lastCleanTime < stateTtl) {
return;
}
lastCleanTime = now;
BroadcastState<String, Map<String,UniversalTemplate>> templates = ctx.getBroadcastState(notifyTemplateMapStateDescriptor);
if (templates == null) {
return;
}
// Map<String, UniversalTemplate> immutableEntries = new HashMap<>();
// templates.immutableEntries().forEach(entry -> immutableEntries.put(entry.getKey(), entry.getValue()));
for(Map.Entry<String,Map<String,UniversalTemplate>> item : templates.entries()) {
for (Map.Entry<String,UniversalTemplate> val : new HashMap<>(item.getValue()).entrySet()){
if (now - val.getValue().getProcessingTime() > stateTtl) {
item.getValue().remove(val.getKey());
}
}
}
// for (Map.Entry<String, UniversalTemplate> template : immutableEntries.entrySet()) {
// if (now - template.getValue().getProcessingTime() > stateTtl) {
// templates.remove(template.getKey());
// }
// }
log.info("clean up expired notify template");
}
}
|
package de.mserve.europass.service;
import java.io.File;
import java.nio.file.Paths;
import org.apache.commons.io.FilenameUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import de.mserve.europass.model.CertificateEntry;
import eu.europa.esig.dss.enumerations.DigestAlgorithm;
import eu.europa.esig.dss.enumerations.SignatureLevel;
import eu.europa.esig.dss.enumerations.SignaturePackaging;
import eu.europa.esig.dss.model.DSSDocument;
import eu.europa.esig.dss.model.FileDocument;
import eu.europa.esig.dss.model.SignatureValue;
import eu.europa.esig.dss.model.ToBeSigned;
import eu.europa.esig.dss.validation.CommonCertificateVerifier;
import eu.europa.esig.dss.xades.XAdESSignatureParameters;
import eu.europa.esig.dss.xades.signature.XAdESService;
public class XmlSigner {
private static final Logger LOG = LoggerFactory.getLogger(CertificateLoader.class);
private CertificateEntry cert;
private XAdESSignatureParameters parameters;
private XmlSigner() {
}
public static XmlSigner build(CertificateEntry cert) {
XmlSigner x = new XmlSigner();
x.setCertificateEntry(cert);
return x;
}
private void setCertificateEntry(CertificateEntry cert) {
// Set token
this.cert = cert;
// Preparing parameters for the XAdES signature
this.parameters = new XAdESSignatureParameters();
// Default settings
this.parameters.setSignatureLevel(SignatureLevel.XAdES_BASELINE_B);
this.parameters.setSignaturePackaging(SignaturePackaging.ENVELOPED);
this.parameters.setDigestAlgorithm(DigestAlgorithm.SHA256);
// Add key
this.parameters.setSigningCertificate(this.cert.getEntry().getCertificate());
this.parameters.setCertificateChain(this.cert.getEntry().getCertificateChain());
}
public void setParameters(SignatureLevel level, SignaturePackaging packaging, DigestAlgorithm da) {
this.parameters.setSignatureLevel(level);
this.parameters.setSignaturePackaging(packaging);
this.parameters.setDigestAlgorithm(da);
}
public boolean sign(File xml) {
return sign(xml, FilenameUtils.getFullPath(xml.getAbsolutePath()));
}
public boolean sign(File xml, String outDir) {
// Prepare the service
XAdESService service = new XAdESService(new CommonCertificateVerifier());
// Prepare the document
LOG.info("Loading XML file '{}''", xml.getAbsolutePath());
DSSDocument document = new FileDocument(xml);
LOG.info("Using signature level {}, packaging {}, digest {}", this.parameters.getSignatureLevel().name(),
this.parameters.getSignaturePackaging().name(), this.parameters.getDigestAlgorithm().name());
ToBeSigned dataToSign = service.getDataToSign(document, parameters);
DigestAlgorithm digestAlgorithm = parameters.getDigestAlgorithm();
if (this.cert == null) {
LOG.error("No valid certifitate set");
return false;
}
if (this.cert.getSignatureToken() == null) {
LOG.error("Certificate '{}' has no valid token", this.cert.getLabel());
return false;
}
SignatureValue signatureValue = this.cert.getSignatureToken().sign(dataToSign, digestAlgorithm, this.cert.getEntry());
LOG.info("Signature value: {} bytes", signatureValue.getValue().length);
DSSDocument signedDocument = service.signDocument(document, parameters, signatureValue);
LOG.info("XML filed successfully signed with key '{}''", this.cert.getLabel());
String outFilePath = Paths.get(outDir, FilenameUtils.getBaseName(xml.getAbsolutePath()) + "-signed.xml").toAbsolutePath().toString();
try {
signedDocument.save(outFilePath);
} catch (Exception e) {
LOG.error("error writing to path '{}'", outFilePath);
return false;
}
LOG.info("XML filed successfully written to path '{}'", outFilePath);
return true;
}
}
|
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2014 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.pscanrulesAlpha;
import java.util.Base64;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.htmlparser.jericho.Source;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.parosproxy.paros.Constant;
import org.parosproxy.paros.core.scanner.Alert;
import org.parosproxy.paros.core.scanner.Plugin.AlertThreshold;
import org.parosproxy.paros.network.HttpMessage;
import org.zaproxy.addon.commonlib.CommonAlertTag;
import org.zaproxy.zap.extension.pscan.PassiveScanThread;
import org.zaproxy.zap.extension.pscan.PluginPassiveScanner;
/**
* A class to passively scan responses for Base64 encoded data, including ASP ViewState data, which
* is Base64 encoded.
*
* @author 70pointer@gmail.com
*/
public class Base64Disclosure extends PluginPassiveScanner {
/**
* a set of patterns used to identify Base64 encoded data. Set a minimum length to reduce false
* positives. Note that because we only look for patterns ending in at least one "=", we will
* have false negatives (ie, we will not detect ALL Base64 references). If we do not include
* this condition, however, we will have a very large number of false positives. TODO: find a
* different way to reduce false positives without causing false negatives.
*/
// static Pattern base64Pattern = Pattern.compile("[a-zA-Z0-9\\+\\\\/]{30,}={1,2}");
// static Pattern base64Pattern = Pattern.compile("[a-zA-Z0-9\\+\\\\/]{30,}={0,2}");
static Set<Pattern> base64Patterns =
new LinkedHashSet<>(); // the order of patterns is important. most specific first
static {
// base64Patterns.add(Pattern.compile("[a-zA-Z0-9\\+\\\\/]{30,}={0,2}"));
// base64Patterns.add(Pattern.compile("[a-zA-Z0-9\\-_]{30,}={0,2}")); //used in JWT - file
// and URL safe variant of Base64 alphabet
base64Patterns.add(Pattern.compile("[a-zA-Z0-9\\+\\\\/\\-_]{30,}={0,2}"));
}
/**
* patterns used to identify strings withut each of the given character sets which is used to
* calculate the probability of this occurring, and eliminate potential Base64 strings which are
* extremely improbable
*/
static Pattern digitPattern = Pattern.compile("[0-9]");
static Pattern alphaPattern = Pattern.compile("[a-zA-Z]");
static Pattern otherPattern = Pattern.compile("[\\+\\\\/\\-_]");
static Pattern lowercasePattern = Pattern.compile("[a-z]");
static Pattern uppercasePattern = Pattern.compile("[A-Z]");
/** the logger. logs stuff. strange that! */
private static Logger log = LogManager.getLogger(Base64Disclosure.class);
/** Prefix for internationalized messages used by this rule */
private static final String MESSAGE_PREFIX = "pscanalpha.base64disclosure.";
private static final Map<String, String> ALERT_TAGS =
CommonAlertTag.toMap(
CommonAlertTag.OWASP_2021_A04_INSECURE_DESIGN,
CommonAlertTag.OWASP_2017_A03_DATA_EXPOSED);
@Override
public String getName() {
return Constant.messages.getString(MESSAGE_PREFIX + "name");
}
/**
* scans the HTTP response for base64 signatures
*
* @param msg
* @param id
* @param source unused
*/
@Override
public void scanHttpResponseReceive(HttpMessage msg, int id, Source source) {
log.debug("Checking message {} for Base64 encoded data", msg);
// get the body contents as a String, so we can match against it
String responseheader = msg.getResponseHeader().getHeadersAsString();
String responsebody = msg.getResponseBody().toString();
String[] responseparts = {responseheader, responsebody};
// for each pattern..
for (Pattern pattern : base64Patterns) {
log.debug("Trying Base64 Pattern: {}", pattern);
for (String haystack : responseparts) {
Matcher matcher = pattern.matcher(haystack);
while (matcher.find()) {
String base64evidence = matcher.group();
String tempbase64evidence = base64evidence;
byte[] decodeddata = null;
try {
// if the string had the "-_" alphabet, replace the - and _ with + and /
// respectively
tempbase64evidence = tempbase64evidence.replace('-', '+');
tempbase64evidence = tempbase64evidence.replace('_', '/');
// decode the data
decodeddata = Base64.getDecoder().decode(tempbase64evidence);
} catch (IllegalArgumentException e) {
// it's not actually Base64. so skip it.
log.debug(
"[{}] (modified from [{}]) could not be decoded as Base64 data",
tempbase64evidence,
base64evidence);
continue;
}
// does the base 64 encoded string actually contain the various characters that
// we might expect?
// (note: we may not care, depending on the threshold set by the user)
String base64evidenceString = new String(tempbase64evidence);
boolean noDigitInString = !digitPattern.matcher(base64evidenceString).find();
boolean noAlphaInString = !alphaPattern.matcher(base64evidenceString).find();
// boolean noOtherInString = !
// otherPattern.matcher(base64evidenceString).find();
boolean noLowerInString =
!lowercasePattern.matcher(base64evidenceString).find();
boolean noUpperInString =
!uppercasePattern.matcher(base64evidenceString).find();
// calculate the actual probability of a Base64 string of this length *not*
// containing a given character class (digit/alphabetic/other Base64 character)
// right about now, I expect to get flamed by the statistics geeks in our
// midst.. wait for it! :)
float probabilityOfNoDigitInString =
(float) Math.pow(((float) 64 - 10) / 64, base64evidence.length());
float probabilityOfNoAlphaInString =
(float) Math.pow(((float) 64 - 52) / 64, base64evidence.length());
// float probabilityOfNoOtherInString = (float)Math.pow(((float)64-2)/64,
// base64evidence.length());
float probabilityOfNoLowerInString =
(float) Math.pow(((float) 64 - 26) / 64, base64evidence.length());
float probabilityOfNoUpperInString = probabilityOfNoLowerInString;
// set the threshold percentage based on what threshold was set by the user
float probabilityThreshold = 0.0F; // 0% probability threshold
switch (this.getAlertThreshold()) {
// 50% probability threshold (ie, "on balance of probability")
case HIGH:
probabilityThreshold = 0.50F;
break;
// 25% probability threshold
case MEDIUM:
probabilityThreshold = 0.25F;
break;
// 10% probability threshold
case LOW:
probabilityThreshold = 0.10F;
break;
// 0% probability threshold (all structurally valid Base64 data is
// considered, regardless of how improbable it is given character
// frequencies, etc.)
default:
}
// if the String is unlikely to be Base64, given the distribution of the
// characters
// ie, less probable than the threshold probability controlled by the user, then
// do not process it.
if ((noDigitInString && probabilityOfNoDigitInString < probabilityThreshold)
|| (noAlphaInString
&& probabilityOfNoAlphaInString < probabilityThreshold)
||
// (noOtherInString && probabilityOfNoOtherInString <
// probabilityThreshold) ||
(noLowerInString && probabilityOfNoLowerInString < probabilityThreshold)
|| (noUpperInString
&& probabilityOfNoUpperInString < probabilityThreshold)) {
log.trace(
"The following candidate Base64 has been excluded on probabilistic grounds: [{}] ",
base64evidence);
if (noDigitInString)
log.trace(
"The candidate Base64 has no digit characters, and the the probability of this occurring for a string of this length is {}%. The threshold is {}%",
probabilityOfNoDigitInString * 100, probabilityThreshold * 100);
if (noAlphaInString)
log.trace(
"The candidate Base64 has no alphabetic characters, and the the probability of this occurring for a string of this length is {}%. The threshold is {}%",
probabilityOfNoAlphaInString * 100, probabilityThreshold * 100);
// if (noOtherInString)
// log.trace("The candidate Base64 has no 'other' characters, and the
// probability of this occurring for a string of this length is {}%. The
// threshold is {}%",probabilityOfNoOtherInString *
// 100,probabilityThreshold *100);
if (noLowerInString)
log.trace(
"The candidate Base64 has no lowercase characters, and the the probability of this occurring for a string of this length is {}%. The threshold is {}%",
probabilityOfNoLowerInString * 100, probabilityThreshold * 100);
if (noUpperInString)
log.trace(
"The candidate Base64 has no uppercase characters, and the the probability of this occurring for a string of this length is {}%. The threshold is {}%",
probabilityOfNoUpperInString * 100, probabilityThreshold * 100);
continue;
}
log.debug(
"Found a match for Base64, of length {}:{}",
base64evidence.length(),
base64evidence);
// so it's valid Base64. Is it valid .NET ViewState data?
// This will be true for both __VIEWSTATE and __EVENTVALIDATION data, although
// currently, we can only interpret/decode __VIEWSTATE.
boolean validviewstate = false;
boolean macless = false;
String viewstatexml = null;
if (decodeddata[0] == -1 || decodeddata[1] == 0x01) {
// TODO: decode __EVENTVALIDATION data
ViewStateDecoder viewstatedecoded = new ViewStateDecoder();
try {
log.debug(
"The following Base64 string has a ViewState preamble: [{}]",
base64evidence);
viewstatexml = viewstatedecoded.decodeAsXML(base64evidence.getBytes());
log.debug(
"The data was successfully decoded as ViewState data of length {}: {}",
viewstatexml.length(),
viewstatexml);
validviewstate = true;
// is the ViewState protected by a MAC?
Matcher hmaclessmatcher =
ViewStateDecoder.PATTERN_NO_HMAC.matcher(viewstatexml);
macless = hmaclessmatcher.find();
log.debug("MAC-less??? {}", macless);
} catch (Exception e) {
// no need to do anything here.. just don't set "validviewstate" to true
// :)
log.debug(
"The Base64 value [{}] has a valid ViewState pre-amble, but is not a valid viewstate. It may be an EVENTVALIDATION value, is not yet decodable.",
base64evidence);
}
}
if (validviewstate == true) {
log.debug("Raising a ViewState informational alert");
// raise an (informational) Alert with the human readable ViewState data
newAlert()
.setName(
Constant.messages.getString(
"pscanalpha.base64disclosure.viewstate.name"))
.setRisk(Alert.RISK_INFO)
.setConfidence(Alert.CONFIDENCE_MEDIUM)
.setDescription(
Constant.messages.getString(
"pscanalpha.base64disclosure.viewstate.desc"))
.setOtherInfo(
Constant.messages.getString(
"pscanalpha.base64disclosure.viewstate.extrainfo",
viewstatexml))
.setSolution(
Constant.messages.getString(
"pscanalpha.base64disclosure.viewstate.soln"))
.setReference(
Constant.messages.getString(
"pscanalpha.base64disclosure.viewstate.refs"))
.setEvidence(viewstatexml)
.setCweId(200) // Information Exposure,
.setWascId(13) // Information Leakage
.raise();
if (!macless && !AlertThreshold.LOW.equals(getAlertThreshold())) {
return;
}
// if the ViewState is not protected by a MAC, alert it as a High, cos we
// can mess with the parameters for sure..
if (macless) {
newAlert()
.setName(
Constant.messages.getString(
"pscanalpha.base64disclosure.viewstatewithoutmac.name"))
.setRisk(Alert.RISK_HIGH)
.setConfidence(Alert.CONFIDENCE_MEDIUM)
.setDescription(
Constant.messages.getString(
"pscanalpha.base64disclosure.viewstatewithoutmac.desc"))
.setOtherInfo(
Constant.messages.getString(
"pscanalpha.base64disclosure.viewstatewithoutmac.extrainfo",
viewstatexml))
.setSolution(
Constant.messages.getString(
"pscanalpha.base64disclosure.viewstatewithoutmac.soln"))
.setReference(
Constant.messages.getString(
"pscanalpha.base64disclosure.viewstatewithoutmac.refs"))
.setEvidence(viewstatexml)
.setCweId(642) // CWE-642 = External Control of Critical State
// Data
.setWascId(13) // Information Leakage
.raise();
if (!AlertThreshold.LOW.equals(getAlertThreshold())) {
return;
}
}
// TODO: if the ViewState contains sensitive data, alert it (particularly if
// running over HTTP)
} else {
log.debug("Raising a Base64 informational alert");
// the Base64 decoded data is not a valid ViewState (even though it may have
// a valid ViewStatet pre-amble)
// so treat it as normal Base64 data, and raise an informational alert.
if (base64evidence.length() > 0) {
newAlert()
.setRisk(Alert.RISK_INFO)
.setConfidence(Alert.CONFIDENCE_MEDIUM)
.setDescription(getDescription())
.setOtherInfo(getExtraInfo(msg, base64evidence, decodeddata))
.setSolution(getSolution())
.setReference(getReference())
.setEvidence(base64evidence)
.setCweId(200) // CWE-200 = Information Exposure
.setWascId(13) // Information Leakage
.raise();
if (!AlertThreshold.LOW.equals(getAlertThreshold())) {
return;
}
}
}
}
}
}
}
@Override
public void setParent(PassiveScanThread parent) {
// Nothing to do.
}
@Override
public int getPluginId() {
return 10094;
}
public Map<String, String> getAlertTags() {
return ALERT_TAGS;
}
private String getDescription() {
return Constant.messages.getString(MESSAGE_PREFIX + "desc");
}
private String getSolution() {
return Constant.messages.getString(MESSAGE_PREFIX + "soln");
}
private String getReference() {
return Constant.messages.getString(MESSAGE_PREFIX + "refs");
}
private String getExtraInfo(HttpMessage msg, String evidence, byte[] decodeddata) {
return Constant.messages.getString(
MESSAGE_PREFIX + "extrainfo", evidence, new String(decodeddata));
}
}
|
package test.net.jawr.web.resource.bundle.generator.css;
import static net.jawr.web.resource.bundle.generator.css.sass.vaadin.SassVaadinGenerator.SASS_GENERATOR_DEFAULT_URL_MODE;
import static org.mockito.Matchers.anyString;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.Writer;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Properties;
import javax.servlet.ServletContext;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.when;
import net.jawr.web.JawrConstant;
import net.jawr.web.config.JawrConfig;
import net.jawr.web.resource.BinaryResourcesHandler;
import net.jawr.web.resource.bundle.IOUtils;
import net.jawr.web.resource.bundle.JoinableResourceBundle;
import net.jawr.web.resource.bundle.generator.GeneratorContext;
import net.jawr.web.resource.bundle.generator.GeneratorRegistry;
import net.jawr.web.resource.bundle.generator.css.sass.vaadin.SassVaadinGenerator;
import net.jawr.web.resource.bundle.handler.ResourceBundlesHandler;
import net.jawr.web.resource.bundle.mappings.FilePathMapping;
import net.jawr.web.resource.handler.reader.ResourceReaderHandler;
import test.net.jawr.web.FileUtils;
import test.net.jawr.web.servlet.mock.MockServletContext;
@RunWith(MockitoJUnitRunner.class)
public class SassVaadinCssGeneratorTestCase {
private static String WORK_DIR = "workDirSass";
private SassVaadinGenerator generator;
@Mock
private JawrConfig config;
@Mock
private GeneratorContext ctx;
@Mock
private ResourceReaderHandler rsReaderHandler;
@Mock
private ResourceReaderHandler binaryRsReaderHandler;
@Mock
private ResourceBundlesHandler cssBundleHandler;
@Mock
private GeneratorRegistry generatorRegistry;
@Mock
private JoinableResourceBundle bundle;
private List<FilePathMapping> linkedResourcePathMappings;
@SuppressWarnings("unchecked")
@Before
public void setUp() throws Exception {
ServletContext servletContext = new MockServletContext();
servletContext.setAttribute(JawrConstant.CSS_CONTEXT_ATTRIBUTE, cssBundleHandler);
when(config.getContext()).thenReturn(servletContext);
when(config.getResourceCharset()).thenReturn(Charset.forName("UTF-8"));
when(config.getServletMapping()).thenReturn("/css");
when(config.getProperty(JawrConstant.SASS_GENERATOR_URL_MODE, SASS_GENERATOR_DEFAULT_URL_MODE))
.thenReturn(SASS_GENERATOR_DEFAULT_URL_MODE);
when(config.getBinaryHashAlgorithm()).thenReturn("MD5");
when(generatorRegistry.isGeneratedBinaryResource(Matchers.startsWith("jar:"))).thenReturn(true);
when(generatorRegistry.isHandlingCssImage(Matchers.startsWith("jar:"))).thenReturn(true);
when(config.getGeneratorRegistry()).thenReturn(generatorRegistry);
linkedResourcePathMappings = new ArrayList<>();
when(bundle.getLinkedFilePathMappings()).thenReturn(linkedResourcePathMappings);
generator = new SassVaadinGenerator();
FileUtils.clearDirectory(FileUtils.getClasspathRootDir() + File.separator + WORK_DIR);
FileUtils.createDir(WORK_DIR);
generator.setWorkingDirectory(FileUtils.getClasspathRootDir() + "/" + WORK_DIR);
when(ctx.getResourceReaderHandler()).thenReturn(rsReaderHandler);
when(ctx.getConfig()).thenReturn(config);
when(ctx.getCharset()).thenReturn(Charset.forName("UTF-8"));
when(ctx.getBundle()).thenReturn(bundle);
// Set up the Image servlet Jawr config
JawrConfig binaryServletJawrConfig = new JawrConfig(JawrConstant.BINARY_TYPE, new Properties());
binaryServletJawrConfig.setGeneratorRegistry(generatorRegistry);
when(binaryRsReaderHandler.getResourceAsStream(anyString()))
.thenReturn(new ByteArrayInputStream("fakeData".getBytes()));
BinaryResourcesHandler binaryRsHandler = new BinaryResourcesHandler(binaryServletJawrConfig,
binaryRsReaderHandler, null);
servletContext.setAttribute(JawrConstant.BINARY_CONTEXT_ATTRIBUTE, binaryRsHandler);
generator.setResourceReaderHandler(rsReaderHandler);
generator.setConfig(config);
generator.afterPropertiesSet();
Mockito.doAnswer(new Answer<Reader>() {
@Override
public Reader answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
Reader rd = null;
try {
final String sassContent = FileUtils.readClassPathFile("generator/css/sass" + args[1]);
rd = new StringReader(sassContent);
} catch (IOException ex) {
// Do nothing
}
return rd;
}
}).when(rsReaderHandler).getResource(Matchers.any(JoinableResourceBundle.class), Matchers.anyString(),
Matchers.anyBoolean(), (List<Class<?>>) Matchers.any());
Mockito.doAnswer(new Answer<String>() {
@Override
public String answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
String result = null;
try {
result = FileUtils.getClassPathFileAbsolutePath("generator/css/sass" + (String) args[0]);
} catch (IOException e) {
System.out.println(e.getMessage());
}
return result;
}
}).when(rsReaderHandler).getFilePath(Matchers.anyString());
Mockito.doAnswer(new Answer<Long>() {
@Override
public Long answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
File f = new File((String) args[0]);
Long result = f.lastModified();
return result;
}
}).when(rsReaderHandler).getLastModified(Matchers.anyString());
// Make sure that _partial-import.scss is restored
Reader rd = new FileReader(FileUtils.getClassPathFile("generator/css/sass/_partial-for-import.scss.backup"));
Writer wr = new FileWriter(FileUtils.getClassPathFile("generator/css/sass/_partial-for-import.scss"));
IOUtils.copy(rd, wr, true);
}
@After
public void tearDown() throws Exception {
// Make sure that _partial-import.scss is restored
Reader rd = new FileReader(FileUtils.getClassPathFile("generator/css/sass/_partial-for-import.scss.backup"));
Writer wr = new FileWriter(FileUtils.getClassPathFile("generator/css/sass/_partial-for-import.scss"));
IOUtils.copy(rd, wr, true);
}
@Test
public void testSassFunctions() throws Exception {
when(ctx.getPath()).thenReturn("/functions.scss");
when(rsReaderHandler.getResourceAsStream(anyString()))
.thenReturn(new ByteArrayInputStream("fakeData".getBytes()));
ctx.setProcessingBundle(true);
Reader rd = generator.createResource(ctx);
StringWriter writer = new StringWriter();
IOUtils.copy(rd, writer);
assertEquals(FileUtils.readClassPathFile("generator/css/sass/expected/vaadin/functions_expected.css"),
writer.getBuffer().toString());
}
@Test
public void testSassCssBundleWithImports() throws Exception {
when(ctx.getPath()).thenReturn("/imports.scss");
when(rsReaderHandler.getResourceAsStream(anyString()))
.thenReturn(new ByteArrayInputStream("fakeData".getBytes()));
ctx.setProcessingBundle(true);
Reader rd = generator.createResource(ctx);
StringWriter writer = new StringWriter();
IOUtils.copy(rd, writer);
Assert.assertEquals(FileUtils.readClassPathFile("generator/css/sass/expected/vaadin/imports_expected.css"),
writer.getBuffer().toString());
assertEquals(2, linkedResourcePathMappings.size());
assertEquals(FileUtils.getClassPathFileAbsolutePath("generator/css/sass/_partial-for-import.scss"),
linkedResourcePathMappings.get(0).getPath());
assertEquals(FileUtils.getClassPathFileAbsolutePath("generator/css/sass/imports.scss"),
linkedResourcePathMappings.get(1).getPath());
// Checks retrieve from cache
rd = generator.createResource(ctx);
writer = new StringWriter();
IOUtils.copy(rd, writer);
Assert.assertEquals(FileUtils.readClassPathFile("generator/css/sass/expected/vaadin/imports_expected.css"),
writer.getBuffer().toString());
}
@Test
public void testSmartBundling() throws Exception {
testSassCssBundleWithImports();
// Simulate change on a linked resource
File f = FileUtils.getClassPathFile("generator/css/sass/_partial-for-import.scss");
FileWriter fWriter = null;
try {
fWriter = new FileWriter(f);
System.out.println("Sass Smartbundling - file last modified before change : " + f.lastModified());
fWriter.append("@import \"./folder-test2/variables.scss\"; \n" + "$foo : red; \n" + "@mixin caption {\n"
+ ".caption { \n" + "$side: right;\n" + "border: 1px solid red;\n" + "background: #ff0000;\n"
+ "padding: 5px;\n" + "margin: 5px;" + "}}\n" + "@include caption;\n");
} finally {
IOUtils.close(fWriter);
}
when(rsReaderHandler.getLastModified(f.getAbsolutePath()))
.thenReturn(Calendar.getInstance().getTimeInMillis() + 3);
linkedResourcePathMappings.clear();
Reader rd = generator.createResource(ctx);
StringWriter writer = new StringWriter();
IOUtils.copy(rd, writer);
Assert.assertEquals(
FileUtils.readClassPathFile("generator/css/sass/expected/vaadin/imports_updated_expected.css"),
writer.getBuffer().toString());
assertEquals(3, linkedResourcePathMappings.size());
assertEquals(FileUtils.getClassPathFileAbsolutePath("generator/css/sass/_partial-for-import.scss"),
linkedResourcePathMappings.get(0).getPath());
assertEquals(FileUtils.getClassPathFileAbsolutePath("generator/css/sass/folder-test2/variables.scss"),
linkedResourcePathMappings.get(1).getPath());
assertEquals(FileUtils.getClassPathFileAbsolutePath("generator/css/sass/imports.scss"),
linkedResourcePathMappings.get(2).getPath());
}
@Test
public void testSassCssBundleWithMixins() throws Exception {
when(ctx.getPath()).thenReturn("/mixins.scss");
// initRsReaderHandler("/mixins.scss");
// initRsReaderHandler("/_partial-for-import.scss");
when(rsReaderHandler.getResourceAsStream(anyString()))
.thenReturn(new ByteArrayInputStream("fakeData".getBytes()));
ctx.setProcessingBundle(true);
Reader rd = generator.createResource(ctx);
StringWriter writer = new StringWriter();
IOUtils.copy(rd, writer);
Assert.assertEquals(FileUtils.readClassPathFile("generator/css/sass/expected/vaadin/mixins_expected.css"),
writer.getBuffer().toString());
}
@Test
public void testSassCssBundleWithParentImport() throws Exception {
when(ctx.getPath()).thenReturn("/folder-test/parent-import.scss");
// initRsReaderHandler("/folder-test/parent-import.scss");
// initRsReaderHandler("/folder-test2/base-imported.scss");
// initRsReaderHandler("/folder-test2/base.scss");
// initRsReaderHandler("/folder-test2/url.scss");
// initRsReaderHandler("/folder-test2/variables.scss");
when(rsReaderHandler.getResourceAsStream(anyString()))
.thenReturn(new ByteArrayInputStream("fakeData".getBytes()));
ctx.setProcessingBundle(true);
Reader rd = generator.createResource(ctx);
StringWriter writer = new StringWriter();
IOUtils.copy(rd, writer);
Assert.assertEquals(
FileUtils.readClassPathFile("generator/css/sass/expected/vaadin/parent-import-expected.css"),
writer.getBuffer().toString());
}
@Test
public void testSassCompass() throws Exception {
when(ctx.getPath()).thenReturn("/compass-test/compass-import.scss");
when(rsReaderHandler.getResourceAsStream(anyString()))
.thenReturn(new ByteArrayInputStream("fakeData".getBytes()));
ctx.setProcessingBundle(true);
Reader rd = generator.createResource(ctx);
StringWriter writer = new StringWriter();
IOUtils.copy(rd, writer);
Assert.assertEquals(
FileUtils.readClassPathFile("generator/css/sass/expected/vaadin/compass-import-expected.css"),
writer.getBuffer().toString());
}
@Test
public void testSassCompassWithUrlRelativeMode() throws Exception {
when(config.getProperty(JawrConstant.SASS_GENERATOR_URL_MODE, SASS_GENERATOR_DEFAULT_URL_MODE))
.thenReturn("RELATIVE");
when(ctx.getPath()).thenReturn("/compass-test/compass-import.scss");
when(rsReaderHandler.getResourceAsStream(anyString()))
.thenReturn(new ByteArrayInputStream("fakeData".getBytes()));
ctx.setProcessingBundle(true);
Reader rd = generator.createResource(ctx);
StringWriter writer = new StringWriter();
IOUtils.copy(rd, writer);
Assert.assertEquals(
FileUtils.readClassPathFile("generator/css/sass/expected/vaadin/compass-import-expected.css"),
writer.getBuffer().toString());
}
@Test
public void testSassCompassWithUrlAbsoluteMode() throws Exception {
when(config.getProperty(JawrConstant.SASS_GENERATOR_URL_MODE, SASS_GENERATOR_DEFAULT_URL_MODE))
.thenReturn("ABSOLUTE");
generator.setConfig(config);
when(ctx.getPath()).thenReturn("/compass-test/compass-import.scss");
when(rsReaderHandler.getResourceAsStream(anyString()))
.thenReturn(new ByteArrayInputStream("fakeData".getBytes()));
ctx.setProcessingBundle(true);
Reader rd = generator.createResource(ctx);
StringWriter writer = new StringWriter();
IOUtils.copy(rd, writer);
Assert.assertEquals(
FileUtils.readClassPathFile(
"generator/css/sass/expected/vaadin/compass-import-absolute-url-expected.css"),
writer.getBuffer().toString());
}
}
|
package uk.gov.hmcts.reform.bulkscan.orchestrator.client.caseupdate;
import com.google.common.collect.ImmutableMap;
import org.assertj.core.groups.Tuple;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.web.client.RestTemplate;
import uk.gov.hmcts.reform.bulkscan.orchestrator.client.caseupdate.model.request.CaseUpdateRequest;
import uk.gov.hmcts.reform.bulkscan.orchestrator.client.caseupdate.model.request.ExceptionRecord;
import uk.gov.hmcts.reform.bulkscan.orchestrator.client.caseupdate.model.request.ExistingCaseDetails;
import uk.gov.hmcts.reform.bulkscan.orchestrator.client.caseupdate.model.response.CaseUpdateDetails;
import uk.gov.hmcts.reform.bulkscan.orchestrator.client.caseupdate.model.response.SuccessfulUpdateResponse;
import javax.validation.ConstraintViolationException;
import javax.validation.Validation;
import javax.validation.Validator;
import static java.util.Collections.emptyList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatCode;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Assertions.tuple;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
@ExtendWith(MockitoExtension.class)
class CaseUpdateDataClientResponseValidationTest {
@Mock RestTemplate restTemplate;
Validator validator = Validation.buildDefaultValidatorFactory().getValidator();
CaseUpdateDataClient client;
@BeforeEach
void setUp() {
this.client = new CaseUpdateDataClient(validator, restTemplate);
}
@Test
void should_require_not_null_caseDetails() {
// given
serverRespondsWith(
new SuccessfulUpdateResponse(
null,
emptyList()
)
);
// then
expectViolations(
tuple("caseDetails", "must not be null")
);
}
@Test
void should_require_valid_caseDetails() {
// given
serverRespondsWith(
new SuccessfulUpdateResponse(
new CaseUpdateDetails(
"",
null
),
emptyList()
)
);
// then
expectViolations(
tuple("caseDetails.caseData", "must not be null")
);
}
@Test
void should_not_throw_exception_for_valid_response() {
// given
serverRespondsWith(
new SuccessfulUpdateResponse(
new CaseUpdateDetails(
null,
ImmutableMap.of("key", "value")
),
emptyList()
)
);
// then
assertThatCode(() -> callUpdateCase())
.doesNotThrowAnyException();
}
void expectViolations(Tuple... violations) {
assertThatExceptionOfType(ConstraintViolationException.class)
.isThrownBy(() -> callUpdateCase())
.satisfies(exc ->
assertThat(exc.getConstraintViolations())
.extracting(violation -> tuple(violation.getPropertyPath().toString(), violation.getMessage()))
.containsExactlyInAnyOrder(violations)
);
}
void serverRespondsWith(SuccessfulUpdateResponse response) {
given(restTemplate.postForObject(anyString(), any(), any()))
.willReturn(response);
}
void callUpdateCase() {
client.getCaseUpdateData(
"http://some-url.com/update",
"s2s-token",
new CaseUpdateRequest(
mock(ExceptionRecord.class),
false,
mock(uk.gov.hmcts.reform.bulkscan.orchestrator.client.caseupdate.model.request.CaseUpdateDetails.class),
mock(ExistingCaseDetails.class)
)
);
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zto.zms.collector.kafka;
import com.google.common.collect.Maps;
import com.zto.zms.metadata.ClusterMetadata;
import com.zto.zms.collector.model.ConsumerGroupInfo;
import com.zto.zms.service.kafka.KafkaAdminManage;
import org.apache.kafka.clients.admin.*;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.Node;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Service
public class KafkaMetaManagerImpl implements KafkaMetaManager {
private static Logger logger = LoggerFactory.getLogger(KafkaMetaManagerImpl.class);
private final Map<String, Collection<Node>> clusterNodes = Maps.newConcurrentMap();
private final Map<String, Node> clusterControllers = Maps.newConcurrentMap();
private final Map<String,List<String>> topics = Maps.newConcurrentMap();
private final Map<String,List<ConsumerGroupInfo>> consumers = Maps.newConcurrentMap();
@Autowired
KafkaAdminManage kafkaAdminManage;
@Override
public Collection<Node> getClusterNodes(ClusterMetadata cluster) {
return clusterNodes.get(cluster.getClusterName());
}
@Override
public void putClusterNodes(ClusterMetadata cluster){
try {
DescribeClusterResult clusterResult = kafkaAdminManage.getKafkaAdmin(cluster.getClusterName()).describeCluster();
KafkaFuture<Collection<Node>> kafkaFuture = clusterResult.nodes();
Collection<Node> nodes = kafkaFuture.get();
clusterNodes.put(cluster.getClusterName(), nodes);
logger.info(cluster.getClusterName() +" New clusterNodes created.");
} catch (Exception e) {
logger.error(cluster.getClusterName() +" New clusterNodes create failed.", e);
}
}
@Override
public Node getClusterController(ClusterMetadata cluster) {
return clusterControllers.get(cluster.getClusterName());
}
@Override
public void putClusterController(ClusterMetadata cluster) {
try {
DescribeClusterResult describeClusterResult = kafkaAdminManage.getKafkaAdmin(cluster.getClusterName()).describeCluster();
Node controller = describeClusterResult.controller().get();
clusterControllers.put(cluster.getClusterName(), controller);
logger.info(cluster.getClusterName() +" New cluster Controller created.");
}catch (Exception e) {
logger.error(cluster.getClusterName() +" New cluster Controller create failed.", e);
}
}
@Override
public List<String> getClusterTopics(ClusterMetadata cluster) {
return topics.get(cluster.getClusterName());
}
@Override
public void putClusterTopics(ClusterMetadata cluster) {
try {
List<String> topicLst = new ArrayList<>();
ListTopicsResult listTopicsResult = kafkaAdminManage.getKafkaAdmin(cluster.getClusterName()).listTopics();
for (TopicListing topicListing : listTopicsResult.listings().get()) {
topicLst.add(topicListing.name());
}
topics.put(cluster.getClusterName(),topicLst);
} catch (Exception e) {
logger.error(cluster.getClusterName() +" list topics failed.", e);
}
}
@Override
public List<ConsumerGroupInfo> getKafkaConsumers(ClusterMetadata cluster) {
return consumers.get(cluster.getClusterName());
}
@Override
public void putKafkaConsumers(ClusterMetadata cluster) {
try {
AdminClient adminClient = kafkaAdminManage.getKafkaAdmin(cluster.getClusterName());
KafkaFuture<Collection<ConsumerGroupListing>> groupsFuture = adminClient.listConsumerGroups().all();
Collection<ConsumerGroupListing> groupGroups = groupsFuture.get();
List<ConsumerGroupInfo> consumerGroups = groupGroups.stream().map(item->{
ConsumerGroupInfo consumerGroupInfo = new ConsumerGroupInfo();
consumerGroupInfo.setGroup(item.groupId());
return consumerGroupInfo;
}).collect(Collectors.toList());
consumers.put(cluster.getClusterName(),consumerGroups);
} catch (Exception e) {
logger.error(cluster.getClusterName() + " list consumers failed.", e);
}
}
}
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.10.27 at 06:20:33 PM CET
//
package urn.oasis.names.tc.saml.assertion;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for StatementAbstractType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="StatementAbstractType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "StatementAbstractType")
@XmlSeeAlso({
SubjectStatementAbstractType.class
})
public abstract class StatementAbstractType {
}
|
package com.stoups.controller.video;
import com.stoups.models.Comment;
import com.stoups.request.AnalyticsRequest;
import com.stoups.services.video.service.YoutubeService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* Created by astouparenko on 5/29/2017.
*/
@RestController
@RequestMapping(value = {"/core/analytics/youtube"})
public class YoutubeController {
@Autowired
YoutubeService youtubeService;
@RequestMapping(value = "/topComments")
public List<Comment> analyzeYoutube(@RequestBody AnalyticsRequest request){
return youtubeService.getTopComments(request);
}
}
|
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.indexing.common.task;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import io.druid.data.input.impl.CSVParseSpec;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.ParseSpec;
import io.druid.data.input.impl.SpatialDimensionSchema;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.indexing.common.TaskLock;
import io.druid.indexing.common.TaskLockType;
import io.druid.indexing.common.TaskToolbox;
import io.druid.indexing.common.TestUtils;
import io.druid.indexing.common.actions.LockAcquireAction;
import io.druid.indexing.common.actions.LockListAction;
import io.druid.indexing.common.actions.LockTryAcquireAction;
import io.druid.indexing.common.actions.SegmentAllocateAction;
import io.druid.indexing.common.actions.SegmentTransactionalInsertAction;
import io.druid.indexing.common.actions.TaskAction;
import io.druid.indexing.common.actions.TaskActionClient;
import io.druid.indexing.common.task.IndexTask.IndexIngestionSpec;
import io.druid.indexing.common.task.IndexTask.IndexTuningConfig;
import io.druid.indexing.overlord.SegmentPublishResult;
import io.druid.java.util.common.DateTimes;
import io.druid.java.util.common.Intervals;
import io.druid.java.util.common.StringUtils;
import io.druid.java.util.common.granularity.Granularities;
import io.druid.java.util.common.parsers.ParseException;
import io.druid.math.expr.ExprMacroTable;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.query.filter.SelectorDimFilter;
import io.druid.segment.IndexIO;
import io.druid.segment.IndexMergerV9;
import io.druid.segment.IndexSpec;
import io.druid.segment.indexing.DataSchema;
import io.druid.segment.transform.ExpressionTransform;
import io.druid.segment.transform.TransformSpec;
import io.druid.segment.indexing.granularity.ArbitraryGranularitySpec;
import io.druid.segment.indexing.granularity.GranularitySpec;
import io.druid.segment.indexing.granularity.UniformGranularitySpec;
import io.druid.segment.loading.DataSegmentPusher;
import io.druid.segment.realtime.appenderator.SegmentIdentifier;
import io.druid.segment.realtime.firehose.LocalFirehoseFactory;
import io.druid.timeline.DataSegment;
import io.druid.timeline.partition.HashBasedNumberedShardSpec;
import io.druid.timeline.partition.NoneShardSpec;
import io.druid.timeline.partition.NumberedShardSpec;
import io.druid.timeline.partition.ShardSpec;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class IndexTaskTest
{
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Rule
public ExpectedException expectedException = ExpectedException.none();
private static final ParseSpec DEFAULT_PARSE_SPEC = new CSVParseSpec(
new TimestampSpec(
"ts",
"auto",
null
),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(Arrays.asList("ts", "dim")),
Lists.newArrayList(),
Lists.newArrayList()
),
null,
Arrays.asList("ts", "dim", "val"),
false,
0
);
private static final IndexSpec indexSpec = new IndexSpec();
private final ObjectMapper jsonMapper;
private IndexMergerV9 indexMergerV9;
private IndexIO indexIO;
private volatile int segmentAllocatePartitionCounter;
public IndexTaskTest()
{
TestUtils testUtils = new TestUtils();
jsonMapper = testUtils.getTestObjectMapper();
indexMergerV9 = testUtils.getTestIndexMergerV9();
indexIO = testUtils.getTestIndexIO();
}
@Test
public void testDeterminePartitions() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("2014-01-01T00:00:10Z,a,1\n");
writer.write("2014-01-01T01:00:20Z,b,1\n");
writer.write("2014-01-01T02:00:30Z,c,1\n");
}
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
null,
null,
createTuningConfig(2, null, false, true),
false
),
null
);
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(2, segments.size());
Assert.assertEquals("test", segments.get(0).getDataSource());
Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval());
Assert.assertEquals(HashBasedNumberedShardSpec.class, segments.get(0).getShardSpec().getClass());
Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum());
Assert.assertEquals(2, ((NumberedShardSpec) segments.get(0).getShardSpec()).getPartitions());
Assert.assertEquals("test", segments.get(1).getDataSource());
Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(1).getInterval());
Assert.assertEquals(HashBasedNumberedShardSpec.class, segments.get(1).getShardSpec().getClass());
Assert.assertEquals(1, segments.get(1).getShardSpec().getPartitionNum());
Assert.assertEquals(2, ((NumberedShardSpec) segments.get(1).getShardSpec()).getPartitions());
}
@Test
public void testForceExtendableShardSpecs() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("2014-01-01T00:00:10Z,a,1\n");
writer.write("2014-01-01T01:00:20Z,b,1\n");
writer.write("2014-01-01T02:00:30Z,c,1\n");
}
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
null,
null,
createTuningConfig(2, null, true, false),
false
),
null
);
Assert.assertEquals(indexTask.getId(), indexTask.getGroupId());
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(2, segments.size());
Assert.assertEquals("test", segments.get(0).getDataSource());
Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval());
Assert.assertEquals(NumberedShardSpec.class, segments.get(0).getShardSpec().getClass());
Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum());
Assert.assertEquals("test", segments.get(1).getDataSource());
Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(1).getInterval());
Assert.assertEquals(NumberedShardSpec.class, segments.get(1).getShardSpec().getClass());
Assert.assertEquals(1, segments.get(1).getShardSpec().getPartitionNum());
}
@Test
public void testTransformSpec() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("2014-01-01T00:00:10Z,a,1\n");
writer.write("2014-01-01T01:00:20Z,b,1\n");
writer.write("2014-01-01T02:00:30Z,c,1\n");
}
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
null,
new TransformSpec(
new SelectorDimFilter("dim", "b", null),
ImmutableList.of(
new ExpressionTransform("dimt", "concat(dim,dim)", ExprMacroTable.nil())
)
),
null,
createTuningConfig(2, null, true, false),
false
),
null
);
Assert.assertEquals(indexTask.getId(), indexTask.getGroupId());
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(1, segments.size());
Assert.assertEquals("test", segments.get(0).getDataSource());
Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval());
Assert.assertEquals(NumberedShardSpec.class, segments.get(0).getShardSpec().getClass());
Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum());
}
@Test
public void testWithArbitraryGranularity() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("2014-01-01T00:00:10Z,a,1\n");
writer.write("2014-01-01T01:00:20Z,b,1\n");
writer.write("2014-01-01T02:00:30Z,c,1\n");
}
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
null,
new ArbitraryGranularitySpec(
Granularities.MINUTE,
Collections.singletonList(Intervals.of("2014-01-01/2014-01-02"))
),
createTuningConfig(10, null, false, true),
false
),
null
);
List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(1, segments.size());
}
@Test
public void testIntervalBucketing() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("2014-01-01T07:59:59.977Z,a,1\n");
writer.write("2014-01-01T08:00:00.000Z,b,1\n");
}
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
null,
new UniformGranularitySpec(
Granularities.HOUR,
Granularities.HOUR,
Collections.singletonList(Intervals.of("2014-01-01T08:00:00Z/2014-01-01T09:00:00Z"))
),
createTuningConfig(50, null, false, true),
false
),
null
);
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(1, segments.size());
}
@Test
public void testNumShardsProvided() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("2014-01-01T00:00:10Z,a,1\n");
writer.write("2014-01-01T01:00:20Z,b,1\n");
writer.write("2014-01-01T02:00:30Z,c,1\n");
}
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
null,
null,
createTuningConfig(null, 1, false, true),
false
),
null
);
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(1, segments.size());
Assert.assertEquals("test", segments.get(0).getDataSource());
Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval());
Assert.assertTrue(segments.get(0).getShardSpec().getClass().equals(NoneShardSpec.class));
Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum());
}
@Test
public void testAppendToExisting() throws Exception
{
segmentAllocatePartitionCounter = 0;
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("2014-01-01T00:00:10Z,a,1\n");
writer.write("2014-01-01T01:00:20Z,b,1\n");
writer.write("2014-01-01T02:00:30Z,c,1\n");
}
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
null,
null,
createTuningConfig(2, null, false, false),
true
),
null
);
Assert.assertEquals("index_append_test", indexTask.getGroupId());
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(2, segmentAllocatePartitionCounter);
Assert.assertEquals(2, segments.size());
Assert.assertEquals("test", segments.get(0).getDataSource());
Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval());
Assert.assertTrue(segments.get(0).getShardSpec().getClass().equals(NumberedShardSpec.class));
Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum());
Assert.assertEquals("test", segments.get(1).getDataSource());
Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(1).getInterval());
Assert.assertTrue(segments.get(1).getShardSpec().getClass().equals(NumberedShardSpec.class));
Assert.assertEquals(1, segments.get(1).getShardSpec().getPartitionNum());
}
@Test
public void testIntervalNotSpecified() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("2014-01-01T00:00:10Z,a,1\n");
writer.write("2014-01-01T01:00:20Z,b,1\n");
writer.write("2014-01-01T02:00:30Z,c,1\n");
}
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
null,
new UniformGranularitySpec(
Granularities.HOUR,
Granularities.MINUTE,
null
),
createTuningConfig(2, null, false, true),
false
),
null
);
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(3, segments.size());
Assert.assertEquals("test", segments.get(0).getDataSource());
Assert.assertEquals(Intervals.of("2014-01-01T00/PT1H"), segments.get(0).getInterval());
Assert.assertTrue(segments.get(0).getShardSpec().getClass().equals(NoneShardSpec.class));
Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum());
Assert.assertEquals("test", segments.get(1).getDataSource());
Assert.assertEquals(Intervals.of("2014-01-01T01/PT1H"), segments.get(1).getInterval());
Assert.assertTrue(segments.get(1).getShardSpec().getClass().equals(NoneShardSpec.class));
Assert.assertEquals(0, segments.get(1).getShardSpec().getPartitionNum());
Assert.assertEquals("test", segments.get(2).getDataSource());
Assert.assertEquals(Intervals.of("2014-01-01T02/PT1H"), segments.get(2).getInterval());
Assert.assertTrue(segments.get(2).getShardSpec().getClass().equals(NoneShardSpec.class));
Assert.assertEquals(0, segments.get(2).getShardSpec().getPartitionNum());
}
@Test
public void testCSVFileWithHeader() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("time,d,val\n");
writer.write("2014-01-01T00:00:10Z,a,1\n");
}
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
new CSVParseSpec(
new TimestampSpec(
"time",
"auto",
null
),
new DimensionsSpec(
null,
Lists.<String>newArrayList(),
Lists.<SpatialDimensionSchema>newArrayList()
),
null,
null,
true,
0
),
null,
createTuningConfig(2, null, false, true),
false
),
null
);
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(1, segments.size());
Assert.assertEquals(Arrays.asList("d"), segments.get(0).getDimensions());
Assert.assertEquals(Arrays.asList("val"), segments.get(0).getMetrics());
Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval());
}
@Test
public void testCSVFileWithHeaderColumnOverride() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("time,d,val\n");
writer.write("2014-01-01T00:00:10Z,a,1\n");
}
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
new CSVParseSpec(
new TimestampSpec(
"time",
"auto",
null
),
new DimensionsSpec(
null,
Lists.<String>newArrayList(),
Lists.<SpatialDimensionSchema>newArrayList()
),
null,
Arrays.asList("time", "dim", "val"),
true,
0
),
null,
createTuningConfig(2, null, false, true),
false
),
null
);
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(1, segments.size());
Assert.assertEquals(Arrays.asList("d"), segments.get(0).getDimensions());
Assert.assertEquals(Arrays.asList("val"), segments.get(0).getMetrics());
Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval());
}
@Test
public void testWithSmallMaxTotalRows() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("2014-01-01T00:00:10Z,a,1\n");
writer.write("2014-01-01T00:00:10Z,b,2\n");
writer.write("2014-01-01T00:00:10Z,c,3\n");
writer.write("2014-01-01T01:00:20Z,a,1\n");
writer.write("2014-01-01T01:00:20Z,b,2\n");
writer.write("2014-01-01T01:00:20Z,c,3\n");
writer.write("2014-01-01T02:00:30Z,a,1\n");
writer.write("2014-01-01T02:00:30Z,b,2\n");
writer.write("2014-01-01T02:00:30Z,c,3\n");
}
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
null,
new UniformGranularitySpec(
Granularities.HOUR,
Granularities.MINUTE,
null
),
createTuningConfig(2, 2, 2, null, false, false, true),
false
),
null
);
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(6, segments.size());
for (int i = 0; i < 6; i++) {
final DataSegment segment = segments.get(i);
final Interval expectedInterval = Intervals.of(StringUtils.format("2014-01-01T0%d/PT1H", (i / 2)));
final int expectedPartitionNum = i % 2;
Assert.assertEquals("test", segment.getDataSource());
Assert.assertEquals(expectedInterval, segment.getInterval());
Assert.assertEquals(NumberedShardSpec.class, segment.getShardSpec().getClass());
Assert.assertEquals(expectedPartitionNum, segment.getShardSpec().getPartitionNum());
}
}
@Test
public void testPerfectRollup() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
populateRollupTestData(tmpFile);
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
null,
new UniformGranularitySpec(
Granularities.DAY,
Granularities.DAY,
true,
null
),
createTuningConfig(3, 2, 2, null, false, true, true),
false
),
null
);
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(3, segments.size());
for (int i = 0; i < 3; i++) {
final DataSegment segment = segments.get(i);
final Interval expectedInterval = Intervals.of("2014-01-01T00:00:00.000Z/2014-01-02T00:00:00.000Z");
Assert.assertEquals("test", segment.getDataSource());
Assert.assertEquals(expectedInterval, segment.getInterval());
Assert.assertTrue(segment.getShardSpec().getClass().equals(HashBasedNumberedShardSpec.class));
Assert.assertEquals(i, segment.getShardSpec().getPartitionNum());
}
}
@Test
public void testBestEffortRollup() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
populateRollupTestData(tmpFile);
IndexTask indexTask = new IndexTask(
null,
null,
createIngestionSpec(
tmpDir,
null,
new UniformGranularitySpec(
Granularities.DAY,
Granularities.DAY,
true,
null
),
createTuningConfig(3, 2, 2, null, false, false, true),
false
),
null
);
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(5, segments.size());
for (int i = 0; i < 5; i++) {
final DataSegment segment = segments.get(i);
final Interval expectedInterval = Intervals.of("2014-01-01T00:00:00.000Z/2014-01-02T00:00:00.000Z");
Assert.assertEquals("test", segment.getDataSource());
Assert.assertEquals(expectedInterval, segment.getInterval());
Assert.assertEquals(NumberedShardSpec.class, segment.getShardSpec().getClass());
Assert.assertEquals(i, segment.getShardSpec().getPartitionNum());
}
}
private static void populateRollupTestData(File tmpFile) throws IOException
{
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("2014-01-01T00:00:10Z,a,1\n");
writer.write("2014-01-01T01:00:20Z,a,1\n");
writer.write("2014-01-01T00:00:10Z,b,2\n");
writer.write("2014-01-01T00:00:10Z,c,3\n");
writer.write("2014-01-01T01:00:20Z,b,2\n");
writer.write("2014-01-01T02:00:30Z,a,1\n");
writer.write("2014-01-01T02:00:30Z,b,2\n");
writer.write("2014-01-01T01:00:20Z,c,3\n");
writer.write("2014-01-01T02:00:30Z,c,3\n");
}
}
@Test
public void testIgnoreParseException() throws Exception
{
final File tmpDir = temporaryFolder.newFolder();
final File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("time,d,val\n");
writer.write("unparseable,a,1\n");
writer.write("2014-01-01T00:00:10Z,a,1\n");
}
// GranularitySpec.intervals and numShards must be null to verify reportParseException=false is respected both in
// IndexTask.determineShardSpecs() and IndexTask.generateAndPublishSegments()
final IndexIngestionSpec parseExceptionIgnoreSpec = createIngestionSpec(
tmpDir,
new CSVParseSpec(
new TimestampSpec(
"time",
"auto",
null
),
new DimensionsSpec(
null,
Lists.<String>newArrayList(),
Lists.<SpatialDimensionSchema>newArrayList()
),
null,
Arrays.asList("time", "dim", "val"),
true,
0
),
null,
createTuningConfig(2, null, null, null, false, false, false), // ignore parse exception,
false
);
IndexTask indexTask = new IndexTask(
null,
null,
parseExceptionIgnoreSpec,
null
);
final List<DataSegment> segments = runTask(indexTask);
Assert.assertEquals(Arrays.asList("d"), segments.get(0).getDimensions());
Assert.assertEquals(Arrays.asList("val"), segments.get(0).getMetrics());
Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval());
}
@Test
public void testReportParseException() throws Exception
{
expectedException.expect(ParseException.class);
expectedException.expectMessage("Unparseable timestamp found!");
final File tmpDir = temporaryFolder.newFolder();
final File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("time,d,val\n");
writer.write("unparseable,a,1\n");
writer.write("2014-01-01T00:00:10Z,a,1\n");
}
final IndexIngestionSpec parseExceptionIgnoreSpec = createIngestionSpec(
tmpDir,
new CSVParseSpec(
new TimestampSpec(
"time",
"auto",
null
),
new DimensionsSpec(
null,
Lists.<String>newArrayList(),
Lists.<SpatialDimensionSchema>newArrayList()
),
null,
Arrays.asList("time", "dim", "val"),
true,
0
),
null,
createTuningConfig(2, null, null, null, false, false, true), // report parse exception
false
);
IndexTask indexTask = new IndexTask(
null,
null,
parseExceptionIgnoreSpec,
null
);
runTask(indexTask);
}
@Test
public void testCsvWithHeaderOfEmptyColumns() throws Exception
{
final File tmpDir = temporaryFolder.newFolder();
File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("time,,\n");
writer.write("2014-01-01T00:00:10Z,a,1\n");
}
tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("time,dim,\n");
writer.write("2014-01-01T00:00:10Z,a,1\n");
}
tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("time,,val\n");
writer.write("2014-01-01T00:00:10Z,a,1\n");
}
final IndexIngestionSpec parseExceptionIgnoreSpec = createIngestionSpec(
tmpDir,
new CSVParseSpec(
new TimestampSpec(
"time",
"auto",
null
),
new DimensionsSpec(
null,
null,
null
),
null,
null,
true,
0
),
null,
createTuningConfig(2, 1, null, null, false, true, true), // report parse exception
false
);
IndexTask indexTask = new IndexTask(
null,
null,
parseExceptionIgnoreSpec,
null
);
final List<DataSegment> segments = runTask(indexTask);
// the order of result segments can be changed because hash shardSpec is used.
// the below loop is to make this test deterministic.
Assert.assertEquals(2, segments.size());
Assert.assertNotEquals(segments.get(0), segments.get(1));
for (DataSegment segment : segments) {
System.out.println(segment.getDimensions());
}
for (int i = 0; i < 2; i++) {
final DataSegment segment = segments.get(i);
final Set<String> dimensions = new HashSet<>(segment.getDimensions());
Assert.assertTrue(
StringUtils.format("Actual dimensions: %s", dimensions),
dimensions.equals(Sets.newHashSet("dim", "column_3")) ||
dimensions.equals(Sets.newHashSet("column_2", "column_3"))
);
Assert.assertEquals(Arrays.asList("val"), segment.getMetrics());
Assert.assertEquals(Intervals.of("2014/P1D"), segment.getInterval());
}
}
@Test
public void testCsvWithHeaderOfEmptyTimestamp() throws Exception
{
expectedException.expect(ParseException.class);
expectedException.expectMessage("Unparseable timestamp found!");
final File tmpDir = temporaryFolder.newFolder();
final File tmpFile = File.createTempFile("druid", "index", tmpDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write(",,\n");
writer.write("2014-01-01T00:00:10Z,a,1\n");
}
final IndexIngestionSpec parseExceptionIgnoreSpec = createIngestionSpec(
tmpDir,
new CSVParseSpec(
new TimestampSpec(
"time",
"auto",
null
),
new DimensionsSpec(
null,
Lists.<String>newArrayList(),
Lists.<SpatialDimensionSchema>newArrayList()
),
null,
Arrays.asList("time", "", ""),
true,
0
),
null,
createTuningConfig(2, null, null, null, false, false, true), // report parse exception
false
);
IndexTask indexTask = new IndexTask(
null,
null,
parseExceptionIgnoreSpec,
null
);
runTask(indexTask);
}
private List<DataSegment> runTask(IndexTask indexTask) throws Exception
{
final List<DataSegment> segments = Lists.newArrayList();
final TaskActionClient actionClient = new TaskActionClient()
{
@Override
public <RetType> RetType submit(TaskAction<RetType> taskAction) throws IOException
{
if (taskAction instanceof LockListAction) {
return (RetType) Collections.singletonList(
new TaskLock(
TaskLockType.EXCLUSIVE,
"",
"",
Intervals.of("2014/P1Y"), DateTimes.nowUtc().toString(),
Tasks.DEFAULT_BATCH_INDEX_TASK_PRIORITY
)
);
}
if (taskAction instanceof LockAcquireAction) {
return (RetType) new TaskLock(
TaskLockType.EXCLUSIVE, "groupId",
"test",
((LockAcquireAction) taskAction).getInterval(),
DateTimes.nowUtc().toString(),
Tasks.DEFAULT_BATCH_INDEX_TASK_PRIORITY
);
}
if (taskAction instanceof LockTryAcquireAction) {
return (RetType) new TaskLock(
TaskLockType.EXCLUSIVE,
"groupId",
"test",
((LockTryAcquireAction) taskAction).getInterval(),
DateTimes.nowUtc().toString(),
Tasks.DEFAULT_BATCH_INDEX_TASK_PRIORITY
);
}
if (taskAction instanceof SegmentTransactionalInsertAction) {
return (RetType) new SegmentPublishResult(
((SegmentTransactionalInsertAction) taskAction).getSegments(),
true
);
}
if (taskAction instanceof SegmentAllocateAction) {
SegmentAllocateAction action = (SegmentAllocateAction) taskAction;
Interval interval = action.getPreferredSegmentGranularity().bucket(action.getTimestamp());
ShardSpec shardSpec = new NumberedShardSpec(segmentAllocatePartitionCounter++, 0);
return (RetType) new SegmentIdentifier(action.getDataSource(), interval, "latestVersion", shardSpec);
}
return null;
}
};
final DataSegmentPusher pusher = new DataSegmentPusher()
{
@Deprecated
@Override
public String getPathForHadoop(String dataSource)
{
return getPathForHadoop();
}
@Override
public String getPathForHadoop()
{
return null;
}
@Override
public DataSegment push(File file, DataSegment segment) throws IOException
{
segments.add(segment);
return segment;
}
@Override
public Map<String, Object> makeLoadSpec(URI uri)
{
throw new UnsupportedOperationException();
}
};
final TaskToolbox box = new TaskToolbox(
null,
actionClient,
null,
pusher,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
jsonMapper,
temporaryFolder.newFolder(),
indexIO,
null,
null,
indexMergerV9,
null,
null,
null,
null
);
indexTask.isReady(box.getTaskActionClient());
indexTask.run(box);
Collections.sort(segments);
return segments;
}
private IndexTask.IndexIngestionSpec createIngestionSpec(
File baseDir,
ParseSpec parseSpec,
GranularitySpec granularitySpec,
IndexTuningConfig tuningConfig,
boolean appendToExisting
)
{
return createIngestionSpec(baseDir, parseSpec, TransformSpec.NONE, granularitySpec, tuningConfig, appendToExisting);
}
private IndexTask.IndexIngestionSpec createIngestionSpec(
File baseDir,
ParseSpec parseSpec,
TransformSpec transformSpec,
GranularitySpec granularitySpec,
IndexTuningConfig tuningConfig,
boolean appendToExisting
)
{
return new IndexTask.IndexIngestionSpec(
new DataSchema(
"test",
jsonMapper.convertValue(
new StringInputRowParser(
parseSpec != null ? parseSpec : DEFAULT_PARSE_SPEC,
null
),
Map.class
),
new AggregatorFactory[]{
new LongSumAggregatorFactory("val", "val")
},
granularitySpec != null ? granularitySpec : new UniformGranularitySpec(
Granularities.DAY,
Granularities.MINUTE,
Arrays.asList(Intervals.of("2014/2015"))
),
transformSpec,
jsonMapper
),
new IndexTask.IndexIOConfig(
new LocalFirehoseFactory(
baseDir,
"druid*",
null
),
appendToExisting
),
tuningConfig
);
}
private static IndexTuningConfig createTuningConfig(
Integer targetPartitionSize,
Integer numShards,
boolean forceExtendableShardSpecs,
boolean forceGuaranteedRollup
)
{
return createTuningConfig(
targetPartitionSize,
1,
null,
numShards,
forceExtendableShardSpecs,
forceGuaranteedRollup,
true
);
}
private static IndexTuningConfig createTuningConfig(
Integer targetPartitionSize,
Integer maxRowsInMemory,
Integer maxTotalRows,
Integer numShards,
boolean forceExtendableShardSpecs,
boolean forceGuaranteedRollup,
boolean reportParseException
)
{
return new IndexTask.IndexTuningConfig(
targetPartitionSize,
maxRowsInMemory,
maxTotalRows,
null,
numShards,
indexSpec,
null,
true,
forceExtendableShardSpecs,
forceGuaranteedRollup,
reportParseException,
null,
null
);
}
}
|
/*
* The MIT License
*
* Copyright 2020 CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.kohsuke.github;
import org.junit.Test;
import java.io.IOException;
import java.util.Date;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
@SuppressWarnings("deprecation") // preview
public class GHCheckRunBuilderTest extends AbstractGHAppInstallationTest {
protected GitHub getInstallationGithub() throws IOException {
return getAppInstallationWithTokenApp3().getRoot();
}
@Test
public void createCheckRun() throws Exception {
GHCheckRun checkRun = getInstallationGithub().getRepository("hub4j-test-org/test-checks")
.createCheckRun("foo", "89a9ae301e35e667756034fdc933b1fc94f63fc1")
.withStatus(GHCheckRun.Status.COMPLETED)
.withConclusion(GHCheckRun.Conclusion.SUCCESS)
.withDetailsURL("http://nowhere.net/stuff")
.withExternalID("whatever")
.withStartedAt(new Date(999_999_000))
.withCompletedAt(new Date(999_999_999))
.add(new GHCheckRunBuilder.Output("Some Title", "what happened…")
.add(new GHCheckRunBuilder.Annotation("stuff.txt",
1,
GHCheckRun.AnnotationLevel.NOTICE,
"hello to you too").withTitle("Look here"))
.add(new GHCheckRunBuilder.Image("Unikitty",
"https://i.pinimg.com/474x/9e/65/c0/9e65c0972294f1e10f648c9780a79fab.jpg")
.withCaption("Princess Unikitty")))
.add(new GHCheckRunBuilder.Action("Help", "what I need help with", "doit"))
.create();
assertEquals("completed", checkRun.getStatus());
assertEquals(1, checkRun.getOutput().getAnnotationsCount());
assertEquals(1424883286, checkRun.getId());
}
@Test
public void createCheckRunManyAnnotations() throws Exception {
GHCheckRunBuilder.Output output = new GHCheckRunBuilder.Output("Big Run", "Lots of stuff here »");
for (int i = 0; i < 101; i++) {
output.add(
new GHCheckRunBuilder.Annotation("stuff.txt", 1, GHCheckRun.AnnotationLevel.NOTICE, "hello #" + i));
}
GHCheckRun checkRun = getInstallationGithub().getRepository("hub4j-test-org/test-checks")
.createCheckRun("big", "89a9ae301e35e667756034fdc933b1fc94f63fc1")
.withConclusion(GHCheckRun.Conclusion.SUCCESS)
.add(output)
.create();
assertEquals("completed", checkRun.getStatus());
assertEquals("Big Run", checkRun.getOutput().getTitle());
assertEquals("Lots of stuff here »", checkRun.getOutput().getSummary());
assertEquals(101, checkRun.getOutput().getAnnotationsCount());
assertEquals(1424883599, checkRun.getId());
}
@Test
public void createCheckRunNoAnnotations() throws Exception {
GHCheckRun checkRun = getInstallationGithub().getRepository("hub4j-test-org/test-checks")
.createCheckRun("quick", "89a9ae301e35e667756034fdc933b1fc94f63fc1")
.withConclusion(GHCheckRun.Conclusion.NEUTRAL)
.add(new GHCheckRunBuilder.Output("Quick note", "nothing more to see here"))
.create();
assertEquals("completed", checkRun.getStatus());
assertEquals(0, checkRun.getOutput().getAnnotationsCount());
assertEquals(1424883957, checkRun.getId());
}
@Test
public void createPendingCheckRun() throws Exception {
GHCheckRun checkRun = getInstallationGithub().getRepository("hub4j-test-org/test-checks")
.createCheckRun("outstanding", "89a9ae301e35e667756034fdc933b1fc94f63fc1")
.withStatus(GHCheckRun.Status.IN_PROGRESS)
.create();
assertEquals("in_progress", checkRun.getStatus());
assertNull(checkRun.getConclusion());
assertEquals(1424883451, checkRun.getId());
}
@Test
public void createCheckRunErrMissingConclusion() throws Exception {
try {
getInstallationGithub().getRepository("hub4j-test-org/test-checks")
.createCheckRun("outstanding", "89a9ae301e35e667756034fdc933b1fc94f63fc1")
.withStatus(GHCheckRun.Status.COMPLETED)
.create();
fail("should have been rejected");
} catch (HttpException x) {
assertEquals(422, x.getResponseCode());
assertThat(x.getMessage(), containsString("\\\"conclusion\\\" wasn't supplied"));
assertThat(x.getUrl(), containsString("/repos/hub4j-test-org/test-checks/check-runs"));
assertThat(x.getResponseMessage(), equalTo("422 Unprocessable Entity"));
}
}
@Test
public void updateCheckRun() throws Exception {
GHCheckRun checkRun = getInstallationGithub().getRepository("hub4j-test-org/test-checks")
.createCheckRun("foo", "89a9ae301e35e667756034fdc933b1fc94f63fc1")
.withStatus(GHCheckRun.Status.IN_PROGRESS)
.withStartedAt(new Date(999_999_000))
.add(new GHCheckRunBuilder.Output("Some Title", "what happened…")
.add(new GHCheckRunBuilder.Annotation("stuff.txt",
1,
GHCheckRun.AnnotationLevel.NOTICE,
"hello to you too").withTitle("Look here")))
.create();
GHCheckRun updated = checkRun.update()
.withStatus(GHCheckRun.Status.COMPLETED)
.withConclusion(GHCheckRun.Conclusion.SUCCESS)
.withCompletedAt(new Date(999_999_999))
.create();
assertEquals(updated.getStartedAt(), new Date(999_999_000));
assertEquals(updated.getName(), "foo");
assertEquals(1, checkRun.getOutput().getAnnotationsCount());
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.ci.web.model;
/**
* TC Bot version data.
*/
@SuppressWarnings("PublicField") public class Version {
/** Default contact. */
public static final String DEFAULT_CONTACT = "dev@ignite.apache.org";
/** Github mirror reference. */
public static final String GITHUB_REF = "https://github.com/apache/ignite-teamcity-bot";
/** TC Bot Version. */
public static final String VERSION = "20190725";
/** Java version, where Web App is running. */
public String javaVer;
/** TC Bot Version. */
public String version = VERSION;
/** Ignite version. */
public String ignVer;
/** Ignite version. */
public String ignVerFull;
/** TC Bot GitHub Mirror. */
public String gitHubMirror = GITHUB_REF;
/** TC Bot Source */
public String apacheGitUrl = "https://gitbox.apache.org/repos/asf/ignite-teamcity-bot.git";
/** Contact email. */
public String contactEmail = DEFAULT_CONTACT;
public Version() {
javaVer = System.getProperty("java.version");
}
}
|
/*
* Copyright (C) 2001 by Dave Jarvis
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
* Online at: http://www.gnu.org/copyleft/gpl.html
*/
package com.barrybecker4.ca.dj.jigo.sgf.tokens;
/**
* The time limit for both players. Time is always given in seconds.
*/
public class TimeLimitToken extends NumberToken implements InfoToken
{
public TimeLimitToken() { }
/**
* Presume no time limit by default.
*/
protected float getDefault() { return 0; }
/**
* The time limit for the game is given in seconds.
*/
public float getTimeLimit() { return getNumber(); }
}
|
/*
* Copyright (C) 2012-2015 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.driver.core;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.util.*;
import com.google.common.collect.Lists;
import com.datastax.driver.core.exceptions.InvalidTypeException;
/**
* A prepared statement with values bound to the bind variables.
* <p>
* Once values has been provided for the variables of the {@link PreparedStatement}
* it has been created from, such BoundStatement can be executed (through
* {@link Session#execute(Statement)}).
* <p>
* The values of a BoundStatement can be set by either index or name. When
* setting them by name, names follow the case insensitivity rules explained in
* {@link ColumnDefinitions} but with the difference that if multiple bind
* variables have the same name, setting that name will set <b>all</b> the
* variables for that name.
* <p>
* Any variable that hasn't been specifically set will be considered {@code null}.
* <p>
* Bound values may also be retrieved using {@code get*()} methods. Note that this
* may have a non-negligible impact on performance: internally, values are stored
* in serialized form, so they need to be deserialized again. These methods are
* provided for debugging purposes.
*/
public class BoundStatement extends Statement implements GettableData {
final PreparedStatement statement;
final ByteBuffer[] values;
private ByteBuffer routingKey;
private DataWrapper wrapper;
/**
* Creates a new {@code BoundStatement} from the provided prepared
* statement.
* @param statement the prepared statement from which to create a {@code BoundStatement}.
*/
public BoundStatement(PreparedStatement statement) {
this.statement = statement;
this.values = new ByteBuffer[statement.getVariables().size()];
// We want to reuse code from AbstractGettableData, but this class already extends Statement,
// so we emulate a mixin with a delegate.
this.wrapper = new DataWrapper(this);
if (statement.getConsistencyLevel() != null)
this.setConsistencyLevel(statement.getConsistencyLevel());
if (statement.getSerialConsistencyLevel() != null)
this.setSerialConsistencyLevel(statement.getSerialConsistencyLevel());
if (statement.isTracing())
this.enableTracing();
if (statement.getRetryPolicy() != null)
this.setRetryPolicy(statement.getRetryPolicy());
}
/**
* Returns the prepared statement on which this BoundStatement is based.
*
* @return the prepared statement on which this BoundStatement is based.
*/
public PreparedStatement preparedStatement() {
return statement;
}
/**
* Returns whether the {@code i}th variable has been bound to a non null value.
*
* @param i the index of the variable to check.
* @return whether the {@code i}th variable has been bound to a non null value.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
*/
public boolean isSet(int i) {
metadata().checkBounds(i);
return values[i] != null;
}
/**
* Returns whether the first occurrence of variable {@code name} has been
* bound to a non-null value.
*
* @param name the name of the variable to check.
* @return whether the first occurrence of variable {@code name} has been
* bound to a non-null value.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is if {@code !this.preparedStatement().variables().names().contains(name)}.
*/
public boolean isSet(String name) {
return isSet(metadata().getFirstIdx(name));
}
/**
* Bound values to the variables of this statement.
*
* This is a convenience method to bind all the variables of the
* {@code BoundStatement} in one call.
*
* @param values the values to bind to the variables of the newly created
* BoundStatement. The first element of {@code values} will be bound to the
* first bind variable, etc. It is legal to provide fewer values than the
* statement has bound variables. In that case, the remaining variable need
* to be bound before execution. If more values than variables are provided
* however, an IllegalArgumentException wil be raised.
* @return this bound statement.
*
* @throws IllegalArgumentException if more {@code values} are provided
* than there is of bound variables in this statement.
* @throws InvalidTypeException if any of the provided value is not of
* correct type to be bound to the corresponding bind variable.
* @throws NullPointerException if one of {@code values} is a collection
* (List, Set or Map) containing a null value. Nulls are not supported in
* collections by CQL.
*/
public BoundStatement bind(Object... values) {
if (values.length > statement.getVariables().size())
throw new IllegalArgumentException(String.format("Prepared statement has only %d variables, %d values provided", statement.getVariables().size(), values.length));
for (int i = 0; i < values.length; i++)
{
Object toSet = values[i];
if (toSet == null) {
setValue(i, null);
continue;
}
DataType columnType = statement.getVariables().getType(i);
switch (columnType.getName()) {
case LIST:
if (!(toSet instanceof List))
throw new InvalidTypeException(String.format("Invalid type for value %d, column is a list but %s provided", i, toSet.getClass()));
List<?> l = (List<?>)toSet;
// If the list is empty, it will never fail validation, but otherwise we should check the list given if of the right type
if (!l.isEmpty()) {
// Ugly? Yes
Class<?> providedClass = l.get(0).getClass();
Class<?> expectedClass = columnType.getTypeArguments().get(0).asJavaClass();
if (!expectedClass.isAssignableFrom(providedClass))
throw new InvalidTypeException(String.format("Invalid type for value %d of CQL type %s, expecting list of %s but provided list of %s", i, columnType, expectedClass, providedClass));
}
break;
case SET:
if (!(toSet instanceof Set))
throw new InvalidTypeException(String.format("Invalid type for value %d, column is a set but %s provided", i, toSet.getClass()));
Set<?> s = (Set<?>)toSet;
// If the list is empty, it will never fail validation, but otherwise we should check the list given if of the right type
if (!s.isEmpty()) {
// Ugly? Yes
Class<?> providedClass = s.iterator().next().getClass();
Class<?> expectedClass = columnType.getTypeArguments().get(0).getName().javaType;
if (!expectedClass.isAssignableFrom(providedClass))
throw new InvalidTypeException(String.format("Invalid type for value %d of CQL type %s, expecting set of %s but provided set of %s", i, columnType, expectedClass, providedClass));
}
break;
case MAP:
if (!(toSet instanceof Map))
throw new InvalidTypeException(String.format("Invalid type for value %d, column is a map but %s provided", i, toSet.getClass()));
Map<?, ?> m = (Map<?, ?>)toSet;
// If the list is empty, it will never fail validation, but otherwise we should check the list given if of the right type
if (!m.isEmpty()) {
// Ugly? Yes
Map.Entry<?, ?> entry = m.entrySet().iterator().next();
Class<?> providedKeysClass = entry.getKey().getClass();
Class<?> providedValuesClass = entry.getValue().getClass();
Class<?> expectedKeysClass = columnType.getTypeArguments().get(0).getName().javaType;
Class<?> expectedValuesClass = columnType.getTypeArguments().get(1).getName().javaType;
if (!expectedKeysClass.isAssignableFrom(providedKeysClass) || !expectedValuesClass.isAssignableFrom(providedValuesClass))
throw new InvalidTypeException(String.format("Invalid type for value %d of CQL type %s, expecting map of %s->%s but provided set of %s->%s", i, columnType, expectedKeysClass, expectedValuesClass, providedKeysClass, providedValuesClass));
}
break;
default:
if (toSet instanceof Token)
toSet = ((Token)toSet).getValue();
Class<?> providedClass = toSet.getClass();
Class<?> expectedClass = columnType.getName().javaType;
if (!expectedClass.isAssignableFrom(providedClass))
throw new InvalidTypeException(String.format("Invalid type for value %d of CQL type %s, expecting %s but %s provided", i, columnType, expectedClass, providedClass));
break;
}
setValue(i, columnType.codec().serialize(toSet));
}
return this;
}
/**
* Sets the routing key for this bound statement.
* <p>
* This is useful when the routing key can neither be set on the {@code PreparedStatement} this bound statement
* was built from, nor automatically computed from bound variables. In particular, this is the case if the
* partition key is composite and only some of its components are bound.
*
* @param routingKey the raw (binary) value to use as routing key.
* @return this {@code BoundStatement} object.
*
* @see BoundStatement#getRoutingKey
*/
public BoundStatement setRoutingKey(ByteBuffer routingKey) {
this.routingKey = routingKey;
return this;
}
/**
* The routing key for this bound query.
* <p>
* This method will return a non-{@code null} value if either of the following occur:
* <ul>
* <li>The routing key has been set directly through {@link BoundStatement#setRoutingKey}.</li>
* <li>The routing key has been set through {@link PreparedStatement#setRoutingKey} for the
* {@code PreparedStatement} this statement has been built from.</li>
* <li>All the columns composing the partition key are bound variables of this {@code BoundStatement}. The routing
* key will then be built using the values provided for these partition key columns.</li>
* </ul>
* Otherwise, {@code null} is returned.
* <p>
*
* Note that if the routing key has been set through {@link BoundStatement#setRoutingKey}, then that takes
* precedence. If the routing key has been set through {@link PreparedStatement#setRoutingKey} then that is used
* next. If neither of those are set then it is computed.
*
* @return the routing key for this statement or {@code null}.
*/
@Override
public ByteBuffer getRoutingKey() {
if (this.routingKey != null) {
return this.routingKey;
}
if (statement.getRoutingKey() != null) {
return statement.getRoutingKey();
}
int[] rkIndexes = statement.getPreparedId().routingKeyIndexes;
if (rkIndexes != null) {
if (rkIndexes.length == 1) {
return values[rkIndexes[0]];
} else {
ByteBuffer[] components = new ByteBuffer[rkIndexes.length];
for (int i = 0; i < components.length; ++i) {
ByteBuffer value = values[rkIndexes[i]];
if (value == null)
return null;
components[i] = value;
}
return SimpleStatement.compose(components);
}
}
return null;
}
/**
* Returns the keyspace this query operates on.
* <p>
* This method will always return a non-{@code null} value (unless the statement
* has no variables, but you should avoid prepared statement in the first in that
* case). The keyspace returned will be the one corresponding to the first
* variable prepared in this statement (which in almost all case will be <i>the</i>
* keyspace for the operation, though it's possible in CQL to build a batch
* statement that acts on multiple keyspace).
*
* @return the keyspace for this statement (see above), or {@code null} if the
* statement has no variables.
*/
@Override
public String getKeyspace() {
return statement.getPreparedId().metadata.size() == 0 ? null : statement.getPreparedId().metadata.getKeyspace(0);
}
/**
* Sets the {@code i}th value to the provided boolean.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not of type BOOLEAN.
*/
public BoundStatement setBool(int i, boolean v) {
metadata().checkType(i, DataType.Name.BOOLEAN);
return setValue(i, TypeCodec.BooleanCodec.instance.serializeNoBoxing(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided boolean.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any one occurrence of) {@code name} is not of type BOOLEAN.
*/
public BoundStatement setBool(String name, boolean v) {
int[] indexes = metadata().getAllIdx(name);
ByteBuffer value = TypeCodec.BooleanCodec.instance.serializeNoBoxing(v);
for (int i = 0; i < indexes.length; i++) {
metadata().checkType(indexes[i], DataType.Name.BOOLEAN);
setValue(indexes[i], value);
}
return this;
}
/**
* Set the {@code i}th value to the provided integer.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not of type INT.
*/
public BoundStatement setInt(int i, int v) {
metadata().checkType(i, DataType.Name.INT);
return setValue(i, TypeCodec.IntCodec.instance.serializeNoBoxing(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided integer.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any one occurrence of) {@code name} is not of type INT.
*/
public BoundStatement setInt(String name, int v) {
int[] indexes = metadata().getAllIdx(name);
ByteBuffer value = TypeCodec.IntCodec.instance.serializeNoBoxing(v);
for (int i = 0; i < indexes.length; i++) {
metadata().checkType(indexes[i], DataType.Name.INT);
setValue(indexes[i], value);
}
return this;
}
/**
* Sets the {@code i}th value to the provided long.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not of type BIGINT or COUNTER.
*/
public BoundStatement setLong(int i, long v) {
metadata().checkType(i, DataType.Name.BIGINT, DataType.Name.COUNTER);
return setValue(i, TypeCodec.LongCodec.instance.serializeNoBoxing(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided long.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not of type BIGINT or COUNTER.
*/
public BoundStatement setLong(String name, long v) {
int[] indexes = metadata().getAllIdx(name);
ByteBuffer value = TypeCodec.LongCodec.instance.serializeNoBoxing(v);
for (int i = 0; i < indexes.length; i++) {
metadata().checkType(indexes[i], DataType.Name.BIGINT, DataType.Name.COUNTER);
setValue(indexes[i], value);
}
return this;
}
/**
* Set the {@code i}th value to the provided date.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not of type TIMESTAMP.
*/
public BoundStatement setDate(int i, Date v) {
metadata().checkType(i, DataType.Name.TIMESTAMP);
return setValue(i, v == null ? null : TypeCodec.DateCodec.instance.serialize(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided date.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not of type TIMESTAMP.
*/
public BoundStatement setDate(String name, Date v) {
int[] indexes = metadata().getAllIdx(name);
ByteBuffer value = v == null ? null : TypeCodec.DateCodec.instance.serialize(v);
for (int i = 0; i < indexes.length; i++) {
metadata().checkType(indexes[i], DataType.Name.TIMESTAMP);
setValue(indexes[i], value);
}
return this;
}
/**
* Sets the {@code i}th value to the provided float.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not of type FLOAT.
*/
public BoundStatement setFloat(int i, float v) {
metadata().checkType(i, DataType.Name.FLOAT);
return setValue(i, TypeCodec.FloatCodec.instance.serializeNoBoxing(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided float.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not of type FLOAT.
*/
public BoundStatement setFloat(String name, float v) {
int[] indexes = metadata().getAllIdx(name);
ByteBuffer value = TypeCodec.FloatCodec.instance.serializeNoBoxing(v);
for (int i = 0; i < indexes.length; i++) {
metadata().checkType(indexes[i], DataType.Name.FLOAT);
setValue(indexes[i], value);
}
return this;
}
/**
* Sets the {@code i}th value to the provided double.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not of type DOUBLE.
*/
public BoundStatement setDouble(int i, double v) {
metadata().checkType(i, DataType.Name.DOUBLE);
return setValue(i, TypeCodec.DoubleCodec.instance.serializeNoBoxing(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided double.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not of type DOUBLE.
*/
public BoundStatement setDouble(String name, double v) {
int[] indexes = metadata().getAllIdx(name);
ByteBuffer value = TypeCodec.DoubleCodec.instance.serializeNoBoxing(v);
for (int i = 0; i < indexes.length; i++) {
metadata().checkType(indexes[i], DataType.Name.DOUBLE);
setValue(indexes[i], value);
}
return this;
}
/**
* Sets the {@code i}th value to the provided string.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is of neither of the
* following types: VARCHAR, TEXT or ASCII.
*/
public BoundStatement setString(int i, String v) {
DataType.Name type = metadata().checkType(i, DataType.Name.VARCHAR,
DataType.Name.TEXT,
DataType.Name.ASCII);
switch (type) {
case ASCII:
return setValue(i, v == null ? null : TypeCodec.StringCodec.asciiInstance.serialize(v));
case TEXT:
case VARCHAR:
return setValue(i, v == null ? null : TypeCodec.StringCodec.utf8Instance.serialize(v));
default:
throw new AssertionError();
}
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided string.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* of neither of the following types: VARCHAR, TEXT or ASCII.
*/
public BoundStatement setString(String name, String v) {
int[] indexes = metadata().getAllIdx(name);
for (int i = 0; i < indexes.length; i++)
setString(indexes[i], v);
return this;
}
/**
* Sets the {@code i}th value to the provided byte buffer.
*
* This method validate that the type of the column set is BLOB. If you
* want to insert manually serialized data into columns of another type,
* use {@link #setBytesUnsafe} instead.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not of type BLOB.
*/
public BoundStatement setBytes(int i, ByteBuffer v) {
metadata().checkType(i, DataType.Name.BLOB);
return setBytesUnsafe(i, v);
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided byte buffer.
*
* This method validate that the type of the column set is BLOB. If you
* want to insert manually serialized data into columns of another type,
* use {@link #setBytesUnsafe} instead.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is not of type BLOB.
*/
public BoundStatement setBytes(String name, ByteBuffer v) {
int[] indexes = metadata().getAllIdx(name);
ByteBuffer value = v == null ? null : v.duplicate();
for (int i = 0; i < indexes.length; i++) {
metadata().checkType(indexes[i], DataType.Name.BLOB);
setValue(indexes[i], value);
}
return this;
}
/**
* Sets the {@code i}th value to the provided byte buffer.
*
* Contrary to {@link #setBytes}, this method does not check the
* type of the column set. If you insert data that is not compatible with
* the type of the column, you will get an {@code InvalidQueryException} at
* execute time.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
*/
public BoundStatement setBytesUnsafe(int i, ByteBuffer v) {
return setValue(i, v == null ? null : v.duplicate());
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided byte buffer.
*
* Contrary to {@link #setBytes}, this method does not check the
* type of the column set. If you insert data that is not compatible with
* the type of the column, you will get an {@code InvalidQueryException} at
* execute time.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is if {@code !this.preparedStatement().variables().names().contains(name)}.
*/
public BoundStatement setBytesUnsafe(String name, ByteBuffer v) {
int[] indexes = metadata().getAllIdx(name);
ByteBuffer value = v == null ? null : v.duplicate();
for (int i = 0; i < indexes.length; i++)
setValue(indexes[i], value);
return this;
}
/**
* Sets the {@code i}th value to the provided big integer.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not of type VARINT.
*/
public BoundStatement setVarint(int i, BigInteger v) {
metadata().checkType(i, DataType.Name.VARINT);
return setValue(i, v == null ? null : TypeCodec.BigIntegerCodec.instance.serialize(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided big integer.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not of type VARINT.
*/
public BoundStatement setVarint(String name, BigInteger v) {
int[] indexes = metadata().getAllIdx(name);
ByteBuffer value = v == null ? null : TypeCodec.BigIntegerCodec.instance.serialize(v);
for (int i = 0; i < indexes.length; i++) {
metadata().checkType(indexes[i], DataType.Name.VARINT);
setValue(indexes[i], value);
}
return this;
}
/**
* Sets the {@code i}th value to the provided big decimal.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not of type DECIMAL.
*/
public BoundStatement setDecimal(int i, BigDecimal v) {
metadata().checkType(i, DataType.Name.DECIMAL);
return setValue(i, v == null ? null : TypeCodec.DecimalCodec.instance.serialize(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided big decimal.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not of type DECIMAL.
*/
public BoundStatement setDecimal(String name, BigDecimal v) {
int[] indexes = metadata().getAllIdx(name);
ByteBuffer value = v == null ? null : TypeCodec.DecimalCodec.instance.serialize(v);
for (int i = 0; i < indexes.length; i++) {
metadata().checkType(indexes[i], DataType.Name.DECIMAL);
setValue(indexes[i], value);
}
return this;
}
/**
* Sets the {@code i}th value to the provided UUID.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not of type UUID or
* TIMEUUID, or if column {@code i} is of type TIMEUUID but {@code v} is
* not a type 1 UUID.
*/
public BoundStatement setUUID(int i, UUID v) {
DataType.Name type = metadata().checkType(i, DataType.Name.UUID,
DataType.Name.TIMEUUID);
if (v == null)
return setValue(i, null);
if (type == DataType.Name.TIMEUUID && v.version() != 1)
throw new InvalidTypeException(String.format("%s is not a Type 1 (time-based) UUID", v));
return type == DataType.Name.UUID
? setValue(i, TypeCodec.UUIDCodec.instance.serialize(v))
: setValue(i, TypeCodec.TimeUUIDCodec.instance.serialize(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided UUID.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not of type UUID or TIMEUUID, or if column {@code name} is of type
* TIMEUUID but {@code v} is not a type 1 UUID.
*/
public BoundStatement setUUID(String name, UUID v) {
int[] indexes = metadata().getAllIdx(name);
ByteBuffer value = v == null ? null : TypeCodec.UUIDCodec.instance.serialize(v);
for (int i = 0; i < indexes.length; i++) {
DataType.Name type = metadata().checkType(indexes[i], DataType.Name.UUID, DataType.Name.TIMEUUID);
if (v != null && type == DataType.Name.TIMEUUID && v.version() != 1)
throw new InvalidTypeException(String.format("%s is not a Type 1 (time-based) UUID", v));
setValue(indexes[i], value);
}
return this;
}
/**
* Sets the {@code i}th value to the provided inet address.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not of type INET.
*/
public BoundStatement setInet(int i, InetAddress v) {
metadata().checkType(i, DataType.Name.INET);
return setValue(i, v == null ? null : TypeCodec.InetCodec.instance.serialize(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided inet address.
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not of type INET.
*/
public BoundStatement setInet(String name, InetAddress v) {
int[] indexes = metadata().getAllIdx(name);
ByteBuffer value = v == null ? null : TypeCodec.InetCodec.instance.serialize(v);
for (int i = 0; i < indexes.length; i++) {
metadata().checkType(indexes[i], DataType.Name.INET);
setValue(indexes[i], value);
}
return this;
}
/**
* Sets the {@code i}th value to the provided {@link Token}.
* <p>
* {@link #setPartitionKeyToken(Token)} should generally be preferred if you
* have a single token variable.
*
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not of the type of the token's value.
*/
public BoundStatement setToken(int i, Token v) {
metadata().checkType(i, v.getType().getName());
return setValue(i, v.getType().serialize(v.getValue()));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided token.
* <p>
* {@link #setPartitionKeyToken(Token)} should generally be preferred if you
* have a single token variable.
* <p>
* If you have multiple token variables, use positional binding ({@link #setToken(int, Token)},
* or named bind markers:
* <pre>
* {@code
* PreparedStatement pst = session.prepare("SELECT * FROM my_table WHERE token(k) > :min AND token(k) <= :max");
* BoundStatement b = pst.bind().setToken("min", minToken).setToken("max", maxToken);
* }
* </pre>
*
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not of the type of the token's value.
*/
public BoundStatement setToken(String name, Token v) {
int[] indexes = metadata().getAllIdx(name);
for (int i = 0; i < indexes.length; i++)
setToken(indexes[i], v);
return this;
}
/**
* Sets the value for (all occurrences of) variable "{@code partition key token}"
* to the provided token (this is the name generated by Cassandra for markers
* corresponding to a {@code token(...)} call).
* <p>
* This method is a shorthand for statements with a single token variable:
* <pre>
* {@code
* Token token = ...
* PreparedStatement pst = session.prepare("SELECT * FROM my_table WHERE token(k) = ?");
* BoundStatement b = pst.bind().setPartitionKeyToken(token);
* }
* </pre>
* If you have multiple token variables, use positional binding ({@link #setToken(int, Token)},
* or named bind markers:
* <pre>
* {@code
* PreparedStatement pst = session.prepare("SELECT * FROM my_table WHERE token(k) > :min AND token(k) <= :max");
* BoundStatement b = pst.bind().setToken("min", minToken).setToken("max", maxToken);
* }
* </pre>
*
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not of the type of the token's value.
*/
public BoundStatement setPartitionKeyToken(Token v) {
return setToken("partition key token", v);
}
/**
* Sets the {@code i}th value to the provided list.
* <p>
* Please note that {@code null} values are not supported inside collection by CQL.
*
* @param <T> the type of the elements of the list to set.
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not a list type or
* if the elements of {@code v} are not of the type of the elements of
* column {@code i}.
* @throws NullPointerException if {@code v} contains null values. Nulls are not supported in collections
* by CQL.
*/
public <T> BoundStatement setList(int i, List<T> v) {
DataType type = metadata().getType(i);
if (type.getName() != DataType.Name.LIST)
throw new InvalidTypeException(String.format("Column %s is of type %s, cannot set to a list", metadata().getName(i), type));
if (v == null)
return setValue(i, null);
// If the list is empty, it will never fail validation, but otherwise we should check the list given if of the right type
if (!v.isEmpty()) {
// Ugly? Yes
Class<?> providedClass = v.get(0).getClass();
Class<?> expectedClass = type.getTypeArguments().get(0).asJavaClass();
if (!expectedClass.isAssignableFrom(providedClass))
throw new InvalidTypeException(String.format("Invalid value for column %s of CQL type %s, expecting list of %s but provided list of %s", metadata().getName(i), type, expectedClass, providedClass));
}
return setValue(i, type.codec().serialize(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided list.
* <p>
* Please note that {@code null} values are not supported inside collection by CQL.
*
* @param <T> the type of the elements of the list to set.
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not a list type or if the elements of {@code v} are not of the type of
* the elements of column {@code name}.
* @throws NullPointerException if {@code v} contains null values. Nulls are not supported in collections
* by CQL.
*/
public <T> BoundStatement setList(String name, List<T> v) {
int[] indexes = metadata().getAllIdx(name);
for (int i = 0; i < indexes.length; i++)
setList(indexes[i], v);
return this;
}
/**
* Sets the {@code i}th value to the provided map.
* <p>
* Please note that {@code null} values are not supported inside collection by CQL.
*
* @param <K> the type of the keys for the map to set.
* @param <V> the type of the values for the map to set.
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not a map type or
* if the elements (keys or values) of {@code v} are not of the type of the
* elements of column {@code i}.
* @throws NullPointerException if {@code v} contains null values. Nulls are not supported in collections
* by CQL.
*/
public <K, V> BoundStatement setMap(int i, Map<K, V> v) {
DataType type = metadata().getType(i);
if (type.getName() != DataType.Name.MAP)
throw new InvalidTypeException(String.format("Column %s is of type %s, cannot set to a map", metadata().getName(i), type));
if (v == null)
return setValue(i, null);
if (!v.isEmpty()) {
// Ugly? Yes
Map.Entry<K, V> entry = v.entrySet().iterator().next();
Class<?> providedKeysClass = entry.getKey().getClass();
Class<?> providedValuesClass = entry.getValue().getClass();
Class<?> expectedKeysClass = type.getTypeArguments().get(0).getName().javaType;
Class<?> expectedValuesClass = type.getTypeArguments().get(1).getName().javaType;
if (!expectedKeysClass.isAssignableFrom(providedKeysClass) || !expectedValuesClass.isAssignableFrom(providedValuesClass))
throw new InvalidTypeException(String.format("Invalid value for column %s of CQL type %s, expecting map of %s->%s but provided map of %s->%s", metadata().getName(i), type, expectedKeysClass, expectedValuesClass, providedKeysClass, providedValuesClass));
}
return setValue(i, type.codec().serialize(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided map.
* <p>
* Please note that {@code null} values are not supported inside collection by CQL.
*
* @param <K> the type of the keys for the map to set.
* @param <V> the type of the values for the map to set.
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not a map type or if the elements (keys or values) of {@code v} are not of
* the type of the elements of column {@code name}.
* @throws NullPointerException if {@code v} contains null values. Nulls are not supported in collections
* by CQL.
*/
public <K, V> BoundStatement setMap(String name, Map<K, V> v) {
int[] indexes = metadata().getAllIdx(name);
for (int i = 0; i < indexes.length; i++)
setMap(indexes[i], v);
return this;
}
/**
* Sets the {@code i}th value to the provided set.
* <p>
* Please note that {@code null} values are not supported inside collection by CQL.
*
* @param <T> the type of the elements of the set to set.
* @param i the index of the variable to set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IndexOutOfBoundsException if {@code i < 0 || i >= this.preparedStatement().variables().size()}.
* @throws InvalidTypeException if column {@code i} is not a set type or
* if the elements of {@code v} are not of the type of the elements of
* column {@code i}.
* @throws NullPointerException if {@code v} contains null values. Nulls are not supported in collections
* by CQL.
*/
public <T> BoundStatement setSet(int i, Set<T> v) {
DataType type = metadata().getType(i);
if (type.getName() != DataType.Name.SET)
throw new InvalidTypeException(String.format("Column %s is of type %s, cannot set to a set", metadata().getName(i), type));
if (v == null)
return setValue(i, null);
if (!v.isEmpty()) {
// Ugly? Yes
Class<?> providedClass = v.iterator().next().getClass();
Class<?> expectedClass = type.getTypeArguments().get(0).getName().javaType;
if (!expectedClass.isAssignableFrom(providedClass))
throw new InvalidTypeException(String.format("Invalid value for column %s of CQL type %s, expecting set of %s but provided set of %s", metadata().getName(i), type, expectedClass, providedClass));
}
return setValue(i, type.codec().serialize(v));
}
/**
* Sets the value for (all occurrences of) variable {@code name} to the
* provided set.
* <p>
* Please note that {@code null} values are not supported inside collection by CQL.
*
* @param <T> the type of the elements of the set to set.
* @param name the name of the variable to set; if multiple variables
* {@code name} are prepared, all of them are set.
* @param v the value to set.
* @return this BoundStatement.
*
* @throws IllegalArgumentException if {@code name} is not a prepared
* variable, that is, if {@code !this.preparedStatement().variables().names().contains(name)}.
* @throws InvalidTypeException if (any occurrence of) {@code name} is
* not a map type or if the elements of {@code v} are not of the type of
* the elements of column {@code name}.
* @throws NullPointerException if {@code v} contains null values. Nulls are not supported in collections
* by CQL.
*/
public <T> BoundStatement setSet(String name, Set<T> v) {
int[] indexes = metadata().getAllIdx(name);
for (int i = 0; i < indexes.length; i++)
setSet(indexes[i], v);
return this;
}
@Override
public boolean isNull(int i) {
return wrapper.isNull(i);
}
@Override
public boolean isNull(String name) {
return wrapper.isNull(name);
}
@Override
public boolean getBool(int i) {
return wrapper.getBool(i);
}
@Override
public boolean getBool(String name) {
return wrapper.getBool(name);
}
@Override
public int getInt(int i) {
return wrapper.getInt(i);
}
@Override
public int getInt(String name) {
return wrapper.getInt(name);
}
@Override
public long getLong(int i) {
return wrapper.getLong(i);
}
@Override
public long getLong(String name) {
return wrapper.getLong(name);
}
@Override
public Date getDate(int i) {
return wrapper.getDate(i);
}
@Override
public Date getDate(String name) {
return wrapper.getDate(name);
}
@Override
public float getFloat(int i) {
return wrapper.getFloat(i);
}
@Override
public float getFloat(String name) {
return wrapper.getFloat(name);
}
@Override
public double getDouble(int i) {
return wrapper.getDouble(i);
}
@Override
public double getDouble(String name) {
return wrapper.getDouble(name);
}
@Override
public ByteBuffer getBytesUnsafe(int i) {
return wrapper.getBytesUnsafe(i);
}
@Override
public ByteBuffer getBytesUnsafe(String name) {
return wrapper.getBytesUnsafe(name);
}
@Override
public ByteBuffer getBytes(int i) {
return wrapper.getBytes(i);
}
@Override
public ByteBuffer getBytes(String name) {
return wrapper.getBytes(name);
}
@Override
public String getString(int i) {
return wrapper.getString(i);
}
@Override
public String getString(String name) {
return wrapper.getString(name);
}
@Override
public BigInteger getVarint(int i) {
return wrapper.getVarint(i);
}
@Override
public BigInteger getVarint(String name) {
return wrapper.getVarint(name);
}
@Override
public BigDecimal getDecimal(int i) {
return wrapper.getDecimal(i);
}
@Override
public BigDecimal getDecimal(String name) {
return wrapper.getDecimal(name);
}
@Override
public UUID getUUID(int i) {
return wrapper.getUUID(i);
}
@Override
public UUID getUUID(String name) {
return wrapper.getUUID(name);
}
@Override
public InetAddress getInet(int i) {
return wrapper.getInet(i);
}
@Override
public InetAddress getInet(String name) {
return wrapper.getInet(name);
}
@Override
public <T> List<T> getList(int i, Class<T> elementsClass) {
return wrapper.getList(i, elementsClass);
}
@Override
public <T> List<T> getList(String name, Class<T> elementsClass) {
return wrapper.getList(name, elementsClass);
}
@Override
public <T> Set<T> getSet(int i, Class<T> elementsClass) {
return wrapper.getSet(i, elementsClass);
}
@Override
public <T> Set<T> getSet(String name, Class<T> elementsClass) {
return wrapper.getSet(name, elementsClass);
}
@Override
public <K, V> Map<K, V> getMap(int i, Class<K> keysClass, Class<V> valuesClass) {
return wrapper.getMap(i, keysClass, valuesClass);
}
@Override
public <K, V> Map<K, V> getMap(String name, Class<K> keysClass, Class<V> valuesClass) {
return wrapper.getMap(name, keysClass, valuesClass);
}
@Override
public Object getObject(int i) {
return wrapper.getObject(i);
}
@Override
public Object getObject(String name) {
return wrapper.getObject(name);
}
private ColumnDefinitions metadata() {
return statement.getVariables();
}
private BoundStatement setValue(int i, ByteBuffer value) {
values[i] = value;
return this;
}
static class DataWrapper extends AbstractGettableData {
final ByteBuffer[] values;
DataWrapper(BoundStatement wrapped) {
super(wrapped.metadata());
values = wrapped.values;
}
@Override
protected ByteBuffer getValue(int i) {
return values[i];
}
}
}
|
package com.bitdubai.fermat_wpd_plugin.layer.middleware.wallet_manager.developer.bitdubai.version_1.exceptions;
import com.bitdubai.fermat_api.FermatException;
/**
* Created by natalia on 04/08/15.
*/
public class CantPersistWalletLanguageException extends FermatException {
/**
* This is the constructor that every inherited FermatException must implement
*
* @param message the short description of the why this exception happened, there is a public static constant called DEFAULT_MESSAGE that can be used here
* @param cause the exception that triggered the throwing of the current exception, if there are no other exceptions to be declared here, the cause should be null
* @param context a String that provides the values of the variables that could have affected the exception
* @param possibleReason an explicative reason of why we believe this exception was most likely thrown
*/
public CantPersistWalletLanguageException(String message, Exception cause, String context, String possibleReason) {
super(message, cause, context, possibleReason);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. The ASF licenses this file to You
* under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. For additional information regarding
* copyright in this work, please see the NOTICE file in the top level
* directory of this distribution.
*/
package org.apache.roller.weblogger.ui.struts2.editor;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.roller.weblogger.WebloggerException;
import org.apache.roller.weblogger.business.FileIOException;
import org.apache.roller.weblogger.business.MediaFileManager;
import org.apache.roller.weblogger.business.WebloggerFactory;
import org.apache.roller.weblogger.pojos.MediaFile;
import org.apache.roller.weblogger.pojos.MediaFileComparator;
import org.apache.roller.weblogger.pojos.MediaFileComparator.MediaFileComparatorType;
import org.apache.roller.weblogger.pojos.MediaFileDirectory;
import org.apache.roller.weblogger.pojos.MediaFileFilter;
import org.apache.roller.weblogger.ui.struts2.pagers.MediaFilePager;
import org.apache.roller.weblogger.ui.struts2.util.KeyValueObject;
import org.apache.roller.weblogger.util.cache.CacheManager;
import org.apache.struts2.convention.annotation.AllowedMethods;
import org.apache.struts2.interceptor.validation.SkipValidation;
/**
* View media files.
*/
@SuppressWarnings("serial")
// TODO: make this work @AllowedMethods({"execute","view","search","delete","deleteSelected","deleteFolder","includeInGallery","moveSelected"})
public class MediaFileView extends MediaFileBase {
private static Log log = LogFactory.getLog(MediaFileView.class);
private String directoryId;
private String directoryName;
private String sortBy;
private String newDirectoryName;
private List<MediaFile> childFiles;
private MediaFileDirectory currentDirectory;
// Search criteria - drop-down for file type
private static List<KeyValueObject> FILE_TYPES = null;
// Search criteria - drop-down for size filter
private static List<KeyValueObject> SIZE_FILTER_TYPES = null;
// Search criteria - drop-down for size unit
private static List<KeyValueObject> SIZE_UNITS = null;
// Sort options for search results.
private static List<KeyValueObject> SORT_OPTIONS = null;
// Pager for displaying search results.
private MediaFilePager pager;
// Path of new directory to be created.
private String newDirectoryPath;
// a new directory the user wishes to view
private String viewDirectoryId = null;
private MediaFileSearchBean bean = new MediaFileSearchBean();
public MediaFileView() {
this.actionName = "mediaFileView";
this.desiredMenu = "editor";
this.pageTitle = "mediaFileView.title";
}
/**
* Prepares view action
*/
@Override
public void myPrepare() {
if (SIZE_FILTER_TYPES == null) {
SIZE_FILTER_TYPES = Arrays.asList(new KeyValueObject(
"mediaFileView.gt", getText("mediaFileView.gt")),
new KeyValueObject("mediaFileView.ge",
getText("mediaFileView.ge")), new KeyValueObject(
"mediaFileView.eq", getText("mediaFileView.eq")),
new KeyValueObject("mediaFileView.le",
getText("mediaFileView.le")), new KeyValueObject(
"mediaFileView.lt", getText("mediaFileView.lt")));
FILE_TYPES = Arrays.asList(new KeyValueObject("mediaFileView.any",
getText("mediaFileView.any")), new KeyValueObject(
"mediaFileView.others", getText("mediaFileView.others")),
new KeyValueObject("mediaFileView.image",
getText("mediaFileView.image")),
new KeyValueObject("mediaFileView.video",
getText("mediaFileView.video")),
new KeyValueObject("mediaFileView.audio",
getText("mediaFileView.audio")));
SIZE_UNITS = Arrays.asList(new KeyValueObject(
"mediaFileView.bytes", getText("mediaFileView.bytes")),
new KeyValueObject("mediaFileView.kb",
getText("mediaFileView.kb")), new KeyValueObject(
"mediaFileView.mb", getText("mediaFileView.mb")));
SORT_OPTIONS = Arrays.asList(new KeyValueObject("name",
getText("generic.name")), new KeyValueObject(
"date_uploaded", getText("mediaFileView.date")),
new KeyValueObject("type", getText("mediaFileView.type")));
}
refreshAllDirectories();
}
/**
* Create a new directory by name. All folders placed at the root.
*/
public String createNewDirectory() {
boolean dirCreated = false;
if (StringUtils.isEmpty(this.newDirectoryName)) {
addError("mediaFile.error.view.dirNameEmpty");
} else if (this.newDirectoryName.contains("/")) {
addError("mediaFile.error.view.dirNameInvalid");
} else {
try {
log.debug("Creating new directory - " + this.newDirectoryName);
MediaFileManager manager = WebloggerFactory.getWeblogger()
.getMediaFileManager();
if (!getActionWeblog().hasMediaFileDirectory(
this.newDirectoryName)) {
// Create
MediaFileDirectory dir = manager.createMediaFileDirectory(
getActionWeblog(), this.newDirectoryName);
// flush changes
WebloggerFactory.getWeblogger().flush();
addMessage("mediaFile.directoryCreate.success",
this.newDirectoryName);
// Switch to folder
setDirectoryId(dir.getId());
dirCreated = true;
} else {
// already exists
addMessage("mediaFile.directoryCreate.error.exists",
this.newDirectoryName);
}
} catch (WebloggerException e) {
log.error("Error creating new directory", e);
addError("Error creating new directory");
}
}
if (dirCreated) {
// Refresh list of directories so the newly created directory is
// included.
refreshAllDirectories();
}
return execute();
}
/**
* Returns directory content in JSON format.
*/
public String fetchDirectoryContentLight() {
execute();
return "success.json";
}
/**
* Fetches and displays list of media file for the given directory. The
* directory could be chosen by ID or path.
*
* @return String The result of the action.
*/
@SkipValidation
@Override
public String execute() {
MediaFileManager manager = WebloggerFactory.getWeblogger().getMediaFileManager();
try {
MediaFileDirectory directory;
if (StringUtils.isNotEmpty(this.directoryId)) {
directory = manager.getMediaFileDirectory(this.directoryId);
} else if (StringUtils.isNotEmpty(this.directoryName)) {
directory = manager.getMediaFileDirectoryByName(
getActionWeblog(), this.directoryName);
} else {
directory = manager.getDefaultMediaFileDirectory(getActionWeblog());
}
this.directoryId = directory.getId();
this.directoryName = directory.getName();
this.childFiles = new ArrayList<>();
this.childFiles.addAll(directory.getMediaFiles());
if ("type".equals(sortBy)) {
this.childFiles.sort(new MediaFileComparator(MediaFileComparatorType.TYPE));
} else if ("date_uploaded".equals(sortBy)) {
this.childFiles.sort(new MediaFileComparator(MediaFileComparatorType.DATE_UPLOADED));
} else {
// default to sort by name
sortBy = "name";
this.childFiles.sort(new MediaFileComparator(MediaFileComparatorType.NAME));
}
this.currentDirectory = directory;
// set current directory if valid
if (directory != null) {
setViewDirectoryId(directory.getId());
}
return SUCCESS;
} catch (FileIOException ex) {
log.error("Error viewing media file directory ", ex);
addError("MediaFile.error.view");
} catch (Exception e) {
log.error("Error viewing media file directory ", e);
addError("MediaFile.error.view");
}
return SUCCESS;
}
/**
* View the contents of another Media folder.
*/
public String view() {
try {
MediaFileManager manager = WebloggerFactory.getWeblogger()
.getMediaFileManager();
if (!StringUtils.isEmpty(viewDirectoryId)) {
setDirectoryId(viewDirectoryId);
setCurrentDirectory(manager.getMediaFileDirectory(viewDirectoryId));
}
} catch (WebloggerException ex) {
log.error("Error looking up directory", ex);
}
return execute();
}
/**
* Save a media file.
*
* @return String The result of the action.
*/
public String search() {
boolean valSuccess = myValidate();
if (valSuccess) {
MediaFileFilter filter = new MediaFileFilter();
bean.copyTo(filter);
MediaFileManager manager = WebloggerFactory.getWeblogger().getMediaFileManager();
try {
List<MediaFile> rawResults = manager.searchMediaFiles(getActionWeblog(), filter);
boolean hasMore = false;
List<MediaFile> results = new ArrayList<>(rawResults);
if (results.size() > MediaFileSearchBean.PAGE_SIZE) {
results.remove(results.size() - 1);
hasMore = true;
}
this.pager = new MediaFilePager(bean.getPageNum(), results,
hasMore);
} catch (Exception e) {
log.error("Error applying search criteria", e);
addError("Error applying search criteria - check Roller logs");
}
}
return SUCCESS;
}
/**
* Delete selected media files.
*
*/
public String deleteSelected() {
doDeleteSelected();
return execute();
}
/**
* Delete selected media file
*
*/
public String delete() {
doDeleteMediaFile();
return execute();
}
/**
* Delete folder
*/
public String deleteFolder() {
try {
MediaFileManager manager = WebloggerFactory.getWeblogger().getMediaFileManager();
if (directoryId != null) {
log.debug("Deleting media file folder - " + directoryId + " ("
+ directoryName + ")");
MediaFileDirectory mediaFileDir = manager
.getMediaFileDirectory(directoryId);
manager.removeMediaFileDirectory(mediaFileDir);
refreshAllDirectories();
WebloggerFactory.getWeblogger().getWeblogManager()
.saveWeblog(this.getActionWeblog());
// flush changes
WebloggerFactory.getWeblogger().flush();
WebloggerFactory.getWeblogger().release();
addMessage("mediaFile.deleteFolder.success");
// notify caches
CacheManager.invalidate(getActionWeblog());
// re-route to default folder
mediaFileDir = manager
.getDefaultMediaFileDirectory(getActionWeblog());
setDirectoryId(mediaFileDir.getId());
setDirectoryName(mediaFileDir.getName());
} else {
log.error("(System error) No directory ID provided for media file folder delete.");
}
} catch (WebloggerException ex) {
log.error("Error deleting folder", ex);
}
return execute();
}
/**
* Include selected media file in gallery
*
*/
public String includeInGallery() {
doIncludeMediaFileInGallery();
return execute();
}
/**
* Move selected media files to a different directory
*
*/
public String moveSelected() {
doMoveSelected();
return execute();
}
public String getDirectoryId() {
return directoryId;
}
public void setDirectoryId(String id) {
this.directoryId = id;
}
public List<MediaFile> getChildFiles() {
return childFiles;
}
public void setChildFiles(List<MediaFile> files) {
this.childFiles = files;
}
public String getNewDirectoryName() {
return newDirectoryName;
}
public void setNewDirectoryName(String newDirectoryName) {
this.newDirectoryName = newDirectoryName;
}
public MediaFileDirectory getCurrentDirectory() {
return currentDirectory;
}
public void setCurrentDirectory(MediaFileDirectory currentDirectory) {
this.currentDirectory = currentDirectory;
}
public String getDirectoryName() {
return directoryName;
}
public void setDirectoryName(String path) {
this.directoryName = path;
}
public String getSortBy() {
return sortBy;
}
public void setSortBy(String sortBy) {
this.sortBy = sortBy;
}
/**
* Validates search input
*/
public boolean myValidate() {
if (StringUtils.isEmpty(bean.getName())
&& StringUtils.isEmpty(bean.getTags())
&& StringUtils.isEmpty(bean.getType()) && bean.getSize() == 0) {
addError("MediaFile.error.search.empty");
return false;
}
return true;
}
public MediaFileSearchBean getBean() {
return bean;
}
public void setBean(MediaFileSearchBean b) {
this.bean = b;
}
public List<KeyValueObject> getFileTypes() {
return FILE_TYPES;
}
public List<KeyValueObject> getSizeFilterTypes() {
return SIZE_FILTER_TYPES;
}
public List<KeyValueObject> getSizeUnits() {
return SIZE_UNITS;
}
public List<KeyValueObject> getSortOptions() {
return SORT_OPTIONS;
}
public MediaFilePager getPager() {
return pager;
}
public void setPager(MediaFilePager pager) {
this.pager = pager;
}
public String getNewDirectoryPath() {
return newDirectoryPath;
}
public void setNewDirectoryPath(String newDirectoryPath) {
this.newDirectoryPath = newDirectoryPath;
}
public String getViewDirectoryId() {
return viewDirectoryId;
}
public void setViewDirectoryId(String viewDirectoryId) {
this.viewDirectoryId = viewDirectoryId;
}
}
|
package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 教育缴费订单扩展信息
*
* @author auto create
* @since 1.0, 2021-02-24 16:31:16
*/
public class EduTradeExtInfo extends AlipayObject {
private static final long serialVersionUID = 5472194886478392375L;
/**
* 课程描述信息
*/
@ApiField("course_desc")
private String courseDesc;
/**
* 课程图片地址
*/
@ApiField("course_img_url")
private String courseImgUrl;
/**
* 课程名称
*/
@ApiField("course_name")
private String courseName;
/**
* 课程标签
*/
@ApiField("course_tag")
private String courseTag;
/**
* 课程原价,单位元,两位小数
*/
@ApiField("origin_price")
private String originPrice;
/**
* 外部课程编号
*/
@ApiField("out_course_id")
private String outCourseId;
/**
* 外部门店编号
*/
@ApiField("out_shop_id")
private String outShopId;
/**
* 课程单价,单位元,两位小数
*/
@ApiField("price")
private String price;
/**
* 门店图片地址
*/
@ApiField("shop_img_url")
private String shopImgUrl;
/**
* 店铺别名
*/
@ApiField("shop_name")
private String shopName;
/**
* 门店可跳转地址
*/
@ApiField("shop_url")
private String shopUrl;
public String getCourseDesc() {
return this.courseDesc;
}
public void setCourseDesc(String courseDesc) {
this.courseDesc = courseDesc;
}
public String getCourseImgUrl() {
return this.courseImgUrl;
}
public void setCourseImgUrl(String courseImgUrl) {
this.courseImgUrl = courseImgUrl;
}
public String getCourseName() {
return this.courseName;
}
public void setCourseName(String courseName) {
this.courseName = courseName;
}
public String getCourseTag() {
return this.courseTag;
}
public void setCourseTag(String courseTag) {
this.courseTag = courseTag;
}
public String getOriginPrice() {
return this.originPrice;
}
public void setOriginPrice(String originPrice) {
this.originPrice = originPrice;
}
public String getOutCourseId() {
return this.outCourseId;
}
public void setOutCourseId(String outCourseId) {
this.outCourseId = outCourseId;
}
public String getOutShopId() {
return this.outShopId;
}
public void setOutShopId(String outShopId) {
this.outShopId = outShopId;
}
public String getPrice() {
return this.price;
}
public void setPrice(String price) {
this.price = price;
}
public String getShopImgUrl() {
return this.shopImgUrl;
}
public void setShopImgUrl(String shopImgUrl) {
this.shopImgUrl = shopImgUrl;
}
public String getShopName() {
return this.shopName;
}
public void setShopName(String shopName) {
this.shopName = shopName;
}
public String getShopUrl() {
return this.shopUrl;
}
public void setShopUrl(String shopUrl) {
this.shopUrl = shopUrl;
}
}
|
package com.rts.services.dao;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.data.jpa.repository.Query;
import com.rts.services.model.AidCase;
import com.rts.services.model.Comment;
import com.rts.services.model.Group;
public interface CommentDAO extends JpaRepository<Comment,Integer>, JpaSpecificationExecutor {
List<Comment> findByCaseid(String caseid);
}
|
package tr.com.infumia.infumialib.platform.paper.smartinventory.icon;
import java.util.ArrayList;
import java.util.Collection;
import java.util.function.Predicate;
import lombok.RequiredArgsConstructor;
import org.bukkit.Material;
import org.bukkit.inventory.ItemStack;
import org.jetbrains.annotations.NotNull;
import tr.com.infumia.infumialib.platform.paper.smartinventory.Handle;
import tr.com.infumia.infumialib.platform.paper.smartinventory.Icon;
import tr.com.infumia.infumialib.platform.paper.smartinventory.InventoryContents;
import tr.com.infumia.infumialib.platform.paper.smartinventory.event.abs.IconEvent;
/**
* an implementation for {@link Icon}.
*/
@RequiredArgsConstructor
public final class BasicIcon implements Icon {
/**
* the handle list.
*/
private final Collection<Handle<? extends IconEvent>> handles = new ArrayList<>();
/**
* the can see.
*/
@NotNull
private Predicate<InventoryContents> canSee = contents -> true;
/**
* the can use.
*/
@NotNull
private Predicate<InventoryContents> canUse = contents -> true;
/**
* the fallback.
*/
@NotNull
private ItemStack fallback = new ItemStack(Material.AIR);
/**
* the item.
*/
@NotNull
private ItemStack item;
@Override
public <T extends IconEvent> void accept(@NotNull final T event) {
final var contents = event.contents();
if (this.canSee.test(contents) && this.canUse.test(contents)) {
for (final var target : this.handles) {
if (target.type().isAssignableFrom(event.getClass())) {
//noinspection unchecked
((Handle<T>) target).accept(event);
}
}
}
}
@NotNull
@Override
public ItemStack calculateItem(@NotNull final InventoryContents contents) {
final ItemStack calculated;
if (this.canSee.test(contents)) {
calculated = this.getItem();
} else {
calculated = this.fallback;
}
return calculated;
}
@NotNull
@Override
public Icon canSee(@NotNull final Predicate<InventoryContents> predicate) {
this.canSee = predicate;
return this;
}
@NotNull
@Override
public Icon canUse(@NotNull final Predicate<InventoryContents> predicate) {
this.canUse = predicate;
return this;
}
@NotNull
@Override
public Icon fallback(@NotNull final ItemStack fallback) {
this.fallback = fallback;
return this;
}
@NotNull
@Override
public ItemStack getItem() {
return this.item;
}
@NotNull
@Override
public <T extends IconEvent> Icon handle(@NotNull final Handle<T> handle) {
this.handles.add(handle);
return this;
}
@NotNull
@Override
public Icon handles(@NotNull final Collection<Handle<? extends IconEvent>> handles) {
this.handles.addAll(handles);
return this;
}
@NotNull
@Override
public Icon item(@NotNull final ItemStack item) {
this.item = item;
return this;
}
}
|
/**
* Copyright 2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.config;
import com.github.ambry.utils.Utils;
import java.util.List;
/**
* The configs for the server
*/
public class ServerConfig {
/**
* The number of request handler threads used by the server to process requests
*/
@Config("server.request.handler.num.of.threads")
@Default("7")
public final int serverRequestHandlerNumOfThreads;
/**
* The number of scheduler threads the server will use to perform background tasks (store, replication)
*/
@Config("server.scheduler.num.of.threads")
@Default("10")
public final int serverSchedulerNumOfthreads;
/**
* The option to enable or disable publishing stats locally.
*/
@Config("server.stats.publish.local.enabled")
@Default("false")
public final boolean serverStatsPublishLocalEnabled;
/**
* The option to enable or disable publishing stats via Health Reports
*/
@Config("server.stats.publish.health.report.enabled")
@Default("false")
public final boolean serverStatsPublishHealthReportEnabled;
/**
* The frequency in mins at which cluster wide quota stats will be aggregated
*/
@Config("server.quota.stats.aggregate.interval.in.minutes")
@Default("60")
public final long serverQuotaStatsAggregateIntervalInMinutes;
/**
* The option to enable data prefetch for GET request and don't do zero copy.
*/
@Config("server.enable.store.data.prefetch")
@Default("false")
public final boolean serverEnableStoreDataPrefetch;
/**
* Implementation class for StoreKeyConverterFactory
*/
@Config("server.store.key.converter.factory")
@Default("com.github.ambry.store.StoreKeyConverterFactoryImpl")
public final String serverStoreKeyConverterFactory;
/**
* Implementation for message transformation.
*/
@Config("server.message.transformer")
@Default("com.github.ambry.messageformat.ValidatingTransformer")
public final String serverMessageTransformer;
/**
* The comma separated list of stats reports to publish in Helix.
*/
@Config("server.stats.reports.to.publish")
@Default("")
public final List<String> serverStatsReportsToPublish;
/**
* The option to enable or disable validating request based on store state.
*/
@Config("server.validate.request.based.on.store.state")
@Default("false")
public final boolean serverValidateRequestBasedOnStoreState;
/**
* True to enable ambry server handling undelete requests.
*/
@Config("server.handle.undelete.request.enabled")
@Default("false")
public final boolean serverHandleUndeleteRequestEnabled;
public ServerConfig(VerifiableProperties verifiableProperties) {
serverRequestHandlerNumOfThreads = verifiableProperties.getInt("server.request.handler.num.of.threads", 7);
serverSchedulerNumOfthreads = verifiableProperties.getInt("server.scheduler.num.of.threads", 10);
serverStatsPublishLocalEnabled = verifiableProperties.getBoolean("server.stats.publish.local.enabled", false);
serverStatsPublishHealthReportEnabled =
verifiableProperties.getBoolean("server.stats.publish.health.report.enabled", false);
serverQuotaStatsAggregateIntervalInMinutes =
verifiableProperties.getLong("server.quota.stats.aggregate.interval.in.minutes", 60);
serverEnableStoreDataPrefetch = verifiableProperties.getBoolean("server.enable.store.data.prefetch", false);
serverStoreKeyConverterFactory = verifiableProperties.getString("server.store.key.converter.factory",
"com.github.ambry.store.StoreKeyConverterFactoryImpl");
serverMessageTransformer = verifiableProperties.getString("server.message.transformer",
"com.github.ambry.messageformat.ValidatingTransformer");
serverStatsReportsToPublish =
Utils.splitString(verifiableProperties.getString("server.stats.reports.to.publish", ""), ",");
serverValidateRequestBasedOnStoreState =
verifiableProperties.getBoolean("server.validate.request.based.on.store.state", false);
serverHandleUndeleteRequestEnabled =
verifiableProperties.getBoolean("server.handle.undelete.request.enabled", false);
}
}
|
package mybatis.mapper;
import dto.Tutoriales;
import java.util.List;
import org.apache.ibatis.annotations.Delete;
import org.apache.ibatis.annotations.Insert;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;
import org.apache.ibatis.annotations.Update;
public interface TutorialesMapper {
@Select(TUTORIALES_QRY)
public List<Tutoriales> tutorialesQry();
@Select(TUTORIALES_GET)
public Tutoriales tutorialesGet(@Param("idtutorial") Integer idtutorial);
@Insert(TUTORIALES_INS)
public int tutorialesIns(Tutoriales tutoriales);
@Update(TUTORIALES_UPD)
public int tutorialesUpd(Tutoriales tutoriales);
@Delete(TUTORIALES_DEL)
public int tutorialesDel(@Param("ids") String ids);
// sentecias SQL
String TUTORIALES_QRY
= "SELECT idtutorial, titulo, tipo, precio "
+ "FROM tutoriales ORDER BY titulo";
String TUTORIALES_GET
= "SELECT idtutorial, titulo, tipo, precio FROM tutoriales "
+ "WHERE idtutorial=#{idtutorial}";
String TUTORIALES_INS
= "INSERT INTO tutoriales(titulo, tipo, precio) "
+ "VALUES(#{titulo}, #{tipo}, #{precio})";
String TUTORIALES_UPD
= "UPDATE tutoriales SET "
+ "titulo=#{titulo}, tipo=#{tipo}, precio=#{precio} "
+ "WHERE idtutorial=#{idtutorial}";
String TUTORIALES_DEL
= "DELETE FROM tutoriales WHERE idtutorial IN(${ids})";
}
|
package crazypants.enderio.machine;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MachineRecipeRegistry {
public static final MachineRecipeRegistry instance = new MachineRecipeRegistry();
private final Map<String, Map<String, IMachineRecipe>> machineRecipes = new HashMap<String, Map<String, IMachineRecipe>>();
public void registerRecipe(String machine, IMachineRecipe recipe) {
getRecipesForMachine(machine).put(recipe.getUid(), recipe);
}
public Map<String, IMachineRecipe> getRecipesForMachine(String machineName) {
Map<String, IMachineRecipe> res = machineRecipes.get(machineName);
if(res == null) {
res = new HashMap<String, IMachineRecipe>();
machineRecipes.put(machineName, res);
}
return res;
}
public IMachineRecipe getRecipeForUid(String uid) {
if(uid == null) {
return null;
}
for (Map<String, IMachineRecipe> recipes : machineRecipes.values()) {
for (IMachineRecipe recipe : recipes.values()) {
if(uid.equals(recipe.getUid())) {
return recipe;
}
}
}
return null;
}
public IMachineRecipe getRecipeForInputs(String machineName, MachineRecipeInput... inputs) {
Map<String, IMachineRecipe> recipes = getRecipesForMachine(machineName);
if(recipes == null) {
return null;
}
for (IMachineRecipe recipe : recipes.values()) {
if(recipe.isRecipe(inputs)) {
return recipe;
}
}
return null;
}
public List<IMachineRecipe> getRecipesForInput(String machineName, MachineRecipeInput input) {
if(input == null) {
return Collections.emptyList();
}
List<IMachineRecipe> result = new ArrayList<IMachineRecipe>();
Map<String, IMachineRecipe> recipes = getRecipesForMachine(machineName);
for (IMachineRecipe recipe : recipes.values()) {
if(recipe.isValidInput(input)) {
result.add(recipe);
}
}
return result;
}
}
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2018 Playtika
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.playtika.test.couchbase;
import com.playtika.test.couchbase.legacy.LegacyClientConfiguration;
import com.playtika.test.couchbase.springdata.CouchbaseConfiguration;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.test.context.junit4.SpringRunner;
import static org.assertj.core.api.Assertions.assertThat;
@RunWith(SpringRunner.class)
@SpringBootTest(
classes = {CouchbaseConfiguration.class, LegacyClientConfiguration.class}
, properties = {"spring.profiles.active=enabled"}
)
public abstract class EmbeddedCouchbaseBootstrapConfigurationTest {
@Autowired
ConfigurableEnvironment environment;
@Test
public void propertiesAreAvailable() {
assertThat(environment.getProperty("embedded.couchbase.bootstrapHttpDirectPort")).isNotEmpty();
assertThat(environment.getProperty("embedded.couchbase.bootstrapCarrierDirectPort")).isNotEmpty();
assertThat(environment.getProperty("embedded.couchbase.host")).isNotEmpty();
assertThat(environment.getProperty("embedded.couchbase.bucket")).isNotEmpty();
assertThat(environment.getProperty("embedded.couchbase.user")).isNotEmpty();
assertThat(environment.getProperty("embedded.couchbase.password")).isNotEmpty();
assertThat(System.getProperty("com.couchbase.bootstrapHttpDirectPort")).isNotEmpty();
assertThat(System.getProperty("com.couchbase.bootstrapCarrierDirectPort")).isNotEmpty();
}
}
|
package proC.physicsWorld;
import java.io.Serializable;
/****************************************************************************
* Copyright (C) 1999-2001 by the Massachusetts Institute of Technology,
* Cambridge, Massachusetts.
*
* All Rights Reserved
*
* Permission to use, copy, modify, and distribute this software and
* its documentation for any purpose and without fee is hereby
* granted, provided that the above copyright notice appear in all
* copies and that both that copyright notice and this permission
* notice appear in supporting documentation, and that MIT's name not
* be used in advertising or publicity pertaining to distribution of
* the software without specific, written prior permission.
*
* THE MASSACHUSETTS INSTITUTE OF TECHNOLOGY DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE MASSACHUSETTS
* INSTITUTE OF TECHNOLOGY BE LIABLE FOR ANY SPECIAL, INDIRECT OR
* CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
* OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
* NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*
* @author: Matt Frank, MIT Laboratory for Computer Science,
* mfrank@lcs.mit.edu
* 1999-Apr-03
*
* @author: Rob Pinder, Phil Sarin, Lik Mui
* Spring 2000
* Exception handling and argument type refinemnt
*
* @author: Jeffrey Sheldon (jeffshel@mit.edu)
* Fall 2000, Spring 2001
* Major rewrites and improvements to iterative solving
*
* @author: Jeremy Nimmer (jwnimmer@alum.mit.edu)
* Fall 2000, Spring 2001
* Editorial role (testing and specification editing)
*
* Version: $Id: Geometry.java,v 1.1 2002/08/21 21:49:40 kirky Exp $
*
***************************************************************************/
/**
* The Geometry library contains procedural abstractions which are useful
* in modeling the physical interactions between objects.
*
* <p>The library is described in terms of these concepts:
* <br><ul>
* <li> object - a ball or a bouncer
* <li> ball - a circle with position and velocity
* <li> bouncer - a line segment or circle with position and angular velocity
* </ul>
*
* <p>
* The intended use of the Geometry library is as follows:
*
* <p><ol><li>
* The client calls the timeUntilCollision() methods to calculate the
* times at which the ball(s) will collide with each of the bouncers
* or with another ball.
* The minimum of all these times (call it "mintime") is the
* time of the next collision.
*
* <li>
* The client updates the position of the ball(s) and the bouncers to
* account for mintime passing. At this point, the ball and the object
* it is about to hit are exactly adjacent to one another.
*
* <li>
* The client calls the appropriate reflect() method to calculate the
* change in the ball's velocity.
*
* <li>The client updates the ball's velocity and repeats back to step 1.
*
* </ol>
*
* <p><a name="constant_velocity"></a>
*
* <p>The timeUntilCollision() methods assume constant ball velocity.
* That is, no force will be acting on the ball, so it will follow a
* straight-line path. Therefore, if external forces (such as gravity
* or friction) need to be accounted for, the client must do so before
* or after the of the "time until / update position / reflect" series
* of steps - never inbetween those three steps.
*
* <p><a name="endpoint_effects"></a>
*
* <b>Important note</b>:
* The methods which deal with line segment bouncers do NOT deal with
* the end-points of the segment. To ensure realistic behavior, shapes
* should be constructed from a combination of line segments with
* zero-radius circles at the end points.
*
* <p>
* For example: A ball is located at (0,0) and is moving in the
* (1,1) direction towards two line segments; one segments spans the
* points (1,1),(1,2) and the other spans (1,1),(2,1).
* The ball will hit the ends of both line segments at a 45 degree angle and
* something REALLY WEIRD will happen. However, if a circle with zero radius
* is placed at (1,1) then the ball will bounce off the circle in the
* expected manner.
**/
public class Geometry {
// nobody should be constructing a "Geometry"
private Geometry() {
}
private static GeometryInterface geometry = new GeometryImpl();
/**
* @param impl the object to be used as the singleton
*
* @effects changes which implementation of
* <code>GeometryInterface</code> will be used to service the static
* methods of this class. Most users will prefer to use
* <code>setForesight</code> or <code>setTuningParameters</code>
* instead.
*
* @see #setForesight
* @see #setTuningParameters
**/
public static void setGeometry(GeometryInterface impl) {
if (impl == null) {
throw new IllegalArgumentException();
}
geometry = impl;
}
/**
* Modifies the behavior of this class to use the specified
* <code>maximumForesight</code> and <code>numberOfSlices</code>. If
* <code>useDoughnut</code> is true then doughnut optimizations are
* enabled. The values used by default are <+Inf, true, 15>.
* Many uses may prefer to simply use <code>setForesight</code>
* instead.
*
* @param maximumForesight The maximal time in the future that a
* collision will be searched for. Collisions may still be returned
* that happen farther than <code>maximumForesight</code> in the
* future, but no extra effort will be made to find them. If set to
* +Infinity, <code>useDoughnut</code> must also be true.
*
* @param useDoughnut When true, the timeUntilRotating* methods
* perform extra calculations to reduce the time during which
* collisions are searched for. If maximumForesight is small, it is
* sometimes quicker to skip these additional checks. Must be true
* if maximumForesight is +Infinity.
*
* @param numberOfSlices The number of slices that the time being
* searched for a possible collision is divided into. Since some
* methods (noteably timeUntilRotating*) cannot use closed form
* formula, they must search for possible collisions over some time
* frame. Increasing the size of this will decrease the likelihood
* of one of the timeUntilRotating* methods missing a collision, but
* will also cause them to run slower.
*
* @see #setForesight
* @see Double#POSITIVE_INFINITY
**/
public static void setTuningParameters(double maximumForesight,
boolean useDoughnut,
int numberOfSlices) {
if (useDoughnut) {
setGeometry(new GeometryImpl(maximumForesight, numberOfSlices));
} else {
setGeometry(new SimpleGeometry(maximumForesight, numberOfSlices));
}
}
/**
* Modifies the behavior of this class to use the specified
* <code>maximumForesight</code>.
*
* @param maximumForesight The maximal time in the future that a
* collision will be searched for. Collisions may still be returned
* that happen farther than <code>maximumForesight</code> in the
* future, but no extra effort will be made to find them.
*
* @see Double#POSITIVE_INFINITY
**/
public static void setForesight(double maximumForesight) {
if (maximumForesight <= 0.1) {
setGeometry(new SimpleGeometry(maximumForesight, 15));
} else {
setGeometry(new GeometryImpl(maximumForesight, 15));
}
}
/**
* <code>DoublePair</code> is a simple immutable record type representing
* a pair of <code>double</code>s.
**/
public static class DoublePair
implements Serializable
{
public final double d1;
public final double d2;
/**
* Creates a DoublePair with <code>d1</code> and
* <code>d2</code> as given
**/
public DoublePair (double d1, double d2) {
this.d1 = d1;
this.d2 = d2;
}
/**
* Creates a DoublePair with <code>d1</code> and
* <code>d2</code> both set to the given argumen
**/
public DoublePair (double both) {
this(both, both);
}
public boolean areFinite() {
return !Double.isInfinite(d1) && !Double.isInfinite(d2) &&
!Double.isNaN(d1) && !Double.isNaN(d2);
}
public String toString() {
return "[" + d1 + "," + d2 + "]";
}
public boolean equals(Object o) {
return (o instanceof DoublePair) && equals((DoublePair) o);
}
public boolean equals(DoublePair p) {
if (p == null) return false;
return (d1 == p.d1) && (d2 == p.d2);
}
public int hashCode() {
return (new Double(d1)).hashCode() + (new Double(d2)).hashCode();
}
}
/**
* <code>VectPair</code> is a simple immutable record type representing
* a pair of <code>Vect</code>s.
* @see Vect
**/
public static class VectPair
implements Serializable
{
public final Vect v1;
public final Vect v2;
/**
* Creates a VectPair with <code>v1</code> and
* <code>v2</code> as given
**/
public VectPair(Vect v1, Vect v2) {
this.v1 = v1;
this.v2 = v2;
}
public String toString() {
return "[" + v1 + "," + v2 + "]";
}
public boolean equals(Object o) {
return (o instanceof VectPair) && equals((VectPair) o);
}
public boolean equals(VectPair p) {
if (p == null) return false;
return
((v1 == null) ? (p.v1 == null) : v1.equals(p.v1)) &&
((v2 == null) ? (p.v2 == null) : v2.equals(p.v2));
}
public int hashCode() {
return
((v1 == null) ? 0 : (3 * v1.hashCode())) +
((v2 == null) ? 0 : (7 * v2.hashCode()));
}
}
/**
* DoublePair with both <code>d1</code> and <code>d2</code>
* set to <code>Double.NaN</code>
* @see Double#NaN
**/
public static final DoublePair DOUBLE_PAIR_NAN =
new DoublePair(Double.NaN);
/**
* Solves the quadratic equation.
*
* @return a pair containing the roots of the equation
* a*x<sup>2</sup> + b*x + c = 0 with the lesser of the two roots
* in <code>result.d1</code>. If no real roots exist, the
* returned pair will contain <code>NaN</code> for both values.
*
* @see Double#NaN
**/
public static DoublePair quadraticSolution(double a, double b, double c) {
return geometry.quadraticSolution(a, b, c);
}
/**
* Solves the quadratic equation.
*
* @return the lesser of the two roots of the quadratic
* equation specified by a*x<sup>2</sup> + b*x + c = 0, or
* <code>NaN</code> if no real roots exist.
*
* @see Double#NaN
**/
public static double minQuadraticSolution(double a,
double b,
double c) {
return geometry.minQuadraticSolution(a, b, c);
}
/***************************************************************************
*
* METHODS FOR LINE SEGMENTS
*
* Suppose we have a line running through the points <x,y> and <w,z>.
* And we have a point <a,b>. We'd like to find the distance from the
* point to the line. We can calculate this by finding the minimum
* distance between the point and all points on the line. (Write the
* line as a function of s: j[s] = x + (w-x)s, k[s] = y + (z-y)s,
* then write the distance squared as a function of s:
* (a - j[s])^2 + (b - k[s])^2
* Take the derivative with respect to s and set it equal 0. The
* result is that the distance squared between the point and the line
* is:
*
* (b(x-w) - a(y-z) + (w y - x z))^2 / ((x-w)^2 + (y-z)^2)
*
*
* Furthermore, the point on the line that is perpendicular to the
* point is given by:
*
* minS = ((w-x)(a-x) + (z-y)(b-y)) / ((w-x)^2 + (y-z)^2)
* minX = j[minS], minY = k[minS]
*
* Okay, now assume that the point is moving. a[t] = u t + c,
* b[t] = v t + d. We want to find the time, t, at which the distance
* between the point and the line will be exactly "r".
*
* Then the numerator of the previous expression will be a quadratic
* expression of the variable t, with At^2 + Bt + C where
*
* F = (v(x-w) - u(y-z))
* G = (d(x-w) - c(y-z) + (w y - x z))
* H = ((x-w)^2 + (y-z)^2)
*
* A = F^2
* B = 2 F G
* C = G^2
*
* So to find the answer we let:
* Cprime = C - r^2 H
*
* and finally:
*
* t = (-B +/- Sqrt(B^2 - 4 A Cprime)) / (2 A)
*
***************************************************************************/
/**
* Returns the point on <code>line</code> which forms a line with
* <code>point</code> that is perpendicular to <code>line</code>.
*
* @requires <code>line</code> has non-zero length
*
* @return the point on <code>line</code> which forms a line with
* <code>point</code> that is perpendicular to <code>line</code>, or
* <code>null</code> if no such point exists within the given line
* segment.
*
* @see #perpendicularPointWholeLine(LineSegment, Vect)
**/
static public Vect perpendicularPoint(LineSegment line,
Vect point) {
return geometry.perpendicularPoint(line, point);
}
/**
* Returns the point on the infinitly long line represented by
* <code>line</code> which forms a line with <code>point</code> that
* is perpendicular to <code>line</code>.
*
* @requires <code>line</code> has non-zero length
*
* @return the point on the infinitly long line represented by
* <code>line</code> which forms a line with <code>point</code> that
* is perpendicular to <code>line</code>, or <code>null</code> if no
* such point exists within the given line segment.
*
* @see #perpendicularPoint(LineSegment, Vect)
**/
public static Vect perpendicularPointWholeLine(LineSegment line,
Vect point) {
return geometry.perpendicularPointWholeLine(line, point);
}
/**
* Accounts for the effects of inelastic collisions given the intial
* and resulting velocities of the collision assuming elasticity.
*
* @requires <code>rCoeff</code> >= 0
*
* @effects given an initial velocity, <code>incidentVect</code>,
* and the velocity resulting from an elastic collision,
* <code>reflectedVect</code>, and a reflection coefficient,
* <code>rCoeff</code>, returns the resulting velocity of the
* collision had it been inelastic with the given reflection
* coefficient. If the reflection coefficient is 1.0, the resulting
* velocity will be equal to <code>reflectedVect</code>. A
* reflection coefficient of 0 implies that the collision will
* absorb any energy that was reflected in the elastic case.
*
* @param incidentVect the intial velocity of the ball
* @param reflectedVect the resulting velocity after the collision
* assuming elasticity.
* @param rCoeff the reflection coefficent.
*
* @return the resulting velocity after an inelastic collision.
**/
public static Vect applyReflectionCoeff(Vect incidentVect,
Vect reflectedVect,
double rCoeff) {
return geometry.applyReflectionCoeff(incidentVect,
reflectedVect,
rCoeff);
}
/**
* Computes the time until a ball, represented by a circle,
* travelling at a specified velocity collides with a specified line
* segment.
*
* @requires <code>line</code> has non-zero length
*
* @effects computes the time until a circular ball
* travelling at a specified velocity collides with a specified line
* segment. If no collision will occur, <tt>POSITIVE_INFINITY</tt> is
* returned. This method assumes that the ball will travel with
* constant velocity until impact.
*
* @param line the line segment representing a wall or (part of) an
* object that might be collided with
*
* @param ball a circle indicate the size and location of a ball
* which might collide with the given line segment
*
* @param velocity the velocity of the ball before impact
*
* @return the time until collision, or <tt>POSITIVE_INFINITY</tt> if
* the collision will not occur
*
* @see Double#POSITIVE_INFINITY
* @see <a href="#endpoint_effects">endpoint effects</a>
**/
public static double timeUntilWallCollision(LineSegment line,
Circle ball,
Vect velocity) {
return geometry.timeUntilWallCollision(line, ball, velocity);
}
/**
* Computes the new velocity of a ball after bouncing (reflecting)
* off a wall.
*
* @requires <code>line</code> has non-zero length,
* <code>reflectionCoeff</code> >= 0
*
* @effects computes the new velocity of a ball reflecting off of a
* wall. The velocity resulting from this method corresponds to
* collision against a surface with the given reflection
* coefficient. A reflection coefficient of 1 indiciates a
* perfectly elastic collision. This method assumes that the ball
* is at the point of impact.
*
* @param line the line segment representing the wall which is being hit
*
* @param velocity the velocity of the ball before impact
*
* @param reflectionCoeff the reflection coefficent
*
* @return the velocity of the ball after impacting the given wall
**/
public static Vect reflectWall(LineSegment line,
Vect velocity,
double reflectionCoeff) {
return geometry.reflectWall(line, velocity, reflectionCoeff);
}
/**
* Computes the new velocity of a ball after bouncing (reflecting)
* off a wall.
*
* @requires <code>line</code> has non-zero length
*
* @effects computes the new velocity of a ball reflecting off of a
* wall. The velocity resulting from this method corresponds to a
* perfectly elastic collision. This method assumes that the ball
* is at the point of impact.
*
* @param line the line segment representing the wall which is being hit
*
* @param velocity the velocity of the ball before impact
*
* @return the velocity of the ball after impacting the given wall
**/
public static Vect reflectWall(LineSegment line,
Vect velocity) {
return geometry.reflectWall(line, velocity);
}
/****************************************************************************
*
* METHODS FOR CIRCLES
*
***************************************************************************/
/**
* @return the square of the distance between two points
* represented by <code>v1</code> and <code>v2</code>.
**/
static public double distanceSquared(Vect v1, Vect v2) {
return geometry.distanceSquared(v1, v2);
}
/**
* @return the square of the distance between two points
* represented by <code>(x1, y1)</code> and <code>(x2,
* y2)</code>.
**/
static public double distanceSquared(double x1, double y1,
double x2, double y2) {
return geometry.distanceSquared(x1, y1, x2, y2);
}
/**
* Computes the time until a ball represented by a circle,
* travelling at a specified velocity collides with a specified
* circle.
*
* @requires ball.radius > 0
*
* @effects computes the time until a ball represented by a circle,
* travelling at a specified velocity collides with a specified
* circle. If no collision will occur <tt>POSITIVE_INFINITY</tt> is
* returned. This method assumes the ball travels with constant
* velocity until impact.
*
* @param circle a circle representing the circle with which the
* ball may collide
*
* @param ball a circle representing the size and initial location
* of the ball
*
* @param velocity the velocity of the ball before impact
*
* @return the time until collision or <tt>POSITIVE_INFINITY</tt> if
* the collision will not occur
*
* @see Double#POSITIVE_INFINITY
**/
static public double timeUntilCircleCollision(Circle circle,
Circle ball,
Vect velocity) {
return geometry.timeUntilCircleCollision(circle, ball, velocity);
}
/**
* Computes the new velocity of a ball reflecting off of a
* circle.
*
* @requires <code>reflectionCoeff</code> >= 0
*
* @effects computes the new velocity of a ball reflecting off of a
* circle. The velocity resulting from this method corresponds to a
* collision against a surface with the given reflection
* coefficient. A reflection coefficient of 1 indicates a perfectly
* elastic collision. This method assumes that the ball is at the
* point of impact.
*
* @param circle the center point of the circle which is being hit
*
* @param ball the center point of the ball
*
* @param velocity the velocity of the ball before impact
*
* @param reflectionCoeff the reflection coefficient
*
* @return the velocity of the ball after impacting the given circle
**/
public static Vect reflectCircle(Vect circle,
Vect ball,
Vect velocity,
double reflectionCoeff) {
return geometry.reflectCircle(circle, ball, velocity, reflectionCoeff);
}
/**
* Computes the new velocity of a ball reflecting off of a
* circle.
*
* @effects computes the new velocity of a ball reflecting off of a
* circle. The velocity resulting from this method corresponds to a
* perfectly elastic collision. This method assumes that the ball
* is at the point of impact.
*
* @param circle the center point of the circle which is being hit
*
* @param ball the center point of the ball
*
* @param velocity the velocity of the ball before impact
*
* @return the velocity of the ball after impacting the given circle
**/
public static Vect reflectCircle(Vect circle,
Vect ball,
Vect velocity) {
return geometry.reflectCircle(circle, ball, velocity);
}
/****************************************************************************
*
* METHODS FOR ROTATING LINE SEGMENTS AND CIRCLES
*
***************************************************************************/
/**
* Rotates the point represented by <code>p</code> by
* <code>a</code> around the center of rotation, <code>cor</code>,
* and returns the result.
*
* @effects rotates the point represented by <code>p</code> by
* <code>a</code> around the center of rotation, <code>cor</code>,
* and returns the result.
*
* @param point the initial location of the point to be rotated
*
* @param cor the point indicating the center of rotation
*
* @param a the amount by which to rotate <code>point</code>
*
* @return point <code>point</code> rotated around <code>cor</code>
* by <code>a</code>
**/
public static Vect rotateAround(Vect point, Vect cor, Angle a) {
return geometry.rotateAround(point, cor, a);
}
/**
* Rotates the line segment represented by
* <code>line</code> by <code>a</code> around the center of
* rotation, <code>cor</code>, and returns the result.
*
* @effects rotates the line segment represented by
* <code>line</code> by <code>a</code> around the center of
* rotation, <code>cor</code>, and returns the result.
*
* @param line the initial location of the line segment to be rotated
*
* @param cor the point indicating the center of rotation
*
* @param a the amount by which to rotate <code>point</code>
*
* @return line segment <code>line</code> rotated around <code>cor</code>
* by <code>a</code>
**/
public static LineSegment rotateAround(LineSegment line, Vect cor, Angle a) {
return geometry.rotateAround(line, cor, a);
}
/**
* Rotates the circle represented by
* <code>circle</code> by <code>a</code> around the center of
* rotation, <code>cor</code>, and returns the result.
*
* @effects rotates the circle represented by
* <code>circle</code> by <code>a</code> around the center of
* rotation, <code>cor</code>, and returns the result.
*
* @param circle the initial location of the circle to be rotated
*
* @param cor the point indicating the center of rotation
*
* @param a the amount by which to rotate <code>point</code>
*
* @return circle <code>circle</code> rotated around <code>cor</code>
* by <code>a</code>
**/
public static Circle rotateAround(Circle circle, Vect cor, Angle a) {
return geometry.rotateAround(circle, cor, a);
}
/**
* Computes the times when the point moving along the given
* trajectory will intersect the given circle
*
* @param circle circle to find collisions with
*
* @param point initial position of the point
*
* @param velocity linear velocity of the point
*
* @return the times until intersection, with lesser result in d1,
* or <tt>+Inf</tt>s if no collisions will occur
*
* @see Double#POSITIVE_INFINITY
**/
public static DoublePair timeUntilCircleCollision(Circle circle,
Vect point,
Vect velocity)
{
return geometry.timeUntilCircleCollision(circle, point, velocity);
}
/**
* Computes the time until a ball travelling at a specified
* velocity collides with a rotating line segment.
*
* @requires <code>line</code> has non-zero length
*
* @effects computes the time until a circular ball
* travelling at a specified velocity collides with a specified line
* segment which is rotating at a fixed angular velocity about a
* fixed center of rotation.
*
* <p><img src="doc-files/rotate_line.gif">
*
* @param line the initial position of the rotating line segment (wall)
*
* @param center the center of rotation for <code>line</code>
*
* @param angularVelocity the angular velocity of the rotation of
* <code>line</code> in radians per second. A positive angular
* velocity denotes a rotation in the direction from the positive
* x-axis to the positive y-axis.
*
* @param ball the size and initial location of the ball
*
* @param velocity the initial velocity of the ball. The ball is
* assumed to travel at a constant velocity until impact.
*
* @return the time until collision or <tt>POSITIVE_INFINITY</tt> if no
* collision was detected.
*
* @see Double#POSITIVE_INFINITY
* @see <a href="#endpoint_effects">endpoint effects</a>
**/
public static double timeUntilRotatingWallCollision(LineSegment line,
Vect center,
double angularVelocity,
Circle ball,
Vect velocity)
{
return geometry.timeUntilRotatingWallCollision(line,
center,
angularVelocity,
ball,
velocity);
}
/**
* Computes the new velocity of a ball reflecting off of a
* wall which is rotating about a point with constant angular
* velocity.
*
* @requires <code>line</code> has non-zero length
* && the ball is at the point of impact
*
* @effects computes the new velocity of a ball reflecting off of a
* wall which is rotating about a point with constant angular
* velocity. The velocity resulting from this method corresponds to
* a perfectly elastic collision. This method assumes that the ball
* is at the point of impact. If the ball does not hit in between
* the endpoints of <code>line</code>, <code>velocity</code> is
* returned.
*
* @param line the line segment representing the initial position of
* the rotating wall
*
* @param center the point about which <code>line</code> rotates
*
* @param angularVelocity the angular velocity at which
* <code>line</code> rotates, in radians per second. A positive angular
* velocity denotes a rotation in the direction from the positive
* x-axis to the positive y-axis.
*
* @param velocity the velocity of the ball before impact
*
* @return the velocity of the ball after impacting the wall
**/
public static Vect reflectRotatingWall(LineSegment line,
Vect center,
double angularVelocity,
Circle ball,
Vect velocity) {
return geometry.reflectRotatingWall(line,
center,
angularVelocity,
ball,
velocity);
}
/**
* Computes the new velocity of a ball reflecting off of a
* wall which is rotating about a point with constant angular
* velocity.
*
* @requires <code>line</code> has non-zero length
* && the ball is at the point of impact
* && <code>reflectionCoeff</code> >= 0
*
* @effects computes the new velocity of a ball reflecting off of a
* wall which is rotating about a point with constant angular
* velocity. The velocity resulting from this method corresponds to
* a collision against a surface of the given reflection
* coefficient. A reflection coefficient of 1 indicates a perfectly
* elastic collision. This method assumes that the ball is at the
* point of impact. If the ball does not hit in between the
* endpoints of <code>line</code>, <code>velocity</code> is
* returned.
*
* @param line the line segment representing the initial position of
* the rotating wall
*
* @param center the point about which <code>line</code> rotates
*
* @param angularVelocity the angular velocity at which
* <code>line</code> rotates, in radians per second. A positive angular
* velocity denotes a rotation in the direction from the positive
* x-axis to the positive y-axis.
*
* @param velocity the velocity of the ball before impact
*
* @param reflectionCoeff the reflection coefficient
*
* @return the velocity of the ball after impacting the wall
**/
public static Vect reflectRotatingWall(LineSegment line,
Vect center,
double angularVelocity,
Circle ball,
Vect velocity,
double reflectionCoeff)
{
return geometry.reflectRotatingWall(line,
center,
angularVelocity,
ball,
velocity,
reflectionCoeff);
}
/**
* Computes the time until a ball travelling at a specified
* velocity collides with a rotating circle.
*
* @effects computes the time until a circular ball
* travelling at a specified velocity collides with a specified circle
* that is rotating about a given center of rotation at a given
* angular velocity. If no collision will occurr <tt>POSITIVE_INFINITY</tt>
* is returned. This method assumes the
* ball will travel with constant velocity until impact.
*
* <p>
* <img src="doc-files/rotate_circle.gif">
*
* @param circle a circle representing the initial location and size
* of the rotating circle
*
* @param center the point around which the circle is rotating
*
* @param angularVelocity the angular velocity with which
* <code>circle</code> is rotating about <code>center</code>, in
* radians per second. A positive angular velocity denotes a
* rotation in the direction from the positive x-axis to the
* positive y-axis.
*
* @param ball a circle representing the size and initial position
* of the ball
*
* @param velocity the velocity of the ball before impact
*
* @see Double#POSITIVE_INFINITY
**/
public static double timeUntilRotatingCircleCollision(Circle circle,
Vect center,
double angularVelocity,
Circle ball,
Vect velocity)
{
return geometry.timeUntilRotatingCircleCollision(circle,
center,
angularVelocity,
ball,
velocity);
}
/**
* Computes the new velocity of a ball reflected off of a rotating
* circle.
*
* @requires the ball is at the point of impact
*
* @effects computes the new velocity of a ball reflected off of a
* circle which is rotating with constant angular velocity around a
* point. The velocity resulting from this method corresponds to a
* perfectly elastic collision.
*
* @param circle the rotating circle
*
* @param center the point about which <code>circle</code> is
* rotating
*
* @param angularVelocity the angular velocity with which
* <code>circle</code> is rotating about <code>center</code>, in
* radians per second. A positive angular velocity denotes a
* rotation in the direction from the positive x-axis to the
* positive y-axis.
*
* @param ball the size and position of the ball before impact
*
* @param velocity the velocity of the ball before impact
*
* @return the velocity of the ball after impacting the rotating
* circle
**/
public static Vect reflectRotatingCircle(Circle circle,
Vect center,
double angularVelocity,
Circle ball,
Vect velocity) {
return geometry.reflectRotatingCircle(circle,
center,
angularVelocity,
ball,
velocity);
}
/**
* Computes the new velocity of a ball reflected off of a rotating
* circle.
*
* @requires the ball is at the point of impact
*
* @effects computes the new velocity of a ball reflected off of a
* circle which is rotating with constant angular velocity around a
* point. The velocity resulting from this method corresponds to a
* collision against a surface with the given reflection
* coefficient. A reflection coefficient of 1.0 indicates a
* perfectly elastic collision.
*
* @param circle the rotating circle
*
* @param center the point about which <code>circle</code> is
* rotating
*
* @param angularVelocity the angular velocity with which
* <code>circle</code> is rotating about <code>center</code>, in
* radians per second. A positive angular velocity denotes a
* rotation in the direction from the positive x-axis to the
* positive y-axis.
*
* @param ball the size and position of the ball before impact
*
* @param velocity the velocity of the ball before impact
*
* @param reflectionCoeff the reflection coefficient
*
* @return the velocity of the ball after impacting the rotating
* circle
**/
public static Vect reflectRotatingCircle(Circle circle,
Vect center,
double angularVelocity,
Circle ball,
Vect velocity,
double reflectionCoeff)
{
return geometry.reflectRotatingCircle(circle,
center,
angularVelocity,
ball,
velocity,
reflectionCoeff);
}
/****************************************************************************
*
* METHODS FOR MULTI-BALL SIMULATIONS
*
***************************************************************************/
/**
* Computes the time until two balls collide.
*
* @effects computes the time until two balls, represented by two
* circles, travelling at specified constant velocities, collide.
* If no collision will occur <tt>POSITIVE_INFINITY</tt> is returned.
* This method assumes that both balls will travel at constant
* velocity until impact.
*
* @param ball1 a circle representing the size and initial position
* of the first ball.
*
* @param vel1 the velocity of the first ball before impact
*
* @param ball2 a circle representing the size and initial position
* of the second ball.
*
* @param vel2 the velocity of the second ball before impact
*
* @return the time until collision or <tt>POSITIVE_INFINITY</tt> if the
* collision will not occur
*
* @see Double#POSITIVE_INFINITY
**/
public static double timeUntilBallBallCollision(Circle ball1,
Vect vel1,
Circle ball2,
Vect vel2) {
return geometry.timeUntilBallBallCollision(ball1, vel1,
ball2, vel2);
}
/**
* Computes the resulting velocities of two balls which collide.
*
* @requires mass1 > 0 && mass2 > 0 && the distance between the two
* balls is approximately equal to the sum of their radii; that is,
* the balls are positioned at the point of impact.
*
* @effects computes the resulting velocities of two balls which
* collide.
*
* @param center1 the position of the center of the first ball
*
* @param mass1 the mass of the first ball
*
* @param velocity1 the velocity of the first ball before impact
*
* @param center2 the position of the center of the second ball
*
* @param mass2 the mass of the second ball
*
* @param velocity2 the velocity of the second ball before impact
*
* @return a <code>VectPair</code>, where the first <code>Vect</code> is
* the velocity of the first ball after the collision and the second
* <code>Vect</code> is the velocity of the second ball after the collision.
**/
public static VectPair reflectBalls(Vect center1,
double mass1,
Vect velocity1,
Vect center2,
double mass2,
Vect velocity2) {
return geometry.reflectBalls(center1,
mass1,
velocity1,
center2,
mass2,
velocity2);
}
}
|
package com.baeldung.java8.lambda.tips;
import java.util.concurrent.Callable;
import java.util.function.Supplier;
public interface Processor {
String processWithCallable(Callable<String> c) throws Exception;
String processWithSupplier(Supplier<String> s);
}
|
package ca.corefacility.bioinformatics.irida.ria.web.ajax;
import java.util.Locale;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.security.oauth2.provider.NoSuchClientException;
import org.springframework.web.bind.annotation.*;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.ajax.AjaxErrorResponse;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.ajax.AjaxResponse;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.ajax.AjaxSuccessResponse;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.clients.ClientTableModel;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.clients.ClientTableRequest;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.clients.CreateUpdateClientDetails;
import ca.corefacility.bioinformatics.irida.ria.web.models.tables.TableResponse;
import ca.corefacility.bioinformatics.irida.ria.web.services.UIClientService;
/**
* Controller to handle ajax request for IRIDA Clients.
*/
@RestController
@RequestMapping("/ajax/clients")
public class ClientsAjaxController {
private final UIClientService service;
private final MessageSource messageSource;
@Autowired
public ClientsAjaxController(UIClientService service, MessageSource messageSource) {
this.service = service;
this.messageSource = messageSource;
}
/**
* Get a page in the clients listing table based on the table request.
*
* @param request Information about the current location in the Clients table
* @return The current page of the clients table
*/
@RequestMapping("/list")
@PreAuthorize("hasRole('ROLE_ADMIN')")
public ResponseEntity<TableResponse<ClientTableModel>> getClientsList(@RequestBody ClientTableRequest request) {
return ResponseEntity.ok(service.getClientList(request));
}
/**
* Revoke all tokens for a client given its identifier
*
* @param id Identifier for a specific client
*/
@DeleteMapping("/revoke")
@PreAuthorize("hasRole('ROLE_ADMIN')")
public void deleteClientTokens(@RequestParam Long id) {
service.deleteClientTokens(id);
}
/**
* Check to see if the client identifier that a user wants to use for a new client is not already used
*
* @param clientId Name to check if exists
* @param locale Users current locale
* @return Http response indicating if the client id is valid.
*/
@RequestMapping("/validate")
@PreAuthorize("hasRole('ROLE_ADMIN')")
public ResponseEntity<String> validateClientName(@RequestParam String clientId, Locale locale) {
try {
service.validateClientId(clientId);
return ResponseEntity.status(HttpStatus.CONFLICT)
.body(messageSource.getMessage("server.AddClientForm.error", new Object[] { clientId }, locale));
} catch (NoSuchClientException e) {
return ResponseEntity.ok("");
}
}
/**
* Create a new client.
*
* @param request Details about the client to create
* @param locale users current locale
* @return Http response containing the result
*/
@PostMapping
@PreAuthorize("hasRole('ROLE_ADMIN')")
public ResponseEntity<AjaxResponse> createClient(@RequestBody CreateUpdateClientDetails request, Locale locale) {
try {
return ResponseEntity.ok(new AjaxSuccessResponse(service.createOrUpdateClient(request, locale)));
} catch (Exception exception) {
return ResponseEntity.status(HttpStatus.CONFLICT)
.body(new AjaxErrorResponse(messageSource.getMessage("server.AddClientForm.error",
new Object[] { request.getClientId() }, locale)));
}
}
/**
* Update the details of a client
*
* @param request Updated details about a client.
* @param locale Current users locale
* @return Client id if success or an error message if there was an error during the update
*/
@PutMapping
@PreAuthorize("hasRole('ROLE_ADMIN')")
public ResponseEntity<AjaxResponse> updateClient(@RequestBody CreateUpdateClientDetails request, Locale locale) {
try {
return ResponseEntity.ok(new AjaxSuccessResponse(service.createOrUpdateClient(request, locale)));
} catch (Exception exception) {
return ResponseEntity.status(HttpStatus.CONFLICT)
.body(new AjaxErrorResponse(messageSource.getMessage("server.UpdateClientForm.error",
new Object[]{request.getClientId()}, locale)));
}
}
/**
* Delete a client
*
* @param id identifier for a client to delete
*/
@DeleteMapping
@PreAuthorize("hasRole('ROLE_ADMIN')")
public void deleteClient(@RequestParam Long id) {
service.deleteClient(id);
}
/**
* Create a secret for a client
*
* @param id identifier for the client to update.
*/
@PutMapping("/secret")
@PreAuthorize("hasRole('ROLE_ADMIN')")
public void regenerateClientSecret(@RequestParam Long id) {
service.regenerateClientSecret(id);
}
}
|
/*
* Copyright 2018 Confluent Inc.
*
* Licensed under the Confluent Community License (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at
*
* http://www.confluent.io/confluent-community-license
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package io.confluent.ksql.analyzer;
import static java.util.Objects.requireNonNull;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import io.confluent.ksql.analyzer.Analysis.AliasedDataSource;
import io.confluent.ksql.analyzer.Analysis.Into;
import io.confluent.ksql.analyzer.Analysis.JoinInfo;
import io.confluent.ksql.execution.ddl.commands.KsqlTopic;
import io.confluent.ksql.execution.expression.tree.ComparisonExpression;
import io.confluent.ksql.execution.expression.tree.Expression;
import io.confluent.ksql.execution.expression.tree.FunctionCall;
import io.confluent.ksql.execution.expression.tree.QualifiedColumnReferenceExp;
import io.confluent.ksql.execution.expression.tree.TraversalExpressionVisitor;
import io.confluent.ksql.execution.expression.tree.UnqualifiedColumnReferenceExp;
import io.confluent.ksql.execution.plan.SelectExpression;
import io.confluent.ksql.execution.windows.KsqlWindowExpression;
import io.confluent.ksql.metastore.MetaStore;
import io.confluent.ksql.metastore.model.DataSource;
import io.confluent.ksql.model.WindowType;
import io.confluent.ksql.name.ColumnName;
import io.confluent.ksql.name.FunctionName;
import io.confluent.ksql.name.SourceName;
import io.confluent.ksql.parser.DefaultTraversalVisitor;
import io.confluent.ksql.parser.NodeLocation;
import io.confluent.ksql.parser.tree.AliasedRelation;
import io.confluent.ksql.parser.tree.AllColumns;
import io.confluent.ksql.parser.tree.AstNode;
import io.confluent.ksql.parser.tree.GroupBy;
import io.confluent.ksql.parser.tree.GroupingElement;
import io.confluent.ksql.parser.tree.Join;
import io.confluent.ksql.parser.tree.JoinOn;
import io.confluent.ksql.parser.tree.Query;
import io.confluent.ksql.parser.tree.Select;
import io.confluent.ksql.parser.tree.SelectItem;
import io.confluent.ksql.parser.tree.SingleColumn;
import io.confluent.ksql.parser.tree.Sink;
import io.confluent.ksql.parser.tree.Table;
import io.confluent.ksql.parser.tree.WindowExpression;
import io.confluent.ksql.planner.plan.JoinNode;
import io.confluent.ksql.schema.ksql.Column;
import io.confluent.ksql.schema.ksql.FormatOptions;
import io.confluent.ksql.schema.ksql.LogicalSchema;
import io.confluent.ksql.serde.Format;
import io.confluent.ksql.serde.FormatFactory;
import io.confluent.ksql.serde.FormatInfo;
import io.confluent.ksql.serde.KeyFormat;
import io.confluent.ksql.serde.SerdeOption;
import io.confluent.ksql.serde.SerdeOptions;
import io.confluent.ksql.serde.ValueFormat;
import io.confluent.ksql.serde.WindowInfo;
import io.confluent.ksql.util.KsqlException;
import io.confluent.ksql.util.SchemaUtil;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
// CHECKSTYLE_RULES.OFF: ClassDataAbstractionCoupling
class Analyzer {
// CHECKSTYLE_RULES.ON: ClassDataAbstractionCoupling
private static final String KAFKA_VALUE_FORMAT_LIMITATION_DETAILS = ""
+ "The KAFKA format is primarily intended for use as a key format. "
+ "It can be used as a value format, but can not be used in any operation that "
+ "requires a repartition or changelog topic." + System.lineSeparator()
+ "Removing this limitation requires enhancements to the core of KSQL. "
+ "This will come in a future release. Until then, avoid using the KAFKA format for values."
+ System.lineSeparator() + "If you have an existing topic with "
+ "KAFKA formatted values you can duplicate the data and serialize using Avro or JSON with a "
+ "statement such as: "
+ System.lineSeparator()
+ System.lineSeparator()
+ "'CREATE STREAM <new-stream-name> WITH(VALUE_FORMAT='Avro') AS "
+ "SELECT * FROM <existing-kafka-formated-stream-name>;'"
+ System.lineSeparator()
+ "For more info see https://github.com/confluentinc/ksql/issues/3060";
private final MetaStore metaStore;
private final String topicPrefix;
private final SerdeOptionsSupplier serdeOptionsSupplier;
private final Set<SerdeOption> defaultSerdeOptions;
/**
* @param metaStore the metastore to use.
* @param topicPrefix the prefix to use for topic names where an explicit name is not specified.
* @param defaultSerdeOptions the default serde options.
*/
Analyzer(
final MetaStore metaStore,
final String topicPrefix,
final Set<SerdeOption> defaultSerdeOptions
) {
this(
metaStore,
topicPrefix,
defaultSerdeOptions,
SerdeOptions::buildForCreateAsStatement);
}
@VisibleForTesting
Analyzer(
final MetaStore metaStore,
final String topicPrefix,
final Set<SerdeOption> defaultSerdeOptions,
final SerdeOptionsSupplier serdeOptionsSupplier
) {
this.metaStore = requireNonNull(metaStore, "metaStore");
this.topicPrefix = requireNonNull(topicPrefix, "topicPrefix");
this.defaultSerdeOptions = ImmutableSet
.copyOf(requireNonNull(defaultSerdeOptions, "defaultSerdeOptions"));
this.serdeOptionsSupplier = requireNonNull(serdeOptionsSupplier, "serdeOptionsSupplier");
}
/**
* Analyze the query.
*
* @param query the query to analyze.
* @param sink the sink the query will output to.
* @return the analysis.
*/
Analysis analyze(
final Query query,
final Optional<Sink> sink
) {
final Visitor visitor = new Visitor(query, sink.isPresent());
visitor.process(query, null);
sink.ifPresent(visitor::analyzeNonStdOutSink);
visitor.validate();
return visitor.analysis;
}
// CHECKSTYLE_RULES.OFF: ClassDataAbstractionCoupling
private final class Visitor extends DefaultTraversalVisitor<AstNode, Void> {
// CHECKSTYLE_RULES.ON: ClassDataAbstractionCoupling
private final Analysis analysis;
private final boolean persistent;
private final boolean pullQuery;
private boolean isJoin = false;
private boolean isGroupBy = false;
Visitor(final Query query, final boolean persistent) {
this.pullQuery = query.isPullQuery();
this.analysis = new Analysis(query.getResultMaterialization());
this.persistent = persistent;
}
private void analyzeNonStdOutSink(final Sink sink) {
analysis.setProperties(sink.getProperties());
setSerdeOptions(sink);
if (!sink.shouldCreateSink()) {
final DataSource existing = metaStore.getSource(sink.getName());
if (existing == null) {
throw new KsqlException("Unknown source: "
+ sink.getName().toString(FormatOptions.noEscape()));
}
analysis.setInto(Into.of(
sink.getName(),
false,
existing.getKsqlTopic()
));
return;
}
final String topicName = sink.getProperties().getKafkaTopic()
.orElseGet(() -> topicPrefix + sink.getName().name());
final KeyFormat keyFormat = buildKeyFormat();
final Format format = getValueFormat(sink);
final Map<String, String> sourceProperties = new HashMap<>();
if (format.name().equals(getSourceInfo().getFormat())) {
getSourceInfo().getProperties().forEach((k, v) -> {
if (format.getInheritableProperties().contains(k)) {
sourceProperties.put(k, v);
}
});
}
// overwrite any inheritable properties if they were explicitly
// specified in the statement
sourceProperties.putAll(sink.getProperties().getFormatProperties());
final ValueFormat valueFormat = ValueFormat.of(FormatInfo.of(
format.name(),
sourceProperties
));
final KsqlTopic intoKsqlTopic = new KsqlTopic(
topicName,
keyFormat,
valueFormat
);
analysis.setInto(Into.of(
sink.getName(),
true,
intoKsqlTopic
));
}
private KeyFormat buildKeyFormat() {
final Optional<KsqlWindowExpression> ksqlWindow = analysis.getWindowExpression()
.map(WindowExpression::getKsqlWindowExpression);
return ksqlWindow
.map(w -> KeyFormat.windowed(
FormatInfo.of(FormatFactory.KAFKA.name()), w.getWindowInfo()))
.orElseGet(() -> analysis
.getFromDataSources()
.get(0)
.getDataSource()
.getKsqlTopic()
.getKeyFormat());
}
private void setSerdeOptions(final Sink sink) {
final List<ColumnName> columnNames = getColumnNames();
final Format valueFormat = getValueFormat(sink);
final Set<SerdeOption> serdeOptions = serdeOptionsSupplier.build(
columnNames,
valueFormat,
sink.getProperties().getWrapSingleValues(),
defaultSerdeOptions
);
analysis.setSerdeOptions(serdeOptions);
}
private List<ColumnName> getColumnNames() {
return analysis.getSelectExpressions().stream()
.map(SelectExpression::getAlias)
.collect(Collectors.toList());
}
private Format getValueFormat(final Sink sink) {
return sink.getProperties().getValueFormat()
.orElseGet(() -> FormatFactory.of(getSourceInfo()));
}
private FormatInfo getSourceInfo() {
return analysis
.getFromDataSources()
.get(0)
.getDataSource()
.getKsqlTopic()
.getValueFormat()
.getFormatInfo();
}
@Override
protected AstNode visitQuery(
final Query node,
final Void context
) {
process(node.getFrom(), context);
process(node.getSelect(), context);
node.getWhere().ifPresent(this::analyzeWhere);
node.getGroupBy().ifPresent(this::analyzeGroupBy);
node.getPartitionBy().ifPresent(this::analyzePartitionBy);
node.getWindow().ifPresent(this::analyzeWindowExpression);
node.getHaving().ifPresent(this::analyzeHaving);
node.getLimit().ifPresent(analysis::setLimitClause);
throwOnUnknownColumnReference();
return null;
}
private void throwOnUnknownColumnReference() {
final ColumnReferenceValidator columnValidator =
new ColumnReferenceValidator(analysis.getFromSourceSchemas(true));
analysis.getWhereExpression()
.ifPresent(columnValidator::analyzeExpression);
analysis.getGroupByExpressions()
.forEach(columnValidator::analyzeExpression);
analysis.getHavingExpression()
.ifPresent(columnValidator::analyzeExpression);
analysis.getSelectExpressions().stream()
.map(SelectExpression::getExpression)
.forEach(columnValidator::analyzeExpression);
}
@Override
protected AstNode visitJoin(final Join node, final Void context) {
isJoin = true;
process(node.getLeft(), context);
process(node.getRight(), context);
final JoinNode.JoinType joinType = getJoinType(node);
final AliasedDataSource left = analysis.getFromDataSources().get(0);
final AliasedDataSource right = analysis.getFromDataSources().get(1);
final JoinOn joinOn = (JoinOn) node.getCriteria();
final ComparisonExpression comparisonExpression = (ComparisonExpression) joinOn
.getExpression();
if (comparisonExpression.getType() != ComparisonExpression.Type.EQUAL) {
throw new KsqlException("Only equality join criteria is supported.");
}
final ColumnReferenceValidator columnValidator =
new ColumnReferenceValidator(analysis.getFromSourceSchemas(false));
final Set<SourceName> srcsUsedInLeft = columnValidator
.analyzeExpression(comparisonExpression.getLeft());
final Set<SourceName> srcsUsedInRight = columnValidator
.analyzeExpression(comparisonExpression.getRight());
final SourceName leftSourceName = getOnlySourceForJoin(
comparisonExpression.getLeft(), comparisonExpression, srcsUsedInLeft);
final SourceName rightSourceName = getOnlySourceForJoin(
comparisonExpression.getRight(), comparisonExpression, srcsUsedInRight);
throwOnSelfJoin(left, right);
throwOnIncompleteJoinCriteria(left, right, leftSourceName, rightSourceName);
throwOnIncompatibleSourceWindowing(left, right);
final boolean flipped = leftSourceName.equals(right.getAlias());
analysis.setJoin(new JoinInfo(
flipped ? comparisonExpression.getRight() : comparisonExpression.getLeft(),
flipped ? comparisonExpression.getLeft() : comparisonExpression.getRight(),
joinType,
node.getWithinExpression()
));
return null;
}
private void throwOnSelfJoin(final AliasedDataSource left, final AliasedDataSource right) {
if (left.getDataSource().getName().equals(right.getDataSource().getName())) {
throw new KsqlException(
"Can not join '" + left.getDataSource().getName().toString(FormatOptions.noEscape())
+ "' to '" + right.getDataSource().getName().toString(FormatOptions.noEscape())
+ "': self joins are not yet supported."
);
}
}
private void throwOnIncompleteJoinCriteria(
final AliasedDataSource left,
final AliasedDataSource right,
final SourceName leftExpressionSource,
final SourceName rightExpressionSource
) {
final boolean valid = ImmutableSet.of(leftExpressionSource, rightExpressionSource)
.containsAll(ImmutableList.of(left.getAlias(), right.getAlias()));
if (!valid) {
throw new KsqlException(
"Each side of the join must reference exactly one source and not the same source. "
+ "Left side references " + leftExpressionSource
+ " and right references " + rightExpressionSource
);
}
}
private void throwOnIncompatibleSourceWindowing(
final AliasedDataSource left,
final AliasedDataSource right
) {
final Optional<WindowType> leftWindowType = left.getDataSource()
.getKsqlTopic()
.getKeyFormat()
.getWindowInfo()
.map(WindowInfo::getType);
final Optional<WindowType> rightWindowType = right.getDataSource()
.getKsqlTopic()
.getKeyFormat()
.getWindowInfo()
.map(WindowInfo::getType);
if (leftWindowType.isPresent() != rightWindowType.isPresent()) {
throw windowedNonWindowedJoinException(left, right, leftWindowType, rightWindowType);
}
if (!leftWindowType.isPresent()) {
return;
}
final WindowType leftWt = leftWindowType.get();
final WindowType rightWt = rightWindowType.get();
final boolean compatible = leftWt == WindowType.SESSION
? rightWt == WindowType.SESSION
: rightWt == WindowType.HOPPING || rightWt == WindowType.TUMBLING;
if (!compatible) {
throw new KsqlException("Incompatible windowed sources."
+ System.lineSeparator()
+ "Left source: " + leftWt
+ System.lineSeparator()
+ "Right source: " + rightWt
+ System.lineSeparator()
+ "Session windowed sources can only be joined to other session windowed sources, "
+ "and may still not result in expected behaviour as session bounds must be an exact "
+ "match for the join to work"
+ System.lineSeparator()
+ "Hopping and tumbling windowed sources can only be joined to other hopping and "
+ "tumbling windowed sources"
);
}
}
private KsqlException windowedNonWindowedJoinException(
final AliasedDataSource left,
final AliasedDataSource right,
final Optional<WindowType> leftWindowType,
final Optional<WindowType> rightWindowType
) {
final String leftMsg = leftWindowType.map(Object::toString).orElse("not");
final String rightMsg = rightWindowType.map(Object::toString).orElse("not");
return new KsqlException("Can not join windowed source to non-windowed source."
+ System.lineSeparator()
+ left.getAlias() + " is " + leftMsg + " windowed"
+ System.lineSeparator()
+ right.getAlias() + " is " + rightMsg + " windowed"
);
}
private SourceName getOnlySourceForJoin(
final Expression exp,
final ComparisonExpression join,
final Set<SourceName> sources
) {
try {
return Iterables.getOnlyElement(sources);
} catch (final Exception e) {
throw new KsqlException("Invalid comparison expression '" + exp + "' in join '" + join
+ "'. Each side of the join comparision must contain references from exactly one "
+ "source.");
}
}
private JoinNode.JoinType getJoinType(final Join node) {
final JoinNode.JoinType joinType;
switch (node.getType()) {
case INNER:
joinType = JoinNode.JoinType.INNER;
break;
case LEFT:
joinType = JoinNode.JoinType.LEFT;
break;
case OUTER:
joinType = JoinNode.JoinType.OUTER;
break;
default:
throw new KsqlException("Join type is not supported: " + node.getType().name());
}
return joinType;
}
@Override
protected AstNode visitAliasedRelation(final AliasedRelation node, final Void context) {
final SourceName structuredDataSourceName = ((Table) node.getRelation()).getName();
final DataSource source = metaStore.getSource(structuredDataSourceName);
if (source == null) {
throw new KsqlException(structuredDataSourceName + " does not exist.");
}
analysis.addDataSource(node.getAlias(), source);
return node;
}
@Override
protected AstNode visitSelect(final Select node, final Void context) {
for (final SelectItem selectItem : node.getSelectItems()) {
if (selectItem instanceof AllColumns) {
visitSelectStar((AllColumns) selectItem);
} else if (selectItem instanceof SingleColumn) {
final SingleColumn column = (SingleColumn) selectItem;
addSelectItem(column.getExpression(), column.getAlias().get());
visitTableFunctions(column.getExpression());
} else {
throw new IllegalArgumentException(
"Unsupported SelectItem type: " + selectItem.getClass().getName());
}
}
return null;
}
@Override
protected AstNode visitGroupBy(final GroupBy node, final Void context) {
return null;
}
private void analyzeWhere(final Expression node) {
analysis.setWhereExpression(node);
}
private void analyzeGroupBy(final GroupBy groupBy) {
isGroupBy = true;
for (final GroupingElement groupingElement : groupBy.getGroupingElements()) {
final Set<Expression> groupingSet = groupingElement.enumerateGroupingSets().get(0);
analysis.addGroupByExpressions(groupingSet);
}
}
private void analyzePartitionBy(final Expression partitionBy) {
analysis.setPartitionBy(partitionBy);
}
private void analyzeWindowExpression(final WindowExpression windowExpression) {
analysis.setWindowExpression(windowExpression);
}
private void analyzeHaving(final Expression node) {
analysis.setHavingExpression(node);
}
private void visitSelectStar(final AllColumns allColumns) {
final Optional<NodeLocation> location = allColumns.getLocation();
final Optional<SourceName> prefix = allColumns.getSource();
for (final AliasedDataSource source : analysis.getFromDataSources()) {
if (prefix.isPresent() && !prefix.get().equals(source.getAlias())) {
continue;
}
final String aliasPrefix = analysis.isJoin()
? source.getAlias().name() + "_"
: "";
final LogicalSchema schema = source.getDataSource().getSchema();
final boolean windowed = source.getDataSource().getKsqlTopic().getKeyFormat().isWindowed();
// Non-join persistent queries only require value columns on SELECT *
// where as joins and transient queries require all columns in the select:
// See https://github.com/confluentinc/ksql/issues/3731 for more info
final List<Column> valueColumns = persistent && !analysis.isJoin()
? schema.value()
: systemColumnsToTheFront(schema.withMetaAndKeyColsInValue(windowed).value());
for (final Column column : valueColumns) {
if (pullQuery && schema.isMetaColumn(column.name())) {
continue;
}
final QualifiedColumnReferenceExp selectItem = new QualifiedColumnReferenceExp(
location,
source.getAlias(),
column.name());
final String alias = aliasPrefix + column.name().name();
addSelectItem(selectItem, ColumnName.of(alias));
}
}
}
private List<Column> systemColumnsToTheFront(final List<Column> columns) {
// When doing a `select *` the system columns should be at the front of the column list
// but are added at the back during processing for performance reasons.
// Switch them around here:
final Map<Boolean, List<Column>> partitioned = columns.stream()
.collect(Collectors.groupingBy(c -> SchemaUtil.isSystemColumn(c.name())));
final List<Column> all = partitioned.get(true);
all.addAll(partitioned.get(false));
return all;
}
public void validate() {
final String kafkaSources = analysis.getFromDataSources().stream()
.filter(s -> s.getDataSource().getKsqlTopic().getValueFormat().getFormat()
== FormatFactory.KAFKA)
.map(AliasedDataSource::getAlias)
.map(SourceName::name)
.collect(Collectors.joining(", "));
if (kafkaSources.isEmpty()) {
return;
}
if (isJoin) {
throw new KsqlException("Source(s) " + kafkaSources + " are using the 'KAFKA' value format."
+ " This format does not yet support JOIN."
+ System.lineSeparator() + KAFKA_VALUE_FORMAT_LIMITATION_DETAILS);
}
if (isGroupBy) {
throw new KsqlException("Source(s) " + kafkaSources + " are using the 'KAFKA' value format."
+ " This format does not yet support GROUP BY."
+ System.lineSeparator() + KAFKA_VALUE_FORMAT_LIMITATION_DETAILS);
}
}
private void addSelectItem(final Expression exp, final ColumnName columnName) {
if (persistent) {
if (SchemaUtil.isSystemColumn(columnName)) {
throw new KsqlException("Reserved column name in select: " + columnName + ". "
+ "Please remove or alias the column.");
}
}
final Set<ColumnName> columnNames = new HashSet<>();
final TraversalExpressionVisitor<Void> visitor = new TraversalExpressionVisitor<Void>() {
@Override
public Void visitColumnReference(
final UnqualifiedColumnReferenceExp node,
final Void context
) {
columnNames.add(node.getReference());
return null;
}
@Override
public Void visitQualifiedColumnReference(
final QualifiedColumnReferenceExp node,
final Void context
) {
columnNames.add(node.getReference());
return null;
}
};
visitor.process(exp, null);
analysis.addSelectItem(exp, columnName);
analysis.addSelectColumnRefs(columnNames);
}
private void visitTableFunctions(final Expression expression) {
final TableFunctionVisitor visitor = new TableFunctionVisitor();
visitor.process(expression, null);
}
private final class TableFunctionVisitor extends TraversalExpressionVisitor<Void> {
private Optional<FunctionName> tableFunctionName = Optional.empty();
@Override
public Void visitFunctionCall(final FunctionCall functionCall, final Void context) {
final FunctionName functionName = functionCall.getName();
final boolean isTableFunction = metaStore.isTableFunction(functionName);
if (isTableFunction) {
if (tableFunctionName.isPresent()) {
throw new KsqlException("Table functions cannot be nested: "
+ tableFunctionName.get() + "(" + functionName + "())");
}
tableFunctionName = Optional.of(functionName);
analysis.addTableFunction(functionCall);
}
super.visitFunctionCall(functionCall, context);
if (isTableFunction) {
tableFunctionName = Optional.empty();
}
return null;
}
}
}
@FunctionalInterface
interface SerdeOptionsSupplier {
Set<SerdeOption> build(
List<ColumnName> valueColumnNames,
Format valueFormat,
Optional<Boolean> wrapSingleValues,
Set<SerdeOption> singleFieldDefaults
);
}
}
|
package com.esese.entity;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
@Entity
public class User {
public enum dataType{
paiFang,aoChang,yingDing,laoTu,wanLin,laoZhaiShe
}
@Id
@Column(name="id")
@GeneratedValue
private int id;
@Column(name="username")
private String username;
@Column(name="password")
private String password;
/**
* 数据结构顺序:老斋舍 万林 老图书馆 樱顶 奥场 牌坊
* data&(0x1<<n)
* */
@Column(name="data")
private int data;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public int getData() {
return data;
}
public String dataToString(){
StringBuilder sb=new StringBuilder("{");
for(dataType type:dataType.values())
sb.append('"').append(type.name()).append('"').append(':').append(isComplete(type)).append(',');
sb.deleteCharAt(sb.length()-1);
sb.append('}');
return sb.toString();
}
public void setData(int data) {
this.data = data;
}
public boolean isComplete(dataType type){
return (data&(0x1<<type.ordinal()))!=0;
}
public void setComplete(dataType type){
data=data|(0x1<<type.ordinal());
}
}
|
package org.openforis.collect.android;
/**
* @author Daniel Wiell
*/
public class SurveyException extends RuntimeException {
public SurveyException(Throwable cause) {
super(cause);
}
}
|
package io.nessus.actions.core.model;
import static io.nessus.actions.core.model.RouteModel.CAMEL_ROUTE_MODEL_RESOURCE_NAME;
import java.io.IOException;
import java.net.URL;
import org.apache.camel.CamelContext;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.model.dataformat.JsonLibrary;
import org.apache.camel.spi.TypeConverterRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.nessus.actions.core.model.MarshalStep.MarshalStepContent;
import io.nessus.actions.core.model.UnmarshalStep.UnmarshalStepContent;
import io.nessus.common.AssertState;
/**
* A generic model based route builder
*/
public class ModelRouteBuilder extends RouteBuilder {
static final Logger LOG = LoggerFactory.getLogger(ModelRouteBuilder.class);
private String modelResource;
public ModelRouteBuilder() {
modelResource = "/" + CAMEL_ROUTE_MODEL_RESOURCE_NAME;
}
public ModelRouteBuilder withModelResource(String respath) {
this.modelResource = respath;
return this;
}
@Override
public void configure() throws Exception {
configure(this);
}
public void configure(RouteBuilder routes) throws Exception {
configureWithModel(routes, loadModel());
}
public RouteModel loadModel() throws IOException {
// Find the Camel Actions definition resource
URL resurl = getClass().getResource(modelResource);
AssertState.notNull(resurl, "Cannot find: " + modelResource);
// Read the Camel Actions Model
LOG.info("Loading model ...");
RouteModel model = RouteModel.read(resurl);
return model;
}
public void configureWithModel(RouteBuilder routes, RouteModel model) {
LOG.info("Configure with {}", model);
// Define the Camel route using the Model
String fromUri = model.getFrom().toCamelUri();
LOG.info("From: {}", fromUri);
RouteDefinition rdef = routes.fromF(fromUri);
model.getSteps().forEach(step -> {
if (step instanceof ToStep) {
ToStep aux = (ToStep) step;
String toUri = aux.toCamelUri();
LOG.info("To: {}", toUri);
rdef.to(toUri);
}
else if (step instanceof MarshalStep) {
MarshalStep aux = (MarshalStep) step;
MarshalStepContent content = aux.getContent();
String format = content.getFormat();
LOG.info("Marshal: {}", format);
if ("json".equals(format)) {
Boolean pretty = content.isPretty();
rdef.marshal().json(JsonLibrary.Jackson, pretty);
}
}
else if (step instanceof UnmarshalStep) {
UnmarshalStep aux = (UnmarshalStep) step;
UnmarshalStepContent content = aux.getContent();
String format = content.getFormat();
LOG.info("Unmarshal: {}", format);
if ("json".equals(format)) {
rdef.unmarshal().json(JsonLibrary.Jackson);
}
}
});
}
public void addToCamelContext(CamelContext context) throws Exception {
// [TODO] Remove when this is part of Camel
// [CAMEL-15301] Provide various type converters for camel-xchange
TypeConverterRegistry registry = context.getTypeConverterRegistry();
registry.addTypeConverters(new TickerTypeConverters());
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.statistics.distribution;
import java.util.Arrays;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
/**
* Test cases for {@link GeometricDistribution}.
* Extends {@link BaseDiscreteDistributionTest}. See javadoc of that class for details.
*/
class GeometricDistributionTest extends BaseDiscreteDistributionTest {
@Override
DiscreteDistribution makeDistribution(Object... parameters) {
final double p = (Double) parameters[0];
return GeometricDistribution.of(p);
}
@Override
Object[][] makeInvalidParameters() {
return new Object[][] {
{-0.1},
{0.0},
{1.1},
};
}
@Override
String[] getParameterNames() {
return new String[] {"ProbabilityOfSuccess"};
}
//-------------------- Additional test cases -------------------------------
/**
* Test the PMF is computed using the power function when p is above 0.5.
* <p>Note: The geometric distribution PMF is defined as:
* <pre>
* pmf(x) = (1-p)^x * p
* </pre>
* <p>As {@code p -> 0} use of the power function should be avoided as it will
* propagate the inexact computation of {@code 1 - p}. The implementation can
* switch to using a rearrangement with the exponential function which avoid
* computing {@code 1 - p}.
* <p>See STATISTICS-34.
*
* @param p Probability of success
*/
@ParameterizedTest
@ValueSource(doubles = {0.5, 0.6658665, 0.75, 0.8125347, 0.9, 0.95, 0.99})
void testPMF(double p) {
final GeometricDistribution dist = GeometricDistribution.of(p);
final int[] x = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 20, 30, 40};
final double[] values = Arrays.stream(x).mapToDouble(k -> p * Math.pow(1 - p, k)).toArray();
// The PMF should be an exact match to the direct implementation with Math.pow.
testProbability(dist, x, values, DoubleTolerances.equals());
}
/**
* Test the inverse CDF returns the correct x from the CDF result.
* Cases were identified using various probabilities to discover a mismatch
* of x != icdf(cdf(x)). This occurs due to rounding errors on the inversion.
*/
@ParameterizedTest
@ValueSource(doubles = {
0.2,
0.8,
// icdf(cdf(x)) requires rounding up
0.07131208016887369,
0.14441285445326058,
0.272118157703929,
0.424656239093432,
0.00899452845634574,
// icdf(cdf(x)) requires rounding down
0.3441320118140774,
0.5680886873083258,
0.8738746761971425,
0.17373328785967923,
0.09252030895185881,
})
void testInverseCDF(double p) {
final GeometricDistribution dist = GeometricDistribution.of(p);
final int[] x = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
testCumulativeProbabilityInverseMapping(dist, x);
}
/**
* Test the inverse SF returns the correct x from the SF result.
* Cases were identified using various probabilities to discover a mismatch
* of x != isf(sf(x)). This occurs due to rounding errors on the inversion.
*/
@ParameterizedTest
@ValueSource(doubles = {
0.2,
0.8,
// isf(sf(x)) requires rounding up
0.9625911263689207,
0.2858964038911178,
0.31872883511135996,
0.46149078212832284,
0.3701613946505057,
// isf(sf(x)) requires rounding down
0.3796493606864414,
0.1113177920615187,
0.2587259503484439,
0.8996839434455458,
0.450704136259792,
})
void testInverseSF(double p) {
final GeometricDistribution dist = GeometricDistribution.of(p);
final int[] x = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
testSurvivalProbabilityInverseMapping(dist, x);
}
@Test
void testAdditionalMoments() {
GeometricDistribution dist;
final DoubleTolerance tol = DoubleTolerances.ulps(1);
dist = GeometricDistribution.of(0.5);
TestUtils.assertEquals((1.0d - 0.5d) / 0.5d, dist.getMean(), tol);
TestUtils.assertEquals((1.0d - 0.5d) / (0.5d * 0.5d), dist.getVariance(), tol);
dist = GeometricDistribution.of(0.3);
TestUtils.assertEquals((1.0d - 0.3d) / 0.3d, dist.getMean(), tol);
TestUtils.assertEquals((1.0d - 0.3d) / (0.3d * 0.3d), dist.getVariance(), tol);
}
/**
* Test the most extreme parameters. Uses a small enough value of p that the distribution is
* truncated by the maximum integer value. This creates a case where (x+1) will overflow.
* This occurs in the cumulative and survival function computations.
*/
@Test
void testExtremeParameters() {
final double p = Double.MIN_VALUE;
final GeometricDistribution dist = GeometricDistribution.of(p);
final int x = Integer.MAX_VALUE;
// CDF = 1 - (1-p)^(x+1)
// Compute with log for accuracy with small p
final double cdf = -Math.expm1(Math.log1p(-p) * (x + 1.0));
Assertions.assertNotEquals(1.0, cdf);
Assertions.assertEquals(cdf, dist.cumulativeProbability(x));
for (int i = 0; i < 5; i++) {
Assertions.assertEquals(x - i, dist.inverseCumulativeProbability(dist.cumulativeProbability(x - i)));
}
// CDF(x=0) = p
Assertions.assertEquals(p, dist.cumulativeProbability(0));
Assertions.assertEquals(0, dist.inverseCumulativeProbability(p));
Assertions.assertEquals(1, dist.inverseCumulativeProbability(Math.nextUp(p)));
for (int i = 1; i < 5; i++) {
Assertions.assertEquals(i, dist.inverseCumulativeProbability(dist.cumulativeProbability(i)));
}
// SF = (1-p)^(x+1)
// Compute with log for accuracy with small p
final double sf = Math.exp(Math.log1p(-p) * (x + 1.0));
Assertions.assertEquals(1.0 - cdf, sf);
Assertions.assertEquals(sf, dist.survivalProbability(x));
// SF is too close to 1 to be able to invert
Assertions.assertEquals(1.0, sf);
Assertions.assertEquals(x, dist.inverseSurvivalProbability(Math.nextDown(1.0)));
}
/**
* Test the most extreme parameters. Uses a large enough value of p that the distribution is
* compacted to x=0.
*
* <p>p is one ULP down from 1.0.
*/
@Test
void testExtremeParameters2() {
final double p = Math.nextDown(1.0);
final GeometricDistribution dist = GeometricDistribution.of(p);
final int x = 0;
// CDF = 1 - (1-p)^(x+1)
// CDF(x=0) = p
Assertions.assertEquals(p, dist.cumulativeProbability(0));
Assertions.assertEquals(0, dist.inverseCumulativeProbability(p));
// CDF is too close to 1 to be able to invert next value
Assertions.assertEquals(Integer.MAX_VALUE, dist.inverseCumulativeProbability(Math.nextUp(p)));
// SF = (1-p)^(x+1)
final double sf = 1 - p;
Assertions.assertNotEquals(0.0, sf);
Assertions.assertEquals(sf, dist.survivalProbability(x));
for (int i = 1; i < 5; i++) {
Assertions.assertEquals(i, dist.inverseSurvivalProbability(dist.survivalProbability(i)));
}
}
/**
* Test the most extreme parameters. Uses a large enough value of p that the distribution is
* compacted to x=0.
*
* <p>p is two ULP down from 1.0.
*/
@Test
void testExtremeParameters3() {
final double p = Math.nextDown(Math.nextDown(1.0));
final GeometricDistribution dist = GeometricDistribution.of(p);
final int x = 0;
// CDF = 1 - (1-p)^(x+1)
// CDF(x=0) = p
Assertions.assertEquals(p, dist.cumulativeProbability(0));
Assertions.assertEquals(0, dist.inverseCumulativeProbability(p));
Assertions.assertEquals(1, dist.inverseCumulativeProbability(Math.nextUp(p)));
// CDF is too close to 1 to be able to invert next value
Assertions.assertEquals(Integer.MAX_VALUE, dist.inverseCumulativeProbability(Math.nextUp(Math.nextUp(p))));
// SF = (1-p)^(x+1)
final double sf = 1 - p;
Assertions.assertNotEquals(0.0, sf);
Assertions.assertEquals(sf, dist.survivalProbability(x));
for (int i = 1; i < 5; i++) {
Assertions.assertEquals(i, dist.inverseSurvivalProbability(dist.survivalProbability(i)));
}
}
}
|
package app.model;
import java.util.ArrayList;
import java.util.Arrays;
public class JsonRunModel {
public String DEVICE_NAME;
public String IPCHECK_HOST;
public String IPCHECK_RESOURCE;
public String EMAIL_SERVER;
public String EMAIL_PROTOCOL;
public int EMAIL_PORT;
public String EMAIL_USERNAME;
public String EMAIL_PASSWORD;
public String EMAIL_SENDER;
public ArrayList<String> EMAIL_RECIPIENT_LIST;
public int EMAIL_TIMEOUT;
public JsonRunModel(boolean useDefault) {
if (useDefault) {
this.DEVICE_NAME = "My Unique Device Name";
this.IPCHECK_HOST = "www.google.com";
this.IPCHECK_RESOURCE = "/search?q=what+is+my+ip";
this.EMAIL_SERVER = "smtp.server.com";
this.EMAIL_PROTOCOL = "SSL";
this.EMAIL_PORT = 465;
this.EMAIL_USERNAME = "sender@address.com";
this.EMAIL_PASSWORD = "password";
this.EMAIL_SENDER = "My Unique Device Name < sender@address.com >";
this.EMAIL_RECIPIENT_LIST = new ArrayList<>(Arrays.asList("recipient-1@address.com", "recipient-2@address.com"));
this.EMAIL_TIMEOUT = 30000;
}
}
}
|
/*
* Hibernate Validator, declare and validate application constraints
*
* License: Apache License, Version 2.0
* See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>.
*/
package org.hibernate.validator.test.constraints.annotations.hv;
import static org.hibernate.validator.testutil.ConstraintViolationAssert.assertNoViolations;
import static org.hibernate.validator.testutil.ConstraintViolationAssert.assertThat;
import static org.hibernate.validator.testutil.ConstraintViolationAssert.violationOf;
import java.util.Set;
import javax.validation.ConstraintViolation;
import org.hibernate.validator.constraints.SafeHtml;
import org.hibernate.validator.test.constraints.annotations.AbstractConstrainedTest;
import org.testng.annotations.Test;
/**
* @author Marko Bekhta
*/
public class SafeHtmlConstrainedTest extends AbstractConstrainedTest {
@Test
public void testSafeHtmlNumber() {
Foo foo = new Foo( "<div>content</div>" );
Set<ConstraintViolation<Foo>> violations = validator.validate( foo );
assertNoViolations( violations );
}
@Test
public void testSafeHtmlInvalid() {
Foo foo = new Foo( "<script>alert('Doh')</script>" );
Set<ConstraintViolation<Foo>> violations = validator.validate( foo );
assertThat( violations ).containsOnlyViolations(
violationOf( SafeHtml.class )
);
}
private static class Foo {
@SafeHtml
private final String html;
public Foo(String html) {
this.html = html;
}
}
}
|
/*
* (C) Copyright Itude Mobile B.V., The Netherlands
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.itude.mobile.mobbl.core.view.builders.field;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.view.Gravity;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.LinearLayout.LayoutParams;
import android.widget.TextView;
import com.itude.mobile.android.util.DateUtil;
import com.itude.mobile.android.util.StringUtil;
import com.itude.mobile.mobbl.core.controller.MBApplicationController;
import com.itude.mobile.mobbl.core.model.MBDocument;
import com.itude.mobile.mobbl.core.services.MBResourceService;
import com.itude.mobile.mobbl.core.view.MBDateField;
import com.itude.mobile.mobbl.core.view.MBField;
import com.itude.mobile.mobbl.core.view.builders.MBStyleHandler;
public abstract class DateTimeFieldBuilder extends MBBaseFieldBuilder {
@Override
public View buildField(MBField field) {
final Context context = MBApplicationController.getInstance().getBaseContext();
final MBStyleHandler styleHandler = getStyleHandler();
final MBDocument doc = field.getDocument();
final String path = field.getPath();
final MBDateField df = new MBDateField();
// Create our container which will fill the whole width
LinearLayout container = new LinearLayout(context);
container.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT));
container.setGravity(Gravity.CENTER_VERTICAL);
// Add our label (if one exists)
TextView label = buildTextViewWithValue(field.getLabel());
label.setGravity(Gravity.CENTER_VERTICAL);
label.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT, 50));
styleHandler.styleLabel(label, field);
final TextView value = new TextView(context);
value.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT, 50));
value.setGravity(Gravity.CENTER_VERTICAL);
// Find out if we have previously set a time
String dateTimeString = doc.getValueForPath(path);
String valueLabelText = "";
String nillValue = field.getValueIfNil();
if (StringUtil.isNotBlank(nillValue)) {
valueLabelText = field.getValueIfNil();
}
if (StringUtil.isNotBlank(dateTimeString)) {
df.setTime(dateTimeString);
valueLabelText = DateUtil.dateToString(df.getCalender().getTime(), field.getFormatMask());
}
if (StringUtil.isNotBlank(valueLabelText)) {
value.setText(valueLabelText);
}
styleHandler.styleDateOrTimeSelectorValue(value, field);
String source = field.getSource();
if (StringUtil.isNotBlank(source)) {
Drawable drawable = MBResourceService.getInstance().getImageByID(source);
value.setBackgroundDrawable(drawable);
}
value.setOnClickListener(getOnClickListener(field, df, value));
container.addView(label);
container.addView(value);
return container;
}
protected abstract View.OnClickListener getOnClickListener(MBField field, MBDateField df, TextView value);
}
|
/*
* (c) Copyright 2021 Micro Focus
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0 which accompany this distribution.
*
* The Apache License is available at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.cloudslang.content.rft.remote_copy.sftp;
import com.jcraft.jsch.UserInfo;
import javax.security.auth.Subject;
public class MyUserInfo implements UserInfo {
private String passwd;
private boolean promptYesNo;
private boolean promptPassphrase;
private boolean promptPassword;
private String Passphrase;
private String privateKey;
/**
* This callback gets invoked when using GSSAPI-with-MIC authentication with the Kerberos mechanism; when that
* happens, we must somehow provide a subject on whose behalf the connection is being made.
*
* @see com.jcraft.jsch.UserInfo#getSubject()
*/
public Subject getSubject() {
return null;
}
public String getPassword() {
return passwd;
}
public boolean promptPassword(String arg0) {
// TODO Auto-generated method stub
return promptPassword;
}
public boolean promptPassphrase(String arg0) {
// TODO Auto-generated method stub
return promptPassphrase;
}
public boolean promptYesNo(String _prompt) {
return promptYesNo;
}
public void showMessage(String arg0) {
// TODO Auto-generated method stub
}
public String getPrivateKey() {
return privateKey;
}
public void setPrivateKey(String privateKey) {
this.privateKey = privateKey;
}
public boolean isPromptPassphrase() {
return promptPassphrase;
}
public void setPromptPassphrase(boolean promptPassphrase) {
this.promptPassphrase = promptPassphrase;
}
public boolean isPromptPassword() {
return promptPassword;
}
public void setPromptPassword(boolean promptPassword) {
this.promptPassword = promptPassword;
}
public boolean isPromptYesNo() {
return promptYesNo;
}
public void setPromptYesNo(boolean promptYesNo) {
this.promptYesNo = promptYesNo;
}
public String getPassphrase() {
return Passphrase;
}
public void setPassphrase(String passphrase) {
Passphrase = passphrase;
}
public String getPasswd() {
return passwd;
}
public void setPasswd(String passwd) {
this.passwd = passwd;
}
}
|
package jscl.math.operator;
import jscl.math.Generic;
import jscl.math.Variable;
import jscl.math.function.Root;
import jscl.math.polynomial.Polynomial;
import jscl.math.polynomial.UnivariatePolynomial;
import jscl.mathml.MathML;
public class Solve extends Operator {
public Solve(Generic expression, Generic variable, Generic subscript) {
super("solve",new Generic[] {expression,variable,subscript});
}
public Generic compute() {
Variable variable=parameter[1].variableValue();
int subscript=parameter[2].integerValue().intValue();
if(parameter[0].isPolynomial(variable)) {
return new Root((UnivariatePolynomial)Polynomial.factory(variable).valueof(parameter[0]),subscript).evaluate();
}
return expressionValue();
}
public String toString() {
StringBuffer buffer=new StringBuffer();
int n=3;
if(parameter[2].signum()==0) n=2;
buffer.append(name);
buffer.append("(");
for(int i=0;i<n;i++) {
buffer.append(parameter[i]).append(i<n-1?", ":"");
}
buffer.append(")");
return buffer.toString();
}
public void toMathML(MathML element, Object data) {
MathML e1;
int exponent=data instanceof Integer?((Integer)data).intValue():1;
int n=3;
if(parameter[2].signum()==0) n=2;
if(exponent==1) nameToMathML(element);
else {
e1=element.element("msup");
nameToMathML(e1);
MathML e2=element.element("mn");
e2.appendChild(element.text(String.valueOf(exponent)));
e1.appendChild(e2);
element.appendChild(e1);
}
e1=element.element("mfenced");
for(int i=0;i<n;i++) {
parameter[i].toMathML(e1,null);
}
element.appendChild(e1);
}
protected Variable newinstance() {
return new Solve(null,null,null);
}
}
|
package ink.ikx.rt.impl.jei.impl.other;
import crafttweaker.CraftTweakerAPI;
import crafttweaker.api.item.IItemStack;
import crafttweaker.mc1120.brackets.BracketHandlerItem;
import ink.ikx.rt.RandomTweaker;
import ink.ikx.rt.api.mods.jei.interfaces.element.JEIElement;
import ink.ikx.rt.api.mods.jei.interfaces.other.JEIBackground;
import ink.ikx.rt.api.mods.jei.interfaces.other.JEIPanel;
import ink.ikx.rt.api.mods.jei.interfaces.other.JEITooltip;
import ink.ikx.rt.api.mods.jei.interfaces.slots.JEISlot;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class JEIPanelImpl implements JEIPanel {
public String uid;
public String localizationKey;
public JEIBackground JEIBackground;
public JEITooltip JEITooltip = null;
public String modid = RandomTweaker.MODID;
public List<JEISlot> JEISlots = new ArrayList<>();
public List<JEIElement> JEIElements = new ArrayList<>();
public List<IItemStack> recipeCatalysts = new ArrayList<>();
public IItemStack icon = BracketHandlerItem.getItem("minecraft:bedrock", 0);
public JEIPanelImpl(String uid, String localizationKey) {
this.uid = uid;
this.localizationKey = localizationKey;
}
@Override
public String getUid() {
return this.uid;
}
@Override
public String getLocalizationKey() {
return this.localizationKey;
}
@Override
public String getModID() {
return this.modid;
}
@Override
public IItemStack getIcon() {
return this.icon;
}
@Override
public JEIBackground getJEIBackground() {
return this.JEIBackground;
}
@Override
public IItemStack[] getRecipeCatalysts() {
return this.recipeCatalysts.toArray(new IItemStack[0]);
}
@Override
public JEISlot[] getJEISlots() {
return this.JEISlots.toArray(new JEISlot[0]);
}
@Override
public JEIElement[] getJEIElements() {
return this.JEIElements.toArray(new JEIElement[0]);
}
@Override
public JEITooltip getJEITooltip() {
return this.JEITooltip;
}
@Override
public void setJEITooltip(JEITooltip JEITooltip) {
this.JEITooltip = JEITooltip;
}
@Override
public void setModID(String modid) {
this.modid = modid;
}
@Override
public void setIcon(IItemStack icon) {
this.icon = icon;
}
@Override
public void setJEIBackGroup(JEIBackground JEIBackground) {
this.JEIBackground = JEIBackground;
}
@Override
public void setJEIBackGroup(int width, int heigh) {
this.JEIBackground = new JEIBackgroundImpl(width, heigh);
}
@Override
public void setJEISlots(JEISlot[] JEISlots) {
this.JEISlots = Arrays.asList(JEISlots);
}
@Override
public void setRecipeCatalysts(IItemStack[] recipeCatalysts) {
this.recipeCatalysts = Arrays.asList(recipeCatalysts);
}
@Override
public void setJEIElements(JEIElement[] JEIElements) {
this.JEIElements = Arrays.asList(JEIElements);
}
@Override
public void addJEISlot(JEISlot JEISlot) {
this.JEISlots.add(JEISlot);
}
@Override
public void addRecipeCatalyst(IItemStack recipeCatalyst) {
this.recipeCatalysts.add(recipeCatalyst);
}
@Override
public void addJEIElement(JEIElement JEIElement) {
this.JEIElements.add(JEIElement);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
JEIPanel JEIPanel = (JEIPanelImpl) o;
return this.uid.equals(JEIPanel.getUid());
}
@Override
public int hashCode() {
return this.uid.hashCode();
}
@Override
public void register() {
if (RandomTweaker.JEIPanelList.contains(this)) {
CraftTweakerAPI.getLogger().logError("All Potions must be unique. JEIKey:" + this.uid + " is not.", new UnsupportedOperationException());
return;
}
if (JEIBackground == null || recipeCatalysts.isEmpty() || JEISlots.isEmpty()) {
CraftTweakerAPI.getLogger().logError("Parameters mustn't be empty !!!");
return;
} else if (icon.matches(BracketHandlerItem.getItem("minecraft:bedrock", 0))) {
CraftTweakerAPI.getLogger()
.logWarning("Please modify icon, even though this is not a requirement");
}
RandomTweaker.JEIPanelList.add(this);
}
}
|
package com.salesmanager.web.entity.catalog;
import java.util.ArrayList;
import java.util.List;
import com.salesmanager.web.entity.catalog.category.ReadableCategory;
/**
* Object representing the results of a search query
* @author Carl Samson
*
*/
public class SearchProductList extends ProductList {
private static final long serialVersionUID = 1L;
private List<ReadableCategory> categoryFacets = new ArrayList<ReadableCategory>();
public List<ReadableCategory> getCategoryFacets() {
return categoryFacets;
}
public void setCategoryFacets(List<ReadableCategory> categoryFacets) {
this.categoryFacets = categoryFacets;
}
}
|
/*-
* ============LICENSE_START==========================================
* ONAP Portal
* ===================================================================
* Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
* ===================================================================
*
* Unless otherwise specified, all software contained herein is licensed
* under the Apache License, Version 2.0 (the "License");
* you may not use this software except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Unless otherwise specified, all documentation contained herein is licensed
* under the Creative Commons License, Attribution 4.0 Intl. (the "License");
* you may not use this documentation except in compliance with the License.
* You may obtain a copy of the License at
*
* https://creativecommons.org/licenses/by/4.0/
*
* Unless required by applicable law or agreed to in writing, documentation
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ============LICENSE_END============================================
*
*/
package org.onap.portal.controller;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.onap.portal.annotation.ApiVersion;
import org.onap.portal.domain.dto.PortalRestResponse;
import org.onap.portal.domain.dto.model.ExternalSystemUser;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
@Configuration
@EnableAspectJAutoProxy
@ApiVersion
public class RolesApprovalSystemVersionController {
private final RolesApprovalSystemController rolesApprovalSystemController;
@Autowired
public RolesApprovalSystemVersionController(
RolesApprovalSystemController rolesApprovalSystemController) {
this.rolesApprovalSystemController = rolesApprovalSystemController;
}
@ApiVersion(max = "v3", service = "/v3/userProfile", min = 0, method = "POST")
public PortalRestResponse<String> postUserProfile(HttpServletRequest request,
HttpServletResponse response, ExternalSystemUser extSysUser) {
return rolesApprovalSystemController.postUserProfile(request, extSysUser, response);
}
@ApiVersion(max = "v3", service = "/v3/userProfile", min = 0, method = "PUT")
public PortalRestResponse<String> putUserProfile(HttpServletRequest request,
HttpServletResponse response, ExternalSystemUser extSysUser) {
return rolesApprovalSystemController.putUserProfile(request, extSysUser, response);
}
@ApiVersion(max = "v3", service = "/v3/userProfile", min = 0, method = "DELETE")
public PortalRestResponse<String> deleteUserProfile(HttpServletRequest request,
HttpServletResponse response, ExternalSystemUser extSysUser) {
return rolesApprovalSystemController.deleteUserProfile(request, extSysUser, response);
}
}
|
/*
* MIT License
*
* Copyright (c) 2021 Jose Osuna
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
package com.prottonne.hr;
public class HRApp {
public static void main(String[] args) {
System.out.println("HR App Starts");
Department departmentEducation = new Department("Department Education");
System.out.println(departmentEducation);
for (Employee employee : departmentEducation.listAllEmployees()) {
System.out.println(employee);
}
Employee empAnn = new Employee(101, "Ann", 1234.56);
Employee empBob = new Employee(102, "Bob", 1200.34);
Employee empRay = new Employee(103, "Ray", 1122.33);
departmentEducation.addEmployee(empAnn);
departmentEducation.addEmployee(empBob);
departmentEducation.addEmployee(empRay);
departmentEducation.getEmployeeById(101);
departmentEducation.getEmployeeById(102);
departmentEducation.getEmployeeById(103);
departmentEducation.getEmployeeById(104);
for (Employee employee : departmentEducation.listAllEmployees()) {
System.out.println(employee);
}
System.out.println("Total Salary: " + departmentEducation.getTotalSalary());
System.out.println("Average Salary: " + departmentEducation.getAverageSalary());
}
}
|
/* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* gradle plugin from the resource data it found. It
* should not be modified by hand.
*/
package android.support.constraint;
public final class R {
public static final class attr {
public static final int constraintSet = 0x7f030072;
public static final int layout_constraintBaseline_creator = 0x7f0300e1;
public static final int layout_constraintBaseline_toBaselineOf = 0x7f0300e2;
public static final int layout_constraintBottom_creator = 0x7f0300e3;
public static final int layout_constraintBottom_toBottomOf = 0x7f0300e4;
public static final int layout_constraintBottom_toTopOf = 0x7f0300e5;
public static final int layout_constraintDimensionRatio = 0x7f0300e6;
public static final int layout_constraintEnd_toEndOf = 0x7f0300e7;
public static final int layout_constraintEnd_toStartOf = 0x7f0300e8;
public static final int layout_constraintGuide_begin = 0x7f0300e9;
public static final int layout_constraintGuide_end = 0x7f0300ea;
public static final int layout_constraintGuide_percent = 0x7f0300eb;
public static final int layout_constraintHeight_default = 0x7f0300ec;
public static final int layout_constraintHeight_max = 0x7f0300ed;
public static final int layout_constraintHeight_min = 0x7f0300ee;
public static final int layout_constraintHorizontal_bias = 0x7f0300ef;
public static final int layout_constraintHorizontal_chainStyle = 0x7f0300f0;
public static final int layout_constraintHorizontal_weight = 0x7f0300f1;
public static final int layout_constraintLeft_creator = 0x7f0300f2;
public static final int layout_constraintLeft_toLeftOf = 0x7f0300f3;
public static final int layout_constraintLeft_toRightOf = 0x7f0300f4;
public static final int layout_constraintRight_creator = 0x7f0300f5;
public static final int layout_constraintRight_toLeftOf = 0x7f0300f6;
public static final int layout_constraintRight_toRightOf = 0x7f0300f7;
public static final int layout_constraintStart_toEndOf = 0x7f0300f8;
public static final int layout_constraintStart_toStartOf = 0x7f0300f9;
public static final int layout_constraintTop_creator = 0x7f0300fa;
public static final int layout_constraintTop_toBottomOf = 0x7f0300fb;
public static final int layout_constraintTop_toTopOf = 0x7f0300fc;
public static final int layout_constraintVertical_bias = 0x7f0300fd;
public static final int layout_constraintVertical_chainStyle = 0x7f0300fe;
public static final int layout_constraintVertical_weight = 0x7f0300ff;
public static final int layout_constraintWidth_default = 0x7f030100;
public static final int layout_constraintWidth_max = 0x7f030101;
public static final int layout_constraintWidth_min = 0x7f030102;
public static final int layout_editor_absoluteX = 0x7f030104;
public static final int layout_editor_absoluteY = 0x7f030105;
public static final int layout_goneMarginBottom = 0x7f030106;
public static final int layout_goneMarginEnd = 0x7f030107;
public static final int layout_goneMarginLeft = 0x7f030108;
public static final int layout_goneMarginRight = 0x7f030109;
public static final int layout_goneMarginStart = 0x7f03010a;
public static final int layout_goneMarginTop = 0x7f03010b;
public static final int layout_optimizationLevel = 0x7f03010e;
}
public static final class id {
public static final int all = 0x7f090023;
public static final int basic = 0x7f09002d;
public static final int chains = 0x7f090037;
public static final int none = 0x7f09008d;
public static final int packed = 0x7f090092;
public static final int parent = 0x7f090094;
public static final int spread = 0x7f0900db;
public static final int spread_inside = 0x7f0900dc;
public static final int wrap = 0x7f09014d;
}
public static final class styleable {
public static final int[] ConstraintLayout_Layout = { 0x010100c4, 0x0101011f, 0x01010120, 0x0101013f, 0x01010140, 0x7f030072, 0x7f0300e1, 0x7f0300e2, 0x7f0300e3, 0x7f0300e4, 0x7f0300e5, 0x7f0300e6, 0x7f0300e7, 0x7f0300e8, 0x7f0300e9, 0x7f0300ea, 0x7f0300eb, 0x7f0300ec, 0x7f0300ed, 0x7f0300ee, 0x7f0300ef, 0x7f0300f0, 0x7f0300f1, 0x7f0300f2, 0x7f0300f3, 0x7f0300f4, 0x7f0300f5, 0x7f0300f6, 0x7f0300f7, 0x7f0300f8, 0x7f0300f9, 0x7f0300fa, 0x7f0300fb, 0x7f0300fc, 0x7f0300fd, 0x7f0300fe, 0x7f0300ff, 0x7f030100, 0x7f030101, 0x7f030102, 0x7f030104, 0x7f030105, 0x7f030106, 0x7f030107, 0x7f030108, 0x7f030109, 0x7f03010a, 0x7f03010b, 0x7f03010e };
public static final int ConstraintLayout_Layout_android_orientation = 0;
public static final int ConstraintLayout_Layout_android_maxWidth = 1;
public static final int ConstraintLayout_Layout_android_maxHeight = 2;
public static final int ConstraintLayout_Layout_android_minWidth = 3;
public static final int ConstraintLayout_Layout_android_minHeight = 4;
public static final int ConstraintLayout_Layout_constraintSet = 5;
public static final int ConstraintLayout_Layout_layout_constraintBaseline_creator = 6;
public static final int ConstraintLayout_Layout_layout_constraintBaseline_toBaselineOf = 7;
public static final int ConstraintLayout_Layout_layout_constraintBottom_creator = 8;
public static final int ConstraintLayout_Layout_layout_constraintBottom_toBottomOf = 9;
public static final int ConstraintLayout_Layout_layout_constraintBottom_toTopOf = 10;
public static final int ConstraintLayout_Layout_layout_constraintDimensionRatio = 11;
public static final int ConstraintLayout_Layout_layout_constraintEnd_toEndOf = 12;
public static final int ConstraintLayout_Layout_layout_constraintEnd_toStartOf = 13;
public static final int ConstraintLayout_Layout_layout_constraintGuide_begin = 14;
public static final int ConstraintLayout_Layout_layout_constraintGuide_end = 15;
public static final int ConstraintLayout_Layout_layout_constraintGuide_percent = 16;
public static final int ConstraintLayout_Layout_layout_constraintHeight_default = 17;
public static final int ConstraintLayout_Layout_layout_constraintHeight_max = 18;
public static final int ConstraintLayout_Layout_layout_constraintHeight_min = 19;
public static final int ConstraintLayout_Layout_layout_constraintHorizontal_bias = 20;
public static final int ConstraintLayout_Layout_layout_constraintHorizontal_chainStyle = 21;
public static final int ConstraintLayout_Layout_layout_constraintHorizontal_weight = 22;
public static final int ConstraintLayout_Layout_layout_constraintLeft_creator = 23;
public static final int ConstraintLayout_Layout_layout_constraintLeft_toLeftOf = 24;
public static final int ConstraintLayout_Layout_layout_constraintLeft_toRightOf = 25;
public static final int ConstraintLayout_Layout_layout_constraintRight_creator = 26;
public static final int ConstraintLayout_Layout_layout_constraintRight_toLeftOf = 27;
public static final int ConstraintLayout_Layout_layout_constraintRight_toRightOf = 28;
public static final int ConstraintLayout_Layout_layout_constraintStart_toEndOf = 29;
public static final int ConstraintLayout_Layout_layout_constraintStart_toStartOf = 30;
public static final int ConstraintLayout_Layout_layout_constraintTop_creator = 31;
public static final int ConstraintLayout_Layout_layout_constraintTop_toBottomOf = 32;
public static final int ConstraintLayout_Layout_layout_constraintTop_toTopOf = 33;
public static final int ConstraintLayout_Layout_layout_constraintVertical_bias = 34;
public static final int ConstraintLayout_Layout_layout_constraintVertical_chainStyle = 35;
public static final int ConstraintLayout_Layout_layout_constraintVertical_weight = 36;
public static final int ConstraintLayout_Layout_layout_constraintWidth_default = 37;
public static final int ConstraintLayout_Layout_layout_constraintWidth_max = 38;
public static final int ConstraintLayout_Layout_layout_constraintWidth_min = 39;
public static final int ConstraintLayout_Layout_layout_editor_absoluteX = 40;
public static final int ConstraintLayout_Layout_layout_editor_absoluteY = 41;
public static final int ConstraintLayout_Layout_layout_goneMarginBottom = 42;
public static final int ConstraintLayout_Layout_layout_goneMarginEnd = 43;
public static final int ConstraintLayout_Layout_layout_goneMarginLeft = 44;
public static final int ConstraintLayout_Layout_layout_goneMarginRight = 45;
public static final int ConstraintLayout_Layout_layout_goneMarginStart = 46;
public static final int ConstraintLayout_Layout_layout_goneMarginTop = 47;
public static final int ConstraintLayout_Layout_layout_optimizationLevel = 48;
public static final int[] ConstraintSet = { 0x010100c4, 0x010100d0, 0x010100dc, 0x010100f4, 0x010100f5, 0x010100f7, 0x010100f8, 0x010100f9, 0x010100fa, 0x0101031f, 0x01010320, 0x01010321, 0x01010322, 0x01010323, 0x01010324, 0x01010325, 0x01010327, 0x01010328, 0x010103b5, 0x010103b6, 0x010103fa, 0x01010440, 0x7f0300e1, 0x7f0300e2, 0x7f0300e3, 0x7f0300e4, 0x7f0300e5, 0x7f0300e6, 0x7f0300e7, 0x7f0300e8, 0x7f0300e9, 0x7f0300ea, 0x7f0300eb, 0x7f0300ec, 0x7f0300ed, 0x7f0300ee, 0x7f0300ef, 0x7f0300f0, 0x7f0300f1, 0x7f0300f2, 0x7f0300f3, 0x7f0300f4, 0x7f0300f5, 0x7f0300f6, 0x7f0300f7, 0x7f0300f8, 0x7f0300f9, 0x7f0300fa, 0x7f0300fb, 0x7f0300fc, 0x7f0300fd, 0x7f0300fe, 0x7f0300ff, 0x7f030100, 0x7f030101, 0x7f030102, 0x7f030104, 0x7f030105, 0x7f030106, 0x7f030107, 0x7f030108, 0x7f030109, 0x7f03010a, 0x7f03010b };
public static final int ConstraintSet_android_orientation = 0;
public static final int ConstraintSet_android_id = 1;
public static final int ConstraintSet_android_visibility = 2;
public static final int ConstraintSet_android_layout_width = 3;
public static final int ConstraintSet_android_layout_height = 4;
public static final int ConstraintSet_android_layout_marginLeft = 5;
public static final int ConstraintSet_android_layout_marginTop = 6;
public static final int ConstraintSet_android_layout_marginRight = 7;
public static final int ConstraintSet_android_layout_marginBottom = 8;
public static final int ConstraintSet_android_alpha = 9;
public static final int ConstraintSet_android_transformPivotX = 10;
public static final int ConstraintSet_android_transformPivotY = 11;
public static final int ConstraintSet_android_translationX = 12;
public static final int ConstraintSet_android_translationY = 13;
public static final int ConstraintSet_android_scaleX = 14;
public static final int ConstraintSet_android_scaleY = 15;
public static final int ConstraintSet_android_rotationX = 16;
public static final int ConstraintSet_android_rotationY = 17;
public static final int ConstraintSet_android_layout_marginStart = 18;
public static final int ConstraintSet_android_layout_marginEnd = 19;
public static final int ConstraintSet_android_translationZ = 20;
public static final int ConstraintSet_android_elevation = 21;
public static final int ConstraintSet_layout_constraintBaseline_creator = 22;
public static final int ConstraintSet_layout_constraintBaseline_toBaselineOf = 23;
public static final int ConstraintSet_layout_constraintBottom_creator = 24;
public static final int ConstraintSet_layout_constraintBottom_toBottomOf = 25;
public static final int ConstraintSet_layout_constraintBottom_toTopOf = 26;
public static final int ConstraintSet_layout_constraintDimensionRatio = 27;
public static final int ConstraintSet_layout_constraintEnd_toEndOf = 28;
public static final int ConstraintSet_layout_constraintEnd_toStartOf = 29;
public static final int ConstraintSet_layout_constraintGuide_begin = 30;
public static final int ConstraintSet_layout_constraintGuide_end = 31;
public static final int ConstraintSet_layout_constraintGuide_percent = 32;
public static final int ConstraintSet_layout_constraintHeight_default = 33;
public static final int ConstraintSet_layout_constraintHeight_max = 34;
public static final int ConstraintSet_layout_constraintHeight_min = 35;
public static final int ConstraintSet_layout_constraintHorizontal_bias = 36;
public static final int ConstraintSet_layout_constraintHorizontal_chainStyle = 37;
public static final int ConstraintSet_layout_constraintHorizontal_weight = 38;
public static final int ConstraintSet_layout_constraintLeft_creator = 39;
public static final int ConstraintSet_layout_constraintLeft_toLeftOf = 40;
public static final int ConstraintSet_layout_constraintLeft_toRightOf = 41;
public static final int ConstraintSet_layout_constraintRight_creator = 42;
public static final int ConstraintSet_layout_constraintRight_toLeftOf = 43;
public static final int ConstraintSet_layout_constraintRight_toRightOf = 44;
public static final int ConstraintSet_layout_constraintStart_toEndOf = 45;
public static final int ConstraintSet_layout_constraintStart_toStartOf = 46;
public static final int ConstraintSet_layout_constraintTop_creator = 47;
public static final int ConstraintSet_layout_constraintTop_toBottomOf = 48;
public static final int ConstraintSet_layout_constraintTop_toTopOf = 49;
public static final int ConstraintSet_layout_constraintVertical_bias = 50;
public static final int ConstraintSet_layout_constraintVertical_chainStyle = 51;
public static final int ConstraintSet_layout_constraintVertical_weight = 52;
public static final int ConstraintSet_layout_constraintWidth_default = 53;
public static final int ConstraintSet_layout_constraintWidth_max = 54;
public static final int ConstraintSet_layout_constraintWidth_min = 55;
public static final int ConstraintSet_layout_editor_absoluteX = 56;
public static final int ConstraintSet_layout_editor_absoluteY = 57;
public static final int ConstraintSet_layout_goneMarginBottom = 58;
public static final int ConstraintSet_layout_goneMarginEnd = 59;
public static final int ConstraintSet_layout_goneMarginLeft = 60;
public static final int ConstraintSet_layout_goneMarginRight = 61;
public static final int ConstraintSet_layout_goneMarginStart = 62;
public static final int ConstraintSet_layout_goneMarginTop = 63;
public static final int[] LinearConstraintLayout = { 0x010100c4 };
public static final int LinearConstraintLayout_android_orientation = 0;
}
}
|
package com.dtstack.taier.develop.vo.develop.result;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
/**
*
* @see TaskCheckpointTransfer
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
@ToString
public class GetCheckPointTimeRangeResultVO {
@ApiModelProperty(value = "开始时间", example = "1612340806000")
private Long startTime;
@ApiModelProperty(value = "结束时间", example = "1612340806000")
private Long endTime;
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.origin.s3;
import com.amazonaws.AbortedException;
import com.amazonaws.AmazonClientException;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.streamsets.pipeline.api.BatchMaker;
import com.streamsets.pipeline.api.Record;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.api.base.BaseSource;
import com.streamsets.pipeline.api.impl.Utils;
import com.streamsets.pipeline.lib.io.ObjectLengthException;
import com.streamsets.pipeline.lib.io.OverrunException;
import com.streamsets.pipeline.lib.parser.DataParser;
import com.streamsets.pipeline.lib.parser.DataParserException;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class AmazonS3Source extends BaseSource {
private final static Logger LOG = LoggerFactory.getLogger(AmazonS3Source.class);
private static final String OFFSET_SEPARATOR = "::";
private static final String MINUS_ONE = "-1";
private static final String ZERO = "0";
private static final long DEFAULT_FETCH_SIZE = 1 * 1024 * 1024;
private final S3ConfigBean s3ConfigBean;
private S3Spooler spooler;
private S3ObjectSummary currentObject;
private DataParser parser;
private S3Object object;
public AmazonS3Source(S3ConfigBean s3ConfigBean) {
this.s3ConfigBean = s3ConfigBean;
}
public S3ObjectSummary getCurrentObject() {
return currentObject;
}
public void setCurrentObject(S3ObjectSummary currentObject) {
this.currentObject = currentObject;
}
@Override
protected List<ConfigIssue> init() {
List<ConfigIssue> issues = super.init();
//init configuration
s3ConfigBean.init(getContext(), issues);
//preview settings
if (getContext().isPreview()) {
s3ConfigBean.basicConfig.maxWaitTime = 1000;
}
//init spooler
if (issues.isEmpty()) {
spooler = new S3Spooler(getContext(), s3ConfigBean);
spooler.init();
}
return issues;
}
@Override
public void destroy() {
IOUtils.closeQuietly(parser);
s3ConfigBean.destroy();
if(spooler != null) {
spooler.destroy();
}
super.destroy();
}
@Override
public String produce(String lastSourceOffset, int maxBatchSize, BatchMaker batchMaker) throws StageException {
int batchSize = Math.min(s3ConfigBean.basicConfig.maxBatchSize, maxBatchSize);
//parse offset string into S3Offset data structure
S3Offset s3Offset = S3Offset.fromString(lastSourceOffset);
spooler.postProcessOlderObjectIfNeeded(s3Offset);
//check if we have an object to produce records from. Otherwise get from spooler.
if (needToFetchNextObjectFromSpooler(s3Offset)) {
s3Offset = fetchNextObjectFromSpooler(s3Offset);
LOG.debug("Object '{}' with offset '{}' fetched from Spooler", s3Offset.getKey(), s3Offset.getOffset());
} else {
//check if the current object was modified between batches
LOG.debug("Checking if Object '{}' has been modified between batches", getCurrentObject().getKey());
if (!getCurrentObject().getETag().equals(s3Offset.geteTag())) {
//send the current object to error archive and get next object from spooler
LOG.debug("Object '{}' has been modified between batches. Sending the object to error",
getCurrentObject().getKey());
try {
spooler.handleCurrentObjectAsError();
} catch (AmazonClientException e) {
throw new StageException(Errors.S3_SPOOLDIR_24, e.toString(), e);
}
s3Offset = fetchNextObjectFromSpooler(s3Offset);
}
}
if (getCurrentObject() != null) {
try {
// we ask for a batch from the currentObject starting at offset
s3Offset.setOffset(produce(getCurrentObject(), s3Offset.getOffset(), batchSize, batchMaker));
} catch (BadSpoolObjectException ex) {
LOG.error(Errors.S3_SPOOLDIR_01.getMessage(), ex.getObject(), ex.getPos(), ex.toString(), ex);
getContext().reportError(Errors.S3_SPOOLDIR_01, ex.getObject(), ex.getPos(), ex.toString());
try {
spooler.handleCurrentObjectAsError();
} catch (AmazonClientException e) {
throw new StageException(Errors.S3_SPOOLDIR_24, e.toString(), e);
}
// we set the offset to -1 to indicate we are done with the current object and we should fetch a new one
// from the spooler
s3Offset.setOffset(MINUS_ONE);
}
}
return s3Offset.toString();
}
private S3Offset fetchNextObjectFromSpooler(S3Offset s3Offset) throws StageException {
setCurrentObject(null);
try {
//The next object found in queue is mostly eligible since we process objects in chronological order.
//However after processing a few files, if the configuration is changed [say relax the prefix] and an older file
//gets selected for processing, it must be ignored.
S3ObjectSummary nextAvailObj = null;
do {
if (nextAvailObj != null) {
LOG.warn("Ignoring object '{}' in spool directory as is lesser than offset object '{}'",
nextAvailObj.getKey(), s3Offset.getKey());
}
nextAvailObj = spooler.poolForObject(s3Offset, s3ConfigBean.basicConfig.maxWaitTime, TimeUnit.MILLISECONDS);
} while (!isEligible(nextAvailObj, s3Offset));
if (nextAvailObj == null) {
// no object to process
LOG.debug("No new object available in spool directory after '{}' secs, producing empty batch",
s3ConfigBean.basicConfig.maxWaitTime/1000);
} else {
setCurrentObject(nextAvailObj);
// if the current offset object is null or the object returned by the spooler is greater than the current offset
// object we take the object returned by the spooler as the new object and set the offset to zero.
// if not, it means the spooler returned us the current object, we just keep processing it from the last
// offset we processed (known via offset tracking)
if (s3Offset.getKey() == null ||
isLaterThan(nextAvailObj.getKey(), nextAvailObj.getLastModified().getTime(), s3Offset.getKey(),
Long.parseLong(s3Offset.getTimestamp()))) {
s3Offset = new S3Offset(getCurrentObject().getKey(), ZERO, getCurrentObject().getETag(),
String.valueOf(getCurrentObject().getLastModified().getTime()));
}
}
} catch (InterruptedException ex) {
// the spooler was interrupted while waiting for an object, we log and return, the pipeline agent will invoke us
// again to wait for an object again
LOG.warn("Pooling interrupted");
} catch (AmazonClientException e) {
throw new StageException(Errors.S3_SPOOLDIR_23, e.toString());
}
return s3Offset;
}
public String produce(S3ObjectSummary s3Object, String offset, int maxBatchSize, BatchMaker batchMaker) throws StageException,
BadSpoolObjectException {
try {
if (parser == null) {
//Get S3 object instead of stream because we want to call close on the object when we close the
// parser (and stream)
if(getContext().isPreview()) {
long fetchSize = s3Object.getSize() > DEFAULT_FETCH_SIZE ? DEFAULT_FETCH_SIZE : s3Object.getSize();
if(fetchSize > 0) {
object = AmazonS3Util.getObjectRange(s3ConfigBean.s3Config.getS3Client(), s3ConfigBean.s3Config.bucket,
s3Object.getKey(), fetchSize);
} else {
LOG.warn("Size of object with key '{}' is 0", s3Object.getKey());
object = AmazonS3Util.getObject(s3ConfigBean.s3Config.getS3Client(), s3ConfigBean.s3Config.bucket,
s3Object.getKey());
}
} else {
object = AmazonS3Util.getObject(s3ConfigBean.s3Config.getS3Client(), s3ConfigBean.s3Config.bucket,
s3Object.getKey());
}
String recordId = s3ConfigBean.s3Config.bucket + s3ConfigBean.s3Config.delimiter + s3Object.getKey();
parser = s3ConfigBean.dataFormatConfig.getParserFactory().getParser(recordId, object.getObjectContent(),
Long.parseLong(offset));
//we don't use S3 GetObject range capabilities to skip the already process offset because the parsers cannot
// pick up from a non root doc depth in the case of a single object with records.
}
for (int i = 0; i < maxBatchSize; i++) {
try {
Record record = parser.parse();
if (record != null) {
batchMaker.addRecord(record);
offset = parser.getOffset();
} else {
parser.close();
parser = null;
object.close();
object = null;
offset = MINUS_ONE;
break;
}
} catch (ObjectLengthException ex) {
String exOffset = offset;
offset = MINUS_ONE;
switch (getContext().getOnErrorRecord()) {
case DISCARD:
break;
case TO_ERROR:
getContext().reportError(Errors.S3_SPOOLDIR_02, s3Object.getKey(), exOffset);
break;
case STOP_PIPELINE:
throw new StageException(Errors.S3_SPOOLDIR_02, s3Object.getKey(), exOffset);
default:
throw new IllegalStateException(Utils.format("It should never happen. OnError '{}'",
getContext().getOnErrorRecord(), ex));
}
}
}
} catch (AmazonClientException e) {
LOG.error("Error processing object with key '{}' offset '{}'", s3Object.getKey(), offset);
throw new StageException(Errors.S3_SPOOLDIR_25, e.toString());
} catch (IOException | DataParserException ex) {
if(ex.getCause() instanceof AbortedException) {
//If the pipeline was stopped, the amazon s3 client thread catches the interrupt and throws aborted exception
//do not treat this as an error. Instead produce what ever you have and move one.
} else {
offset = MINUS_ONE;
String exOffset;
if (ex instanceof OverrunException) {
exOffset = String.valueOf(((OverrunException) ex).getStreamOffset());
} else {
try {
exOffset = (parser != null) ? parser.getOffset() : MINUS_ONE;
} catch (IOException ex1) {
LOG.warn("Could not get the object offset to report with error, reason: {}", ex1.toString(), ex);
exOffset = MINUS_ONE;
}
}
switch (getContext().getOnErrorRecord()) {
case DISCARD:
break;
case TO_ERROR:
throw new BadSpoolObjectException(s3Object.getKey(), exOffset, ex);
case STOP_PIPELINE:
getContext().reportError(Errors.S3_SPOOLDIR_03, s3Object.getKey(), exOffset, ex.toString());
throw new StageException(Errors.S3_SPOOLDIR_03, s3Object.getKey(), exOffset, ex.toString(), ex);
default:
throw new IllegalStateException(Utils.format("It should never happen. OnError '{}'",
getContext().getOnErrorRecord(), ex));
}
}
} finally {
if (MINUS_ONE.equals(offset)) {
if (parser != null) {
try {
parser.close();
parser = null;
} catch (IOException ex) {
LOG.debug("Exception while closing parser : '{}'", ex.toString());
}
}
if (object != null) {
try {
object.close();
object = null;
} catch (IOException ex) {
LOG.debug("Exception while closing S3 object : '{}'", ex.toString());
}
}
}
}
return offset;
}
private boolean needToFetchNextObjectFromSpooler(S3Offset s3Offset) {
return
// we don't have an object half way processed in the current agent execution
getCurrentObject() == null ||
// we don't have an object half way processed from a previous agent execution via offset tracking
s3Offset.getKey() == null ||
// the current object has been fully processed
MINUS_ONE.equals(s3Offset.getOffset());
}
private boolean isEligible(S3ObjectSummary nextAvailObj, S3Offset s3Offset) {
return (nextAvailObj == null) ||
(nextAvailObj.getLastModified().getTime() >= Long.parseLong(s3Offset.getTimestamp()));
}
private boolean isLaterThan(String nextKey, long nextTimeStamp, String originalKey, long originalTimestamp) {
return (nextTimeStamp > originalTimestamp) ||
(nextTimeStamp == originalTimestamp && nextKey.compareTo(originalKey) > 0);
}
static class S3Offset {
private final String key;
private final String eTag;
private String offset;
private final String timestamp;
public S3Offset(String key, String offset, String eTag, String timestamp) {
this.key = key;
this.offset = offset;
this.eTag = eTag;
this.timestamp = timestamp;
}
public String getKey() {
return key;
}
public String geteTag() {
return eTag;
}
public String getOffset() {
return offset;
}
public String getTimestamp() {
return timestamp;
}
public void setOffset(String offset) {
this.offset = offset;
}
@Override
public String toString() {
return key + OFFSET_SEPARATOR + offset + OFFSET_SEPARATOR + eTag + OFFSET_SEPARATOR + timestamp;
}
public static S3Offset fromString(String lastSourceOffset) throws StageException {
if (lastSourceOffset != null) {
String[] split = lastSourceOffset.split(OFFSET_SEPARATOR);
if (split.length == 4) {
return new S3Offset(split[0], split[1], split[2], split[3]);
} else {
throw new StageException(Errors.S3_SPOOLDIR_21, lastSourceOffset);
}
}
return new S3Offset(null, ZERO, null, ZERO);
}
}
}
|
package havis.llrpservice.server.osgi;
import havis.device.io.IODevice;
import havis.device.rf.RFDevice;
import havis.llrpservice.sbc.service.OSGiServiceFactory;
import havis.llrpservice.sbc.service.ServiceFactory;
import havis.llrpservice.server.service.LLRPServiceManager;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.logging.Level;
import java.util.logging.Logger;
import mockit.Deencapsulation;
import mockit.Delegate;
import mockit.Mocked;
import mockit.NonStrictExpectations;
import mockit.Verifications;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.testng.Assert;
import org.testng.annotations.Test;
public class ActivatorTest {
private Path BASE_RESOURCE_PATH = Paths.get(ActivatorTest.class
.getPackage().getName().replace('.', '/'));
@SuppressWarnings("unchecked")
@Test
public void start(@Mocked final BundleContext bundleContext,
@Mocked final Logger log,
@Mocked final LLRPServiceManager llrpServiceManager)
throws Exception {
class Data {
URL url;
String path;
}
final Data data = new Data();
new NonStrictExpectations() {
{
bundleContext.getBundle().getResource("bundle.properties");
result = new Delegate<Bundle>() {
@SuppressWarnings("unused")
URL getResource(String name) {
return data.url;
}
};
bundleContext.getProperty("havis.llrpservice.config.base.path");
result = new Delegate<BundleContext>() {
@SuppressWarnings("unused")
String getProperty(String key) {
return data.path;
}
};
log.isLoggable(Level.INFO);
result = true;
}
};
Logger origLog = Deencapsulation.getField(Activator.class, "log");
Deencapsulation.setField(Activator.class, "log", log);
// start the bundle with the required property
// "havis.llrpservice.config.base.path" at both possible locations
Activator activator = new Activator() {
@Override
Path adjust2env(Path path) {
// directly use the the relative path starting at the class path
return path;
}
};
data.url = getClass().getClassLoader().getResource(
BASE_RESOURCE_PATH.resolve("bundle.properties").toString());
data.path = "huhu";
activator.start(bundleContext);
new Verifications() {
{
// the LLRP service manager is created with the config base path
// from bundle.properties
String path;
ServiceFactory<RFDevice> rfcServiceFactory;
ServiceFactory<IODevice> gpioServiceFactory;
new LLRPServiceManager(path = withCapture(),
withInstanceOf(OSGiServiceFactory.class),
rfcServiceFactory = withCapture(),
gpioServiceFactory = withCapture());
times = 1;
Assert.assertEquals(path, "conf/havis-llrpservice");
Assert.assertNotNull(rfcServiceFactory);
Assert.assertNotNull(gpioServiceFactory);
// the LLRP service is started and stopped
llrpServiceManager.run();
times = 1;
}
};
// stop the bundle
activator.stop(bundleContext);
new Verifications() {
{
// the LLRP service is stopped
llrpServiceManager.stop();
times = 1;
}
};
Deencapsulation.setField(Activator.class, "log", origLog);
}
@Test
public void startError(@Mocked final BundleContext bundleContext,
@Mocked LLRPServiceManager llrpServerManager) throws Exception {
class Data {
URL url;
String path;
}
final Data data = new Data();
new NonStrictExpectations() {
{
bundleContext.getBundle().getResource("bundle.properties");
result = new Delegate<Bundle>() {
@SuppressWarnings("unused")
URL getResource(String name) {
return data.url;
}
};
bundleContext.getProperty("havis.llrpservice.config.base.path");
result = new Delegate<BundleContext>() {
@SuppressWarnings("unused")
String getProperty(String key) {
return data.path;
}
};
}
};
// start the bundle without the required property
// "havis.llrpservice.config.base.path"
Activator activator = new Activator() {
@Override
Path adjust2env(Path path) {
// directly use the the relative path starting at the class path
return path;
}
};
data.url = getClass().getClassLoader().getResource(
BASE_RESOURCE_PATH.resolve("bundleMissingProp.properties")
.toString());
try {
activator.start(bundleContext);
Assert.fail();
} catch (MissingPropertyException e) {
Assert.assertTrue(e.getMessage().contains(
"havis.llrpservice.config.base.path"));
}
// set the config.base.path to the properties provided by the OSGi
// container
data.path = "huhu";
activator.start(bundleContext);
}
}
|
package priv.jesse.mall.web.user;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import priv.jesse.mall.entity.User;
import priv.jesse.mall.entity.pojo.ResultBean;
import priv.jesse.mall.service.UserService;
import priv.jesse.mall.service.exception.LoginException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.List;
@Controller
@RequestMapping("/mall/user")
public class UserController {
@Autowired
private UserService userService;
/**
* 打开注册页面
*
* @return
*/
@RequestMapping("/toRegister.html")
public String toRegister() {
return "mall/user/register";
}
/**
* 打开登录页面
*
* @return
*/
@RequestMapping("/toLogin.html")
public String toLogin() {
System.out.println("---------------------login-------------------------");
return "mall/user/login";
}
/**
* 登录
*
* @param username
* @param password
*/
@RequestMapping("/login.do")
public void login(String username,
String password,
HttpServletRequest request,
HttpServletResponse response) throws IOException {
User user = userService.checkLogin(username, password);
if (user != null) {
//登录成功 重定向到首页
request.getSession().setAttribute("user", user);
response.sendRedirect("/index.html");
} else {
throw new LoginException("登录失败! 用户名或者密码错误");
}
}
/**
* 注册
*/
@RequestMapping("/register.do")
public void register(String username,
String password,
String name,
String phone,
String email,
String addr,
HttpServletResponse response) throws IOException {
User user = new User();
user.setUsername(username);
user.setPhone(phone);
user.setPassword(password);
user.setName(name);
user.setEmail(email);
user.setAddr(addr);
userService.create(user);
// 注册完成后重定向到登录页面
response.sendRedirect("/mall/user/toLogin.html");
}
/**
* 登出
*/
@RequestMapping("/logout.do")
public void logout(HttpServletRequest request, HttpServletResponse response) throws IOException {
request.getSession().removeAttribute("user");
response.sendRedirect("/mall/index.html");
}
/**
* 验证用户名是否唯一
* @param username
* @return
*/
@ResponseBody
@RequestMapping("/checkUsername.do")
public ResultBean<Boolean> checkUsername(String username){
List<User> users = userService.findByUsername(username);
if (users==null||users.isEmpty()){
return new ResultBean<>(true);
}
return new ResultBean<>(false);
}
/**
* 如发生错误 转发到这页面
*
* @param response
* @param request
* @return
*/
@RequestMapping("/error.html")
public String error(HttpServletResponse response, HttpServletRequest request) {
return "error";
}
}
|
// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
import io.kaitai.struct.ByteBufferKaitaiStream;
import io.kaitai.struct.KaitaiStruct;
import io.kaitai.struct.KaitaiStream;
import java.io.IOException;
import java.util.Map;
import java.util.HashMap;
import java.util.Arrays;
import java.util.ArrayList;
import java.nio.charset.Charset;
/**
* Allegro library for C (mostly used for game and multimedia apps
* programming) used its own container file format.
*
* In general, it allows storage of arbitrary binary data blocks
* bundled together with some simple key-value style metadata
* ("properties") for every block. Allegro also pre-defines some simple
* formats for bitmaps, fonts, MIDI music, sound samples and
* palettes. Allegro library v4.0+ also support LZSS compression.
*
* This spec applies to Allegro data files for library versions 2.2 up
* to 4.4.
* @see <a href="https://liballeg.org/stabledocs/en/datafile.html">Source</a>
*/
public class AllegroDat extends KaitaiStruct {
public static AllegroDat fromFile(String fileName) throws IOException {
return new AllegroDat(new ByteBufferKaitaiStream(fileName));
}
public enum PackEnum {
UNPACKED(1936484398);
private final long id;
PackEnum(long id) { this.id = id; }
public long id() { return id; }
private static final Map<Long, PackEnum> byId = new HashMap<Long, PackEnum>(1);
static {
for (PackEnum e : PackEnum.values())
byId.put(e.id(), e);
}
public static PackEnum byId(long id) { return byId.get(id); }
}
public AllegroDat(KaitaiStream _io) {
this(_io, null, null);
}
public AllegroDat(KaitaiStream _io, KaitaiStruct _parent) {
this(_io, _parent, null);
}
public AllegroDat(KaitaiStream _io, KaitaiStruct _parent, AllegroDat _root) {
super(_io);
this._parent = _parent;
this._root = _root == null ? this : _root;
_read();
}
private void _read() {
this.packMagic = PackEnum.byId(this._io.readU4be());
this.datMagic = this._io.readBytes(4);
if (!(Arrays.equals(datMagic(), new byte[] { 65, 76, 76, 46 }))) {
throw new KaitaiStream.ValidationNotEqualError(new byte[] { 65, 76, 76, 46 }, datMagic(), _io(), "/seq/1");
}
this.numObjects = this._io.readU4be();
objects = new ArrayList<DatObject>(((Number) (numObjects())).intValue());
for (int i = 0; i < numObjects(); i++) {
this.objects.add(new DatObject(this._io, this, _root));
}
}
/**
* Simple monochrome monospaced font, 95 characters, 8x16 px
* characters.
*/
public static class DatFont16 extends KaitaiStruct {
public static DatFont16 fromFile(String fileName) throws IOException {
return new DatFont16(new ByteBufferKaitaiStream(fileName));
}
public DatFont16(KaitaiStream _io) {
this(_io, null, null);
}
public DatFont16(KaitaiStream _io, AllegroDat.DatFont _parent) {
this(_io, _parent, null);
}
public DatFont16(KaitaiStream _io, AllegroDat.DatFont _parent, AllegroDat _root) {
super(_io);
this._parent = _parent;
this._root = _root;
_read();
}
private void _read() {
chars = new ArrayList<byte[]>(((Number) (95)).intValue());
for (int i = 0; i < 95; i++) {
this.chars.add(this._io.readBytes(16));
}
}
private ArrayList<byte[]> chars;
private AllegroDat _root;
private AllegroDat.DatFont _parent;
public ArrayList<byte[]> chars() { return chars; }
public AllegroDat _root() { return _root; }
public AllegroDat.DatFont _parent() { return _parent; }
}
public static class DatBitmap extends KaitaiStruct {
public static DatBitmap fromFile(String fileName) throws IOException {
return new DatBitmap(new ByteBufferKaitaiStream(fileName));
}
public DatBitmap(KaitaiStream _io) {
this(_io, null, null);
}
public DatBitmap(KaitaiStream _io, AllegroDat.DatObject _parent) {
this(_io, _parent, null);
}
public DatBitmap(KaitaiStream _io, AllegroDat.DatObject _parent, AllegroDat _root) {
super(_io);
this._parent = _parent;
this._root = _root;
_read();
}
private void _read() {
this.bitsPerPixel = this._io.readS2be();
this.width = this._io.readU2be();
this.height = this._io.readU2be();
this.image = this._io.readBytesFull();
}
private short bitsPerPixel;
private int width;
private int height;
private byte[] image;
private AllegroDat _root;
private AllegroDat.DatObject _parent;
public short bitsPerPixel() { return bitsPerPixel; }
public int width() { return width; }
public int height() { return height; }
public byte[] image() { return image; }
public AllegroDat _root() { return _root; }
public AllegroDat.DatObject _parent() { return _parent; }
}
public static class DatFont extends KaitaiStruct {
public static DatFont fromFile(String fileName) throws IOException {
return new DatFont(new ByteBufferKaitaiStream(fileName));
}
public DatFont(KaitaiStream _io) {
this(_io, null, null);
}
public DatFont(KaitaiStream _io, AllegroDat.DatObject _parent) {
this(_io, _parent, null);
}
public DatFont(KaitaiStream _io, AllegroDat.DatObject _parent, AllegroDat _root) {
super(_io);
this._parent = _parent;
this._root = _root;
_read();
}
private void _read() {
this.fontSize = this._io.readS2be();
switch (fontSize()) {
case 8: {
this.body = new DatFont8(this._io, this, _root);
break;
}
case 16: {
this.body = new DatFont16(this._io, this, _root);
break;
}
case 0: {
this.body = new DatFont39(this._io, this, _root);
break;
}
}
}
private short fontSize;
private KaitaiStruct body;
private AllegroDat _root;
private AllegroDat.DatObject _parent;
public short fontSize() { return fontSize; }
public KaitaiStruct body() { return body; }
public AllegroDat _root() { return _root; }
public AllegroDat.DatObject _parent() { return _parent; }
}
/**
* Simple monochrome monospaced font, 95 characters, 8x8 px
* characters.
*/
public static class DatFont8 extends KaitaiStruct {
public static DatFont8 fromFile(String fileName) throws IOException {
return new DatFont8(new ByteBufferKaitaiStream(fileName));
}
public DatFont8(KaitaiStream _io) {
this(_io, null, null);
}
public DatFont8(KaitaiStream _io, AllegroDat.DatFont _parent) {
this(_io, _parent, null);
}
public DatFont8(KaitaiStream _io, AllegroDat.DatFont _parent, AllegroDat _root) {
super(_io);
this._parent = _parent;
this._root = _root;
_read();
}
private void _read() {
chars = new ArrayList<byte[]>(((Number) (95)).intValue());
for (int i = 0; i < 95; i++) {
this.chars.add(this._io.readBytes(8));
}
}
private ArrayList<byte[]> chars;
private AllegroDat _root;
private AllegroDat.DatFont _parent;
public ArrayList<byte[]> chars() { return chars; }
public AllegroDat _root() { return _root; }
public AllegroDat.DatFont _parent() { return _parent; }
}
public static class DatObject extends KaitaiStruct {
public static DatObject fromFile(String fileName) throws IOException {
return new DatObject(new ByteBufferKaitaiStream(fileName));
}
public DatObject(KaitaiStream _io) {
this(_io, null, null);
}
public DatObject(KaitaiStream _io, AllegroDat _parent) {
this(_io, _parent, null);
}
public DatObject(KaitaiStream _io, AllegroDat _parent, AllegroDat _root) {
super(_io);
this._parent = _parent;
this._root = _root;
_read();
}
private void _read() {
this.properties = new ArrayList<Property>();
{
Property _it;
int i = 0;
do {
_it = new Property(this._io, this, _root);
this.properties.add(_it);
i++;
} while (!(!(_it.isValid())));
}
this.lenCompressed = this._io.readS4be();
this.lenUncompressed = this._io.readS4be();
switch (type()) {
case "BMP ": {
this._raw_body = this._io.readBytes(lenCompressed());
KaitaiStream _io__raw_body = new ByteBufferKaitaiStream(_raw_body);
this.body = new DatBitmap(_io__raw_body, this, _root);
break;
}
case "RLE ": {
this._raw_body = this._io.readBytes(lenCompressed());
KaitaiStream _io__raw_body = new ByteBufferKaitaiStream(_raw_body);
this.body = new DatRleSprite(_io__raw_body, this, _root);
break;
}
case "FONT": {
this._raw_body = this._io.readBytes(lenCompressed());
KaitaiStream _io__raw_body = new ByteBufferKaitaiStream(_raw_body);
this.body = new DatFont(_io__raw_body, this, _root);
break;
}
default: {
this.body = this._io.readBytes(lenCompressed());
break;
}
}
}
private String type;
public String type() {
if (this.type != null)
return this.type;
this.type = properties().get(properties().size() - 1).magic();
return this.type;
}
private ArrayList<Property> properties;
private int lenCompressed;
private int lenUncompressed;
private Object body;
private AllegroDat _root;
private AllegroDat _parent;
private byte[] _raw_body;
public ArrayList<Property> properties() { return properties; }
public int lenCompressed() { return lenCompressed; }
public int lenUncompressed() { return lenUncompressed; }
public Object body() { return body; }
public AllegroDat _root() { return _root; }
public AllegroDat _parent() { return _parent; }
public byte[] _raw_body() { return _raw_body; }
}
/**
* New bitmap font format introduced since Allegro 3.9: allows
* flexible designation of character ranges, 8-bit colored
* characters, etc.
*/
public static class DatFont39 extends KaitaiStruct {
public static DatFont39 fromFile(String fileName) throws IOException {
return new DatFont39(new ByteBufferKaitaiStream(fileName));
}
public DatFont39(KaitaiStream _io) {
this(_io, null, null);
}
public DatFont39(KaitaiStream _io, AllegroDat.DatFont _parent) {
this(_io, _parent, null);
}
public DatFont39(KaitaiStream _io, AllegroDat.DatFont _parent, AllegroDat _root) {
super(_io);
this._parent = _parent;
this._root = _root;
_read();
}
private void _read() {
this.numRanges = this._io.readS2be();
ranges = new ArrayList<Range>(((Number) (numRanges())).intValue());
for (int i = 0; i < numRanges(); i++) {
this.ranges.add(new Range(this._io, this, _root));
}
}
public static class Range extends KaitaiStruct {
public static Range fromFile(String fileName) throws IOException {
return new Range(new ByteBufferKaitaiStream(fileName));
}
public Range(KaitaiStream _io) {
this(_io, null, null);
}
public Range(KaitaiStream _io, AllegroDat.DatFont39 _parent) {
this(_io, _parent, null);
}
public Range(KaitaiStream _io, AllegroDat.DatFont39 _parent, AllegroDat _root) {
super(_io);
this._parent = _parent;
this._root = _root;
_read();
}
private void _read() {
this.mono = this._io.readU1();
this.startChar = this._io.readU4be();
this.endChar = this._io.readU4be();
chars = new ArrayList<FontChar>(((Number) (((endChar() - startChar()) + 1))).intValue());
for (int i = 0; i < ((endChar() - startChar()) + 1); i++) {
this.chars.add(new FontChar(this._io, this, _root));
}
}
private int mono;
private long startChar;
private long endChar;
private ArrayList<FontChar> chars;
private AllegroDat _root;
private AllegroDat.DatFont39 _parent;
public int mono() { return mono; }
/**
* First character in range
*/
public long startChar() { return startChar; }
/**
* Last character in range (inclusive)
*/
public long endChar() { return endChar; }
public ArrayList<FontChar> chars() { return chars; }
public AllegroDat _root() { return _root; }
public AllegroDat.DatFont39 _parent() { return _parent; }
}
public static class FontChar extends KaitaiStruct {
public static FontChar fromFile(String fileName) throws IOException {
return new FontChar(new ByteBufferKaitaiStream(fileName));
}
public FontChar(KaitaiStream _io) {
this(_io, null, null);
}
public FontChar(KaitaiStream _io, AllegroDat.DatFont39.Range _parent) {
this(_io, _parent, null);
}
public FontChar(KaitaiStream _io, AllegroDat.DatFont39.Range _parent, AllegroDat _root) {
super(_io);
this._parent = _parent;
this._root = _root;
_read();
}
private void _read() {
this.width = this._io.readU2be();
this.height = this._io.readU2be();
this.body = this._io.readBytes((width() * height()));
}
private int width;
private int height;
private byte[] body;
private AllegroDat _root;
private AllegroDat.DatFont39.Range _parent;
public int width() { return width; }
public int height() { return height; }
public byte[] body() { return body; }
public AllegroDat _root() { return _root; }
public AllegroDat.DatFont39.Range _parent() { return _parent; }
}
private short numRanges;
private ArrayList<Range> ranges;
private AllegroDat _root;
private AllegroDat.DatFont _parent;
public short numRanges() { return numRanges; }
public ArrayList<Range> ranges() { return ranges; }
public AllegroDat _root() { return _root; }
public AllegroDat.DatFont _parent() { return _parent; }
}
public static class Property extends KaitaiStruct {
public static Property fromFile(String fileName) throws IOException {
return new Property(new ByteBufferKaitaiStream(fileName));
}
public Property(KaitaiStream _io) {
this(_io, null, null);
}
public Property(KaitaiStream _io, AllegroDat.DatObject _parent) {
this(_io, _parent, null);
}
public Property(KaitaiStream _io, AllegroDat.DatObject _parent, AllegroDat _root) {
super(_io);
this._parent = _parent;
this._root = _root;
_read();
}
private void _read() {
this.magic = new String(this._io.readBytes(4), Charset.forName("UTF-8"));
if (isValid()) {
this.type = new String(this._io.readBytes(4), Charset.forName("UTF-8"));
}
if (isValid()) {
this.lenBody = this._io.readU4be();
}
if (isValid()) {
this.body = new String(this._io.readBytes(lenBody()), Charset.forName("UTF-8"));
}
}
private Boolean isValid;
public Boolean isValid() {
if (this.isValid != null)
return this.isValid;
boolean _tmp = (boolean) (magic().equals("prop"));
this.isValid = _tmp;
return this.isValid;
}
private String magic;
private String type;
private Long lenBody;
private String body;
private AllegroDat _root;
private AllegroDat.DatObject _parent;
public String magic() { return magic; }
public String type() { return type; }
public Long lenBody() { return lenBody; }
public String body() { return body; }
public AllegroDat _root() { return _root; }
public AllegroDat.DatObject _parent() { return _parent; }
}
public static class DatRleSprite extends KaitaiStruct {
public static DatRleSprite fromFile(String fileName) throws IOException {
return new DatRleSprite(new ByteBufferKaitaiStream(fileName));
}
public DatRleSprite(KaitaiStream _io) {
this(_io, null, null);
}
public DatRleSprite(KaitaiStream _io, AllegroDat.DatObject _parent) {
this(_io, _parent, null);
}
public DatRleSprite(KaitaiStream _io, AllegroDat.DatObject _parent, AllegroDat _root) {
super(_io);
this._parent = _parent;
this._root = _root;
_read();
}
private void _read() {
this.bitsPerPixel = this._io.readS2be();
this.width = this._io.readU2be();
this.height = this._io.readU2be();
this.lenImage = this._io.readU4be();
this.image = this._io.readBytesFull();
}
private short bitsPerPixel;
private int width;
private int height;
private long lenImage;
private byte[] image;
private AllegroDat _root;
private AllegroDat.DatObject _parent;
public short bitsPerPixel() { return bitsPerPixel; }
public int width() { return width; }
public int height() { return height; }
public long lenImage() { return lenImage; }
public byte[] image() { return image; }
public AllegroDat _root() { return _root; }
public AllegroDat.DatObject _parent() { return _parent; }
}
private PackEnum packMagic;
private byte[] datMagic;
private long numObjects;
private ArrayList<DatObject> objects;
private AllegroDat _root;
private KaitaiStruct _parent;
public PackEnum packMagic() { return packMagic; }
public byte[] datMagic() { return datMagic; }
public long numObjects() { return numObjects; }
public ArrayList<DatObject> objects() { return objects; }
public AllegroDat _root() { return _root; }
public KaitaiStruct _parent() { return _parent; }
}
|
/**
*
*/
package de.chaosbutterfly.smcombat.client.vaadin.window;
import org.vaadin.dialogs.ConfirmDialog;
import com.vaadin.ui.Button;
import com.vaadin.ui.Button.ClickEvent;
import com.vaadin.ui.Button.ClickListener;
import com.vaadin.ui.Component;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.UI;
import com.vaadin.ui.VerticalLayout;
import com.vaadin.ui.Window;
/**
* @author alters
*
*/
public abstract class BaseEditDialog extends Window {
private static final long serialVersionUID = 1L;
public static final int RESULT_NOT_SET = -1;
public static final int RESULT_CANCEL = 0;
public static final int RESULT_SAVE = 1;
private int result = RESULT_NOT_SET;
protected Button cancelButton;
protected Button saveButton;
protected String caption;
public BaseEditDialog(String caption) {
this.caption = caption;
setCaption(caption);
VerticalLayout content = new VerticalLayout();
setContent(content);
// Center it in the browser window
center();
content.addComponent(provideEditComponent());
HorizontalLayout buttonsHLO = new HorizontalLayout();
buttonsHLO.setMargin(true);
cancelButton = new Button("Cancel");
cancelButton.addClickListener(new CancelButtonClicklistener());
saveButton = new Button("Save");
ClickListener saveButtonListener = provideSaveButtonListener();
if (saveButtonListener != null) {
saveButton.addClickListener(saveButtonListener);
}
saveButton.addClickListener(new ClickListener() {
private static final long serialVersionUID = 1L;
@Override
public void buttonClick(ClickEvent event) {
result = RESULT_SAVE;
close();
}
});
buttonsHLO.addComponent(cancelButton);
buttonsHLO.addComponent(saveButton);
content.addComponent(buttonsHLO);
setClosable(false);//not closable because they shall use them buttons!
}
protected abstract Component provideEditComponent();
protected ClickListener provideSaveButtonListener() {
return null;
}
protected boolean isDirty() {
return false;
}
protected boolean stringDirty(String original, String changed) {
if (original != null && changed != null) {
return original.equals(changed);
} else
return !(original == null && changed == null);
}
protected boolean booleanDirty(Boolean original, Boolean changed) {
if (original != null && changed != null) {
return original.equals(changed);
} else
return !(original == null && changed == null);
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((caption == null) ? 0 : caption.hashCode());
return result;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (!(obj instanceof BaseEditDialog))
return false;
BaseEditDialog other = (BaseEditDialog) obj;
if (caption == null) {
if (other.caption != null)
return false;
} else if (!caption.equals(other.caption))
return false;
return true;
}
/**
* @return the result
*/
public int getResult() {
return result;
}
/**
* @param result
* the result to set
*/
public void setResult(int result) {
this.result = result;
}
private class CancelButtonClicklistener implements ClickListener {
private static final long serialVersionUID = 1L;
@Override
public void buttonClick(ClickEvent event) {
if (isDirty()) {
// quick confirmation saying unsaved will be lost
ConfirmDialog.show(UI.getCurrent(), "Are you sure?", new ConfirmDialog.Listener() {
private static final long serialVersionUID = 1L;
@Override
public void onClose(ConfirmDialog dialog) {
if (dialog.isConfirmed()) {
// Confirmed loss of unchanged changes
result = RESULT_CANCEL;
close();
} else {
// User did not confirm loss of unsaved changes
}
}
});
} else {
//not dirty, simply close
result = RESULT_CANCEL;
close();
}
}
}
}
|
package org.sosy_lab.cpachecker.cpa.policyiteration;
import org.sosy_lab.cpachecker.cfa.model.CFANode;
import org.sosy_lab.cpachecker.core.interfaces.AbstractState;
import org.sosy_lab.cpachecker.core.interfaces.Graphable;
/**
* Abstract state for policy iteration: bounds on each expression (from the
* template), for the given control node.
*
* Logic-less container class.
*/
public abstract class PolicyState implements AbstractState, Graphable {
private final CFANode node;
protected PolicyState(CFANode pNode) {
node = pNode;
}
/**
* Cast to subclass.
* Syntax sugar to avoid ugliness.
*/
public PolicyIntermediateState asIntermediate() {
return (PolicyIntermediateState) this;
}
public PolicyAbstractedState asAbstracted() {
return (PolicyAbstractedState) this;
}
public CFANode getNode() {
return node;
}
public abstract boolean isAbstract();
@Override
public boolean shouldBeHighlighted() {
return false;
}
}
|
package androidsamples.java.tictactoe;
import android.os.Bundle;
import android.util.Log;
import android.view.MenuItem;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import androidx.navigation.Navigation;
import com.google.firebase.auth.FirebaseAuth;
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.menu_logout) {
Log.d(TAG, "logout clicked");
FirebaseAuth.getInstance().signOut();
Toast.makeText(MainActivity.this, "Logged out", Toast.LENGTH_SHORT).show();
Navigation.findNavController(this, R.id.nav_host_fragment).navigate(R.id.action_need_auth);
return true;
}
return super.onOptionsItemSelected(item);
}
}
|
package uk.gov.hmcts.reform.idam.web.sso;
import com.google.common.collect.ImmutableMap;
import feign.Request;
import feign.Response;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.security.oauth2.client.OAuth2AuthorizedClient;
import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken;
import org.springframework.security.oauth2.client.web.OAuth2AuthorizedClientRepository;
import org.springframework.web.client.HttpStatusCodeException;
import uk.gov.hmcts.reform.idam.web.client.OidcApi;
import uk.gov.hmcts.reform.idam.web.client.SsoFederationApi;
import uk.gov.hmcts.reform.idam.web.config.properties.StrategicConfigurationProperties;
import uk.gov.hmcts.reform.idam.web.helper.AuthHelper;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.verify;
import static org.springframework.http.HttpHeaders.LOCATION;
import static org.springframework.http.HttpHeaders.SET_COOKIE;
import static uk.gov.hmcts.reform.idam.web.helper.ErrorHelper.restException;
@RunWith(MockitoJUnitRunner.class)
public class SSOAuthenticationSuccessHandlerTest {
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private HttpServletRequest request;
@Mock
private HttpServletResponse response;
@Mock
private OAuth2AuthenticationToken authentication;
@Mock
private OAuth2AuthorizedClientRepository repository;
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private SsoFederationApi federationApi;
@Mock
private OidcApi oidcApi;
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private OAuth2AuthorizedClient client;
@Mock
private StrategicConfigurationProperties.Session sessionProperties;
@Mock
private HttpSession session;
@Mock
private AuthHelper authHelper;
private SSOAuthenticationSuccessHandler underTest;
@Before
public void setUp() {
given(repository.loadAuthorizedClient(any(), any(), any())).willReturn(client);
given(client.getAccessToken().getTokenValue()).willReturn("an_access_token");
given(sessionProperties.getIdamSessionCookie()).willReturn("Idam.Session");
underTest = new SSOAuthenticationSuccessHandler(repository, federationApi, oidcApi, sessionProperties, authHelper);
}
@Test
public void onAuthenticationSuccess_shouldJustWorkBecauseIHaveMockedEverything() throws IOException {
Map<String, Collection<String>> headers = ImmutableMap.of(SET_COOKIE, List.of("Idam.Session=abcdefg"));
final Map<String, String[]> paramMap = ImmutableMap.of("some_param", new String[] {"some_value"});
Map<String, Collection<String>> feignHeaders = ImmutableMap.of(LOCATION, List.of("http://some_url"));
given(request.getSession()).willReturn(session);
given(session.getAttribute(anyString())).willReturn(paramMap);
Response feignResponse1 = Response.builder()
.request(Request.create(Request.HttpMethod.CONNECT, "some_url", feignHeaders, (Request.Body) null, null))
.headers(headers).build();
given(federationApi.federationAuthenticate(anyString())).willReturn(feignResponse1);
Response feignResponse2 = Response.builder()
.request(Request.create(Request.HttpMethod.CONNECT, "some_url", feignHeaders, (Request.Body) null, null))
.headers(feignHeaders).build();
given(oidcApi.oauth2AuthorizePost(any(), any())).willReturn(feignResponse2);
underTest.onAuthenticationSuccess(request, response, authentication);
verify(response, atLeastOnce()).sendRedirect(any());
}
@Test
public void onAuthenticationSuccess_shouldCallCreateInsteadOfUpdate() throws IOException {
Map<String, Collection<String>> headers = ImmutableMap.of(SET_COOKIE, List.of("Idam.Session=abcdefg"));
final Map<String, String[]> paramMap = ImmutableMap.of("some_param", new String[] {"some_value"});
Map<String, Collection<String>> feignHeaders = ImmutableMap.of(LOCATION, List.of("http://some_url"));
given(request.getSession()).willReturn(session);
given(session.getAttribute(anyString())).willReturn(paramMap);
given(federationApi.updateFederatedUser(anyString()))
.willThrow(restException("", HttpStatus.NOT_FOUND, new HttpHeaders(), null));
Response feignResponse1 = Response.builder()
.request(Request.create(Request.HttpMethod.CONNECT, "some_url", feignHeaders, (Request.Body) null, null))
.headers(headers).build();
given(federationApi.federationAuthenticate(anyString())).willReturn(feignResponse1);
Response feignResponse2 = Response.builder()
.request(Request.create(Request.HttpMethod.CONNECT, "some_url", feignHeaders, (Request.Body) null, null))
.headers(feignHeaders).build();
given(oidcApi.oauth2AuthorizePost(any(), any())).willReturn(feignResponse2);
underTest.onAuthenticationSuccess(request, response, authentication);
verify(federationApi, atLeastOnce()).createFederatedUser(anyString());
}
@Test(expected = HttpStatusCodeException.class)
public void onAuthenticationSuccess_shouldThrowExceptionIfResponseIsCommittee() throws IOException {
Map<String, Collection<String>> headers = ImmutableMap.of(SET_COOKIE, List.of("Idam.Session=abcdefg"));
final Map<String, String[]> paramMap = ImmutableMap.of("some_param", new String[] {"some_value"});
Map<String, Collection<String>> feignHeaders = ImmutableMap.of(LOCATION, List.of("http://some_url"));
given(request.getSession()).willReturn(session);
given(session.getAttribute(anyString())).willReturn(paramMap);
Response feignResponse1 = Response.builder()
.request(Request.create(Request.HttpMethod.CONNECT, "some_url", feignHeaders, (Request.Body) null, null))
.headers(headers).build();
given(federationApi.federationAuthenticate(anyString())).willReturn(feignResponse1);
Response feignResponse2 = Response.builder()
.request(Request.create(Request.HttpMethod.CONNECT, "some_url", feignHeaders, (Request.Body) null, null))
.headers(feignHeaders).build();
given(oidcApi.oauth2AuthorizePost(any(), any())).willReturn(feignResponse2);
given(response.isCommitted()).willReturn(true);
underTest.onAuthenticationSuccess(request, response, authentication);
}
@Test(expected = HttpStatusCodeException.class)
public void onAuthenticationSuccess_shouldThrowExceptionIfLocationHeaderIsEmpty() throws IOException {
Map<String, Collection<String>> headers = ImmutableMap.of(SET_COOKIE, List.of("Idam.Session=abcdefg"));
final Map<String, String[]> paramMap = ImmutableMap.of("some_param", new String[] {"some_value"});
Map<String, Collection<String>> feignHeaders = ImmutableMap.of(LOCATION, Collections.emptyList());
given(request.getSession()).willReturn(session);
given(session.getAttribute(anyString())).willReturn(paramMap);
Response feignResponse1 = Response.builder()
.request(Request.create(Request.HttpMethod.CONNECT, "some_url", feignHeaders, (Request.Body) null, null))
.headers(headers).build();
given(federationApi.federationAuthenticate(anyString())).willReturn(feignResponse1);
Response feignResponse2 = Response.builder()
.request(Request.create(Request.HttpMethod.CONNECT, "some_url", feignHeaders, (Request.Body) null, null))
.headers(feignHeaders).build();
given(oidcApi.oauth2AuthorizePost(any(), any())).willReturn(feignResponse2);
underTest.onAuthenticationSuccess(request, response, authentication);
}
@Test(expected = HttpStatusCodeException.class)
public void onAuthenticationSuccess_shouldThrowExceptionIfLocationHeaderIsMissing() throws IOException {
Map<String, Collection<String>> headers = ImmutableMap.of(SET_COOKIE, List.of("Idam.Session=abcdefg"));
final Map<String, String[]> paramMap = ImmutableMap.of("some_param", new String[] {"some_value"});
Map<String, Collection<String>> feignHeaders = Collections.emptyMap();
given(request.getSession()).willReturn(session);
given(session.getAttribute(anyString())).willReturn(paramMap);
Response feignResponse1 = Response.builder()
.request(Request.create(Request.HttpMethod.CONNECT, "some_url", feignHeaders, (Request.Body) null, null))
.headers(headers).build();
given(federationApi.federationAuthenticate(anyString())).willReturn(feignResponse1);
Response feignResponse2 = Response.builder()
.request(Request.create(Request.HttpMethod.CONNECT, "some_url", feignHeaders, (Request.Body) null, null))
.headers(feignHeaders).build();
given(oidcApi.oauth2AuthorizePost(any(), any())).willReturn(feignResponse2);
underTest.onAuthenticationSuccess(request, response, authentication);
}
@Test(expected = HttpStatusCodeException.class)
public void onAuthenticationSuccess_shouldThrowExceptionIfParamMapIsMissing() throws IOException {
Map<String, Collection<String>> headers = ImmutableMap.of(SET_COOKIE, List.of("Idam.Session=abcdefg"));
Map<String, Collection<String>> feignHeaders = Collections.emptyMap();
given(request.getSession()).willReturn(session);
given(session.getAttribute(anyString())).willReturn(Collections.emptyMap());
Response feignResponse1 = Response.builder()
.request(Request.create(Request.HttpMethod.CONNECT, "some_url", feignHeaders, (Request.Body) null, null))
.headers(headers).build();
given(federationApi.federationAuthenticate(anyString())).willReturn(feignResponse1);
underTest.onAuthenticationSuccess(request, response, authentication);
}
@Test(expected = HttpStatusCodeException.class)
public void onAuthenticationSuccess_shouldThrowExceptionIfNoSessionCookieExists() throws IOException {
Map<String, Collection<String>> headers = ImmutableMap.of(SET_COOKIE, Collections.emptyList());
Map<String, Collection<String>> feignHeaders = Collections.emptyMap();
Response feignResponse1 = Response.builder()
.request(Request.create(Request.HttpMethod.CONNECT, "some_url", feignHeaders, (Request.Body) null, null))
.headers(headers).build();
given(federationApi.federationAuthenticate(anyString())).willReturn(feignResponse1);
underTest.onAuthenticationSuccess(request, response, authentication);
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.ws.addressing;
/**
* Encapsulates the WS-Addressing namespace URI (and by implication, also the
* version).
*/
public interface AddressingType {
/**
* @return WS-Addressing namespace URI
*/
String getNamespaceURI();
}
|
package se.l4.exobytes.internal.streaming;
import java.io.ByteArrayInputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Base64;
import java.util.OptionalInt;
import se.l4.exobytes.streaming.AbstractStreamingInput;
import se.l4.exobytes.streaming.Token;
import se.l4.exobytes.streaming.ValueConversion;
/**
* Input for JSON. Please note that this class is not intended for general use
* and does not strictly conform to the JSON standard.
*
*/
public class JsonInput
extends AbstractStreamingInput
{
private static final char NULL = 0;
private static final int LEVELS = 20;
private final Reader in;
private final char[] buffer;
private int position;
private int limit;
private int level;
private boolean[] lists;
public JsonInput(InputStream in)
throws IOException
{
this(new InputStreamReader(in, StandardCharsets.UTF_8));
}
public JsonInput(Reader in)
throws IOException
{
this.in = in;
lists = new boolean[LEVELS];
buffer = new char[1024];
lists[0] = true;
}
@Override
public void close()
throws IOException
{
in.close();
}
@Override
protected IOException raiseException(String message)
{
return new IOException(message);
}
@Override
protected Token peek0()
throws IOException
{
readWhitespace();
if(limit - position < 1)
{
if(false == readAhead(1)) return Token.END_OF_STREAM;
}
if(limit - position > 0)
{
return toToken(buffer[position]);
}
return Token.END_OF_STREAM;
}
@Override
public Token next0()
throws IOException
{
Token token = peek();
switch(token)
{
case OBJECT_END:
case LIST_END:
{
readNext();
char c = peekChar();
if(c == ',') read();
level--;
return token;
}
case OBJECT_START:
case LIST_START:
readNext();
increaseLevel(token == Token.LIST_START);
return token;
case NULL:
{
String v = readNonString();
if(! "null".equals(v))
{
throw raiseException("Expected null, but encountered malformed null-value: " + v);
}
markValueRead();
return token;
}
default:
return token;
}
}
private void increaseLevel(boolean isList)
{
level++;
if(lists.length == level)
{
// Grow lists when needed
lists = Arrays.copyOf(lists, level * 2);
}
lists[level] = isList;
}
@Override
public OptionalInt getLength()
{
return OptionalInt.empty();
}
@Override
protected void skipValue()
throws IOException
{
char c = peekChar();
if(c == '"')
{
// This is a string
readString(true);
if(peekChar() == ':')
{
char next = readNext();
if(next != ':')
{
throw raiseException("Expected `:`, got `" + next + "`");
}
}
}
else
{
_outer:
while(true)
{
c = peekChar(false);
switch(c)
{
case NULL:
case '}':
case ']':
case ',':
case ':':
break _outer;
default:
if(Character.isWhitespace(c)) break _outer;
}
read();
}
}
markValueRead();
}
private String readNonString()
throws IOException
{
StringBuilder value = new StringBuilder();
_outer:
while(true)
{
char c = peekChar(false);
switch(c)
{
case NULL:
case '}':
case ']':
case ',':
case ':':
break _outer;
default:
if(Character.isWhitespace(c)) break _outer;
}
value.append(read());
}
return value.toString();
}
@Override
protected Object readDynamic0()
throws IOException
{
if(current() == Token.NULL)
{
return null;
}
else if(peekChar() == '"')
{
return readString();
}
else
{
String v = readNonString();
markValueRead();
switch(v)
{
case "null":
return null;
case "false":
return false;
case "true":
return true;
default:
try
{
return Long.parseLong(v);
}
catch(NumberFormatException e)
{
try
{
return (long) Double.parseDouble(v);
}
catch(NumberFormatException e2)
{
throw raiseException("Unable to read dynamic value, was: " + v);
}
}
}
}
}
@Override
public boolean readBoolean()
throws IOException
{
String value = readNonString();
switch(value)
{
case "true":
markValueRead();
return true;
case "false":
markValueRead();
return false;
}
throw raiseException("Expected boolean but found " + value);
}
@Override
public byte readByte()
throws IOException
{
return ValueConversion.toByte(readInt());
}
@Override
public char readChar()
throws IOException
{
if(peekChar() == '"')
{
String s = readString();
if(s.length() != 1)
{
throw raiseException("Expected single character but string value was not a single character");
}
return s.charAt(0);
}
else
{
return ValueConversion.toChar(readInt());
}
}
@Override
public short readShort()
throws IOException
{
return ValueConversion.toShort(readInt());
}
@Override
public int readInt()
throws IOException
{
return ValueConversion.toInt(readLong());
}
@Override
public long readLong()
throws IOException
{
String in = readNonString();
markValueRead();
try
{
return Long.parseLong(in);
}
catch(NumberFormatException e)
{
try
{
return (long) Double.parseDouble(in);
}
catch(NumberFormatException e2)
{
throw raiseException("Unable to read number: " + in);
}
}
}
@Override
public float readFloat()
throws IOException
{
return (float) readDouble();
}
@Override
public double readDouble()
throws IOException
{
String in = readNonString();
markValueRead();
try
{
return Double.parseDouble(in);
}
catch(NumberFormatException e)
{
throw raiseException("Unable to read number: " + in);
}
}
@Override
public String readString()
throws IOException
{
String s = readString(true);
if(peekChar() == ':')
{
readNext();
}
markValueRead();
return s;
}
@Override
public byte[] readByteArray()
throws IOException
{
String value = readString();
return Base64.getDecoder().decode(value);
}
@Override
public InputStream readByteStream()
throws IOException
{
return new ByteArrayInputStream(readByteArray());
}
private String readString(boolean readStart)
throws IOException
{
StringBuilder key = new StringBuilder();
char c = read();
if(readStart)
{
if(c != '"') throw raiseException("Expected \", but got " + c);
c = read();
}
while(c != '"')
{
if(c == '\\')
{
readEscaped(key);
}
else
{
key.append(c);
}
c = read();
}
return key.toString();
}
private void readEscaped(StringBuilder result)
throws IOException
{
char c = read();
switch(c)
{
case '\'':
result.append('\'');
break;
case '"':
result.append('"');
break;
case '\\':
result.append('\\');
break;
case '/':
result.append('/');
break;
case 'r':
result.append('\r');
break;
case 'n':
result.append('\n');
break;
case 't':
result.append('\t');
break;
case 'b':
result.append('\b');
break;
case 'f':
result.append('\f');
break;
case 'u':
// Unicode, read 4 chars and treat as hex
readAhead(4);
String s = new String(buffer, position, 4);
result.append((char) Integer.parseInt(s, 16));
position += 4;
break;
}
}
private char peekChar()
throws IOException
{
return peekChar(true);
}
private char peekChar(boolean ws)
throws IOException
{
if(ws) readWhitespace();
if(limit - position < 1)
{
if(false == readAhead(1))
{
return NULL;
}
}
if(limit - position > 0)
{
return buffer[position];
}
return NULL;
}
@Override
protected void markValueRead()
throws IOException
{
super.markValueRead();
// Check for trailing commas
readWhitespace();
char c = peekChar();
if(c == ',') read();
}
/**
* Read all of the whitespace at the current position.
*
* @throws IOException
*/
private void readWhitespace()
throws IOException
{
if(limit - position > 0 && ! Character.isWhitespace(buffer[position])) return;
while(true)
{
if(limit - position < 1)
{
if(! readAhead(1)) return;
}
char c = buffer[position];
if(Character.isWhitespace(c) || c == ',')
{
position++;
}
else
{
return;
}
}
}
/**
* Reader the next character while also skipping whitespace as necessary.
*
* @return
* @throws IOException
*/
private char readNext()
throws IOException
{
readWhitespace();
return read();
}
/**
* Read a single character at the current position.
*
* @return
* @throws IOException
*/
private char read()
throws IOException
{
if(limit - position < 1)
{
if(! readAhead(1))
{
throw new EOFException();
}
}
return buffer[position++];
}
/**
* Perform a read ahead for the given number of characters. Will read the
* characters into the buffer.
*
* @param minChars
* @return
* @throws IOException
*/
private boolean readAhead(int minChars)
throws IOException
{
if(limit < 0)
{
return false;
}
else if(position + minChars < limit)
{
return true;
}
else if(limit >= position)
{
// If we have characters left we need to keep them in the buffer
int stop = limit - position;
System.arraycopy(buffer, position, buffer, 0, stop);
limit = stop;
}
else
{
limit = 0;
}
int read = read(buffer, limit, buffer.length - limit);
position = 0;
limit += read;
if(read == 0)
{
return false;
}
if(read < minChars)
{
throw raiseException("Needed " + minChars + " but got " + read);
}
return true;
}
/**
* Fully read a number of characters.
*
* @param buffer
* @param offset
* @param length
* @return
* @throws IOException
*/
private int read(char[] buffer, int offset, int length)
throws IOException
{
int result = 0;
while(result < length)
{
int l = in.read(buffer, offset + result, length - result);
if(l == -1) break;
result += l;
}
return result;
}
/**
* Take the current character and turn it into a {@link Token}.
*
* @param c
* @return
*/
private Token toToken(char c)
throws IOException
{
if(c == NULL)
{
return Token.END_OF_STREAM;
}
switch(c)
{
case '{':
return Token.OBJECT_START;
case '}':
return Token.OBJECT_END;
case '[':
return Token.LIST_START;
case ']':
return Token.LIST_END;
case '"':
return Token.VALUE;
case 'n':
return Token.NULL;
case 'f':
case 't':
case '+':
case '-':
return Token.VALUE;
default:
if(c >= '0' && c <= '9')
{
return Token.VALUE;
}
}
throw raiseException("Unexpected JSON input, next character is: " + c);
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.plugin.minion.tasks.realtimetoofflinesegments;
import com.google.common.base.Preconditions;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.helix.task.TaskState;
import org.apache.pinot.common.metadata.segment.SegmentZKMetadata;
import org.apache.pinot.common.minion.RealtimeToOfflineSegmentsTaskMetadata;
import org.apache.pinot.common.utils.LLCSegmentName;
import org.apache.pinot.controller.helix.core.minion.ClusterInfoAccessor;
import org.apache.pinot.controller.helix.core.minion.generator.PinotTaskGenerator;
import org.apache.pinot.controller.helix.core.minion.generator.TaskGeneratorUtils;
import org.apache.pinot.core.common.MinionConstants;
import org.apache.pinot.core.common.MinionConstants.RealtimeToOfflineSegmentsTask;
import org.apache.pinot.core.minion.PinotTaskConfig;
import org.apache.pinot.spi.annotations.minion.TaskGenerator;
import org.apache.pinot.spi.config.table.TableConfig;
import org.apache.pinot.spi.config.table.TableTaskConfig;
import org.apache.pinot.spi.config.table.TableType;
import org.apache.pinot.spi.stream.StreamConfig;
import org.apache.pinot.spi.utils.CommonConstants.Segment;
import org.apache.pinot.spi.utils.IngestionConfigUtils;
import org.apache.pinot.spi.utils.TimeUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link PinotTaskGenerator} implementation for generating tasks of type {@link RealtimeToOfflineSegmentsTask}
*
* These will be generated only for REALTIME tables.
* At any given time, only 1 task of this type should be generated for a table.
*
* Steps:
* - The watermarkMs is read from the {@link RealtimeToOfflineSegmentsTaskMetadata} ZNode
* found at MINION_TASK_METADATA/RealtimeToOfflineSegmentsTask/tableNameWithType
* In case of cold-start, no ZNode will exist.
* A new ZNode will be created, with watermarkMs as the smallest time found in the COMPLETED segments
*
* - The execution window for the task is calculated as,
* windowStartMs = watermarkMs, windowEndMs = windowStartMs + bucketTimeMs,
* where bucketTime can be provided in the taskConfigs (default 1d)
*
* - If the execution window is not older than bufferTimeMs, no task will be generated,
* where bufferTime can be provided in the taskConfigs (default 2d)
*
* - Segment metadata is scanned for all COMPLETED segments,
* to pick those containing data in window [windowStartMs, windowEndMs)
*
* - There are some special considerations for using last completed segment of a partition.
* Such segments will be checked for segment endTime, to ensure there's no overflow into CONSUMING segments
*
* - A PinotTaskConfig is created, with segment information, execution window, and any config specific to the task
*/
@TaskGenerator
public class RealtimeToOfflineSegmentsTaskGenerator implements PinotTaskGenerator {
private static final Logger LOGGER = LoggerFactory.getLogger(RealtimeToOfflineSegmentsTaskGenerator.class);
private static final String DEFAULT_BUCKET_PERIOD = "1d";
private static final String DEFAULT_BUFFER_PERIOD = "2d";
private ClusterInfoAccessor _clusterInfoAccessor;
@Override
public void init(ClusterInfoAccessor clusterInfoAccessor) {
_clusterInfoAccessor = clusterInfoAccessor;
}
@Override
public String getTaskType() {
return RealtimeToOfflineSegmentsTask.TASK_TYPE;
}
@Override
public List<PinotTaskConfig> generateTasks(List<TableConfig> tableConfigs) {
String taskType = RealtimeToOfflineSegmentsTask.TASK_TYPE;
List<PinotTaskConfig> pinotTaskConfigs = new ArrayList<>();
for (TableConfig tableConfig : tableConfigs) {
String realtimeTableName = tableConfig.getTableName();
if (tableConfig.getTableType() != TableType.REALTIME) {
LOGGER.warn("Skip generating task: {} for non-REALTIME table: {}", taskType, realtimeTableName);
continue;
}
StreamConfig streamConfig = new StreamConfig(realtimeTableName, IngestionConfigUtils.getStreamConfigMap(tableConfig));
if (streamConfig.hasHighLevelConsumerType()) {
LOGGER.warn("Skip generating task: {} for HLC REALTIME table: {}", taskType, realtimeTableName);
continue;
}
LOGGER.info("Start generating task configs for table: {} for task: {}", realtimeTableName, taskType);
// Only schedule 1 task of this type, per table
Map<String, TaskState> incompleteTasks = TaskGeneratorUtils.getIncompleteTasks(taskType, realtimeTableName, _clusterInfoAccessor);
if (!incompleteTasks.isEmpty()) {
LOGGER.warn("Found incomplete tasks: {} for same table: {}. Skipping task generation.", incompleteTasks.keySet(), realtimeTableName);
continue;
}
// Get all segment metadata for completed segments (DONE status).
List<SegmentZKMetadata> completedSegmentsZKMetadata = new ArrayList<>();
Map<Integer, String> partitionToLatestCompletedSegmentName = new HashMap<>();
Set<Integer> allPartitions = new HashSet<>();
getCompletedSegmentsInfo(realtimeTableName, completedSegmentsZKMetadata, partitionToLatestCompletedSegmentName, allPartitions);
if (completedSegmentsZKMetadata.isEmpty()) {
LOGGER.info("No realtime-completed segments found for table: {}, skipping task generation: {}", realtimeTableName, taskType);
continue;
}
allPartitions.removeAll(partitionToLatestCompletedSegmentName.keySet());
if (!allPartitions.isEmpty()) {
LOGGER.info("Partitions: {} have no completed segments. Table: {} is not ready for {}. Skipping task generation.", allPartitions, realtimeTableName,
taskType);
continue;
}
TableTaskConfig tableTaskConfig = tableConfig.getTaskConfig();
Preconditions.checkState(tableTaskConfig != null);
Map<String, String> taskConfigs = tableTaskConfig.getConfigsForTaskType(taskType);
Preconditions.checkState(taskConfigs != null, "Task config shouldn't be null for table: {}", realtimeTableName);
// Get the bucket size and buffer
String bucketTimePeriod = taskConfigs.getOrDefault(RealtimeToOfflineSegmentsTask.BUCKET_TIME_PERIOD_KEY, DEFAULT_BUCKET_PERIOD);
String bufferTimePeriod = taskConfigs.getOrDefault(RealtimeToOfflineSegmentsTask.BUFFER_TIME_PERIOD_KEY, DEFAULT_BUFFER_PERIOD);
long bucketMs = TimeUtils.convertPeriodToMillis(bucketTimePeriod);
long bufferMs = TimeUtils.convertPeriodToMillis(bufferTimePeriod);
// Get watermark from RealtimeToOfflineSegmentsTaskMetadata ZNode. WindowStart = watermark. WindowEnd = windowStart + bucket.
long windowStartMs = getWatermarkMs(realtimeTableName, completedSegmentsZKMetadata, bucketMs);
long windowEndMs = windowStartMs + bucketMs;
// Check that execution window is older than bufferTime
if (windowEndMs > System.currentTimeMillis() - bufferMs) {
LOGGER.info("Window with start: {} and end: {} is not older than buffer time: {} configured as {} ago. Skipping task generation: {}", windowStartMs,
windowEndMs, bufferMs, bufferTimePeriod, taskType);
continue;
}
// Find all COMPLETED segments with data overlapping execution window: windowStart (inclusive) to windowEnd (exclusive)
List<String> segmentNames = new ArrayList<>();
List<String> downloadURLs = new ArrayList<>();
Set<String> lastCompletedSegmentPerPartition = new HashSet<>(partitionToLatestCompletedSegmentName.values());
boolean skipGenerate = false;
for (SegmentZKMetadata segmentZKMetadata : completedSegmentsZKMetadata) {
String segmentName = segmentZKMetadata.getSegmentName();
long segmentStartTimeMs = segmentZKMetadata.getStartTimeMs();
long segmentEndTimeMs = segmentZKMetadata.getEndTimeMs();
// Check overlap with window
if (windowStartMs <= segmentEndTimeMs && segmentStartTimeMs < windowEndMs) {
// If last completed segment is being used, make sure that segment crosses over end of window.
// In the absence of this check, CONSUMING segments could contain some portion of the window. That data would be skipped forever.
if (lastCompletedSegmentPerPartition.contains(segmentName) && segmentEndTimeMs < windowEndMs) {
LOGGER.info("Window data overflows into CONSUMING segments for partition of segment: {}. Skipping task generation: {}", segmentName, taskType);
skipGenerate = true;
break;
}
segmentNames.add(segmentName);
downloadURLs.add(segmentZKMetadata.getDownloadUrl());
}
}
if (segmentNames.isEmpty() || skipGenerate) {
LOGGER.info("Found no eligible segments for task: {} with window [{} - {}). Skipping task generation", taskType, windowStartMs, windowEndMs);
continue;
}
Map<String, String> configs = new HashMap<>();
configs.put(MinionConstants.TABLE_NAME_KEY, realtimeTableName);
configs.put(MinionConstants.SEGMENT_NAME_KEY, StringUtils.join(segmentNames, ","));
configs.put(MinionConstants.DOWNLOAD_URL_KEY, StringUtils.join(downloadURLs, MinionConstants.URL_SEPARATOR));
configs.put(MinionConstants.UPLOAD_URL_KEY, _clusterInfoAccessor.getVipUrl() + "/segments");
// Segment processor configs
configs.put(RealtimeToOfflineSegmentsTask.WINDOW_START_MS_KEY, String.valueOf(windowStartMs));
configs.put(RealtimeToOfflineSegmentsTask.WINDOW_END_MS_KEY, String.valueOf(windowEndMs));
String roundBucketTimePeriod = taskConfigs.get(RealtimeToOfflineSegmentsTask.ROUND_BUCKET_TIME_PERIOD_KEY);
if (roundBucketTimePeriod != null) {
configs.put(RealtimeToOfflineSegmentsTask.ROUND_BUCKET_TIME_PERIOD_KEY, roundBucketTimePeriod);
}
// NOTE: Check and put both keys for backward-compatibility
String mergeType = taskConfigs.get(RealtimeToOfflineSegmentsTask.MERGE_TYPE_KEY);
if (mergeType == null) {
mergeType = taskConfigs.get(RealtimeToOfflineSegmentsTask.COLLECTOR_TYPE_KEY);
}
if (mergeType != null) {
configs.put(RealtimeToOfflineSegmentsTask.MERGE_TYPE_KEY, mergeType);
configs.put(RealtimeToOfflineSegmentsTask.COLLECTOR_TYPE_KEY, mergeType);
}
for (Map.Entry<String, String> entry : taskConfigs.entrySet()) {
if (entry.getKey().endsWith(RealtimeToOfflineSegmentsTask.AGGREGATION_TYPE_KEY_SUFFIX)) {
configs.put(entry.getKey(), entry.getValue());
}
}
String maxNumRecordsPerSegment = taskConfigs.get(RealtimeToOfflineSegmentsTask.MAX_NUM_RECORDS_PER_SEGMENT_KEY);
if (maxNumRecordsPerSegment != null) {
configs.put(RealtimeToOfflineSegmentsTask.MAX_NUM_RECORDS_PER_SEGMENT_KEY, maxNumRecordsPerSegment);
}
pinotTaskConfigs.add(new PinotTaskConfig(taskType, configs));
LOGGER.info("Finished generating task configs for table: {} for task: {}", realtimeTableName, taskType);
}
return pinotTaskConfigs;
}
/**
* Fetch completed (non-consuming) segment and partition information
* @param realtimeTableName the realtime table name
* @param completedSegmentsZKMetadata list for collecting the completed segments ZK metadata
* @param partitionToLatestCompletedSegmentName map for collecting the partitionId to the latest completed segment name
* @param allPartitions set for collecting all partition ids
*/
private void getCompletedSegmentsInfo(String realtimeTableName, List<SegmentZKMetadata> completedSegmentsZKMetadata,
Map<Integer, String> partitionToLatestCompletedSegmentName, Set<Integer> allPartitions) {
List<SegmentZKMetadata> segmentsZKMetadata = _clusterInfoAccessor.getSegmentsZKMetadata(realtimeTableName);
Map<Integer, LLCSegmentName> latestLLCSegmentNameMap = new HashMap<>();
for (SegmentZKMetadata segmentZKMetadata : segmentsZKMetadata) {
LLCSegmentName llcSegmentName = new LLCSegmentName(segmentZKMetadata.getSegmentName());
allPartitions.add(llcSegmentName.getPartitionGroupId());
if (segmentZKMetadata.getStatus().equals(Segment.Realtime.Status.DONE)) {
completedSegmentsZKMetadata.add(segmentZKMetadata);
latestLLCSegmentNameMap.compute(llcSegmentName.getPartitionGroupId(), (partitionGroupId, latestLLCSegmentName) -> {
if (latestLLCSegmentName == null) {
return llcSegmentName;
} else {
if (llcSegmentName.getSequenceNumber() > latestLLCSegmentName.getSequenceNumber()) {
return llcSegmentName;
} else {
return latestLLCSegmentName;
}
}
});
}
}
for (Map.Entry<Integer, LLCSegmentName> entry : latestLLCSegmentNameMap.entrySet()) {
partitionToLatestCompletedSegmentName.put(entry.getKey(), entry.getValue().getSegmentName());
}
}
/**
* Get the watermark from the RealtimeToOfflineSegmentsMetadata ZNode.
* If the znode is null, computes the watermark using either the start time config or the start time from segment metadata
*/
private long getWatermarkMs(String realtimeTableName, List<SegmentZKMetadata> completedSegmentsZKMetadata, long bucketMs) {
RealtimeToOfflineSegmentsTaskMetadata realtimeToOfflineSegmentsTaskMetadata =
_clusterInfoAccessor.getMinionRealtimeToOfflineSegmentsTaskMetadata(realtimeTableName);
if (realtimeToOfflineSegmentsTaskMetadata == null) {
// No ZNode exists. Cold-start.
long watermarkMs;
// Find the smallest time from all segments
long minStartTimeMs = Long.MAX_VALUE;
for (SegmentZKMetadata segmentZKMetadata : completedSegmentsZKMetadata) {
minStartTimeMs = Math.min(minStartTimeMs, segmentZKMetadata.getStartTimeMs());
}
Preconditions.checkState(minStartTimeMs != Long.MAX_VALUE);
// Round off according to the bucket. This ensures we align the offline segments to proper time boundaries
// For example, if start time millis is 20200813T12:34:59, we want to create the first segment for window [20200813, 20200814)
watermarkMs = (minStartTimeMs / bucketMs) * bucketMs;
// Create RealtimeToOfflineSegmentsTaskMetadata ZNode using watermark calculated above
realtimeToOfflineSegmentsTaskMetadata = new RealtimeToOfflineSegmentsTaskMetadata(realtimeTableName, watermarkMs);
_clusterInfoAccessor.setRealtimeToOfflineSegmentsTaskMetadata(realtimeToOfflineSegmentsTaskMetadata);
}
return realtimeToOfflineSegmentsTaskMetadata.getWatermarkMs();
}
}
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.09.02 at 04:19:10 PM PDT
//
package net.authorize.api.contract.v1;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlEnumValue;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for ARBGetSubscriptionListSearchTypeEnum.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="ARBGetSubscriptionListSearchTypeEnum">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="cardExpiringThisMonth"/>
* <enumeration value="subscriptionActive"/>
* <enumeration value="subscriptionExpiringThisMonth"/>
* <enumeration value="subscriptionInactive"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "ARBGetSubscriptionListSearchTypeEnum")
@XmlEnum
public enum ARBGetSubscriptionListSearchTypeEnum {
@XmlEnumValue("cardExpiringThisMonth")
CARD_EXPIRING_THIS_MONTH("cardExpiringThisMonth"),
@XmlEnumValue("subscriptionActive")
SUBSCRIPTION_ACTIVE("subscriptionActive"),
@XmlEnumValue("subscriptionExpiringThisMonth")
SUBSCRIPTION_EXPIRING_THIS_MONTH("subscriptionExpiringThisMonth"),
@XmlEnumValue("subscriptionInactive")
SUBSCRIPTION_INACTIVE("subscriptionInactive");
private final String value;
ARBGetSubscriptionListSearchTypeEnum(String v) {
value = v;
}
public String value() {
return value;
}
public static ARBGetSubscriptionListSearchTypeEnum fromValue(String v) {
for (ARBGetSubscriptionListSearchTypeEnum c: ARBGetSubscriptionListSearchTypeEnum.values()) {
if (c.value.equals(v)) {
return c;
}
}
throw new IllegalArgumentException(v);
}
}
|
/*
* 3D City Database - The Open Source CityGML Database
* https://www.3dcitydb.org/
*
* Copyright 2013 - 2021
* Chair of Geoinformatics
* Technical University of Munich, Germany
* https://www.lrg.tum.de/gis/
*
* The 3D City Database is jointly developed with the following
* cooperation partners:
*
* Virtual City Systems, Berlin <https://vc.systems/>
* M.O.S.S. Computer Grafik Systeme GmbH, Taufkirchen <http://www.moss.de/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.citydb.core.operation.exporter.database.content;
import org.citydb.config.Config;
import org.citydb.config.geometry.GeometryObject;
import org.citydb.config.project.exporter.ExportConfig;
import org.citydb.config.project.exporter.OutputFormat;
import org.citydb.config.project.exporter.XLinkConfig;
import org.citydb.config.project.exporter.XLinkFeatureConfig;
import org.citydb.core.ade.ADEExtension;
import org.citydb.core.ade.ADEExtensionManager;
import org.citydb.core.ade.exporter.ADEExportManager;
import org.citydb.core.ade.exporter.CityGMLExportHelper;
import org.citydb.core.database.adapter.AbstractDatabaseAdapter;
import org.citydb.core.database.schema.TableEnum;
import org.citydb.core.database.schema.mapping.*;
import org.citydb.core.operation.common.cache.*;
import org.citydb.core.operation.common.cache.model.CacheTableModel;
import org.citydb.core.operation.common.util.AffineTransformer;
import org.citydb.core.operation.common.xlink.DBXlink;
import org.citydb.core.operation.exporter.CityGMLExportException;
import org.citydb.core.operation.exporter.util.*;
import org.citydb.core.operation.exporter.writer.FeatureWriteException;
import org.citydb.core.operation.exporter.writer.FeatureWriter;
import org.citydb.core.plugin.PluginException;
import org.citydb.core.plugin.PluginManager;
import org.citydb.core.plugin.extension.exporter.FeatureExportExtension;
import org.citydb.core.query.Query;
import org.citydb.core.query.filter.lod.LodFilter;
import org.citydb.core.query.filter.projection.CombinedProjectionFilter;
import org.citydb.core.query.filter.projection.ProjectionFilter;
import org.citydb.core.util.CoreConstants;
import org.citydb.core.util.Util;
import org.citydb.sqlbuilder.expression.IntegerLiteral;
import org.citydb.sqlbuilder.schema.Column;
import org.citydb.sqlbuilder.select.ProjectionToken;
import org.citydb.sqlbuilder.select.projection.Function;
import org.citydb.util.concurrent.WorkerPool;
import org.citydb.util.log.Logger;
import org.citygml4j.builder.jaxb.CityGMLBuilder;
import org.citygml4j.builder.jaxb.CityGMLBuilderException;
import org.citygml4j.builder.jaxb.unmarshal.JAXBUnmarshaller;
import org.citygml4j.model.citygml.ade.binding.ADEModelObject;
import org.citygml4j.model.citygml.ade.generic.ADEGenericElement;
import org.citygml4j.model.citygml.appearance.Appearance;
import org.citygml4j.model.citygml.bridge.*;
import org.citygml4j.model.citygml.building.AbstractBoundarySurface;
import org.citygml4j.model.citygml.building.AbstractOpening;
import org.citygml4j.model.citygml.building.*;
import org.citygml4j.model.citygml.cityfurniture.CityFurniture;
import org.citygml4j.model.citygml.cityobjectgroup.CityObjectGroup;
import org.citygml4j.model.citygml.core.*;
import org.citygml4j.model.citygml.generics.GenericCityObject;
import org.citygml4j.model.citygml.landuse.LandUse;
import org.citygml4j.model.citygml.relief.AbstractReliefComponent;
import org.citygml4j.model.citygml.relief.ReliefFeature;
import org.citygml4j.model.citygml.transportation.AbstractTransportationObject;
import org.citygml4j.model.citygml.transportation.TransportationComplex;
import org.citygml4j.model.citygml.tunnel.*;
import org.citygml4j.model.citygml.vegetation.PlantCover;
import org.citygml4j.model.citygml.vegetation.SolitaryVegetationObject;
import org.citygml4j.model.citygml.waterbody.AbstractWaterBoundarySurface;
import org.citygml4j.model.citygml.waterbody.WaterBody;
import org.citygml4j.model.gml.GMLClass;
import org.citygml4j.model.gml.base.AbstractGML;
import org.citygml4j.model.gml.feature.AbstractFeature;
import org.citygml4j.model.gml.geometry.AbstractGeometry;
import org.citygml4j.model.module.citygml.CityGMLVersion;
import org.citygml4j.util.gmlid.DefaultGMLIdManager;
import org.citygml4j.xml.io.reader.MissingADESchemaException;
import org.w3c.dom.Document;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.Reader;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.*;
public class CityGMLExportManager implements CityGMLExportHelper {
private final Logger log = Logger.getInstance();
private final IdentityHashMap<Class<? extends DBExporter>, DBExporter> exporters = new IdentityHashMap<>();
private final IdentityHashMap<ADEExtension, ADEExportManager> adeExporters = new IdentityHashMap<>();
private final Connection connection;
private final Query query;
private final AbstractDatabaseAdapter databaseAdapter;
private final SchemaMapping schemaMapping;
private final CityGMLBuilder cityGMLBuilder;
private final ADEExtensionManager adeManager;
private final List<FeatureExportExtension> plugins;
private final FeatureWriter featureWriter;
private final WorkerPool<DBXlink> xlinkPool;
private final IdCacheManager idCacheManager;
private final CacheTableManager cacheTableManager;
private final InternalConfig internalConfig;
private final Config config;
private final boolean failOnError;
private final Set<String> localGeometryCache;
private final AttributeValueSplitter attributeValueSplitter;
private final ExportCounter exportCounter;
private final JAXBUnmarshaller jaxbUnmarshaller;
private final boolean hasADESupport;
private GMLConverter gmlConverter;
private LodGeometryChecker lodGeometryChecker;
private AppearanceRemover appearanceRemover;
private AffineTransformer affineTransformer;
private Document document;
public CityGMLExportManager(Connection connection,
Query query,
AbstractDatabaseAdapter databaseAdapter,
SchemaMapping schemaMapping,
CityGMLBuilder cityGMLBuilder,
FeatureWriter featureWriter,
WorkerPool<DBXlink> xlinkPool,
IdCacheManager idCacheManager,
CacheTableManager cacheTableManager,
AffineTransformer affineTransformer,
InternalConfig internalConfig,
Config config) throws CityGMLExportException {
this.connection = connection;
this.query = query;
this.databaseAdapter = databaseAdapter;
this.schemaMapping = schemaMapping;
this.cityGMLBuilder = cityGMLBuilder;
this.featureWriter = featureWriter;
this.xlinkPool = xlinkPool;
this.idCacheManager = idCacheManager;
this.cacheTableManager = cacheTableManager;
this.internalConfig = internalConfig;
this.config = config;
adeManager = ADEExtensionManager.getInstance();
hasADESupport = !adeManager.getEnabledExtensions().isEmpty();
plugins = PluginManager.getInstance().getEnabledExternalPlugins(FeatureExportExtension.class);
failOnError = config.getExportConfig().getGeneralOptions().isFailFastOnErrors();
localGeometryCache = new HashSet<>();
attributeValueSplitter = new AttributeValueSplitter();
exportCounter = new ExportCounter(schemaMapping);
if (!query.getLodFilter().preservesGeometry()) {
lodGeometryChecker = new LodGeometryChecker(this, schemaMapping);
if (config.getExportConfig().getAppearances().isSetExportAppearance())
appearanceRemover = new AppearanceRemover();
}
if (config.getExportConfig().getAffineTransformation().isEnabled()) {
this.affineTransformer = affineTransformer;
}
try {
jaxbUnmarshaller = cityGMLBuilder.createJAXBUnmarshaller();
jaxbUnmarshaller.setThrowMissingADESchema(false);
jaxbUnmarshaller.setParseSchema(false);
} catch (CityGMLBuilderException e) {
throw new CityGMLExportException("Failed to build JAXB unmarshaller.", e);
}
}
public AbstractGML exportObject(long objectId, AbstractObjectType<?> objectType) throws CityGMLExportException, SQLException {
AbstractGML object = exportObject(objectId, objectType, false);
return object != null ? processObject(object) : null;
}
private AbstractGML processObject(AbstractGML object) throws CityGMLExportException, SQLException {
try {
// execute batch export
executeBatch();
// remove empty city objects in case we filter LoDs
if (lodGeometryChecker != null)
lodGeometryChecker.cleanupCityObjects(object);
// remove local appearances in case we filter LoDs
if (appearanceRemover != null)
appearanceRemover.cleanupAppearances(object);
// cache geometry ids in case we export global appearances
if (internalConfig.isExportGlobalAppearances())
getExporter(DBGlobalAppearance.class).cacheGeometryIds(object);
if (object instanceof AbstractFeature) {
AbstractFeature feature = (AbstractFeature) object;
// invoke export plugins
if (!plugins.isEmpty()) {
for (FeatureExportExtension plugin : plugins) {
try {
feature = plugin.postprocess(feature);
if (feature == null)
return null;
} catch (PluginException e) {
throw new CityGMLExportException("Export plugin " + plugin.getClass().getName() + " threw an exception.", e);
}
}
}
// trigger export of textures if required
if (isLazyTextureExport() && config.getExportConfig().getAppearances().isSetExportAppearance())
getExporter(DBLocalAppearance.class).triggerLazyTextureExport(feature);
}
return object;
} finally {
// clear local geometry cache
localGeometryCache.clear();
}
}
@Override
public <T extends AbstractGML> T createObject(long objectId, int objectClassId, Class<T> type) throws CityGMLExportException, SQLException {
AbstractObjectType<?> objectType = getAbstractObjectType(objectClassId);
if (objectType == null)
throw new CityGMLExportException("Failed to determine object type for " + getObjectSignature(objectClassId, objectId) + ". Skipping export.");
AbstractGML object = exportObject(objectId, objectType, true);
return type.isInstance(object) ? type.cast(object) : null;
}
protected <T extends AbstractGML> T createObject(int objectClassId, Class<T> type) {
AbstractGML object = Util.createObject(objectClassId, query.getTargetVersion());
return type.isInstance(object) ? type.cast(object) : null;
}
private AbstractGML exportObject(long objectId, AbstractObjectType<?> objectType, boolean exportStub) throws CityGMLExportException, SQLException {
AbstractGML object = Util.createObject(objectType.getObjectClassId(), query.getTargetVersion());
if (object == null)
throw new CityGMLExportException("Failed to instantiate citygml4j object for " + getObjectSignature(objectType, objectId) + ". Skipping export.");
if (object instanceof ADEModelObject) {
ADEExtension extension = adeManager.getExtensionByObjectClassId(objectType.getObjectClassId());
if (extension != null && !extension.isEnabled())
throw new CityGMLExportException("ADE extension for object " + getObjectSignature(objectType, objectId) + " is disabled. Skipping export.");
if (exportStub)
object.setLocalProperty(CoreConstants.EXPORT_STUB, true);
}
boolean success;
// top-level feature types
if (object instanceof AbstractBuilding)
success = getExporter(DBBuilding.class).doExport((AbstractBuilding)object, objectId, (FeatureType)objectType);
else if (object instanceof AbstractBridge)
success = getExporter(DBBridge.class).doExport((AbstractBridge)object, objectId, (FeatureType)objectType);
else if (object instanceof AbstractTunnel)
success = getExporter(DBTunnel.class).doExport((AbstractTunnel)object, objectId, (FeatureType)objectType);
else if (object instanceof CityFurniture)
success = getExporter(DBCityFurniture.class).doExport((CityFurniture)object, objectId, (FeatureType)objectType);
else if (object instanceof CityObjectGroup)
success = getExporter(DBCityObjectGroup.class).doExport((CityObjectGroup)object, objectId, (FeatureType)objectType);
else if (object instanceof GenericCityObject)
success = getExporter(DBGenericCityObject.class).doExport((GenericCityObject)object, objectId, (FeatureType)objectType);
else if (object instanceof LandUse)
success = getExporter(DBLandUse.class).doExport((LandUse)object, objectId, (FeatureType)objectType);
else if (object instanceof PlantCover)
success = getExporter(DBPlantCover.class).doExport((PlantCover)object, objectId, (FeatureType)objectType);
else if (object instanceof SolitaryVegetationObject)
success = getExporter(DBSolitaryVegetatObject.class).doExport((SolitaryVegetationObject)object, objectId, (FeatureType)objectType);
else if (object instanceof ReliefFeature)
success = getExporter(DBReliefFeature.class).doExport((ReliefFeature)object, objectId, (FeatureType)objectType);
else if (object instanceof TransportationComplex)
success = getExporter(DBTransportationComplex.class).doExport((TransportationComplex)object, objectId, (FeatureType)objectType);
else if (object instanceof WaterBody)
success = getExporter(DBWaterBody.class).doExport((WaterBody)object, objectId, (FeatureType)objectType);
// nested feature types
else if (object instanceof AbstractBoundarySurface)
success = getExporter(DBThematicSurface.class).doExport((AbstractBoundarySurface)object, objectId, (FeatureType)objectType);
else if (object instanceof AbstractOpening)
success = getExporter(DBOpening.class).doExport((AbstractOpening)object, objectId, (FeatureType)objectType);
else if (object instanceof BuildingInstallation)
success = getExporter(DBBuildingInstallation.class).doExport((BuildingInstallation)object, objectId, (FeatureType)objectType);
else if (object instanceof IntBuildingInstallation)
success = getExporter(DBBuildingInstallation.class).doExport((IntBuildingInstallation)object, objectId, (FeatureType)objectType);
else if (object instanceof Room)
success = getExporter(DBRoom.class).doExport((Room)object, objectId, (FeatureType)objectType);
else if (object instanceof BuildingFurniture)
success = getExporter(DBBuildingFurniture.class).doExport((BuildingFurniture)object, objectId, (FeatureType)objectType);
else if (object instanceof org.citygml4j.model.citygml.bridge.AbstractBoundarySurface)
success = getExporter(DBBridgeThematicSurface.class).doExport((org.citygml4j.model.citygml.bridge.AbstractBoundarySurface)object, objectId, (FeatureType)objectType);
else if (object instanceof org.citygml4j.model.citygml.bridge.AbstractOpening)
success = getExporter(DBBridgeOpening.class).doExport((org.citygml4j.model.citygml.bridge.AbstractOpening)object, objectId, (FeatureType)objectType);
else if (object instanceof BridgeConstructionElement)
success = getExporter(DBBridgeConstrElement.class).doExport((BridgeConstructionElement)object, objectId, (FeatureType)objectType);
else if (object instanceof BridgeInstallation)
success = getExporter(DBBridgeInstallation.class).doExport((BridgeInstallation)object, objectId, (FeatureType)objectType);
else if (object instanceof IntBridgeInstallation)
success = getExporter(DBBridgeInstallation.class).doExport((IntBridgeInstallation)object, objectId, (FeatureType)objectType);
else if (object instanceof BridgeRoom)
success = getExporter(DBBridgeRoom.class).doExport((BridgeRoom)object, objectId, (FeatureType)objectType);
else if (object instanceof BridgeFurniture)
success = getExporter(DBBridgeFurniture.class).doExport((BridgeFurniture)object, objectId, (FeatureType)objectType);
else if (object instanceof org.citygml4j.model.citygml.tunnel.AbstractBoundarySurface)
success = getExporter(DBTunnelThematicSurface.class).doExport((org.citygml4j.model.citygml.tunnel.AbstractBoundarySurface)object, objectId, (FeatureType)objectType);
else if (object instanceof org.citygml4j.model.citygml.tunnel.AbstractOpening)
success = getExporter(DBTunnelOpening.class).doExport((org.citygml4j.model.citygml.tunnel.AbstractOpening)object, objectId, (FeatureType)objectType);
else if (object instanceof TunnelInstallation)
success = getExporter(DBTunnelInstallation.class).doExport((TunnelInstallation)object, objectId, (FeatureType)objectType);
else if (object instanceof IntTunnelInstallation)
success = getExporter(DBTunnelInstallation.class).doExport((IntTunnelInstallation)object, objectId, (FeatureType)objectType);
else if (object instanceof HollowSpace)
success = getExporter(DBTunnelHollowSpace.class).doExport((HollowSpace)object, objectId, (FeatureType)objectType);
else if (object instanceof TunnelFurniture)
success = getExporter(DBTunnelFurniture.class).doExport((TunnelFurniture)object, objectId, (FeatureType)objectType);
else if (object instanceof AbstractReliefComponent)
success = getExporter(DBReliefComponent.class).doExport((AbstractReliefComponent)object, objectId, (FeatureType)objectType);
else if (object instanceof AbstractTransportationObject)
success = getExporter(DBTrafficArea.class).doExport((AbstractTransportationObject)object, objectId, (FeatureType)objectType);
else if (object instanceof AbstractWaterBoundarySurface)
success = getExporter(DBWaterBoundarySurface.class).doExport((AbstractWaterBoundarySurface)object, objectId, (FeatureType)objectType);
else if (object instanceof Address)
success = getExporter(DBAddress.class).doExport((Address)object, objectId, (FeatureType)objectType);
// generic fallback for any ADE object
else {
getExporter(DBCityObject.class).addBatch(object, objectId, objectType, query.getProjectionFilter(objectType));
success = true;
}
return success ? object : null;
}
@SuppressWarnings("unchecked")
@Override
public <T extends AbstractFeature> Collection<T> exportNestedFeatures(FeatureProperty featureProperty, long parentId, Class<T> featureClass) throws CityGMLExportException, SQLException {
Collection<? extends AbstractFeature> features;
FeatureType featureType = featureProperty.getType();
// building module
if (featureType.isEqualToOrSubTypeOf(getFeatureType(AbstractBuilding.class)))
features = getExporter(DBBuilding.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(AbstractBoundarySurface.class)))
features = getExporter(DBThematicSurface.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(AbstractOpening.class)))
features = getExporter(DBOpening.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(BuildingInstallation.class)))
features = getExporter(DBBuildingInstallation.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(IntBuildingInstallation.class)))
features = getExporter(DBBuildingInstallation.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(Room.class)))
features = getExporter(DBRoom.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(BuildingFurniture.class)))
features = getExporter(DBBuildingFurniture.class).doExport(featureProperty, parentId);
// bridge module
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(AbstractBridge.class)))
features = getExporter(DBBridge.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(org.citygml4j.model.citygml.bridge.AbstractBoundarySurface.class)))
features = getExporter(DBBridgeThematicSurface.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(org.citygml4j.model.citygml.bridge.AbstractOpening.class)))
features = getExporter(DBBridgeOpening.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(BridgeConstructionElement.class)))
features = getExporter(DBBridgeConstrElement.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(BridgeInstallation.class)))
features = getExporter(DBBridgeInstallation.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(IntBridgeInstallation.class)))
features = getExporter(DBBridgeInstallation.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(BridgeRoom.class)))
features = getExporter(DBBridgeRoom.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(BridgeFurniture.class)))
features = getExporter(DBBridgeFurniture.class).doExport(featureProperty, parentId);
// city furniture module
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(CityFurniture.class)))
features = getExporter(DBCityFurniture.class).doExport(featureProperty, parentId);
// generics module
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(GenericCityObject.class)))
features = getExporter(DBGenericCityObject.class).doExport(featureProperty, parentId);
// land use module
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(LandUse.class)))
features = getExporter(DBLandUse.class).doExport(featureProperty, parentId);
// vegetation module
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(PlantCover.class)))
features = getExporter(DBPlantCover.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(SolitaryVegetationObject.class)))
features = getExporter(DBSolitaryVegetatObject.class).doExport(featureProperty, parentId);
// relief module
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(ReliefFeature.class)))
features = getExporter(DBReliefFeature.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(AbstractReliefComponent.class)))
features = getExporter(DBReliefComponent.class).doExport(featureProperty, parentId);
// transportation module
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(TransportationComplex.class)))
features = getExporter(DBTransportationComplex.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(AbstractTransportationObject.class)))
features = getExporter(DBTrafficArea.class).doExport(featureProperty, parentId);
// tunnel module
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(AbstractTunnel.class)))
features = getExporter(DBTunnel.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(org.citygml4j.model.citygml.tunnel.AbstractBoundarySurface.class)))
features = getExporter(DBTunnelThematicSurface.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(org.citygml4j.model.citygml.tunnel.AbstractOpening.class)))
features = getExporter(DBTunnelOpening.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(TunnelInstallation.class)))
features = getExporter(DBTunnelInstallation.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(IntTunnelInstallation.class)))
features = getExporter(DBTunnelInstallation.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(HollowSpace.class)))
features = getExporter(DBTunnelHollowSpace.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(TunnelFurniture.class)))
features = getExporter(DBTunnelFurniture.class).doExport(featureProperty, parentId);
// water body module
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(WaterBody.class)))
features = getExporter(DBWaterBody.class).doExport(featureProperty, parentId);
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(AbstractWaterBoundarySurface.class)))
features = getExporter(DBWaterBoundarySurface.class).doExport(featureProperty, parentId);
// core module
else if (featureType.isEqualToOrSubTypeOf(getFeatureType(Address.class)))
features = getExporter(DBAddress.class).doExport(featureProperty, parentId);
else
features = Collections.emptyList();
// filter instances according to the provided feature class
if (!features.isEmpty())
features.removeIf(abstractFeature -> !featureClass.isInstance(abstractFeature));
return (Collection<T>)features;
}
protected void delegateToADEExporter(AbstractGML object, long objectId, AbstractObjectType<?> objectType, ProjectionFilter projectionFilter) throws CityGMLExportException, SQLException {
// delegate export of ADE object to an ADE exporter
if (object instanceof ADEModelObject && !object.hasLocalProperty(CoreConstants.EXPORT_STUB))
getADEExportManager(objectType.getSchema()).exportObject((ADEModelObject)object, objectId, objectType, projectionFilter);
}
protected void delegateToADEExporter(List<String> adeHookTables, AbstractFeature parent, long parentId, FeatureType parentType, ProjectionFilter projectionFilter) throws CityGMLExportException, SQLException {
// delegate export of ADE generic application properties to an ADE exporter
for (String adeHookTable : adeHookTables)
getADEExportManager(adeHookTable).exportGenericApplicationProperties(adeHookTable, parent, parentId, parentType, projectionFilter);
}
protected int getFeatureBatchSize() {
return getBatchSize(config.getDatabaseConfig().getExportBatching().getFeatureBatchSize());
}
protected int getGeometryBatchSize() {
return getBatchSize(config.getDatabaseConfig().getExportBatching().getGeometryBatchSize());
}
private int getBatchSize(int batchSize) {
return Math.min(batchSize, databaseAdapter.getSQLAdapter().getMaximumNumberOfItemsForInOperator());
}
@Override
public void executeBatch() throws CityGMLExportException, SQLException {
getExporter(DBCityObject.class).executeBatch();
getExporter(DBSurfaceGeometry.class).executeBatch();
}
@Override
public SurfaceGeometryExporter getSurfaceGeometryExporter() throws CityGMLExportException, SQLException {
return getExporter(DBSurfaceGeometry.class);
}
@Override
public ImplicitGeometry createImplicitGeometry(long id, GeometryObject referencePoint, String transformationMatrix) throws CityGMLExportException, SQLException {
return getExporter(DBImplicitGeometry.class).doExport(id, referencePoint, transformationMatrix);
}
public Appearance exportGlobalAppearance(long appearanceId) throws CityGMLExportException, SQLException {
return getExporter(DBGlobalAppearance.class).doExport(appearanceId);
}
@Override
public boolean exportAsGlobalFeature(AbstractFeature feature) throws CityGMLExportException, SQLException {
if (featureWriter.supportsFlatHierarchies()) {
if (!query.getFeatureTypeFilter().containsFeatureType(getFeatureType(feature)))
feature.setLocalProperty(CoreConstants.EXPORT_AS_ADDITIONAL_OBJECT, true);
AbstractGML object = processObject(feature);
if (object instanceof AbstractFeature) {
feature = (AbstractFeature) object;
try {
featureWriter.write(feature, -1);
} catch (FeatureWriteException e) {
throw new CityGMLExportException("Failed to write global feature with gml:id '" + feature.getId() + "'.", e);
}
updateExportCounter(feature);
return true;
}
}
return false;
}
@Override
public boolean supportsExportOfGlobalFeatures() {
return featureWriter.supportsFlatHierarchies();
}
@Override
public GMLConverter getGMLConverter() {
if (gmlConverter == null) {
gmlConverter = new GMLConverter(query.isSetTargetSrs() ?
query.getTargetSrs().getGMLSrsName() :
databaseAdapter.getConnectionMetaData().getReferenceSystem().getGMLSrsName(),
affineTransformer, config);
}
return gmlConverter;
}
@Override
public AbstractDatabaseAdapter getDatabaseAdapter() {
return databaseAdapter;
}
@Override
public CityGMLVersion getTargetCityGMLVersion() {
return query.getTargetVersion();
}
@Override
public ProjectionFilter getProjectionFilter(AbstractObjectType<?> objectType) {
return query.getProjectionFilter(objectType);
}
@Override
public CombinedProjectionFilter getCombinedProjectionFilter(String tableName) {
List<ProjectionFilter> filters = new ArrayList<>();
schemaMapping.listAbstractObjectTypesByTable(tableName, true).forEach(type -> filters.add(query.getProjectionFilter(type)));
return new CombinedProjectionFilter(filters);
}
@Override
public LodFilter getLodFilter() {
return query.getLodFilter();
}
@Override
public AttributeValueSplitter getAttributeValueSplitter() {
return attributeValueSplitter;
}
@Override
public boolean isFailOnError() {
return failOnError;
}
@Override
public ExportConfig getExportConfig() {
return config.getExportConfig();
}
public AffineTransformer getAffineTransformer() {
return affineTransformer;
}
public InternalConfig getInternalConfig() {
return internalConfig;
}
@Override
public String getTableNameWithSchema(String tableName) {
return databaseAdapter.getConnectionDetails().getSchema() + '.' + tableName;
}
@Override
public ProjectionToken getGeometryColumn(Column column) {
return (!internalConfig.isTransformCoordinates()) ?
column :
new Function(databaseAdapter.getSQLAdapter().resolveDatabaseOperationName("citydb_srs.transform_or_null"),
column.getName(), column, new IntegerLiteral(query.getTargetSrs().getSrid()));
}
@Override
public ProjectionToken getGeometryColumn(Column column, String asName) {
return (!internalConfig.isTransformCoordinates()) ?
new Column(column.getTable(), column.getName(), asName) :
new Function(databaseAdapter.getSQLAdapter().resolveDatabaseOperationName("citydb_srs.transform_or_null"),
asName, column, new IntegerLiteral(query.getTargetSrs().getSrid()));
}
@Override
public String getGeometryColumn(String columnName) {
return (!internalConfig.isTransformCoordinates()) ?
columnName :
databaseAdapter.getSQLAdapter().resolveDatabaseOperationName("citydb_srs.transform_or_null") +
"(" + columnName + ", " + query.getTargetSrs().getSrid() + ") as " + columnName.replaceFirst(".*?\\.", "");
}
@Override
public String getGeometryColumn(String columnName, String asName) {
return (!internalConfig.isTransformCoordinates()) ?
columnName + " as " + asName :
databaseAdapter.getSQLAdapter().resolveDatabaseOperationName("citydb_srs.transform_or_null") +
"(" + columnName + ", " + query.getTargetSrs().getSrid() + ") as " + asName;
}
@Override
public void logOrThrowErrorMessage(String message) throws CityGMLExportException {
logOrThrowErrorMessage(message, null);
}
@Override
public void logOrThrowErrorMessage(String message, Throwable cause) throws CityGMLExportException {
if (!failOnError) {
log.error(message, cause);
} else {
throw new CityGMLExportException(message, cause);
}
}
@Override
public String getObjectSignature(int objectClassId, long id) {
AbstractObjectType<?> objectType = schemaMapping.getAbstractObjectType(objectClassId);
return objectType != null ? getObjectSignature(objectType, id) : "city object (id : " + id + " )";
}
@Override
public String getObjectSignature(AbstractObjectType<?> objectType, long id) {
return objectType.getSchema().getXMLPrefix() + ":" + objectType.getPath() + " (id: " + id + ")";
}
@Override
public FeatureType getFeatureType(AbstractFeature feature) {
return schemaMapping.getFeatureType(Util.getObjectClassId(feature.getClass()));
}
@Override
public ObjectType getObjectType(AbstractGML object) {
return schemaMapping.getObjectType(Util.getObjectClassId(object.getClass()));
}
@Override
public AbstractObjectType<?> getAbstractObjectType(AbstractGML object) {
return schemaMapping.getAbstractObjectType(Util.getObjectClassId(object.getClass()));
}
@Override
public FeatureType getFeatureType(int objectClassId) {
return schemaMapping.getFeatureType(objectClassId);
}
@Override
public ObjectType getObjectType(int objectClassId) {
return schemaMapping.getObjectType(objectClassId);
}
@Override
public AbstractObjectType<?> getAbstractObjectType(int objectClassId) {
return schemaMapping.getAbstractObjectType(objectClassId);
}
public String generateFeatureGmlId(AbstractFeature feature) {
return generateFeatureGmlId(feature, feature.getId());
}
public String generateFeatureGmlId(AbstractFeature feature, String oldGmlId) {
if (internalConfig.getOutputFormat() == OutputFormat.CITYJSON) {
return DefaultGMLIdManager.getInstance().generateUUID();
} else {
XLinkFeatureConfig xlinkOptions = config.getExportConfig().getCityGMLOptions().getXlink().getFeature();
String gmlId = DefaultGMLIdManager.getInstance().generateUUID(xlinkOptions.getIdPrefix());
if (oldGmlId != null) {
if (xlinkOptions.isSetAppendId())
gmlId = gmlId + "-" + oldGmlId;
if (xlinkOptions.isSetKeepGmlIdAsExternalReference() && feature instanceof AbstractCityObject) {
ExternalReference externalReference = new ExternalReference();
if (internalConfig.getOutputFile() != null)
externalReference.setInformationSystem(internalConfig.getOutputFile().getFile().toString());
ExternalObject externalObject = new ExternalObject();
externalObject.setName(oldGmlId);
externalReference.setExternalObject(externalObject);
((AbstractCityObject) feature).addExternalReference(externalReference);
}
}
return gmlId;
}
}
public String generateGeometryGmlId(AbstractGeometry geometry) {
return generateGeometryGmlId(geometry.getId());
}
public String generateGeometryGmlId(String oldGmlId) {
XLinkConfig xlinkOptions = config.getExportConfig().getCityGMLOptions().getXlink().getGeometry();
String gmlId = DefaultGMLIdManager.getInstance().generateUUID(xlinkOptions.getIdPrefix());
if (xlinkOptions.isSetAppendId() && oldGmlId != null) {
gmlId = gmlId + "-" + oldGmlId;
}
return gmlId;
}
public void propagateXlink(DBXlink xlink) {
xlinkPool.addWork(xlink);
}
@Override
public boolean lookupAndPutObjectId(String gmlId, long id, int objectClassId) {
IdCache cache = idCacheManager.getCache(IdCacheType.OBJECT);
return cache != null && cache.lookupAndPut(gmlId, id, objectClassId);
}
@Override
public boolean lookupObjectId(String gmlId) {
IdCache cache = idCacheManager.getCache(IdCacheType.OBJECT);
return cache != null && cache.get(gmlId) != null;
}
public void putObjectId(String gmlId, long id, int objectClassId) {
IdCache cache = idCacheManager.getCache(IdCacheType.OBJECT);
if (cache != null)
cache.put(gmlId, id, -1, false, null, objectClassId);
}
public boolean lookupAndPutGeometryId(String gmlId, long id, boolean useLocalScope) {
boolean isCached = !localGeometryCache.add(gmlId);
if (!useLocalScope) {
IdCache cache = idCacheManager.getCache(IdCacheType.GEOMETRY);
if (cache != null) {
if (isCached) {
cache.put(gmlId, id, 0, false, null, MappingConstants.SURFACE_GEOMETRY_OBJECTCLASS_ID);
} else {
isCached = cache.lookupAndPut(gmlId, id, MappingConstants.SURFACE_GEOMETRY_OBJECTCLASS_ID);
}
}
}
return isCached;
}
public boolean lookupGeometryId(String gmlId) {
boolean isCached = localGeometryCache.contains(gmlId);
if (!isCached) {
IdCache cache = idCacheManager.getCache(IdCacheType.GEOMETRY);
isCached = cache != null && cache.get(gmlId) != null;
}
return isCached;
}
public String getGeometrySignature(AbstractGeometry geometry, long id) {
return getGeometrySignature(geometry.getGMLClass(), id);
}
public String getGeometrySignature(GMLClass geometryClass, long id) {
return "gml:" + geometryClass + " (ID: " + id + ")";
}
protected String getPropertyName(AbstractProperty property) {
return property.getSchema().getXMLPrefix() + ":" + property.getPath();
}
protected boolean hasADESupport() {
return hasADESupport;
}
protected FeatureType getFeatureType(Class<? extends AbstractFeature> featureClass) {
return schemaMapping.getFeatureType(Util.getObjectClassId(featureClass));
}
protected AbstractObjectType<?> getAbstractObjectType(Class<? extends AbstractGML> objectClass) {
return schemaMapping.getAbstractObjectType(Util.getObjectClassId(objectClass));
}
public boolean isLazyTextureExport() {
return !query.getLodFilter().preservesGeometry();
}
public void updateExportCounter(AbstractFeature feature) {
exportCounter.updateExportCounter(feature);
}
public Map<Integer, Long> getAndResetObjectCounter() {
return exportCounter.getAndResetObjectCounter();
}
public Map<GMLClass, Long> getAndResetGeometryCounter() {
return exportCounter.getAndResetGeometryCounter();
}
public CityGMLBuilder getCityGMLBuilder() {
return cityGMLBuilder;
}
protected SchemaMapping getSchemaMapping() {
return schemaMapping;
}
protected ADEGenericElement createADEGenericElement(String uri, String localName) throws ParserConfigurationException {
if (document == null)
document = DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument();
ADEGenericElement adeElement = new ADEGenericElement();
adeElement.setContent(document.createElementNS(uri, localName));
return adeElement;
}
protected Object unmarshal(Reader reader) {
Object object;
try {
Unmarshaller unmarshaller = cityGMLBuilder.getJAXBContext().createUnmarshaller();
object = unmarshaller.unmarshal(reader);
if (object != null)
object = jaxbUnmarshaller.unmarshal(object);
} catch (JAXBException | MissingADESchemaException e) {
object = null;
}
return object;
}
public void close() throws CityGMLExportException, SQLException {
for (DBExporter exporter : exporters.values())
exporter.close();
for (ADEExportManager adeExporter : adeExporters.values())
adeExporter.close();
exporters.clear();
}
protected Set<String> getADEHookTables(TableEnum table) {
Set<String> adeHookTables = new HashSet<>();
for (FeatureType featureType : schemaMapping.listFeatureTypesByTable(table.getName(), true)) {
// skip ADE features - we do not support ADEs of ADEs
if (adeManager.getExtensionByObjectClassId(featureType.getObjectClassId()) != null) {
continue;
}
adeHookTables.addAll(getADEHookTables(featureType));
}
return adeHookTables;
}
protected Set<String> getADEHookTables(FeatureType featureType) {
Set<String> adeHookTables = new HashSet<>();
for (AbstractProperty property : featureType.listProperties(false, true)) {
if (property instanceof InjectedProperty) {
String adeHookTable = ((InjectedProperty) property).getBaseJoin().getTable();
ADEExtension extension = adeManager.getExtensionByTableName(adeHookTable);
if (extension == null || !extension.isEnabled()) {
continue;
}
adeHookTables.add(adeHookTable);
}
}
return adeHookTables;
}
private ADEExportManager getADEExportManager(AppSchema schema) throws CityGMLExportException, SQLException {
ADEExtension adeExtension = adeManager.getExtensionBySchema(schema);
if (adeExtension == null || !adeExtension.isEnabled()) {
throw new CityGMLExportException("ADE extension for schema " +
schema.getNamespace(query.getTargetVersion()).getURI() +
" is disabled. Skipping export.");
}
return getADEExportManager(adeExtension);
}
private ADEExportManager getADEExportManager(String tableName) throws CityGMLExportException, SQLException {
ADEExtension adeExtension = adeManager.getExtensionByTableName(tableName);
if (adeExtension == null || !adeExtension.isEnabled()) {
throw new CityGMLExportException("ADE extension for table '" +
tableName + "' is disabled. Skipping export.");
}
return getADEExportManager(adeExtension);
}
private ADEExportManager getADEExportManager(ADEExtension extension) throws CityGMLExportException, SQLException {
ADEExportManager adeExporter = adeExporters.get(extension);
if (adeExporter == null) {
adeExporter = extension.createADEExportManager();
if (adeExporter == null)
throw new CityGMLExportException("Failed to create ADE exporter for '" +
extension.getMetadata().getIdentifier() + "'");
adeExporter.init(connection, this);
adeExporters.put(extension, adeExporter);
}
return adeExporter;
}
protected <T extends DBExporter> T getExporter(Class<T> type) throws CityGMLExportException, SQLException {
DBExporter exporter = exporters.get(type);
if (exporter == null) {
// core module
if (type == DBSurfaceGeometry.class)
exporter = new DBSurfaceGeometry(connection, this);
else if (type == DBCityObject.class)
exporter = new DBCityObject(connection, query, this);
else if (type == DBGeneralization.class)
exporter = new DBGeneralization(connection, this);
else if (type == DBCityObjectGenericAttrib.class)
exporter = new DBCityObjectGenericAttrib(connection, this);
else if (type == DBAddress.class)
exporter = new DBAddress(connection, this);
else if (type == DBImplicitGeometry.class)
exporter = new DBImplicitGeometry(connection, this);
// building module
else if (type == DBBuilding.class)
exporter = new DBBuilding(connection, this);
else if (type == DBThematicSurface.class)
exporter = new DBThematicSurface(connection, this);
else if (type == DBOpening.class)
exporter = new DBOpening(connection, this);
else if (type == DBBuildingInstallation.class)
exporter = new DBBuildingInstallation(connection, this);
else if (type == DBRoom.class)
exporter = new DBRoom(connection, this);
else if (type == DBBuildingFurniture.class)
exporter = new DBBuildingFurniture(connection, this);
// bridge module
else if (type == DBBridge.class)
exporter = new DBBridge(connection, this);
else if (type == DBBridgeThematicSurface.class)
exporter = new DBBridgeThematicSurface(connection, this);
else if (type == DBBridgeOpening.class)
exporter = new DBBridgeOpening(connection, this);
else if (type == DBBridgeConstrElement.class)
exporter = new DBBridgeConstrElement(connection, this);
else if (type == DBBridgeInstallation.class)
exporter = new DBBridgeInstallation(connection, this);
else if (type == DBBridgeRoom.class)
exporter = new DBBridgeRoom(connection, this);
else if (type == DBBridgeFurniture.class)
exporter = new DBBridgeFurniture(connection, this);
// city furniture module
else if (type == DBCityFurniture.class)
exporter = new DBCityFurniture(connection, this);
// city object group module
else if (type == DBCityObjectGroup.class)
exporter = new DBCityObjectGroup(connection, this);
// generics module
else if (type == DBGenericCityObject.class)
exporter = new DBGenericCityObject(connection, this);
// land use module
else if (type == DBLandUse.class)
exporter = new DBLandUse(connection, this);
// vegetation module
else if (type == DBPlantCover.class)
exporter = new DBPlantCover(connection, this);
else if (type == DBSolitaryVegetatObject.class)
exporter = new DBSolitaryVegetatObject(connection, this);
// relief module
else if (type == DBReliefFeature.class)
exporter = new DBReliefFeature(connection, this);
else if (type == DBReliefComponent.class)
exporter = new DBReliefComponent(connection, this);
// transportation module
else if (type == DBTransportationComplex.class)
exporter = new DBTransportationComplex(connection, this);
else if (type == DBTrafficArea.class)
exporter = new DBTrafficArea(connection, this);
// tunnel module
else if (type == DBTunnel.class)
exporter = new DBTunnel(connection, this);
else if (type == DBTunnelThematicSurface.class)
exporter = new DBTunnelThematicSurface(connection, this);
else if (type == DBTunnelOpening.class)
exporter = new DBTunnelOpening(connection, this);
else if (type == DBTunnelInstallation.class)
exporter = new DBTunnelInstallation(connection, this);
else if (type == DBTunnelHollowSpace.class)
exporter = new DBTunnelHollowSpace(connection, this);
else if (type == DBTunnelFurniture.class)
exporter = new DBTunnelFurniture(connection, this);
// water body module
else if (type == DBWaterBody.class)
exporter = new DBWaterBody(connection, this);
else if (type == DBWaterBoundarySurface.class)
exporter = new DBWaterBoundarySurface(connection, this);
// appearance module
else if (type == DBGlobalAppearance.class) {
CacheTable cacheTable = null;
if (internalConfig.isExportGlobalAppearances()) {
cacheTable = cacheTableManager.getCacheTable(CacheTableModel.GLOBAL_APPEARANCE);
if (cacheTable == null)
logOrThrowErrorMessage("Failed to access temporary table for global appearances.");
}
exporter = new DBGlobalAppearance(cacheTable, this, config);
} else if (type == DBLocalAppearance.class)
exporter = new DBLocalAppearance(connection, query, this, config);
if (exporter == null)
throw new CityGMLExportException("Failed to build database exporter of type " + type.getName() + ".");
exporters.put(type, exporter);
}
return type.cast(exporter);
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.spi.predicate;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.trino.spi.block.Block;
import io.trino.spi.type.Type;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodType;
import java.util.Objects;
import static io.trino.spi.function.InvocationConvention.InvocationArgumentConvention.NEVER_NULL;
import static io.trino.spi.function.InvocationConvention.InvocationReturnConvention.FAIL_ON_NULL;
import static io.trino.spi.function.InvocationConvention.InvocationReturnConvention.NULLABLE_RETURN;
import static io.trino.spi.function.InvocationConvention.simpleConvention;
import static io.trino.spi.predicate.Utils.TUPLE_DOMAIN_TYPE_OPERATORS;
import static io.trino.spi.predicate.Utils.handleThrowable;
import static io.trino.spi.predicate.Utils.nativeValueToBlock;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
// TODO: When we move RowExpressions to the SPI, we should get rid of this. This is effectively a ConstantExpression.
public final class NullableValue
{
private final Type type;
private final Object value;
private final MethodHandle equalOperator;
private final MethodHandle hashCodeOperator;
public NullableValue(Type type, Object value)
{
requireNonNull(type, "type is null");
if (value != null && !Primitives.wrap(type.getJavaType()).isInstance(value)) {
throw new IllegalArgumentException(format("Object '%s' does not match type %s", value, type.getJavaType()));
}
this.type = type;
this.value = value;
if (type.isComparable()) {
this.equalOperator = TUPLE_DOMAIN_TYPE_OPERATORS.getEqualOperator(type, simpleConvention(NULLABLE_RETURN, NEVER_NULL, NEVER_NULL))
.asType(MethodType.methodType(Boolean.class, Object.class, Object.class));
this.hashCodeOperator = TUPLE_DOMAIN_TYPE_OPERATORS.getHashCodeOperator(type, simpleConvention(FAIL_ON_NULL, NEVER_NULL))
.asType(MethodType.methodType(long.class, Object.class));
}
else {
this.equalOperator = null;
this.hashCodeOperator = null;
}
}
public static NullableValue of(Type type, Object value)
{
requireNonNull(value, "value is null");
return new NullableValue(type, value);
}
public static NullableValue asNull(Type type)
{
return new NullableValue(type, null);
}
// Jackson deserialization only
@JsonCreator
public static NullableValue fromSerializable(@JsonProperty("serializable") Serializable serializable)
{
Type type = serializable.getType();
Block block = serializable.getBlock();
return new NullableValue(type, block == null ? null : Utils.blockToNativeValue(type, block));
}
// Jackson serialization only
@JsonProperty
public Serializable getSerializable()
{
return new Serializable(type, value == null ? null : Utils.nativeValueToBlock(type, value));
}
public Block asBlock()
{
return Utils.nativeValueToBlock(type, value);
}
public Type getType()
{
return type;
}
public boolean isNull()
{
return value == null;
}
public Object getValue()
{
return value;
}
@Override
public int hashCode()
{
long hash = Objects.hash(type);
if (value != null) {
hash = hash * 31 + valueHash();
}
return (int) hash;
}
private long valueHash()
{
try {
return (long) hashCodeOperator.invokeExact(value);
}
catch (Throwable throwable) {
throw handleThrowable(throwable);
}
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
NullableValue other = (NullableValue) obj;
return Objects.equals(this.type, other.type)
&& (this.value == null) == (other.value == null)
&& (this.value == null || valueEquals(other.value));
}
private boolean valueEquals(Object otherValue)
{
try {
return ((Boolean) equalOperator.invokeExact(value, otherValue)) == Boolean.TRUE;
}
catch (Throwable throwable) {
throw handleThrowable(throwable);
}
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder("NullableValue{");
sb.append("type=").append(type);
sb.append(", value=").append(type.getObjectValue(ToStringSession.INSTANCE, nativeValueToBlock(type, value), 0));
sb.append('}');
return sb.toString();
}
public static class Serializable
{
private final Type type;
private final Block block;
@JsonCreator
public Serializable(
@JsonProperty("type") Type type,
@JsonProperty("block") Block block)
{
this.type = requireNonNull(type, "type is null");
this.block = block;
}
@JsonProperty
public Type getType()
{
return type;
}
@JsonProperty
public Block getBlock()
{
return block;
}
}
}
|
package cn.jsprun.foreg.vo.statistic;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class Stats_manageTeamVO {
private Stats_navbarVO navbar = new Stats_navbarVO();
private List<Map<String,Object>> manageTeamMapList = new ArrayList<Map<String,Object>>();
private List<ForumTableGroup> forumTableGroupList = new ArrayList<ForumTableGroup>();
private List<String> adminTableTitleList = new ArrayList<String>();
private List<String> forumTableTitleList = new ArrayList<String>();
private String lastTime;
private String nextTime;
public static class ForumTableGroup{
private String groupId;
private String groupName;
private List<Forum> forumList = new ArrayList<Forum>();
public static class Forum{
private boolean selectFroumName;
private String rowspan;
private String uri;
private String froumName;
private String uid;
private String username;
private String managerName;
private String lastAccessTime;
private String offDays;
private String credits;
private String posts;
private String thisMonthPosts;
private String thisMonthManage;
private boolean showOnline;
private String allTimeOnline;
private String thisMonthTimeOnline;
public String getAllTimeOnline() {
return allTimeOnline;
}
public void setAllTimeOnline(String allTimeOnline) {
this.allTimeOnline = allTimeOnline;
}
public String getCredits() {
return credits;
}
public void setCredits(String credits) {
this.credits = credits;
}
public String getFroumName() {
return froumName;
}
public void setFroumName(String froumName) {
this.froumName = froumName;
}
public String getLastAccessTime() {
return lastAccessTime;
}
public void setLastAccessTime(String lastAccessTime) {
this.lastAccessTime = lastAccessTime;
}
public String getManagerName() {
return managerName;
}
public void setManagerName(String managerName) {
this.managerName = managerName;
}
public String getOffDays() {
return offDays;
}
public void setOffDays(String offDays) {
this.offDays = offDays;
}
public String getPosts() {
return posts;
}
public void setPosts(String posts) {
this.posts = posts;
}
public String getRowspan() {
return rowspan;
}
public void setRowspan(String rowspan) {
this.rowspan = rowspan;
}
public boolean isShowOnline() {
return showOnline;
}
public void setShowOnline(boolean showOnline) {
this.showOnline = showOnline;
}
public String getThisMonthManage() {
return thisMonthManage;
}
public void setThisMonthManage(String thisMonthManage) {
this.thisMonthManage = thisMonthManage;
}
public String getThisMonthPosts() {
return thisMonthPosts;
}
public void setThisMonthPosts(String thisMonthPosts) {
this.thisMonthPosts = thisMonthPosts;
}
public String getThisMonthTimeOnline() {
return thisMonthTimeOnline;
}
public void setThisMonthTimeOnline(String thisMonthTimeOnline) {
this.thisMonthTimeOnline = thisMonthTimeOnline;
}
public String getUid() {
return uid;
}
public void setUid(String uid) {
this.uid = uid;
}
public String getUri() {
return uri;
}
public void setUri(String uri) {
this.uri = uri;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public boolean isSelectFroumName() {
return selectFroumName;
}
public void setSelectFroumName(boolean selectFroumName) {
this.selectFroumName = selectFroumName;
}
}
public List<Forum> getForumList() {
return forumList;
}
public void setForumList(List<Forum> forumList) {
this.forumList = forumList;
}
public String getGroupId() {
return groupId;
}
public void setGroupId(String groupId) {
this.groupId = groupId;
}
public String getGroupName() {
return groupName;
}
public void setGroupName(String groupName) {
this.groupName = groupName;
}
}
public String getLastTime() {
return lastTime;
}
public void setLastTime(String lastTime) {
this.lastTime = lastTime;
}
public String getNextTime() {
return nextTime;
}
public void setNextTime(String nextTime) {
this.nextTime = nextTime;
}
public boolean isBeingAdmin() {
return manageTeamMapList.size()>0;
}
public List<Map<String, Object>> getManageTeamMapList() {
return manageTeamMapList;
}
public List<String> getAdminTableTitleList() {
return adminTableTitleList;
}
public List<String> getForumTableTitleList() {
return forumTableTitleList;
}
public List<ForumTableGroup> getForumTableGroupList() {
return forumTableGroupList;
}
public void setForumTableGroupList(List<ForumTableGroup> forumTableGroupList) {
this.forumTableGroupList = forumTableGroupList;
}
public Stats_navbarVO getNavbar() {
return navbar;
}
public void setNavbar(Stats_navbarVO navbar) {
this.navbar = navbar;
}
}
|
package io.github.pleuvoir.domain;
import java.io.Serializable;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class LoginSession implements Serializable {
private static final long serialVersionUID = 4917752846879171558L;
private String userName;
private String nickName;
private String url;
private String deviceId;
private String sKey;
private String wxSid;
private String wxUin;
private String passTicket;
private String syncKeyStr;
private Integer inviteStartCount;
private BaseRequest baseRequest;
private SyncKey syncKey;
/**
* 服务端返回时,同时设置对应的syncKeyStr,方便后面取
*/
public void setSyncKey(SyncKey syncKey) {
this.syncKey = syncKey;
StringBuilder syncKeyBuf = new StringBuilder();
for (SyncKeyPair item : syncKey.getList()) {
syncKeyBuf.append("|").append(item.getKey()).append("_").append(item.getVal());
}
if (syncKeyBuf.length() > 0) {
this.syncKeyStr = syncKeyBuf.substring(1);
}
}
}
|
package com.packtpub.adfguide.ch6.model.views;
import oracle.jbo.server.ViewObjectImpl;
// ---------------------------------------------------------------------
// --- File generated by Oracle ADF Business Components Design Time.
// --- Thu Dec 01 14:16:26 IST 2011
// --- Custom code may be added to this class.
// --- Warning: Do not modify method signatures of generated methods.
// ---------------------------------------------------------------------
public class DepartmentVOImpl extends ViewObjectImpl {
/**
* This is the default constructor (do not remove).
*/
public DepartmentVOImpl() {
}
public void reviseSalaryForDepartment() {
}
}
|
/**
* Copyright © 2012-2016 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved.
*/
package com.sayee.sxsy.modules.sys.dao;
import com.sayee.sxsy.modules.sys.entity.Role;
import com.sayee.sxsy.common.persistence.CrudDao;
import com.sayee.sxsy.common.persistence.annotation.MyBatisDao;
import java.util.List;
/**
* 角色DAO接口
* @author ThinkGem
* @version 2013-12-05
*/
@MyBatisDao
public interface RoleDao extends CrudDao<Role> {
public Role getByName(Role role);
public Role getByEnname(Role role);
/**
* 维护角色与菜单权限关系
* @param role
* @return
*/
public int deleteRoleMenu(Role role);
public int insertRoleMenu(Role role);
/**
* 维护角色与公司部门关系
* @param role
* @return
*/
public int deleteRoleOffice(Role role);
public int insertRoleOffice(Role role);
/**
* 根据用户的id查询角色
*/
public List<Role> selectRole (String id);
}
|
/*
* Waltz - Enterprise Architecture
* Copyright (C) 2016, 2017, 2018, 2019 Waltz open source project
* See README.md for more information
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific
*
*/
package com.khartec.waltz.jobs.generators;
import com.khartec.waltz.common.ListUtilities;
import com.khartec.waltz.common.MapUtilities;
import com.khartec.waltz.common.RandomUtilities;
import com.khartec.waltz.model.person.ImmutablePerson;
import com.khartec.waltz.model.person.PersonKind;
import com.khartec.waltz.service.person.PersonService;
import com.khartec.waltz.service.person_hierarchy.PersonHierarchyService;
import io.codearte.jfairy.Fairy;
import io.codearte.jfairy.producer.person.Person;
import org.springframework.context.ApplicationContext;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import static com.khartec.waltz.common.RandomUtilities.randomPick;
import static com.khartec.waltz.schema.Tables.INVOLVEMENT;
import static com.khartec.waltz.schema.tables.OrganisationalUnit.ORGANISATIONAL_UNIT;
import static com.khartec.waltz.schema.tables.Person.PERSON;
public class PersonDataGenerator implements SampleDataGenerator {
private static final int MAX_DEPTH = 4;
private static final List<ImmutablePerson> peeps = new ArrayList<>();
private static final Fairy fairy = Fairy.create();
private static int counter = 0;
private static final Random rnd = RandomUtilities.getRandom();
@Override
public Map<String, Integer> create(ApplicationContext ctx) {
PersonService personService = ctx.getBean(PersonService.class);
PersonHierarchyService personHierarchyService = ctx.getBean(PersonHierarchyService.class);
List<Long> ouIds = getDsl(ctx)
.select(ORGANISATIONAL_UNIT.ID)
.from(ORGANISATIONAL_UNIT)
.fetch(ORGANISATIONAL_UNIT.ID);
Person person = fairy.person();
ImmutablePerson root = ImmutablePerson.builder()
.employeeId(person.getPassportNumber())
.personKind(PersonKind.EMPLOYEE)
.userPrincipalName(person.getUsername())
.title(randomPick(SampleData.jobTitles[0]))
.departmentName("CEO")
.displayName(person.getFullName())
.email(person.getEmail())
.organisationalUnitId(10L)
.isRemoved(false)
.build();
peeps.add(root);
visit(root, 1, ouIds);
personService.bulkSave(peeps);
ImmutablePerson admin = randomPick(peeps);
getDsl(ctx)
.update(PERSON)
.set(PERSON.USER_PRINCIPAL_NAME, "admin")
.set(PERSON.EMAIL, "admin")
.where(PERSON.EMPLOYEE_ID.eq(admin.employeeId()))
.execute();
personHierarchyService.build();
return MapUtilities.newHashMap("created", peeps.size());
}
@Override
public boolean remove(ApplicationContext ctx) {
log("Removing people");
getDsl(ctx).deleteFrom(INVOLVEMENT).execute();
getDsl(ctx).deleteFrom(PERSON).execute();
return true;
}
private static void visit(ImmutablePerson parent, int level, List<Long> orgUnitIds) {
if (level > MAX_DEPTH) return;
int nextLevel = level + 1;
int siblingCount = level == 1 ? SampleData.jobTitles[1].length : rnd.nextInt(7) + 2;
for (int i = 0 ; i < siblingCount ; i++) {
Person person = fairy.person();
String jobTitle = level == 1
? SampleData.jobTitles[1][i]
: randomPick(
SampleData.jobTitles[level >= SampleData.jobTitles.length ? SampleData.jobTitles.length - 1 : level]);
Long ouId = orgUnitIds.get(rnd.nextInt(orgUnitIds.size()));
ImmutablePerson p = ImmutablePerson.builder()
.managerEmployeeId(parent.employeeId())
.employeeId(person.getPassportNumber())
.personKind(PersonKind.EMPLOYEE)
.userPrincipalName(person.getUsername())
.title(jobTitle)
.departmentName(randomPick(SampleData.departmentNames))
.displayName(person.getFullName())
.email((counter++) + person.getEmail())
.organisationalUnitId(ouId)
.isRemoved(false)
.build();
peeps.add(p);
visit(p, nextLevel, orgUnitIds);
}
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.core;
import java.io.Serializable;
import java.lang.reflect.InvocationTargetException;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.management.ObjectName;
import javax.naming.NamingException;
import javax.servlet.Filter;
import javax.servlet.FilterConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import org.apache.catalina.Context;
import org.apache.catalina.Globals;
import org.apache.catalina.deploy.FilterDef;
import org.apache.catalina.security.SecurityUtil;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.InstanceManager;
import org.apache.tomcat.util.ExceptionUtils;
import org.apache.tomcat.util.log.SystemLogHandler;
import org.apache.tomcat.util.modeler.Registry;
import org.apache.tomcat.util.modeler.Util;
import org.apache.tomcat.util.res.StringManager;
/**
* Implementation of a <code>javax.servlet.FilterConfig</code> useful in
* managing the filter instances instantiated when a web application is first
* started.
*
* @author Craig R. McClanahan
*/
public final class ApplicationFilterConfig implements FilterConfig, Serializable {
private static final long serialVersionUID = 1L;
static final StringManager sm = StringManager.getManager(Constants.Package);
private static final org.apache.juli.logging.Log log = LogFactory.getLog(ApplicationFilterConfig.class);
/**
* Empty String collection to serve as the basis for empty enumerations.
*/
private static final List<String> emptyString = Collections.emptyList();
// ----------------------------------------------------------- Constructors
/**
* Construct a new ApplicationFilterConfig for the specified filter
* definition.
*
* @param context
* The context with which we are associated
* @param filterDef
* Filter definition for which a FilterConfig is to be
* constructed
*
* @exception ClassCastException
* if the specified class does not implement the
* <code>javax.servlet.Filter</code> interface
* @exception ClassNotFoundException
* if the filter class cannot be found
* @exception IllegalAccessException
* if the filter class cannot be publicly instantiated
* @exception InstantiationException
* if an exception occurs while instantiating the filter
* object
* @exception ServletException
* if thrown by the filter's init() method
* @throws NamingException
* @throws InvocationTargetException
*/
ApplicationFilterConfig(Context context, FilterDef filterDef)
throws ClassCastException, ClassNotFoundException, IllegalAccessException, InstantiationException,
ServletException, InvocationTargetException, NamingException {
super();
this.context = context;
this.filterDef = filterDef;
// Allocate a new filter instance if necessary
if (filterDef.getFilter() == null) {
getFilter();
} else {
this.filter = filterDef.getFilter();
getInstanceManager().newInstance(filter);
initFilter();
}
}
// ----------------------------------------------------- Instance Variables
/**
* The Context with which we are associated.
*/
private transient Context context = null;
/**
* The application Filter we are configured for.
*/
private transient Filter filter = null;
/**
* The <code>FilterDef</code> that defines our associated Filter.
*/
private final FilterDef filterDef;
/**
* the InstanceManager used to create and destroy filter instances.
*/
private transient InstanceManager instanceManager;
/**
* JMX registration name
*/
private ObjectName oname;
// --------------------------------------------------- FilterConfig Methods
/**
* Return the name of the filter we are configuring.
*/
@Override
public String getFilterName() {
return (filterDef.getFilterName());
}
/**
* Return the class of the filter we are configuring.
*/
public String getFilterClass() {
return filterDef.getFilterClass();
}
/**
* Return a <code>String</code> containing the value of the named
* initialization parameter, or <code>null</code> if the parameter does not
* exist.
*
* @param name
* Name of the requested initialization parameter
*/
@Override
public String getInitParameter(String name) {
Map<String, String> map = filterDef.getParameterMap();
if (map == null) {
return (null);
}
return map.get(name);
}
/**
* Return an <code>Enumeration</code> of the names of the initialization
* parameters for this Filter.
*/
@Override
public Enumeration<String> getInitParameterNames() {
Map<String, String> map = filterDef.getParameterMap();
if (map == null) {
return Collections.enumeration(emptyString);
}
return Collections.enumeration(map.keySet());
}
/**
* Return the ServletContext of our associated web application.
*/
@Override
public ServletContext getServletContext() {
return this.context.getServletContext();
}
/**
* Return a String representation of this object.
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder("ApplicationFilterConfig[");
sb.append("name=");
sb.append(filterDef.getFilterName());
sb.append(", filterClass=");
sb.append(filterDef.getFilterClass());
sb.append("]");
return (sb.toString());
}
// --------------------------------------------------------- Public Methods
public Map<String, String> getFilterInitParameterMap() {
return Collections.unmodifiableMap(filterDef.getParameterMap());
}
// -------------------------------------------------------- Package Methods
/**
* Return the application Filter we are configured for.
*
* @exception ClassCastException
* if the specified class does not implement the
* <code>javax.servlet.Filter</code> interface
* @exception ClassNotFoundException
* if the filter class cannot be found
* @exception IllegalAccessException
* if the filter class cannot be publicly instantiated
* @exception InstantiationException
* if an exception occurs while instantiating the filter
* object
* @exception ServletException
* if thrown by the filter's init() method
* @throws NamingException
* @throws InvocationTargetException
*/
Filter getFilter() throws ClassCastException, ClassNotFoundException, IllegalAccessException,
InstantiationException, ServletException, InvocationTargetException, NamingException {
// Return the existing filter instance, if any
if (this.filter != null)
return (this.filter);
// Identify the class loader we will be using
String filterClass = filterDef.getFilterClass();
this.filter = (Filter) getInstanceManager().newInstance(filterClass);
initFilter();
return (this.filter);
}
private void initFilter() throws ServletException {
if (context instanceof StandardContext && context.getSwallowOutput()) {
try {
SystemLogHandler.startCapture();
filter.init(this);
} finally {
String capturedlog = SystemLogHandler.stopCapture();
if (capturedlog != null && capturedlog.length() > 0) {
getServletContext().log(capturedlog);
}
}
} else {
filter.init(this);
}
// Expose filter via JMX
registerJMX();
}
/**
* Return the filter definition we are configured for.
*/
FilterDef getFilterDef() {
return (this.filterDef);
}
/**
* Release the Filter instance associated with this FilterConfig, if there
* is one.
*/
void release() {
unregisterJMX();
if (this.filter != null) {
try {
if (Globals.IS_SECURITY_ENABLED) {
try {
SecurityUtil.doAsPrivilege("destroy", filter);
} finally {
SecurityUtil.remove(filter);
}
} else {
filter.destroy();
}
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
context.getLogger().error(sm.getString("applicationFilterConfig.release", filterDef.getFilterName(),
filterDef.getFilterClass()), t);
}
if (!context.getIgnoreAnnotations()) {
try {
((StandardContext) context).getInstanceManager().destroyInstance(this.filter);
} catch (Exception e) {
Throwable t = ExceptionUtils.unwrapInvocationTargetException(e);
ExceptionUtils.handleThrowable(t);
context.getLogger().error(sm.getString("applicationFilterConfig.preDestroy",
filterDef.getFilterName(), filterDef.getFilterClass()), t);
}
}
}
this.filter = null;
}
// -------------------------------------------------------- Private Methods
private InstanceManager getInstanceManager() {
if (instanceManager == null) {
if (context instanceof StandardContext) {
instanceManager = ((StandardContext) context).getInstanceManager();
} else {
instanceManager = new DefaultInstanceManager(null, new HashMap<String, Map<String, String>>(), context,
getClass().getClassLoader());
}
}
return instanceManager;
}
private void registerJMX() {
String parentName = context.getName();
if (!parentName.startsWith("/")) {
parentName = "/" + parentName;
}
String hostName = context.getParent().getName();
hostName = (hostName == null) ? "DEFAULT" : hostName;
// domain == engine name
String domain = context.getParent().getParent().getName();
String webMod = "//" + hostName + parentName;
String onameStr = null;
String filterName = filterDef.getFilterName();
if (Util.objectNameValueNeedsQuote(filterName)) {
filterName = ObjectName.quote(filterName);
}
if (context instanceof StandardContext) {
StandardContext standardContext = (StandardContext) context;
onameStr = domain + ":j2eeType=Filter,name=" + filterName + ",WebModule=" + webMod + ",J2EEApplication="
+ standardContext.getJ2EEApplication() + ",J2EEServer=" + standardContext.getJ2EEServer();
} else {
onameStr = domain + ":j2eeType=Filter,name=" + filterName + ",WebModule=" + webMod;
}
try {
oname = new ObjectName(onameStr);
Registry.getRegistry(null, null).registerComponent(this, oname, null);
} catch (Exception ex) {
log.info(sm.getString("applicationFilterConfig.jmxRegisterFail", getFilterClass(), getFilterName()), ex);
}
}
private void unregisterJMX() {
// unregister this component
if (oname != null) {
try {
Registry.getRegistry(null, null).unregisterComponent(oname);
if (log.isDebugEnabled())
log.debug(sm.getString("applicationFilterConfig.jmxUnregister", getFilterClass(), getFilterName()));
} catch (Exception ex) {
log.error(sm.getString("applicationFilterConfig.jmxUnregisterFail", getFilterClass(), getFilterName()),
ex);
}
}
}
}
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2020 artipie.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.artipie.rpm.pkg;
import com.artipie.rpm.Digest;
import java.io.IOException;
/**
* RPM checksum.
* @since 0.6
*/
public interface Checksum {
/**
* Digest.
* @return Digest
*/
Digest digest();
/**
* Checksum hex string.
* @return Hex string
* @throws IOException On error
*/
String hex() throws IOException;
}
|
/**
*/
package top.argumentation.impl;
import java.util.Collection;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.util.EObjectResolvingEList;
import top.argumentation.ArgumentGroup;
import top.argumentation.ArgumentationElement;
import top.argumentation.ArgumentationPackage;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Argument Group</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link top.argumentation.impl.ArgumentGroupImpl#getArgumentationElement <em>Argumentation Element</em>}</li>
* </ul>
*
* @generated
*/
public class ArgumentGroupImpl extends ArgumentationElementImpl implements ArgumentGroup {
/**
* The cached value of the '{@link #getArgumentationElement() <em>Argumentation Element</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getArgumentationElement()
* @generated
* @ordered
*/
protected EList<ArgumentationElement> argumentationElement;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ArgumentGroupImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return ArgumentationPackage.Literals.ARGUMENT_GROUP;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<ArgumentationElement> getArgumentationElement() {
if (argumentationElement == null) {
argumentationElement = new EObjectResolvingEList<ArgumentationElement>(ArgumentationElement.class, this, ArgumentationPackage.ARGUMENT_GROUP__ARGUMENTATION_ELEMENT);
}
return argumentationElement;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ArgumentationPackage.ARGUMENT_GROUP__ARGUMENTATION_ELEMENT:
return getArgumentationElement();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ArgumentationPackage.ARGUMENT_GROUP__ARGUMENTATION_ELEMENT:
getArgumentationElement().clear();
getArgumentationElement().addAll((Collection<? extends ArgumentationElement>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case ArgumentationPackage.ARGUMENT_GROUP__ARGUMENTATION_ELEMENT:
getArgumentationElement().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case ArgumentationPackage.ARGUMENT_GROUP__ARGUMENTATION_ELEMENT:
return argumentationElement != null && !argumentationElement.isEmpty();
}
return super.eIsSet(featureID);
}
} //ArgumentGroupImpl
|
package rxreddit.model;
import com.google.gson.annotations.SerializedName;
import java.util.List;
public class MoreChildrenResponse {
@SerializedName("json")
MoreChildrenResponseJson json;
public List<Listing> getChildComments() {
if (json.data == null) return null;
return json.data.listings;
}
static class MoreChildrenResponseJson {
@SerializedName("errors")
List<String> errors;
@SerializedName("data")
MoreChildrenResponseData data;
static class MoreChildrenResponseData {
@SerializedName("things")
List<Listing> listings;
}
}
}
|
package com.instabug.library;
import com.instabug.library.e.c;
import com.instabug.library.internal.a.b;
import com.instabug.library.internal.module.a;
import com.instabug.library.util.InstabugSDKLogger;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import org.json.JSONException;
import org.json.JSONObject;
public class InstabugFeaturesFetcherService extends n {
protected final void b() throws Exception {
a aVar = new a();
final b a = a.a(this);
final File file = new File(getExternalCacheDir() != null ? getExternalCacheDir() : getCacheDir(), "com.instabug.library.settings");
try {
if (!file.exists() || Math.abs(System.currentTimeMillis() - file.lastModified()) > 86400000) {
InstabugSDKLogger.d(this, "Feature file doesn't exist or too old, fetching features again");
com.instabug.library.e.a.a.a().a(this, new c.a<String, Throwable>(this) {
final /* synthetic */ InstabugFeaturesFetcherService c;
public final /* synthetic */ void b(Object obj) {
Exception e;
String str = (String) obj;
try {
InstabugSDKLogger.d(this.c, "Features fetched successfully");
JSONObject jSONObject = new JSONObject(str);
jSONObject.put("device", a.l());
InstabugFeaturesManager.getInstance().updateFeatureAvailability(Feature.CRASH_REPORTING, jSONObject.optBoolean("crash_reporting", true));
InstabugFeaturesManager.getInstance().updateFeatureAvailability(Feature.PUSH_NOTIFICATION, jSONObject.optBoolean("push_notifications", true));
InstabugFeaturesManager.getInstance().updateFeatureAvailability(Feature.WHITE_LABELING, jSONObject.optBoolean("white_label", true));
InstabugFeaturesManager.getInstance().updateFeatureAvailability(Feature.IN_APP_MESSAGING, jSONObject.optBoolean("in_app_messaging", true));
InstabugFeaturesManager.getInstance().updateFeatureAvailability(Feature.MULTIPLE_ATTACHMENTS, jSONObject.optBoolean("multiple_attachments", true));
InstabugFeaturesManager.getInstance().updateFeatureAvailability(Feature.TRACK_USER_STEPS, jSONObject.optBoolean("user_steps", true));
InstabugFeaturesManager.getInstance().updateFeatureAvailability(Feature.CONSOLE_LOGS, jSONObject.optBoolean("console_log", true));
InstabugFeaturesManager.getInstance().updateFeatureAvailability(Feature.INSTABUG_LOGS, jSONObject.optBoolean("ibg_log", true));
InstabugFeaturesManager.getInstance().updateFeatureAvailability(Feature.USER_DATA, jSONObject.optBoolean("user_data", true));
FileWriter fileWriter = new FileWriter(file);
fileWriter.write(jSONObject.toString());
fileWriter.close();
return;
} catch (JSONException e2) {
e = e2;
} catch (IOException e3) {
e = e3;
}
e.printStackTrace();
}
public final /* bridge */ /* synthetic */ void a(Object obj) {
InstabugSDKLogger.d(this.c, "Something went wrong while fetching features");
}
});
}
} catch (Exception e) {
InstabugSDKLogger.d(this, "Something went wrong while fetching features");
}
}
}
|
package org.apache.poi.hssf.record.chart;
import org.apache.poi.hssf.record.RecordInputStream;
import org.apache.poi.hssf.record.StandardRecord;
import org.apache.poi.util.BitField;
import org.apache.poi.util.BitFieldFactory;
import org.apache.poi.util.HexDump;
import org.apache.poi.util.LittleEndianOutput;
public final class SheetPropertiesRecord extends StandardRecord {
public static final short sid = 4164;
private static final BitField chartTypeManuallyFormatted = BitFieldFactory.getInstance(1);
private static final BitField plotVisibleOnly = BitFieldFactory.getInstance(2);
private static final BitField doNotSizeWithWindow = BitFieldFactory.getInstance(4);
private static final BitField defaultPlotDimensions = BitFieldFactory.getInstance(8);
private static final BitField autoPlotArea = BitFieldFactory.getInstance(16);
private int field_1_flags;
private int field_2_empty;
public static final byte EMPTY_NOT_PLOTTED = 0;
public static final byte EMPTY_ZERO = 1;
public static final byte EMPTY_INTERPOLATED = 2;
public SheetPropertiesRecord() {}
public SheetPropertiesRecord(RecordInputStream in) {
this.field_1_flags = in.readUShort();
this.field_2_empty = in.readUShort();
}
public String toString() {
StringBuffer buffer = new StringBuffer();
buffer.append("[SHTPROPS]\n");
buffer.append(" .flags = ").append(HexDump.shortToHex(this.field_1_flags)).append('\n');
buffer.append(" .chartTypeManuallyFormatted= ").append(this.isChartTypeManuallyFormatted()).append('\n');
buffer.append(" .plotVisibleOnly = ").append(this.isPlotVisibleOnly()).append('\n');
buffer.append(" .doNotSizeWithWindow = ").append(this.isDoNotSizeWithWindow()).append('\n');
buffer.append(" .defaultPlotDimensions = ").append(this.isDefaultPlotDimensions()).append('\n');
buffer.append(" .autoPlotArea = ").append(this.isAutoPlotArea()).append('\n');
buffer.append(" .empty = ").append(HexDump.shortToHex(this.field_2_empty)).append('\n');
buffer.append("[/SHTPROPS]\n");
return buffer.toString();
}
public void serialize(LittleEndianOutput out) {
out.writeShort(this.field_1_flags);
out.writeShort(this.field_2_empty);
}
protected int getDataSize() {
return 4;
}
public short getSid() {
return (short)4164;
}
public Object clone() {
SheetPropertiesRecord rec = new SheetPropertiesRecord();
rec.field_1_flags = this.field_1_flags;
rec.field_2_empty = this.field_2_empty;
return rec;
}
public int getFlags() {
return this.field_1_flags;
}
public int getEmpty() {
return this.field_2_empty;
}
public void setEmpty(byte empty) {
this.field_2_empty = empty;
}
public void setChartTypeManuallyFormatted(boolean value) {
this.field_1_flags = chartTypeManuallyFormatted.setBoolean(this.field_1_flags, value);
}
public boolean isChartTypeManuallyFormatted() {
return chartTypeManuallyFormatted.isSet(this.field_1_flags);
}
public void setPlotVisibleOnly(boolean value) {
this.field_1_flags = plotVisibleOnly.setBoolean(this.field_1_flags, value);
}
public boolean isPlotVisibleOnly() {
return plotVisibleOnly.isSet(this.field_1_flags);
}
public void setDoNotSizeWithWindow(boolean value) {
this.field_1_flags = doNotSizeWithWindow.setBoolean(this.field_1_flags, value);
}
public boolean isDoNotSizeWithWindow() {
return doNotSizeWithWindow.isSet(this.field_1_flags);
}
public void setDefaultPlotDimensions(boolean value) {
this.field_1_flags = defaultPlotDimensions.setBoolean(this.field_1_flags, value);
}
public boolean isDefaultPlotDimensions() {
return defaultPlotDimensions.isSet(this.field_1_flags);
}
public void setAutoPlotArea(boolean value) {
this.field_1_flags = autoPlotArea.setBoolean(this.field_1_flags, value);
}
public boolean isAutoPlotArea() {
return autoPlotArea.isSet(this.field_1_flags);
}
}
|
package com.nexus.mall.common.domain;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
/**
* @className SwaggerProperties
* @description Swagger自定义配置
* @author LiYuan
* @date 2020/10/28
**/
@Data
@EqualsAndHashCode(callSuper = false)
@Builder
public class SwaggerProperties {
/**
* API文档生成基础路径
*/
private String apiBasePackage;
/**
* 是否要启用登录认证
*/
private boolean enableSecurity;
/**
* 文档标题
*/
private String title;
/**
* 文档描述
*/
private String description;
/**
* 文档版本
*/
private String version;
/**
* 文档联系人姓名
*/
private String contactName;
/**
* 文档联系人网址
*/
private String contactUrl;
/**
* 文档联系人邮箱
*/
private String contactEmail;
}
|
/*
* MIT License
*
* Copyright (c) 2019 blombler008
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.github.blombler008.teamspeak3bot.events.handlers.channel;
import com.github.blombler008.teamspeak3bot.Teamspeak3Bot;
import com.github.blombler008.teamspeak3bot.events.Event;
import com.github.theholywaffle.teamspeak3.TS3Api;
import com.github.theholywaffle.teamspeak3.api.event.BaseEvent;
import com.github.theholywaffle.teamspeak3.api.event.ChannelPasswordChangedEvent;
import java.util.Map;
public class EventChannelPasswordChanged extends Event {
public EventChannelPasswordChanged(Teamspeak3Bot instance, Map<String, String> e, TS3Api api, BaseEvent event) {
super(instance, e, api, event);
}
@Override
public ChannelPasswordChangedEvent getEvent() {
return (ChannelPasswordChangedEvent) event;
}
@Override
public TS3Api getApi() {
return api;
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aliyuncs.ecs.model.v20140526;
import com.aliyuncs.RpcAcsRequest;
import com.aliyuncs.ecs.Endpoint;
/**
* @author auto create
* @version
*/
public class DescribeDeploymentSetsRequest extends RpcAcsRequest<DescribeDeploymentSetsResponse> {
public DescribeDeploymentSetsRequest() {
super("Ecs", "2014-05-26", "DescribeDeploymentSets", "ecs");
try {
com.aliyuncs.AcsRequest.class.getDeclaredField("productEndpointMap").set(this, Endpoint.endpointMap);
com.aliyuncs.AcsRequest.class.getDeclaredField("productEndpointRegional").set(this, Endpoint.endpointRegionalType);
} catch (Exception e) {}
}
private Long resourceOwnerId;
private String resourceOwnerAccount;
private String ownerAccount;
private String networkType;
private String deploymentSetName;
private Long ownerId;
private Integer pageNumber;
private String deploymentSetIds;
private String granularity;
private String domain;
private Integer pageSize;
private String strategy;
public Long getResourceOwnerId() {
return this.resourceOwnerId;
}
public void setResourceOwnerId(Long resourceOwnerId) {
this.resourceOwnerId = resourceOwnerId;
if(resourceOwnerId != null){
putQueryParameter("ResourceOwnerId", resourceOwnerId.toString());
}
}
public String getResourceOwnerAccount() {
return this.resourceOwnerAccount;
}
public void setResourceOwnerAccount(String resourceOwnerAccount) {
this.resourceOwnerAccount = resourceOwnerAccount;
if(resourceOwnerAccount != null){
putQueryParameter("ResourceOwnerAccount", resourceOwnerAccount);
}
}
public String getOwnerAccount() {
return this.ownerAccount;
}
public void setOwnerAccount(String ownerAccount) {
this.ownerAccount = ownerAccount;
if(ownerAccount != null){
putQueryParameter("OwnerAccount", ownerAccount);
}
}
public String getNetworkType() {
return this.networkType;
}
public void setNetworkType(String networkType) {
this.networkType = networkType;
if(networkType != null){
putQueryParameter("NetworkType", networkType);
}
}
public String getDeploymentSetName() {
return this.deploymentSetName;
}
public void setDeploymentSetName(String deploymentSetName) {
this.deploymentSetName = deploymentSetName;
if(deploymentSetName != null){
putQueryParameter("DeploymentSetName", deploymentSetName);
}
}
public Long getOwnerId() {
return this.ownerId;
}
public void setOwnerId(Long ownerId) {
this.ownerId = ownerId;
if(ownerId != null){
putQueryParameter("OwnerId", ownerId.toString());
}
}
public Integer getPageNumber() {
return this.pageNumber;
}
public void setPageNumber(Integer pageNumber) {
this.pageNumber = pageNumber;
if(pageNumber != null){
putQueryParameter("PageNumber", pageNumber.toString());
}
}
public String getDeploymentSetIds() {
return this.deploymentSetIds;
}
public void setDeploymentSetIds(String deploymentSetIds) {
this.deploymentSetIds = deploymentSetIds;
if(deploymentSetIds != null){
putQueryParameter("DeploymentSetIds", deploymentSetIds);
}
}
public String getGranularity() {
return this.granularity;
}
public void setGranularity(String granularity) {
this.granularity = granularity;
if(granularity != null){
putQueryParameter("Granularity", granularity);
}
}
public String getBizDomain() {
return this.domain;
}
public void setBizDomain(String domain) {
this.domain = domain;
if(domain != null){
putQueryParameter("Domain", domain);
}
}
/**
* @deprecated use getBizDomain instead of this.
*/
@Deprecated
public String getDomain() {
return this.domain;
}
/**
* @deprecated use setBizDomain instead of this.
*/
@Deprecated
public void setDomain(String domain) {
this.domain = domain;
if(domain != null){
putQueryParameter("Domain", domain);
}
}
public Integer getPageSize() {
return this.pageSize;
}
public void setPageSize(Integer pageSize) {
this.pageSize = pageSize;
if(pageSize != null){
putQueryParameter("PageSize", pageSize.toString());
}
}
public String getStrategy() {
return this.strategy;
}
public void setStrategy(String strategy) {
this.strategy = strategy;
if(strategy != null){
putQueryParameter("Strategy", strategy);
}
}
@Override
public Class<DescribeDeploymentSetsResponse> getResponseClass() {
return DescribeDeploymentSetsResponse.class;
}
}
|
/**
* Copyright ${license.git.copyrightYears} the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package mbg.test.mb3.dsql;
import static mbg.test.common.util.TestUtilities.blobsAreEqual;
import static mbg.test.common.util.TestUtilities.generateRandomBlob;
import static mbg.test.mb3.generated.dsql.mapper.AwfulTableDynamicSqlSupport.awfulTable;
import static mbg.test.mb3.generated.dsql.mapper.FieldsblobsDynamicSqlSupport.fieldsblobs;
import static mbg.test.mb3.generated.dsql.mapper.FieldsonlyDynamicSqlSupport.fieldsonly;
import static mbg.test.mb3.generated.dsql.mapper.PkblobsDynamicSqlSupport.pkblobs;
import static mbg.test.mb3.generated.dsql.mapper.PkfieldsDynamicSqlSupport.pkfields;
import static mbg.test.mb3.generated.dsql.mapper.PkfieldsblobsDynamicSqlSupport.pkfieldsblobs;
import static mbg.test.mb3.generated.dsql.mapper.PkonlyDynamicSqlSupport.pkonly;
import static org.junit.jupiter.api.Assertions.*;
import static org.mybatis.dynamic.sql.SqlBuilder.*;
import java.util.List;
import org.apache.ibatis.session.SqlSession;
import org.junit.jupiter.api.Test;
import mbg.test.mb3.generated.dsql.mapper.AwfulTableMapper;
import mbg.test.mb3.generated.dsql.mapper.FieldsblobsMapper;
import mbg.test.mb3.generated.dsql.mapper.FieldsonlyMapper;
import mbg.test.mb3.generated.dsql.mapper.PkblobsMapper;
import mbg.test.mb3.generated.dsql.mapper.PkfieldsMapper;
import mbg.test.mb3.generated.dsql.mapper.PkfieldsblobsMapper;
import mbg.test.mb3.generated.dsql.mapper.PkonlyMapper;
import mbg.test.mb3.generated.dsql.model.AwfulTable;
import mbg.test.mb3.generated.dsql.model.Fieldsblobs;
import mbg.test.mb3.generated.dsql.model.Fieldsonly;
import mbg.test.mb3.generated.dsql.model.Pkblobs;
import mbg.test.mb3.generated.dsql.model.Pkfields;
import mbg.test.mb3.generated.dsql.model.Pkfieldsblobs;
import mbg.test.mb3.generated.dsql.model.Pkonly;
/**
*
* @author Jeff Butler
*
*/
public class UpdateByExampleTest extends AbstractTest {
@Test
public void testFieldsOnlyUpdateByExampleSelective() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
FieldsonlyMapper mapper = sqlSession.getMapper(FieldsonlyMapper.class);
Fieldsonly record = new Fieldsonly();
record.setDoublefield(11.22);
record.setFloatfield(33.44);
record.setIntegerfield(5);
mapper.insert(record);
record = new Fieldsonly();
record.setDoublefield(44.55);
record.setFloatfield(66.77);
record.setIntegerfield(8);
mapper.insert(record);
record = new Fieldsonly();
record.setDoublefield(88.99);
record.setFloatfield(100.111);
record.setIntegerfield(9);
mapper.insert(record);
record = new Fieldsonly();
record.setDoublefield(99d);
int rows = mapper.updateByExampleSelective(record)
.where(fieldsonly.integerfield, isGreaterThan(5))
.build()
.execute();
assertEquals(2, rows);
List<Fieldsonly> answer = mapper.selectByExample()
.where(fieldsonly.integerfield, isEqualTo(5))
.build()
.execute();
assertEquals(1, answer.size());
record = answer.get(0);
assertEquals(record.getDoublefield(), 11.22, 0.001);
assertEquals(record.getFloatfield(), 33.44, 0.001);
assertEquals(record.getIntegerfield().intValue(), 5);
answer = mapper.selectByExample()
.where(fieldsonly.integerfield, isEqualTo(8))
.build()
.execute();
assertEquals(1, answer.size());
record = answer.get(0);
assertEquals(record.getDoublefield(), 99d, 0.001);
assertEquals(record.getFloatfield(), 66.77, 0.001);
assertEquals(record.getIntegerfield().intValue(), 8);
answer = mapper.selectByExample()
.where(fieldsonly.integerfield, isEqualTo(9))
.build()
.execute();
assertEquals(1, answer.size());
record = answer.get(0);
assertEquals(record.getDoublefield(), 99d, 0.001);
assertEquals(record.getFloatfield(), 100.111, 0.001);
assertEquals(record.getIntegerfield().intValue(), 9);
} finally {
sqlSession.close();
}
}
@Test
public void testFieldsOnlyUpdateByExample() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
FieldsonlyMapper mapper = sqlSession.getMapper(FieldsonlyMapper.class);
Fieldsonly record = new Fieldsonly();
record.setDoublefield(11.22);
record.setFloatfield(33.44);
record.setIntegerfield(5);
mapper.insert(record);
record = new Fieldsonly();
record.setDoublefield(44.55);
record.setFloatfield(66.77);
record.setIntegerfield(8);
mapper.insert(record);
record = new Fieldsonly();
record.setDoublefield(88.99);
record.setFloatfield(100.111);
record.setIntegerfield(9);
mapper.insert(record);
record = new Fieldsonly();
record.setIntegerfield(22);
int rows = mapper.updateByExample(record)
.where(fieldsonly.integerfield, isEqualTo(5))
.build()
.execute();
assertEquals(1, rows);
List<Fieldsonly> answer = mapper.selectByExample()
.where(fieldsonly.integerfield, isEqualTo(22))
.build()
.execute();
assertEquals(1, answer.size());
record = answer.get(0);
assertNull(record.getDoublefield());
assertNull(record.getFloatfield());
assertEquals(record.getIntegerfield().intValue(), 22);
} finally {
sqlSession.close();
}
}
@Test
public void testPKOnlyUpdateByExampleSelective() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
PkonlyMapper mapper = sqlSession.getMapper(PkonlyMapper.class);
Pkonly key = new Pkonly(1, 3);
mapper.insert(key);
key = new Pkonly(5, 6);
mapper.insert(key);
key = new Pkonly(7, 8);
mapper.insert(key);
key = new Pkonly(null, 3);
int rows = mapper.updateByExampleSelective(key)
.where(pkonly.id, isGreaterThan(4))
.build()
.execute();
assertEquals(2, rows);
long returnedRows = mapper.countByExample()
.where(pkonly.id, isEqualTo(5))
.and(pkonly.seqNum, isEqualTo(3))
.build()
.execute();
assertEquals(1, returnedRows);
returnedRows = mapper.countByExample()
.where(pkonly.id, isEqualTo(7))
.and(pkonly.seqNum, isEqualTo(3))
.build()
.execute();
assertEquals(1, returnedRows);
} finally {
sqlSession.close();
}
}
@Test
public void testPKOnlyUpdateByExample() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
PkonlyMapper mapper = sqlSession.getMapper(PkonlyMapper.class);
Pkonly key = new Pkonly(1, 3);
mapper.insert(key);
key = new Pkonly(5, 6);
mapper.insert(key);
key = new Pkonly(7, 8);
mapper.insert(key);
key = new Pkonly(22, 3);
int rows = mapper.updateByExample(key)
.where(pkonly.id, isEqualTo(7))
.build()
.execute();
assertEquals(1, rows);
long returnedRows = mapper.countByExample()
.where(pkonly.id, isEqualTo(22))
.and(pkonly.seqNum, isEqualTo(3))
.build()
.execute();
assertEquals(1, returnedRows);
} finally {
sqlSession.close();
}
}
@Test
public void testPKFieldsUpdateByExampleSelective() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
PkfieldsMapper mapper = sqlSession.getMapper(PkfieldsMapper.class);
Pkfields record = new Pkfields();
record.setFirstname("Jeff");
record.setLastname("Smith");
record.setId1(1);
record.setId2(2);
mapper.insert(record);
record = new Pkfields();
record.setFirstname("Bob");
record.setLastname("Jones");
record.setId1(3);
record.setId2(4);
mapper.insert(record);
record = new Pkfields();
record.setFirstname("Fred");
int rows = mapper.updateByExampleSelective(record)
.where(pkfields.lastname, isLike("J%"))
.build()
.execute();
assertEquals(1, rows);
long returnedRows = mapper.countByExample()
.where(pkfields.firstname, isEqualTo("Fred"))
.and(pkfields.lastname, isEqualTo("Jones"))
.and(pkfields.id1, isEqualTo(3))
.and(pkfields.id2, isEqualTo(4))
.build()
.execute();
assertEquals(1, returnedRows);
} finally {
sqlSession.close();
}
}
@Test
public void testPKFieldsUpdateByExample() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
PkfieldsMapper mapper = sqlSession.getMapper(PkfieldsMapper.class);
Pkfields record = new Pkfields();
record.setFirstname("Jeff");
record.setLastname("Smith");
record.setId1(1);
record.setId2(2);
mapper.insert(record);
record = new Pkfields();
record.setFirstname("Bob");
record.setLastname("Jones");
record.setId1(3);
record.setId2(4);
mapper.insert(record);
record = new Pkfields();
record.setFirstname("Fred");
record.setId1(3);
record.setId2(4);
int rows = mapper.updateByExample(record)
.where(pkfields.id1, isEqualTo(3))
.and(pkfields.id2, isEqualTo(4))
.build()
.execute();
assertEquals(1, rows);
long returnedRows = mapper.countByExample()
.where(pkfields.firstname, isEqualTo("Fred"))
.and(pkfields.id1, isEqualTo(3))
.and(pkfields.id2, isEqualTo(4))
.build()
.execute();
assertEquals(1, returnedRows);
} finally {
sqlSession.close();
}
}
@Test
public void testPKBlobsUpdateByExampleSelective() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
PkblobsMapper mapper = sqlSession.getMapper(PkblobsMapper.class);
Pkblobs record = new Pkblobs();
record.setId(3);
record.setBlob1(generateRandomBlob());
record.setBlob2(generateRandomBlob());
mapper.insert(record);
record = new Pkblobs();
record.setId(6);
record.setBlob1(generateRandomBlob());
record.setBlob2(generateRandomBlob());
mapper.insert(record);
Pkblobs newRecord = new Pkblobs();
newRecord.setBlob1(generateRandomBlob());
int rows = mapper.updateByExampleSelective(newRecord)
.where(pkblobs.id, isGreaterThan(4))
.build()
.execute();
assertEquals(1, rows);
List<Pkblobs> answer = mapper.selectByExample()
.where(pkblobs.id, isGreaterThan(4))
.build()
.execute();
assertEquals(1, answer.size());
Pkblobs returnedRecord = answer.get(0);
assertEquals(6, returnedRecord.getId().intValue());
assertTrue(blobsAreEqual(newRecord.getBlob1(), returnedRecord.getBlob1()));
assertTrue(blobsAreEqual(record.getBlob2(), returnedRecord.getBlob2()));
} finally {
sqlSession.close();
}
}
@Test
public void testPKBlobsUpdateByExample() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
PkblobsMapper mapper = sqlSession.getMapper(PkblobsMapper.class);
Pkblobs record = new Pkblobs();
record.setId(3);
record.setBlob1(generateRandomBlob());
record.setBlob2(generateRandomBlob());
mapper.insert(record);
record = new Pkblobs();
record.setId(6);
record.setBlob1(generateRandomBlob());
record.setBlob2(generateRandomBlob());
mapper.insert(record);
Pkblobs newRecord = new Pkblobs();
newRecord.setId(8);
int rows = mapper.updateByExample(newRecord)
.where(pkblobs.id, isGreaterThan(4))
.build()
.execute();
assertEquals(1, rows);
List<Pkblobs> answer = mapper.selectByExample()
.where(pkblobs.id, isGreaterThan(4))
.build()
.execute();
assertEquals(1, answer.size());
Pkblobs returnedRecord = answer.get(0);
assertEquals(8, returnedRecord.getId().intValue());
assertNull(returnedRecord.getBlob1());
assertNull(returnedRecord.getBlob2());
} finally {
sqlSession.close();
}
}
@Test
public void testPKFieldsBlobsUpdateByExampleSelective() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
PkfieldsblobsMapper mapper = sqlSession.getMapper(PkfieldsblobsMapper.class);
Pkfieldsblobs record = new Pkfieldsblobs();
record.setId1(3);
record.setId2(4);
record.setFirstname("Jeff");
record.setLastname("Smith");
record.setBlob1(generateRandomBlob());
mapper.insert(record);
record = new Pkfieldsblobs();
record.setId1(5);
record.setId2(6);
record.setFirstname("Scott");
record.setLastname("Jones");
record.setBlob1(generateRandomBlob());
mapper.insert(record);
Pkfieldsblobs newRecord = new Pkfieldsblobs();
newRecord.setFirstname("Fred");
int rows = mapper.updateByExampleSelective(newRecord)
.where(pkfieldsblobs.id1, isNotEqualTo(3))
.build()
.execute();
assertEquals(1, rows);
List<Pkfieldsblobs> answer = mapper.selectByExample()
.where(pkfieldsblobs.id1, isNotEqualTo(3))
.build()
.execute();
assertEquals(1, answer.size());
Pkfieldsblobs returnedRecord = answer.get(0);
assertEquals(record.getId1(), returnedRecord.getId1());
assertEquals(record.getId2(), returnedRecord.getId2());
assertEquals(newRecord.getFirstname(), returnedRecord.getFirstname());
assertEquals(record.getLastname(), returnedRecord.getLastname());
assertTrue(blobsAreEqual(record.getBlob1(), returnedRecord.getBlob1()));
} finally {
sqlSession.close();
}
}
@Test
public void testPKFieldsBlobsUpdateByExample() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
PkfieldsblobsMapper mapper = sqlSession.getMapper(PkfieldsblobsMapper.class);
Pkfieldsblobs record = new Pkfieldsblobs();
record.setId1(3);
record.setId2(4);
record.setFirstname("Jeff");
record.setLastname("Smith");
record.setBlob1(generateRandomBlob());
mapper.insert(record);
record = new Pkfieldsblobs();
record.setId1(5);
record.setId2(6);
record.setFirstname("Scott");
record.setLastname("Jones");
record.setBlob1(generateRandomBlob());
mapper.insert(record);
Pkfieldsblobs newRecord = new Pkfieldsblobs();
newRecord.setId1(3);
newRecord.setId2(8);
newRecord.setFirstname("Fred");
int rows = mapper.updateByExample(newRecord)
.where(pkfieldsblobs.id1, isEqualTo(3))
.build()
.execute();
assertEquals(1, rows);
List<Pkfieldsblobs> answer = mapper.selectByExample()
.where(pkfieldsblobs.id1, isEqualTo(3))
.build()
.execute();
assertEquals(1, answer.size());
Pkfieldsblobs returnedRecord = answer.get(0);
assertEquals(newRecord.getId1(), returnedRecord.getId1());
assertEquals(newRecord.getId2(), returnedRecord.getId2());
assertEquals(newRecord.getFirstname(), returnedRecord.getFirstname());
assertNull(returnedRecord.getLastname());
assertNull(returnedRecord.getBlob1());
} finally {
sqlSession.close();
}
}
@Test
public void testFieldsBlobsUpdateByExampleSelective() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
FieldsblobsMapper mapper = sqlSession.getMapper(FieldsblobsMapper.class);
Fieldsblobs record = new Fieldsblobs();
record.setFirstname("Jeff");
record.setLastname("Smith");
record.setBlob1(generateRandomBlob());
record.setBlob2(generateRandomBlob());
mapper.insert(record);
record = new Fieldsblobs();
record.setFirstname("Scott");
record.setLastname("Jones");
record.setBlob1(generateRandomBlob());
record.setBlob2(generateRandomBlob());
mapper.insert(record);
Fieldsblobs newRecord = new Fieldsblobs();
newRecord.setLastname("Doe");
int rows = mapper.updateByExampleSelective(newRecord)
.where(fieldsblobs.firstname, isLike("S%"))
.build()
.execute();
assertEquals(1, rows);
List<Fieldsblobs> answer = mapper.selectByExample()
.where(fieldsblobs.firstname, isLike("S%"))
.build()
.execute();
assertEquals(1, answer.size());
Fieldsblobs returnedRecord = answer.get(0);
assertEquals(record.getFirstname(), returnedRecord.getFirstname());
assertEquals(newRecord.getLastname(), returnedRecord.getLastname());
assertTrue(blobsAreEqual(record.getBlob1(), returnedRecord.getBlob1()));
assertTrue(blobsAreEqual(record.getBlob2(), returnedRecord.getBlob2()));
} finally {
sqlSession.close();
}
}
@Test
public void testFieldsBlobsUpdateByExample() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
FieldsblobsMapper mapper = sqlSession.getMapper(FieldsblobsMapper.class);
Fieldsblobs record = new Fieldsblobs();
record.setFirstname("Jeff");
record.setLastname("Smith");
record.setBlob1(generateRandomBlob());
record.setBlob2(generateRandomBlob());
mapper.insert(record);
record = new Fieldsblobs();
record.setFirstname("Scott");
record.setLastname("Jones");
record.setBlob1(generateRandomBlob());
record.setBlob2(generateRandomBlob());
mapper.insert(record);
Fieldsblobs newRecord = new Fieldsblobs();
newRecord.setFirstname("Scott");
newRecord.setLastname("Doe");
int rows = mapper.updateByExample(newRecord)
.where(fieldsblobs.firstname, isLike("S%"))
.build()
.execute();
assertEquals(1, rows);
List<Fieldsblobs> answer = mapper.selectByExample()
.where(fieldsblobs.firstname, isLike("S%"))
.build()
.execute();
assertEquals(1, answer.size());
Fieldsblobs returnedRecord = answer.get(0);
assertEquals(newRecord.getFirstname(), returnedRecord.getFirstname());
assertEquals(newRecord.getLastname(), returnedRecord.getLastname());
assertNull(returnedRecord.getBlob1());
assertNull(returnedRecord.getBlob2());
} finally {
sqlSession.close();
}
}
@Test
public void testAwfulTableUpdateByExampleSelective() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
AwfulTableMapper mapper = sqlSession.getMapper(AwfulTableMapper.class);
AwfulTable record = new AwfulTable();
record.seteMail("fred@fred.com");
record.setEmailaddress("alsofred@fred.com");
record.setFirstFirstName("fred1");
record.setFrom("from field");
record.setId1(1);
record.setId2(2);
record.setId5(5);
record.setId6(6);
record.setId7(7);
record.setSecondFirstName("fred2");
record.setThirdFirstName("fred3");
mapper.insert(record);
record = new AwfulTable();
record.seteMail("fred2@fred.com");
record.setEmailaddress("alsofred2@fred.com");
record.setFirstFirstName("fred11");
record.setFrom("from from field");
record.setId1(11);
record.setId2(22);
record.setId5(55);
record.setId6(66);
record.setId7(77);
record.setSecondFirstName("fred22");
record.setThirdFirstName("fred33");
mapper.insert(record);
AwfulTable newRecord = new AwfulTable();
newRecord.setFirstFirstName("Alonzo");
int rows = mapper.updateByExampleSelective(newRecord)
.where(awfulTable.eMail, isLike("fred2@%"))
.build()
.execute();
assertEquals(1, rows);
List<AwfulTable> answer = mapper.selectByExample()
.where(awfulTable.eMail, isLike("fred2@%"))
.build()
.execute();
assertEquals(1, answer.size());
AwfulTable returnedRecord = answer.get(0);
assertEquals(record.getCustomerId(), returnedRecord.getCustomerId());
assertEquals(record.geteMail(), returnedRecord.geteMail());
assertEquals(record.getEmailaddress(), returnedRecord.getEmailaddress());
assertEquals(newRecord.getFirstFirstName(), returnedRecord.getFirstFirstName());
assertEquals(record.getFrom(), returnedRecord.getFrom());
assertEquals(record.getId1(), returnedRecord.getId1());
assertEquals(record.getId2(), returnedRecord.getId2());
assertEquals(record.getId5(), returnedRecord.getId5());
assertEquals(record.getId6(), returnedRecord.getId6());
assertEquals(record.getId7(), returnedRecord.getId7());
assertEquals(record.getSecondFirstName(), returnedRecord.getSecondFirstName());
assertEquals(record.getThirdFirstName(), returnedRecord.getThirdFirstName());
} finally {
sqlSession.close();
}
}
@Test
public void testAwfulTableUpdateByExample() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
AwfulTableMapper mapper = sqlSession.getMapper(AwfulTableMapper.class);
AwfulTable record = new AwfulTable();
record.seteMail("fred@fred.com");
record.setEmailaddress("alsofred@fred.com");
record.setFirstFirstName("fred1");
record.setFrom("from field");
record.setId1(1);
record.setId2(2);
record.setId5(5);
record.setId6(6);
record.setId7(7);
record.setSecondFirstName("fred2");
record.setThirdFirstName("fred3");
mapper.insert(record);
record = new AwfulTable();
record.seteMail("fred2@fred.com");
record.setEmailaddress("alsofred2@fred.com");
record.setFirstFirstName("fred11");
record.setFrom("from from field");
record.setId1(11);
record.setId2(22);
record.setId5(55);
record.setId6(66);
record.setId7(77);
record.setSecondFirstName("fred22");
record.setThirdFirstName("fred33");
mapper.insert(record);
AwfulTable newRecord = new AwfulTable();
newRecord.setFirstFirstName("Alonzo");
newRecord.setCustomerId(58);
newRecord.setId1(111);
newRecord.setId2(222);
newRecord.setId5(555);
newRecord.setId6(666);
newRecord.setId7(777);
int rows = mapper.updateByExample(newRecord)
.where(awfulTable.eMail, isLike("fred2@%"))
.build()
.execute();
assertEquals(1, rows);
List<AwfulTable> answer = mapper.selectByExample()
.where(awfulTable.customerId, isEqualTo(58))
.build()
.execute();
assertEquals(1, answer.size());
AwfulTable returnedRecord = answer.get(0);
assertEquals(newRecord.getCustomerId(), returnedRecord.getCustomerId());
assertNull(returnedRecord.geteMail());
assertNull(returnedRecord.getEmailaddress());
assertEquals(newRecord.getFirstFirstName(), returnedRecord.getFirstFirstName());
assertNull(returnedRecord.getFrom());
assertEquals(newRecord.getId1(), returnedRecord.getId1());
assertEquals(newRecord.getId2(), returnedRecord.getId2());
assertEquals(newRecord.getId5(), returnedRecord.getId5());
assertEquals(newRecord.getId6(), returnedRecord.getId6());
assertEquals(newRecord.getId7(), returnedRecord.getId7());
assertNull(returnedRecord.getSecondFirstName());
assertNull(returnedRecord.getThirdFirstName());
} finally {
sqlSession.close();
}
}
}
|
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest.action.support;
import org.elasticsearch.common.compress.lzf.LZF;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamOutput;
import org.elasticsearch.common.io.stream.LZFStreamInput;
import org.elasticsearch.common.xcontent.*;
import org.elasticsearch.rest.RestRequest;
import java.io.IOException;
/**
*
*/
public class RestXContentBuilder {
public static XContentBuilder restContentBuilder(RestRequest request) throws IOException {
XContentType contentType = XContentType.fromRestContentType(request.header("Content-Type"));
if (contentType == null) {
// try and guess it from the body, if exists
if (request.hasContent()) {
contentType = XContentFactory.xContentType(request.contentByteArray(), request.contentByteArrayOffset(), request.contentLength());
}
}
if (contentType == null) {
// default to JSON
contentType = XContentType.JSON;
}
CachedStreamOutput.Entry cachedEntry = CachedStreamOutput.popEntry();
XContentBuilder builder = new XContentBuilder(XContentFactory.xContent(contentType), cachedEntry.cachedBytes(), cachedEntry);
if (request.paramAsBoolean("pretty", false)) {
builder.prettyPrint();
}
String casing = request.param("case");
if (casing != null && "camelCase".equals(casing)) {
builder.fieldCaseConversion(XContentBuilder.FieldCaseConversion.CAMELCASE);
} else {
// we expect all REST interfaces to write results in underscore casing, so
// no need for double casing
builder.fieldCaseConversion(XContentBuilder.FieldCaseConversion.NONE);
}
return builder;
}
public static void restDocumentSource(byte[] source, XContentBuilder builder, ToXContent.Params params) throws IOException {
restDocumentSource(source, 0, source.length, builder, params);
}
public static void restDocumentSource(byte[] source, int offset, int length, XContentBuilder builder, ToXContent.Params params) throws IOException {
if (LZF.isCompressed(source, offset, length)) {
BytesStreamInput siBytes = new BytesStreamInput(source, offset, length, false);
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
XContentType contentType = XContentFactory.xContentType(siLzf);
siLzf.resetToBufferStart();
if (contentType == builder.contentType()) {
builder.rawField("_source", siLzf);
} else {
XContentParser parser = XContentFactory.xContent(contentType).createParser(siLzf);
try {
parser.nextToken();
builder.field("_source");
builder.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
} else {
XContentType contentType = XContentFactory.xContentType(source, offset, length);
if (contentType == builder.contentType()) {
builder.rawField("_source", source, offset, length);
} else {
XContentParser parser = XContentFactory.xContent(contentType).createParser(source, offset, length);
try {
parser.nextToken();
builder.field("_source");
builder.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
}
}
}
|
import java.util.ArrayList;
import java.util.List;
public class Tree<T> {
private T data = null;
private List<Tree<T>> children = new ArrayList<>();
private Tree<T> parent = null;
public Tree(T data) {
this.data = data;
}
public Tree<T> getParent() {
return parent;
}
public T getData() {
return data;
}
public List<Tree<T>> getChildren() {
return children;
}
public void setData(T data) {
this.data = data;
}
public void setParent(Tree<T> parent) {
this.parent = parent;
}
public void setChildren(List<Tree<T>> children) {
this.children = children;
}
public Tree<T> addChild(Tree<T> child) {
child.setParent(this);
this.children.add(child);
return child;
}
public void addChildren(List<Tree<T>> children) {
children.forEach(each -> each.setParent(this));
this.children.addAll(children);
}
public void printAllNodes(Tree<T> node, String appender) {
System.out.println(appender + node.getData());
node.getChildren().forEach(each -> printAllNodes(each, appender + appender));
}
public Tree<T> getRootNode(Tree<T> node) {
if (node.getParent() == null)
return this;
return parent.getRootNode(parent);
}
public void deleteNode() {
if (this.parent != null) {
int index = this.parent.getChildren().indexOf(this);
this.parent.getChildren().remove(this);
for (Tree<T> node : getChildren()) {
node.setParent(this.parent);
}
this.parent.getChildren().addAll(index, getChildren());
} else
this.deleteRootNode();
this.getChildren().clear();
}
public Tree<T> deleteRootNode() {
if (this.parent != null) {
throw new IllegalStateException("deleteRootNode is called for non root");
}
final Tree<T> newParent = this.getChildren().get(0);
newParent.setParent(null);
this.getChildren().remove(0);
this.getChildren().forEach(child -> child.setParent(newParent));
newParent.getChildren().addAll(getChildren());
this.getChildren().clear();
return newParent;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.