repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
EvilGreenArmy/tms | src/main/java/com/tms/security/Md5PwdEncoder.java | 3170 | package com.tms.security;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.jasypt.commons.CommonUtils;
import org.springframework.stereotype.Service;
/**
* MD5密码加密
*/
@Service("md5PwdEncoder")
public class Md5PwdEncoder implements PwdEncoder {
public String encodePassword(String rawPass) {
return encodePassword(rawPass, defaultSalt);
}
public String encodePassword(String rawPass, String salt) {
String saltedPass = mergePasswordAndSalt(rawPass, salt, false);
MessageDigest messageDigest = getMessageDigest();
byte[] digest;
try {
digest = messageDigest.digest(saltedPass.getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException("UTF-8 not supported!");
}
return new String(CommonUtils.toHexadecimal(digest));
}
public boolean isPasswordValid(String encPass, String rawPass) {
return isPasswordValid(encPass, rawPass, defaultSalt);
}
public boolean isPasswordValid(String encPass, String rawPass, String salt) {
if (encPass == null) {
return false;
}
String pass2 = encodePassword(rawPass, salt);
return encPass.equals(pass2);
}
protected final MessageDigest getMessageDigest() {
String algorithm = "MD5";
try {
return MessageDigest.getInstance(algorithm);
} catch (NoSuchAlgorithmException e) {
throw new IllegalArgumentException("No such algorithm ["
+ algorithm + "]");
}
}
/**
* Used by subclasses to extract the password and salt from a merged
* <code>String</code> created using
* {@link #mergePasswordAndSalt(String,Object,boolean)}.
* <p>
* The first element in the returned array is the password. The second
* element is the salt. The salt array element will always be present, even
* if no salt was found in the <code>mergedPasswordSalt</code> argument.
* </p>
*
* @param salt
* as generated by <code>mergePasswordAndSalt</code>
*
* @return an array, in which the first element is the password and the
* second the salt
*
* @throws IllegalArgumentException
* if mergedPasswordSalt is null or empty.
*/
protected String mergePasswordAndSalt(String password, Object salt,
boolean strict) {
if (password == null) {
password = "";
}
if (strict && (salt != null)) {
if ((salt.toString().lastIndexOf("{") != -1)
|| (salt.toString().lastIndexOf("}") != -1)) {
throw new IllegalArgumentException(
"Cannot use { or } in salt.toString()");
}
}
if ((salt == null) || "".equals(salt)) {
return password;
} else {
return password + "{" + salt.toString() + "}";
}
}
/**
* 混淆码。防止破解。
*/
private String defaultSalt;
/**
* 获得混淆码
*
* @return
*/
public String getDefaultSalt() {
return defaultSalt;
}
/**
* 设置混淆码
*
* @param defaultSalt
*/
public void setDefaultSalt(String defaultSalt) {
this.defaultSalt = defaultSalt;
}
public static void main(String[] args) {
Md5PwdEncoder p=new Md5PwdEncoder();
System.out.println(p.encodePassword("111111","googlebaidu"));
}
}
| apache-2.0 |
kevinzetterstrom/forecast-android | forecast/src/main/java/android/zetterstrom/com/forecast/models/Alert.java | 2472 | /*
* Copyright 2016 Kevin Zetterstrom
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.zetterstrom.com.forecast.models;
import android.support.annotation.Nullable;
import com.google.gson.annotations.SerializedName;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
/**
* An alert object represents a severe weather warning issued for the requested location by a
* governmental authority
* <p/>
* Not all fields may contain data. A null field is indicative that it was not present in the response.
* <p/>
* <p/>
* Created by Kevin Zetterstrom on 2/10/16.
*/
@SuppressWarnings("unused")
public class Alert implements Serializable {
private static final long serialVersionUID = -4721384892605656941L;
@Nullable
@SerializedName(ModelConstants.FIELD_TITLE)
private String mTitle;
@Nullable
@SerializedName(ModelConstants.FIELD_REGIONS)
private List<String> mRegions;
@Nullable
@SerializedName(ModelConstants.FIELD_SEVERITY)
private String mSeverity;
@Nullable
@SerializedName(ModelConstants.FIELD_DESCRIPTION)
private String mDescription;
@Nullable
@SerializedName(ModelConstants.FIELD_EXPIRES)
private Date mExpires;
@Nullable
@SerializedName(ModelConstants.FIELD_URI)
private String mUri;
@Nullable
@SerializedName(ModelConstants.FIELD_TIME)
private Date mTime;
@Nullable
public String getTitle() {
return mTitle;
}
@Nullable
public List<String> getRegions() {
return mRegions;
}
@Nullable
public String getSeverity() {
return mSeverity;
}
@Nullable
public Date getTime() {
return mTime;
}
@Nullable
public Date getExpires() {
return mExpires;
}
@Nullable
public String getDescription() {
return mDescription;
}
@Nullable
public String getUri() {
return mUri;
}
}
| apache-2.0 |
PilarHu/codemirror-jaxb | src/main/java/hu/pilar/cjg/TagInfo.java | 2635 | /*
* Copyright (c) 2006-2014 PILAR Kft. All Rights Reserved.
*
* This software is the confidential and proprietary information of
* PILAR Kft. ("Confidential Information").
* You shall not disclose such Confidential Information and shall use it only in
* accordance with the terms of the license agreement you entered into
* with PILAR Kft.
*
* PILAR MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE SUITABILITY OF
* THE SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
* TO THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
* PARTICULAR PURPOSE, OR NON-INFRINGEMENT. PILAR SHALL NOT BE LIABLE FOR
* ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR
* DISTRIBUTING THIS SOFTWARE OR ITS DERIVATIVES.tor.
*/
package hu.pilar.cjg;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
/**
*
* @author cserepj
*/
class TagInfo {
private final String tag;
/**
* All possible atribute values
*/
private final Map<String, Set<String>> attrs = new TreeMap<>();
/**
* What tags can be beneath this tag
*/
private final Set<String> children = new TreeSet<>();
/**
* What other tags may be used instead of this one
*/
private final Set<String> overrides = new HashSet<>();
TagInfo(String tag) {
this.tag = tag;
}
TagInfo(String tag, TagInfo clone) {
this.tag = tag;
this.children.addAll(clone.children);
this.attrs.putAll(clone.attrs);
this.overrides.addAll(clone.overrides);
}
@JsonIgnore
String getTag() {
return tag;
}
@JsonProperty(value = "attrs")
Map<String, Set<String>> getAttrs() {
return attrs;
}
@JsonIgnore
Set<String> getOverrides() {
return overrides;
}
@JsonProperty("children")
Set<String> getChildren() {
return children;
}
TagInfo withAttribute(String name, Set<String> possibleValues) {
attrs.put(name, possibleValues);
return this;
}
TagInfo withChild(TagInfo child) {
if (child != null) {
if (child.tag != null) {
children.add(child.tag);
}
if (!child.overrides.isEmpty()) {
children.addAll(child.overrides);
}
}
return this;
}
}
| apache-2.0 |
fml5362/BasketBallDemo | src/com/hyphenate/chatuidemo/circle/mvp/view/ICircleView.java | 842 | package com.hyphenate.chatuidemo.circle.mvp.view;
import com.hyphenate.chatuidemo.circle.bean.CommentConfig;
import com.hyphenate.chatuidemo.circle.bean.CommentItem;
import com.hyphenate.chatuidemo.circle.bean.FavortItem;
/**
*
* @ClassName: ICircleViewUpdateListener
* @Description: view,服务器响应后更新界面
* @author yiw
* @date 2015-12-28 下午4:13:04
*
*/
public interface ICircleView {
public void update2DeleteCircle(String circleId);
public void update2AddFavorite(int circlePosition, FavortItem addItem);
public void update2DeleteFavort(int circlePosition, String favortId);
public void update2AddComment(int circlePosition, CommentItem addItem);
public void update2DeleteComment(int circlePosition, String commentId);
public void updateEditTextBodyVisible(int visibility, CommentConfig commentConfig);
}
| apache-2.0 |
orcoliver/traccar | src/test/java/org/traccar/protocol/MegastekFrameDecoderTest.java | 2194 | package org.traccar.protocol;
import org.junit.Test;
import org.traccar.ProtocolTest;
public class MegastekFrameDecoderTest extends ProtocolTest {
@Test
public void testDecode() throws Exception {
var decoder = new MegastekFrameDecoder();
verifyFrame(
binary("30313337244d47563030322c3335343535303035303239323636392c4756543930302c522c3134313231352c3033313830342c412c2c532c2c452c30302c30332c30302c332e36372c302e3030302c302e30302c3131372e312c302e302c3531302c31302c2c2c2c303030302c303030302c32322c31322c302c202c202c2c312d312c39382c5057204f4e3b21"),
decoder.decode(null, null, binary("30313337244d47563030322c3335343535303035303239323636392c4756543930302c522c3134313231352c3033313830342c412c2c532c2c452c30302c30332c30302c332e36372c302e3030302c302e30302c3131372e312c302e302c3531302c31302c2c2c2c303030302c303030302c32322c31322c302c202c202c2c312d312c39382c5057204f4e3b21")));
verifyFrame(
binary("244d47563030322c3031333737373030373533363433342c2c522c3031303131342c3030303035372c562c303030302e303030302c4e2c30303030302e303030302c452c30302c30302c30302c39392e392c302e3030302c302e30302c302e302c38302e3236332c3531302c38392c323334322c303330422c2c303030302c303030302c3230302c39362c302c202c202c2c2c2c54696d65723b21"),
decoder.decode(null, null, binary("244d47563030322c3031333737373030373533363433342c2c522c3031303131342c3030303035372c562c303030302e303030302c4e2c30303030302e303030302c452c30302c30302c30302c39392e392c302e3030302c302e30302c302e302c38302e3236332c3531302c38392c323334322c303330422c2c303030302c303030302c3230302c39362c302c202c202c2c2c2c54696d65723b210d0a")));
verifyFrame(
binary("53545832363034373520202020202020202020024f244750524d432c3133313131302e30302c562c2c2c2c2c2c2c3036303931332c2c2c4e2a37362c3232322c30312c383135412c443435352c31312c39372c303030302c303030312c302c54696d65723b3735"),
decoder.decode(null, null, binary("53545832363034373520202020202020202020024f244750524d432c3133313131302e30302c562c2c2c2c2c2c2c3036303931332c2c2c4e2a37362c3232322c30312c383135412c443435352c31312c39372c303030302c303030312c302c54696d65723b37350d0a")));
}
}
| apache-2.0 |
losipiuk/presto | core/trino-main/src/main/java/io/trino/Session.java | 32381 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import io.trino.client.ProtocolHeaders;
import io.trino.connector.CatalogName;
import io.trino.metadata.SessionPropertyManager;
import io.trino.security.AccessControl;
import io.trino.security.SecurityContext;
import io.trino.spi.QueryId;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.security.Identity;
import io.trino.spi.security.SelectedRole;
import io.trino.spi.session.ResourceEstimates;
import io.trino.spi.type.TimeZoneKey;
import io.trino.sql.SqlPath;
import io.trino.sql.tree.Execute;
import io.trino.transaction.TransactionId;
import io.trino.transaction.TransactionManager;
import java.security.Principal;
import java.time.Instant;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.TimeZone;
import java.util.stream.Collectors;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static io.trino.client.ProtocolHeaders.TRINO_HEADERS;
import static io.trino.connector.CatalogName.createInformationSchemaCatalogName;
import static io.trino.connector.CatalogName.createSystemTablesCatalogName;
import static io.trino.spi.StandardErrorCode.NOT_FOUND;
import static io.trino.util.Failures.checkCondition;
import static java.util.Objects.requireNonNull;
public final class Session
{
private final QueryId queryId;
private final Optional<TransactionId> transactionId;
private final boolean clientTransactionSupport;
private final Identity identity;
private final Optional<String> source;
private final Optional<String> catalog;
private final Optional<String> schema;
private final SqlPath path;
private final TimeZoneKey timeZoneKey;
private final Locale locale;
private final Optional<String> remoteUserAddress;
private final Optional<String> userAgent;
private final Optional<String> clientInfo;
private final Optional<String> traceToken;
private final Set<String> clientTags;
private final Set<String> clientCapabilities;
private final ResourceEstimates resourceEstimates;
private final Instant start;
private final Map<String, String> systemProperties;
// TODO use Table
private final Map<CatalogName, Map<String, String>> connectorProperties;
// TODO use Table
private final Map<String, Map<String, String>> unprocessedCatalogProperties;
private final SessionPropertyManager sessionPropertyManager;
private final Map<String, String> preparedStatements;
private final ProtocolHeaders protocolHeaders;
public Session(
QueryId queryId,
Optional<TransactionId> transactionId,
boolean clientTransactionSupport,
Identity identity,
Optional<String> source,
Optional<String> catalog,
Optional<String> schema,
SqlPath path,
Optional<String> traceToken,
TimeZoneKey timeZoneKey,
Locale locale,
Optional<String> remoteUserAddress,
Optional<String> userAgent,
Optional<String> clientInfo,
Set<String> clientTags,
Set<String> clientCapabilities,
ResourceEstimates resourceEstimates,
Instant start,
Map<String, String> systemProperties,
Map<CatalogName, Map<String, String>> connectorProperties,
Map<String, Map<String, String>> unprocessedCatalogProperties,
SessionPropertyManager sessionPropertyManager,
Map<String, String> preparedStatements,
ProtocolHeaders protocolHeaders)
{
this.queryId = requireNonNull(queryId, "queryId is null");
this.transactionId = requireNonNull(transactionId, "transactionId is null");
this.clientTransactionSupport = clientTransactionSupport;
this.identity = requireNonNull(identity, "identity is null");
this.source = requireNonNull(source, "source is null");
this.catalog = requireNonNull(catalog, "catalog is null");
this.schema = requireNonNull(schema, "schema is null");
this.path = requireNonNull(path, "path is null");
this.traceToken = requireNonNull(traceToken, "traceToken is null");
this.timeZoneKey = requireNonNull(timeZoneKey, "timeZoneKey is null");
this.locale = requireNonNull(locale, "locale is null");
this.remoteUserAddress = requireNonNull(remoteUserAddress, "remoteUserAddress is null");
this.userAgent = requireNonNull(userAgent, "userAgent is null");
this.clientInfo = requireNonNull(clientInfo, "clientInfo is null");
this.clientTags = ImmutableSet.copyOf(requireNonNull(clientTags, "clientTags is null"));
this.clientCapabilities = ImmutableSet.copyOf(requireNonNull(clientCapabilities, "clientCapabilities is null"));
this.resourceEstimates = requireNonNull(resourceEstimates, "resourceEstimates is null");
this.start = start;
this.systemProperties = ImmutableMap.copyOf(requireNonNull(systemProperties, "systemProperties is null"));
this.sessionPropertyManager = requireNonNull(sessionPropertyManager, "sessionPropertyManager is null");
this.preparedStatements = requireNonNull(preparedStatements, "preparedStatements is null");
this.protocolHeaders = requireNonNull(protocolHeaders, "protocolHeaders is null");
ImmutableMap.Builder<CatalogName, Map<String, String>> catalogPropertiesBuilder = ImmutableMap.builder();
connectorProperties.entrySet().stream()
.map(entry -> Maps.immutableEntry(entry.getKey(), ImmutableMap.copyOf(entry.getValue())))
.forEach(catalogPropertiesBuilder::put);
this.connectorProperties = catalogPropertiesBuilder.build();
ImmutableMap.Builder<String, Map<String, String>> unprocessedCatalogPropertiesBuilder = ImmutableMap.builder();
unprocessedCatalogProperties.entrySet().stream()
.map(entry -> Maps.immutableEntry(entry.getKey(), ImmutableMap.copyOf(entry.getValue())))
.forEach(unprocessedCatalogPropertiesBuilder::put);
this.unprocessedCatalogProperties = unprocessedCatalogPropertiesBuilder.build();
checkArgument(transactionId.isEmpty() || unprocessedCatalogProperties.isEmpty(), "Catalog session properties cannot be set if there is an open transaction");
checkArgument(catalog.isPresent() || schema.isEmpty(), "schema is set but catalog is not");
}
public QueryId getQueryId()
{
return queryId;
}
public String getUser()
{
return identity.getUser();
}
public Identity getIdentity()
{
return identity;
}
public Optional<String> getSource()
{
return source;
}
public Optional<String> getCatalog()
{
return catalog;
}
public Optional<String> getSchema()
{
return schema;
}
public SqlPath getPath()
{
return path;
}
public TimeZoneKey getTimeZoneKey()
{
return timeZoneKey;
}
public Locale getLocale()
{
return locale;
}
public Optional<String> getRemoteUserAddress()
{
return remoteUserAddress;
}
public Optional<String> getUserAgent()
{
return userAgent;
}
public Optional<String> getClientInfo()
{
return clientInfo;
}
public Set<String> getClientTags()
{
return clientTags;
}
public Set<String> getClientCapabilities()
{
return clientCapabilities;
}
public Optional<String> getTraceToken()
{
return traceToken;
}
public ResourceEstimates getResourceEstimates()
{
return resourceEstimates;
}
public Instant getStart()
{
return start;
}
public Optional<TransactionId> getTransactionId()
{
return transactionId;
}
public TransactionId getRequiredTransactionId()
throws NotInTransactionException
{
return transactionId.orElseThrow(NotInTransactionException::new);
}
public boolean isClientTransactionSupport()
{
return clientTransactionSupport;
}
public <T> T getSystemProperty(String name, Class<T> type)
{
return sessionPropertyManager.decodeSystemPropertyValue(name, systemProperties.get(name), type);
}
public Map<CatalogName, Map<String, String>> getConnectorProperties()
{
return connectorProperties;
}
public Map<String, String> getConnectorProperties(CatalogName catalogName)
{
return connectorProperties.getOrDefault(catalogName, ImmutableMap.of());
}
public Map<String, Map<String, String>> getUnprocessedCatalogProperties()
{
return unprocessedCatalogProperties;
}
public Map<String, String> getSystemProperties()
{
return systemProperties;
}
public Map<String, String> getPreparedStatements()
{
return preparedStatements;
}
public String getPreparedStatementFromExecute(Execute execute)
{
return getPreparedStatement(execute.getName().getValue());
}
public String getPreparedStatement(String name)
{
String sql = preparedStatements.get(name);
checkCondition(sql != null, NOT_FOUND, "Prepared statement not found: " + name);
return sql;
}
public ProtocolHeaders getProtocolHeaders()
{
return protocolHeaders;
}
public Session beginTransactionId(TransactionId transactionId, TransactionManager transactionManager, AccessControl accessControl)
{
requireNonNull(transactionId, "transactionId is null");
checkArgument(this.transactionId.isEmpty(), "Session already has an active transaction");
requireNonNull(transactionManager, "transactionManager is null");
requireNonNull(accessControl, "accessControl is null");
for (Entry<String, String> property : systemProperties.entrySet()) {
// verify permissions
accessControl.checkCanSetSystemSessionProperty(identity, property.getKey());
// validate session property value
sessionPropertyManager.validateSystemSessionProperty(property.getKey(), property.getValue());
}
// Now that there is a transaction, the catalog name can be resolved to a connector, and the catalog properties can be validated
ImmutableMap.Builder<CatalogName, Map<String, String>> connectorProperties = ImmutableMap.builder();
for (Entry<String, Map<String, String>> catalogEntry : unprocessedCatalogProperties.entrySet()) {
String catalogName = catalogEntry.getKey();
Map<String, String> catalogProperties = catalogEntry.getValue();
if (catalogProperties.isEmpty()) {
continue;
}
CatalogName catalog = transactionManager.getOptionalCatalogMetadata(transactionId, catalogName)
.orElseThrow(() -> new TrinoException(NOT_FOUND, "Session property catalog does not exist: " + catalogName))
.getCatalogName();
for (Entry<String, String> property : catalogProperties.entrySet()) {
// verify permissions
accessControl.checkCanSetCatalogSessionProperty(new SecurityContext(transactionId, identity, queryId), catalogName, property.getKey());
// validate session property value
sessionPropertyManager.validateCatalogSessionProperty(catalog, catalogName, property.getKey(), property.getValue());
}
connectorProperties.put(catalog, catalogProperties);
}
ImmutableMap.Builder<String, SelectedRole> connectorRoles = ImmutableMap.builder();
for (Entry<String, SelectedRole> entry : identity.getConnectorRoles().entrySet()) {
String catalogName = entry.getKey();
SelectedRole role = entry.getValue();
CatalogName catalog = transactionManager.getOptionalCatalogMetadata(transactionId, catalogName)
.orElseThrow(() -> new TrinoException(NOT_FOUND, "Catalog for role does not exist: " + catalogName))
.getCatalogName();
if (role.getType() == SelectedRole.Type.ROLE) {
accessControl.checkCanSetCatalogRole(new SecurityContext(transactionId, identity, queryId), role.getRole().orElseThrow(), catalogName);
}
connectorRoles.put(catalog.getCatalogName(), role);
String informationSchemaCatalogName = createInformationSchemaCatalogName(catalog).getCatalogName();
if (transactionManager.getCatalogNames(transactionId).containsKey(informationSchemaCatalogName)) {
connectorRoles.put(informationSchemaCatalogName, role);
}
String systemTablesCatalogName = createSystemTablesCatalogName(catalog).getCatalogName();
if (transactionManager.getCatalogNames(transactionId).containsKey(systemTablesCatalogName)) {
connectorRoles.put(systemTablesCatalogName, role);
}
}
return new Session(
queryId,
Optional.of(transactionId),
clientTransactionSupport,
Identity.from(identity)
.withConnectorRoles(connectorRoles.build())
.build(),
source,
catalog,
schema,
path,
traceToken,
timeZoneKey,
locale,
remoteUserAddress,
userAgent,
clientInfo,
clientTags,
clientCapabilities,
resourceEstimates,
start,
systemProperties,
connectorProperties.build(),
ImmutableMap.of(),
sessionPropertyManager,
preparedStatements,
protocolHeaders);
}
public Session withDefaultProperties(Map<String, String> systemPropertyDefaults, Map<String, Map<String, String>> catalogPropertyDefaults)
{
requireNonNull(systemPropertyDefaults, "systemPropertyDefaults is null");
requireNonNull(catalogPropertyDefaults, "catalogPropertyDefaults is null");
// to remove this check properties must be authenticated and validated as in beginTransactionId
checkState(
this.transactionId.isEmpty() && this.connectorProperties.isEmpty(),
"Session properties cannot be overridden once a transaction is active");
Map<String, String> systemProperties = new HashMap<>();
systemProperties.putAll(systemPropertyDefaults);
systemProperties.putAll(this.systemProperties);
Map<String, Map<String, String>> connectorProperties = catalogPropertyDefaults.entrySet().stream()
.map(entry -> Maps.immutableEntry(entry.getKey(), new HashMap<>(entry.getValue())))
.collect(Collectors.toMap(Entry::getKey, Entry::getValue));
for (Entry<String, Map<String, String>> catalogProperties : this.unprocessedCatalogProperties.entrySet()) {
String catalog = catalogProperties.getKey();
for (Entry<String, String> entry : catalogProperties.getValue().entrySet()) {
connectorProperties.computeIfAbsent(catalog, id -> new HashMap<>())
.put(entry.getKey(), entry.getValue());
}
}
return new Session(
queryId,
transactionId,
clientTransactionSupport,
identity,
source,
catalog,
schema,
path,
traceToken,
timeZoneKey,
locale,
remoteUserAddress,
userAgent,
clientInfo,
clientTags,
clientCapabilities,
resourceEstimates,
start,
systemProperties,
ImmutableMap.of(),
connectorProperties,
sessionPropertyManager,
preparedStatements,
protocolHeaders);
}
public ConnectorSession toConnectorSession()
{
return new FullConnectorSession(this, identity.toConnectorIdentity());
}
public ConnectorSession toConnectorSession(String catalogName)
{
return toConnectorSession(new CatalogName(catalogName));
}
public ConnectorSession toConnectorSession(CatalogName catalogName)
{
requireNonNull(catalogName, "catalogName is null");
return new FullConnectorSession(
this,
identity.toConnectorIdentity(catalogName.getCatalogName()),
connectorProperties.getOrDefault(catalogName, ImmutableMap.of()),
catalogName,
catalogName.getCatalogName(),
sessionPropertyManager);
}
public SessionRepresentation toSessionRepresentation()
{
return new SessionRepresentation(
queryId.toString(),
transactionId,
clientTransactionSupport,
identity.getUser(),
identity.getGroups(),
identity.getPrincipal().map(Principal::toString),
identity.getEnabledRoles(),
source,
catalog,
schema,
path,
traceToken,
timeZoneKey,
locale,
remoteUserAddress,
userAgent,
clientInfo,
clientTags,
clientCapabilities,
resourceEstimates,
start,
systemProperties,
connectorProperties,
unprocessedCatalogProperties,
identity.getConnectorRoles(),
preparedStatements,
protocolHeaders.getProtocolName());
}
@Override
public String toString()
{
return toStringHelper(this)
.add("queryId", queryId)
.add("transactionId", transactionId)
.add("user", getUser())
.add("principal", getIdentity().getPrincipal().orElse(null))
.add("source", source.orElse(null))
.add("catalog", catalog.orElse(null))
.add("schema", schema.orElse(null))
.add("path", path)
.add("traceToken", traceToken.orElse(null))
.add("timeZoneKey", timeZoneKey)
.add("locale", locale)
.add("remoteUserAddress", remoteUserAddress.orElse(null))
.add("userAgent", userAgent.orElse(null))
.add("clientInfo", clientInfo.orElse(null))
.add("clientTags", clientTags)
.add("clientCapabilities", clientCapabilities)
.add("resourceEstimates", resourceEstimates)
.add("start", start)
.omitNullValues()
.toString();
}
public static SessionBuilder builder(SessionPropertyManager sessionPropertyManager)
{
return new SessionBuilder(sessionPropertyManager);
}
@VisibleForTesting
public static SessionBuilder builder(Session session)
{
return new SessionBuilder(session);
}
public SecurityContext toSecurityContext()
{
return new SecurityContext(getRequiredTransactionId(), getIdentity(), queryId);
}
public static class SessionBuilder
{
private QueryId queryId;
private TransactionId transactionId;
private boolean clientTransactionSupport;
private Identity identity;
private String source;
private String catalog;
private String schema;
private SqlPath path;
private Optional<String> traceToken = Optional.empty();
private TimeZoneKey timeZoneKey;
private Locale locale;
private String remoteUserAddress;
private String userAgent;
private String clientInfo;
private Set<String> clientTags = ImmutableSet.of();
private Set<String> clientCapabilities = ImmutableSet.of();
private ResourceEstimates resourceEstimates;
private Instant start = Instant.now();
private final Map<String, String> systemProperties = new HashMap<>();
private final Map<String, Map<String, String>> catalogSessionProperties = new HashMap<>();
private final SessionPropertyManager sessionPropertyManager;
private final Map<String, String> preparedStatements = new HashMap<>();
private ProtocolHeaders protocolHeaders = TRINO_HEADERS;
private SessionBuilder(SessionPropertyManager sessionPropertyManager)
{
this.sessionPropertyManager = requireNonNull(sessionPropertyManager, "sessionPropertyManager is null");
}
private SessionBuilder(Session session)
{
requireNonNull(session, "session is null");
checkArgument(session.getTransactionId().isEmpty(), "Session builder cannot be created from a session in a transaction");
this.sessionPropertyManager = session.sessionPropertyManager;
this.queryId = session.queryId;
this.transactionId = session.transactionId.orElse(null);
this.clientTransactionSupport = session.clientTransactionSupport;
this.identity = session.identity;
this.source = session.source.orElse(null);
this.catalog = session.catalog.orElse(null);
this.path = session.path;
this.schema = session.schema.orElse(null);
this.traceToken = requireNonNull(session.traceToken, "traceToken is null");
this.timeZoneKey = session.timeZoneKey;
this.locale = session.locale;
this.remoteUserAddress = session.remoteUserAddress.orElse(null);
this.userAgent = session.userAgent.orElse(null);
this.clientInfo = session.clientInfo.orElse(null);
this.clientTags = ImmutableSet.copyOf(session.clientTags);
this.start = session.start;
this.systemProperties.putAll(session.systemProperties);
session.unprocessedCatalogProperties
.forEach((catalog, properties) -> catalogSessionProperties.put(catalog, new HashMap<>(properties)));
this.preparedStatements.putAll(session.preparedStatements);
this.protocolHeaders = session.protocolHeaders;
}
public SessionBuilder setQueryId(QueryId queryId)
{
this.queryId = requireNonNull(queryId, "queryId is null");
return this;
}
public SessionBuilder setTransactionId(TransactionId transactionId)
{
checkArgument(catalogSessionProperties.isEmpty(), "Catalog session properties cannot be set if there is an open transaction");
this.transactionId = transactionId;
return this;
}
public SessionBuilder setClientTransactionSupport()
{
this.clientTransactionSupport = true;
return this;
}
public SessionBuilder setCatalog(String catalog)
{
this.catalog = catalog;
return this;
}
public SessionBuilder setCatalog(Optional<String> catalog)
{
this.catalog = catalog.orElse(null);
return this;
}
public SessionBuilder setLocale(Locale locale)
{
this.locale = locale;
return this;
}
public SessionBuilder setRemoteUserAddress(String remoteUserAddress)
{
this.remoteUserAddress = remoteUserAddress;
return this;
}
public SessionBuilder setRemoteUserAddress(Optional<String> remoteUserAddress)
{
this.remoteUserAddress = remoteUserAddress.orElse(null);
return this;
}
public SessionBuilder setSchema(String schema)
{
this.schema = schema;
return this;
}
public SessionBuilder setSchema(Optional<String> schema)
{
this.schema = schema.orElse(null);
return this;
}
public SessionBuilder setPath(SqlPath path)
{
this.path = path;
return this;
}
public SessionBuilder setPath(Optional<SqlPath> path)
{
this.path = path.orElse(null);
return this;
}
public SessionBuilder setSource(String source)
{
this.source = source;
return this;
}
public SessionBuilder setSource(Optional<String> source)
{
this.source = source.orElse(null);
return this;
}
public SessionBuilder setTraceToken(Optional<String> traceToken)
{
this.traceToken = requireNonNull(traceToken, "traceToken is null");
return this;
}
public SessionBuilder setStart(Instant start)
{
this.start = start;
return this;
}
public SessionBuilder setTimeZoneKey(TimeZoneKey timeZoneKey)
{
this.timeZoneKey = timeZoneKey;
return this;
}
public SessionBuilder setTimeZoneKey(Optional<TimeZoneKey> timeZoneKey)
{
this.timeZoneKey = timeZoneKey.orElse(null);
return this;
}
public SessionBuilder setIdentity(Identity identity)
{
this.identity = identity;
return this;
}
public SessionBuilder setUserAgent(String userAgent)
{
this.userAgent = userAgent;
return this;
}
public SessionBuilder setUserAgent(Optional<String> userAgent)
{
this.userAgent = userAgent.orElse(null);
return this;
}
public SessionBuilder setClientInfo(String clientInfo)
{
this.clientInfo = clientInfo;
return this;
}
public SessionBuilder setClientInfo(Optional<String> clientInfo)
{
this.clientInfo = clientInfo.orElse(null);
return this;
}
public SessionBuilder setClientTags(Set<String> clientTags)
{
this.clientTags = ImmutableSet.copyOf(clientTags);
return this;
}
public SessionBuilder setClientCapabilities(Set<String> clientCapabilities)
{
this.clientCapabilities = ImmutableSet.copyOf(clientCapabilities);
return this;
}
public SessionBuilder setResourceEstimates(ResourceEstimates resourceEstimates)
{
this.resourceEstimates = resourceEstimates;
return this;
}
/**
* Sets a system property for the session. The property name and value must
* only contain characters from US-ASCII and must not be for '='.
*/
public SessionBuilder setSystemProperty(String propertyName, String propertyValue)
{
systemProperties.put(propertyName, propertyValue);
return this;
}
/**
* Sets system properties, discarding any system properties previously set.
*/
public SessionBuilder setSystemProperties(Map<String, String> systemProperties)
{
requireNonNull(systemProperties, "systemProperties is null");
this.systemProperties.clear();
this.systemProperties.putAll(systemProperties);
return this;
}
/**
* Sets a catalog property for the session. The property name and value must
* only contain characters from US-ASCII and must not be for '='.
*/
public SessionBuilder setCatalogSessionProperty(String catalogName, String propertyName, String propertyValue)
{
checkArgument(transactionId == null, "Catalog session properties cannot be set if there is an open transaction");
catalogSessionProperties.computeIfAbsent(catalogName, id -> new HashMap<>()).put(propertyName, propertyValue);
return this;
}
public SessionBuilder addPreparedStatement(String statementName, String query)
{
this.preparedStatements.put(statementName, query);
return this;
}
public SessionBuilder setProtocolHeaders(ProtocolHeaders protocolHeaders)
{
this.protocolHeaders = requireNonNull(protocolHeaders, "protocolHeaders is null");
return this;
}
public Session build()
{
return new Session(
queryId,
Optional.ofNullable(transactionId),
clientTransactionSupport,
identity,
Optional.ofNullable(source),
Optional.ofNullable(catalog),
Optional.ofNullable(schema),
path != null ? path : new SqlPath(Optional.empty()),
traceToken,
timeZoneKey != null ? timeZoneKey : TimeZoneKey.getTimeZoneKey(TimeZone.getDefault().getID()),
locale != null ? locale : Locale.getDefault(),
Optional.ofNullable(remoteUserAddress),
Optional.ofNullable(userAgent),
Optional.ofNullable(clientInfo),
clientTags,
clientCapabilities,
Optional.ofNullable(resourceEstimates).orElse(new ResourceEstimateBuilder().build()),
start,
systemProperties,
ImmutableMap.of(),
catalogSessionProperties,
sessionPropertyManager,
preparedStatements,
protocolHeaders);
}
}
public static class ResourceEstimateBuilder
{
private Optional<Duration> executionTime = Optional.empty();
private Optional<Duration> cpuTime = Optional.empty();
private Optional<DataSize> peakMemory = Optional.empty();
public ResourceEstimateBuilder setExecutionTime(Duration executionTime)
{
this.executionTime = Optional.of(executionTime);
return this;
}
public ResourceEstimateBuilder setCpuTime(Duration cpuTime)
{
this.cpuTime = Optional.of(cpuTime);
return this;
}
public ResourceEstimateBuilder setPeakMemory(DataSize peakMemory)
{
this.peakMemory = Optional.of(peakMemory);
return this;
}
public ResourceEstimates build()
{
return new ResourceEstimates(
executionTime.map(Duration::toMillis).map(java.time.Duration::ofMillis),
cpuTime.map(Duration::toMillis).map(java.time.Duration::ofMillis),
peakMemory.map(DataSize::toBytes));
}
}
}
| apache-2.0 |
vic-ita/RAW | RAW/src/raw/dht/DhtAddress.java | 1694 | /*******************************************************************************
* Copyright 2017 Vincenzo-Maria Cappelleri <vincenzo.cappelleri@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package raw.dht;
import java.io.Serializable;
import java.net.InetAddress;
import java.net.InetSocketAddress;
/**
* Objects from this interface wrap
* the physical address to a dht node.
*
* @author vic
*
*/
public interface DhtAddress extends Serializable{
/**
* @return the {@link InetAddress} of this node
*/
public InetAddress getAddress();
/**
* @return an integer representation of the UDP port number [0 - 65535] of this node.
*/
public int getUdpPort();
/**
* @return {@link InetSocketAddress} complete of address and UDP port number
*/
public InetSocketAddress getUdpSocketAddress();
/**
* @return an integer representation of the TCP port number [0 - 65535] of this node.
*/
public int getTcpPort();
/**
* @return {@link InetSocketAddress} complete of address and TCP port number
*/
public InetSocketAddress getTcpSocketAddress();
}
| apache-2.0 |
everttigchelaar/camel-svn | camel-core/src/main/java/org/apache/camel/component/file/AntPathMatcherGenericFileFilter.java | 4610 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.file;
import java.lang.reflect.Method;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.util.ObjectHelper;
import static org.apache.camel.util.CollectionHelper.collectionAsCommaDelimitedString;
/**
* File filter using Spring's AntPathMatcher.
* <p/>
* Exclude take precedence over includes. If a file match both exclude and include it will be regarded as excluded.
* @param <T>
*/
public class AntPathMatcherGenericFileFilter<T> implements GenericFileFilter<T>, CamelContextAware {
private static final String ANTPATHMATCHER_CLASSNAME = "org.apache.camel.spring.util.SpringAntPathMatcherFileFilter";
private CamelContext context;
private String[] excludes;
private String[] includes;
private Object filter;
private Method includesMethod;
private Method excludesMethod;
private Method acceptsMethod;
public boolean accept(GenericFile<T> file) {
try {
synchronized (this) {
if (filter == null) {
init();
}
}
// invoke setIncludes(String), must using string type as invoking with string[] does not work
ObjectHelper.invokeMethod(includesMethod, filter, collectionAsCommaDelimitedString(includes));
// invoke setExcludes(String), must using string type as invoking with string[] does not work
ObjectHelper.invokeMethod(excludesMethod, filter, collectionAsCommaDelimitedString(excludes));
// invoke acceptPathName(String)
String path = file.getRelativeFilePath();
return (Boolean) ObjectHelper.invokeMethod(acceptsMethod, filter, path);
} catch (NoSuchMethodException e) {
throw new TypeNotPresentException(ANTPATHMATCHER_CLASSNAME, e);
}
}
@SuppressWarnings("unchecked")
private void init() throws NoSuchMethodException {
// we must use reflection to invoke the AntPathMatcherFileFilter that reside in camel-spring.jar
// and we don't want camel-core to have runtime dependency on camel-spring.jar
// use class resolver from CamelContext to ensure it works with OSGi as well
Class clazz = context.getClassResolver().resolveClass(ANTPATHMATCHER_CLASSNAME);
ObjectHelper.notNull(clazz, ANTPATHMATCHER_CLASSNAME + " not found in classpath. camel-spring.jar is required in the classpath.");
filter = ObjectHelper.newInstance(clazz);
includesMethod = filter.getClass().getMethod("setIncludes", String.class);
excludesMethod = filter.getClass().getMethod("setExcludes", String.class);
acceptsMethod = filter.getClass().getMethod("acceptPathName", String.class);
}
public String[] getExcludes() {
return excludes;
}
public void setExcludes(String[] excludes) {
this.excludes = excludes;
}
public String[] getIncludes() {
return includes;
}
public void setIncludes(String[] includes) {
this.includes = includes;
}
/**
* Sets excludes using a single string where each element can be separated with comma
*/
public void setExcludes(String excludes) {
setExcludes(excludes.split(","));
}
/**
* Sets includes using a single string where each element can be separated with comma
*/
public void setIncludes(String includes) {
setIncludes(includes.split(","));
}
public void setCamelContext(CamelContext camelContext) {
this.context = camelContext;
}
public CamelContext getCamelContext() {
return context;
}
}
| apache-2.0 |
araqne/logdb | araqne-logdb-client/src/main/java/org/araqne/logdb/client/package-info.java | 971 | /**
* Copyright (C) 2014 Eediom, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* 이 패키지는 로그프레소를 제어하는데 필요한 클라이언트 API를 제공합니다. LogDbClient 클래스 인스턴스를 생성하여 쿼리를 실행하거나, 로그 수집 설정, 테이블, 인덱스 설정 등 모든 관리적 작업을 수행할 수 있습니다.
*
* @author xeraph@eediom.com
*
*/
package org.araqne.logdb.client;
| apache-2.0 |
EBISPOT/goci | goci-tools/goci-utils/src/main/java/uk/ac/ebi/spot/goci/service/junidecode/X0f.java | 6446 | /*
* Copyright (C) 2015 Giuseppe Cardone <ippatsuman@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ebi.spot.goci.service.junidecode;
/**
* Character map for Unicode characters with codepoint U+0Fxx.
* @author Giuseppe Cardone
* @version 0.1
*/
class X0f {
public static final String[] map = new String[]{
"AUM", // 0x00
"", // 0x01
"", // 0x02
"", // 0x03
"", // 0x04
"", // 0x05
"", // 0x06
"", // 0x07
" // ", // 0x08
" * ", // 0x09
"", // 0x0a
"-", // 0x0b
" / ", // 0x0c
" / ", // 0x0d
" // ", // 0x0e
" -/ ", // 0x0f
" +/ ", // 0x10
" X/ ", // 0x11
" /XX/ ", // 0x12
" /X/ ", // 0x13
", ", // 0x14
"", // 0x15
"", // 0x16
"", // 0x17
"", // 0x18
"", // 0x19
"", // 0x1a
"", // 0x1b
"", // 0x1c
"", // 0x1d
"", // 0x1e
"", // 0x1f
"0", // 0x20
"1", // 0x21
"2", // 0x22
"3", // 0x23
"4", // 0x24
"5", // 0x25
"6", // 0x26
"7", // 0x27
"8", // 0x28
"9", // 0x29
".5", // 0x2a
"1.5", // 0x2b
"2.5", // 0x2c
"3.5", // 0x2d
"4.5", // 0x2e
"5.5", // 0x2f
"6.5", // 0x30
"7.5", // 0x31
"8.5", // 0x32
"-.5", // 0x33
"+", // 0x34
"*", // 0x35
"^", // 0x36
"_", // 0x37
"", // 0x38
"~", // 0x39
"[?]", // 0x3a
"]", // 0x3b
"[[", // 0x3c
"]]", // 0x3d
"", // 0x3e
"", // 0x3f
"k", // 0x40
"kh", // 0x41
"g", // 0x42
"gh", // 0x43
"ng", // 0x44
"c", // 0x45
"ch", // 0x46
"j", // 0x47
"[?]", // 0x48
"ny", // 0x49
"tt", // 0x4a
"tth", // 0x4b
"dd", // 0x4c
"ddh", // 0x4d
"nn", // 0x4e
"t", // 0x4f
"th", // 0x50
"d", // 0x51
"dh", // 0x52
"n", // 0x53
"p", // 0x54
"ph", // 0x55
"b", // 0x56
"bh", // 0x57
"m", // 0x58
"ts", // 0x59
"tsh", // 0x5a
"dz", // 0x5b
"dzh", // 0x5c
"w", // 0x5d
"zh", // 0x5e
"z", // 0x5f
"\'", // 0x60
"y", // 0x61
"r", // 0x62
"l", // 0x63
"sh", // 0x64
"ssh", // 0x65
"s", // 0x66
"h", // 0x67
"a", // 0x68
"kss", // 0x69
"r", // 0x6a
"[?]", // 0x6b
"[?]", // 0x6c
"[?]", // 0x6d
"[?]", // 0x6e
"[?]", // 0x6f
"[?]", // 0x70
"aa", // 0x71
"i", // 0x72
"ii", // 0x73
"u", // 0x74
"uu", // 0x75
"R", // 0x76
"RR", // 0x77
"L", // 0x78
"LL", // 0x79
"e", // 0x7a
"ee", // 0x7b
"o", // 0x7c
"oo", // 0x7d
"M", // 0x7e
"H", // 0x7f
"i", // 0x80
"ii", // 0x81
"", // 0x82
"", // 0x83
"", // 0x84
"", // 0x85
"", // 0x86
"", // 0x87
"", // 0x88
"", // 0x89
"", // 0x8a
"", // 0x8b
"[?]", // 0x8c
"[?]", // 0x8d
"[?]", // 0x8e
"[?]", // 0x8f
"k", // 0x90
"kh", // 0x91
"g", // 0x92
"gh", // 0x93
"ng", // 0x94
"c", // 0x95
"ch", // 0x96
"j", // 0x97
"[?]", // 0x98
"ny", // 0x99
"tt", // 0x9a
"tth", // 0x9b
"dd", // 0x9c
"ddh", // 0x9d
"nn", // 0x9e
"t", // 0x9f
"th", // 0xa0
"d", // 0xa1
"dh", // 0xa2
"n", // 0xa3
"p", // 0xa4
"ph", // 0xa5
"b", // 0xa6
"bh", // 0xa7
"m", // 0xa8
"ts", // 0xa9
"tsh", // 0xaa
"dz", // 0xab
"dzh", // 0xac
"w", // 0xad
"zh", // 0xae
"z", // 0xaf
"\'", // 0xb0
"y", // 0xb1
"r", // 0xb2
"l", // 0xb3
"sh", // 0xb4
"ss", // 0xb5
"s", // 0xb6
"h", // 0xb7
"a", // 0xb8
"kss", // 0xb9
"w", // 0xba
"y", // 0xbb
"r", // 0xbc
"[?]", // 0xbd
"X", // 0xbe
" :X: ", // 0xbf
" /O/ ", // 0xc0
" /o/ ", // 0xc1
" \\o\\ ", // 0xc2
" (O) ", // 0xc3
"", // 0xc4
"", // 0xc5
"", // 0xc6
"", // 0xc7
"", // 0xc8
"", // 0xc9
"", // 0xca
"", // 0xcb
"", // 0xcc
"[?]", // 0xcd
"[?]", // 0xce
"", // 0xcf
"[?]", // 0xd0
"[?]", // 0xd1
"[?]", // 0xd2
"[?]", // 0xd3
"[?]", // 0xd4
"[?]", // 0xd5
"[?]", // 0xd6
"[?]", // 0xd7
"[?]", // 0xd8
"[?]", // 0xd9
"[?]", // 0xda
"[?]", // 0xdb
"[?]", // 0xdc
"[?]", // 0xdd
"[?]", // 0xde
"[?]", // 0xdf
"[?]", // 0xe0
"[?]", // 0xe1
"[?]", // 0xe2
"[?]", // 0xe3
"[?]", // 0xe4
"[?]", // 0xe5
"[?]", // 0xe6
"[?]", // 0xe7
"[?]", // 0xe8
"[?]", // 0xe9
"[?]", // 0xea
"[?]", // 0xeb
"[?]", // 0xec
"[?]", // 0xed
"[?]", // 0xee
"[?]", // 0xef
"[?]", // 0xf0
"[?]", // 0xf1
"[?]", // 0xf2
"[?]", // 0xf3
"[?]", // 0xf4
"[?]", // 0xf5
"[?]", // 0xf6
"[?]", // 0xf7
"[?]", // 0xf8
"[?]", // 0xf9
"[?]", // 0xfa
"[?]", // 0xfb
"[?]", // 0xfc
"[?]", // 0xfd
"[?]" // 0xfe
};
}
| apache-2.0 |
radut/Podcast-Server | src/test/java/lan/dk/podcastserver/manager/worker/updater/RSSUpdaterTest.java | 3284 | package lan.dk.podcastserver.manager.worker.updater;
import lan.dk.podcastserver.entity.Item;
import lan.dk.podcastserver.entity.Podcast;
import lan.dk.podcastserver.service.ImageService;
import lan.dk.podcastserver.service.JdomService;
import lan.dk.podcastserver.service.SignatureService;
import org.jdom2.JDOMException;
import org.jdom2.input.SAXBuilder;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Set;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.AdditionalMatchers.not;
import static org.mockito.Mockito.*;
/**
* Created by kevin on 28/06/15 for Podcast Server
*/
@RunWith(MockitoJUnitRunner.class)
public class RSSUpdaterTest {
public static final String PODCAST_URL = "/remote/podcast/rss.appload.xml";
public static final String MOCK_URL = "http://mockUrl.com/";
public static Podcast RSS_APPLOAD;
@Captor ArgumentCaptor<String> urlArgumentCaptor;
@Mock SignatureService signatureService;
@Mock JdomService jdomService;
@Mock ImageService imageService;
@InjectMocks RSSUpdater rssUpdater;
@Before
public void beforeEach() throws JDOMException, IOException {
RSS_APPLOAD = new Podcast().setUrl(PODCAST_URL);
when(jdomService.jdom2Parse(eq(PODCAST_URL)))
.then(invocationOnMock -> new SAXBuilder().build(Paths.get(RSSUpdaterTest.class.getResource(PODCAST_URL).toURI()).toFile()));
when(jdomService.jdom2Parse(not(eq(PODCAST_URL))))
.thenThrow(new JDOMException());
}
@Test
public void should_get_items() throws JDOMException, IOException {
/* When */ Set<Item> items = rssUpdater.getItems(RSS_APPLOAD);
/* Then */
verify(jdomService, times(1)).jdom2Parse(urlArgumentCaptor.capture());
assertThat(urlArgumentCaptor.getValue()).isEqualTo(PODCAST_URL);
assertThat(items).hasSize(217);
}
@Test
public void should_return_null_if_not_updatable_podcast() {
/* Given */ Podcast podcastNotUpdatable = new Podcast().setUrl("http://notUpdatable.com");
/* When */ Set<Item> items = rssUpdater.getItems(podcastNotUpdatable);
/* Then */ assertThat(items).isEmpty();
}
@Test
public void should_return_an_empty_set() {
/* Given */ Podcast podcast = new Podcast().setUrl(MOCK_URL);
/* When */ Set<Item> items = rssUpdater.getItems(podcast);
/* Then */ assertThat(items).isEmpty();
}
@Test
public void should_call_signature_from_url() {
/* Given */
/* When */ rssUpdater.signatureOf(RSS_APPLOAD);
/* Then */
verify(signatureService, times(1)).generateSignatureFromURL(urlArgumentCaptor.capture());
assertThat(urlArgumentCaptor.getValue()).isEqualTo(PODCAST_URL);
}
@Test
public void should_return_his_type() {
AbstractUpdater.Type type = rssUpdater.type();
assertThat(type.key()).isEqualTo("RSS");
assertThat(type.name()).isEqualTo("RSS");
}
} | apache-2.0 |
sparseware/ccp-bellavista | shared/com/sparseware/bellavista/ActionPath.java | 5300 | /*
* Copyright (C) SparseWare Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sparseware.bellavista;
import com.appnativa.util.CharScanner;
import com.appnativa.util.Helper;
import java.util.ArrayList;
import java.util.List;
/**
* The class manages a path thorough the application. It is meant to be used to
* programmatically move through the application. When fully implemented all
* primary screens in the application will have a unique path and will be able
* to be navigated to, programmatically via, its path.
*
* @author Don DeCoteau
*
*/
public class ActionPath extends ArrayList<String> {
protected Object linkedData;
/**
* Creates a new empty path.
*/
public ActionPath() {
super(5);
}
/**
* Creates a new path with the specified segments.
*
* @param segments the segment to add
*/
public ActionPath(List<String> segments) {
super(5);
if (segments != null) {
addAll(segments);
}
}
/**
* Creates a new path with the specified segments.
*
* If a single segment is added then that segment
* is parsed looking for segments separated by a forward slash, otherwise
* the segments are added as is.
*
* @param segments the segment to add
*/
public ActionPath(String... segments) {
super(5);
if (segments != null) {
addSegments(segments);
}
}
/**
* Add segments to a path. If a single segment is added then that segment
* is parsed looking for segments separated by a forward slash, otherwise
* the segments are added as is.
*
* @param segments the segment to add
*/
public void addSegments(String... segments) {
int len = (segments == null)
? 0
: segments.length;
if (len == 1) {
if (segments[0].indexOf("/") != -1) {
CharScanner.getTokens(segments[0], '/', false, this);
} else {
add(segments[0]);
}
} else {
for (int i = 0; i < len; i++) {
add(segments[i]);
}
}
}
@Override
public Object clone() {
return copy();
}
/**
* Returns a path that is a copy of this path
* @return a path that is a copy of this path
*/
public ActionPath copy() {
ActionPath p = new ActionPath(this);
return p;
}
/**
* Gets any data associated with the path
* @return any data associated with the path
*/
public Object getLinkedData() {
return linkedData;
}
/**
* Sees the last segment in the path
* @return the last segment in the path or null
*/
public String peek() {
int len = size();
return (len == 0)
? null
: get(len - 1);
}
/**
* Gets the last segment in the path
* @return the last segment in the path or null
*/
public String pop() {
int len = size();
return (len == 0)
? null
: remove(len - 1);
}
/**
* Add a segment to the end of the path
* @param segment the segment to add
*/
public void push(String segment) {
add(segment);
}
/**
* Sets data associated with the path
* @param linkedData the data
*/
public void setLinkedData(Object linkedData) {
this.linkedData = linkedData;
}
/**
* Gets the first segment in the path
* @return the first segment in the path or null
*/
public String shift() {
return isEmpty()
? null
: remove(0);
}
/**
* Sees the first segment in the path
* @return the first segment in the path or null
*/
public String shiftPeek() {
int len = size();
return (len == 0)
? null
: get(0);
}
@Override
public String toString() {
return Helper.toString(this, "/");
}
/**
* Add a segment to the beginning of the path
* @param segment the segment to add
*/
public void unshift(String segment) {
add(0, segment);
}
/**
* Parses the specified string an creates and action path
* @param path the string representing the path
* @return the new path
*/
public static ActionPath fromString(String path) {
ActionPath p = new ActionPath();
CharScanner.getTokens(path, '/', false, p);
return p;
}
/**
* An interface for UI handlers that support action paths
*
* @author Don DeCoteau
*
*/
interface iActionPathSupporter {
/**
* Gets the a path for the information currently displayed by the supporter.
*
* @return a path for the information currently displayed information
*/
ActionPath getDisplayedActionPath();
/**
* Called on an active supporter to handle the specified path.
*
* @param path
* a path that is relative to the supporter
*/
void handleActionPath(ActionPath path);
}
}
| apache-2.0 |
VHAINNOVATIONS/ASRCM | srcalc/src/main/java/gov/va/med/srcalc/web/view/admin/SummaryReportRow.java | 5188 | package gov.va.med.srcalc.web.view.admin;
import java.util.ArrayList;
import java.util.Objects;
import gov.va.med.srcalc.domain.calculation.HistoricalCalculation;
import gov.va.med.srcalc.domain.calculation.SignedResult;
import org.joda.time.DateTime;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedSet;
/**
* Encapsulates a single row of data for a Summary Report. Immutable.
*/
public final class SummaryReportRow
{
/**
* Since {@link SignedResult} is immutable, just store a reference to it instead of
* copying the properties.
*/
private final SignedResult fSignedResult;
/**
* The name of the risk model for this particular row. This value must be a key in
* SignedResult.outcomes.
*/
private final String fRiskModelName;
/**
* Constructs an instance representing the given result and a particular outcome
* within that result.
* @param signedResult the result to represent
* @param riskModelName the risk model name of the outcome to represent
* @throws NullPointerException if signedResult is null
* @throws IllegalArgumentException if there is no outcome for the given name
*/
public SummaryReportRow(final SignedResult signedResult, final String riskModelName)
{
fSignedResult = signedResult;
fRiskModelName = riskModelName;
// Fail-fast on a bad model name.
if (!fSignedResult.getOutcomes().containsKey(fRiskModelName))
{
throw new IllegalArgumentException(
"Given riskModelName must be present in SignedResult.");
}
}
/**
* Makes a {@link SummaryReportRow} for each outcome in the given SignedResult.
* @return a list in alphabetical order by risk model name
*/
public static ImmutableList<SummaryReportRow> fromSignedResult(
final SignedResult result)
{
final ArrayList<SummaryReportRow> rows = new ArrayList<>();
for (final String modelName : ImmutableSortedSet.copyOf(
String.CASE_INSENSITIVE_ORDER, result.getOutcomes().keySet()))
{
rows.add(new SummaryReportRow(result, modelName));
}
return ImmutableList.copyOf(rows);
}
/**
* Returns the result's associated CPT code. Will be an empty string if the result
* had no associated CPT code.
* @return never null
* @see SignedResult#getCptCode()
*/
public String getCptCode()
{
return fSignedResult.getCptCode().or("");
}
/**
* Returns the result's associated Specialty name.
* @return never null
* @see HistoricalCalculation#getSpecialtyName()
*/
public String getSpecialtyName()
{
return fSignedResult.getHistoricalCalculation().getSpecialtyName();
}
/**
* Returns the result's associated station number.
* @return never null
* @see HistoricalCalculation#getUserStation()
*/
public String getUserStation()
{
return fSignedResult.getHistoricalCalculation().getUserStation();
}
/**
* Returns the Provider Type of the user who ran the calculation. An absent Provider
* Type is represented by an empty string.
* @see HistoricalCalculation#getProviderType()
*/
public String getProviderType()
{
return fSignedResult.getHistoricalCalculation().getProviderType().or("");
}
/**
* Returns the Result's signature timestamp.
* @return never null
* @see SignedResult#getSignatureTimestamp()
*/
public DateTime getSignatureTimestamp()
{
return fSignedResult.getSignatureTimestamp();
}
/**
* Returns the Risk Model Name of the outcome this object represents.
* @return never null
* @see SignedResult#getOutcomes()
*/
public String getRiskModelName()
{
return fRiskModelName;
}
/**
* Returns the actualy risk result of the outcome this object represents.
* @see SignedResult#getOutcomes()
*/
public float getOutcome()
{
return fSignedResult.getOutcomes().get(fRiskModelName);
}
@Override
public String toString()
{
return MoreObjects.toStringHelper(this)
.add("signedResult", fSignedResult)
.add("riskModel", fRiskModelName)
.toString();
}
/**
* Returns true if the given object is also a SummaryReportRow for the same
* SignedResult and outcome, false otherwise.
*/
@Override
public boolean equals(final Object obj)
{
if (obj instanceof SummaryReportRow)
{
final SummaryReportRow other = (SummaryReportRow)obj;
return Objects.equals(this.fSignedResult, other.fSignedResult) &&
Objects.equals(this.fRiskModelName, other.fRiskModelName);
}
return false;
}
@Override
public int hashCode()
{
return Objects.hash(fSignedResult, fRiskModelName);
}
}
| apache-2.0 |
deleidos/digitaledge-platform | commons-core/src/main/java/com/deleidos/rtws/commons/dao/type/sql/RealHandler.java | 14251 | /**
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "{}"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright {yyyy} {name of copyright owner}
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.deleidos.rtws.commons.dao.type.sql;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import com.deleidos.rtws.commons.dao.exception.DataAccessException;
import com.deleidos.rtws.commons.dao.exception.DataRetrievalException;
import com.deleidos.rtws.commons.dao.exception.DataStorageException;
public class RealHandler extends SqlTypeHandler<Number> {
public RealHandler() {
super("REAL", Number.class);
}
@Override
public Number get(ResultSet object, String field) {
try {
double value = object.getDouble(field);
if(object.wasNull()) {
return null;
} else {
return value;
}
} catch (SQLException e) {
throw new DataRetrievalException(e);
} catch (Exception e) {
throw new DataAccessException("Unexpected error.", e);
}
}
@Override
public void set(ResultSet record, String field, Number value) {
try {
if (value == null) {
record.updateNull(field);
} else {
record.updateDouble(field, value.doubleValue());
}
} catch (SQLException e) {
throw new DataStorageException(e);
} catch (Exception e) {
throw new DataAccessException("Unexpected error.", e);
}
}
@Override
public Number get(ResultSet object, int field) {
try {
double value = object.getDouble(field);
if(object.wasNull()) {
return null;
} else {
return value;
}
} catch (SQLException e) {
throw new DataRetrievalException(e);
} catch (Exception e) {
throw new DataAccessException("Unexpected error.", e);
}
}
@Override
public void set(ResultSet record, int field, Number value) {
try {
if (value == null) {
record.updateNull(field);
} else {
record.updateDouble(field, value.doubleValue());
}
} catch (SQLException e) {
throw new DataStorageException(e);
} catch (Exception e) {
throw new DataAccessException("Unexpected error.", e);
}
}
@Override
public void set(PreparedStatement record, int field, Number value) {
try {
if (value == null) {
record.setNull(field, Types.NUMERIC);
} else {
record.setDouble(field, value.doubleValue());
}
} catch (SQLException e) {
throw new DataStorageException(e);
} catch (Exception e) {
throw new DataAccessException("Unexpected error.", e);
}
}
}
| apache-2.0 |
SAP/openui5 | src/testsuite-utils/src/main/java/com/sap/openui5/ConcatFilter.java | 8337 | package com.sap.openui5;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.GenericServlet;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
/**
* The class <code>ConcatFilter</code> is used to concatenate files like
* sap-ui-core.js, sap-ui-core-nojQuery.js and sap-ui-debug.js.
* <p>
* <i>This class must not be used in productive systems.</i>
*
* @author Peter Muessig
*/
public class ConcatFilter implements Filter {
/** default prefix for the classpath */
private static final String CLASSPATH_PREFIX = "META-INF";
/** filter configuration */
private FilterConfig config;
/* (non-Javadoc)
* @see javax.servlet.Filter#init(javax.servlet.FilterConfig)
*/
@Override
public void init(FilterConfig filterConfig) throws ServletException {
// keep the filter configuration
this.config = filterConfig;
} // method: init
/* (non-Javadoc)
* @see javax.servlet.Filter#destroy()
*/
@Override
public void destroy() {
this.config = null;
} // method: destroy
/* (non-Javadoc)
* @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain)
*/
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
// make sure that the request/response are http request/response
if (request instanceof HttpServletRequest && response instanceof HttpServletResponse) {
// determine the path of the request
HttpServletRequest httpRequest = (HttpServletRequest) request;
HttpServletResponse httpResponse = (HttpServletResponse) response;
String method = httpRequest.getMethod().toUpperCase(); // NOSONAR
String path = httpRequest.getServletPath() + httpRequest.getPathInfo();
// only process GET or HEAD requests
if (method.matches("GET|HEAD")) {
// check for sap-ui-core.js
if ("/resources/sap-ui-core.js".equals(path)) {
this.log("Merging module: sap-ui-core.js");
response.setContentType(this.config.getServletContext().getMimeType(path));
httpResponse.addDateHeader("Last-Modified", System.currentTimeMillis());
if ("GET".equals(method)) {
OutputStream os = response.getOutputStream();
if (this.findResource("/resources/ui5loader.js") != null) {
IOUtils.write(this.loadResource("/resources/ui5loader.js"), os, "UTF-8");
IOUtils.write(this.loadResource("/resources/ui5loader-autoconfig.js"), os, "UTF-8");
} else {
IOUtils.write(this.loadResource("/resources/sap/ui/thirdparty/jquery.js"), os, "UTF-8");
IOUtils.write(this.loadResource("/resources/sap/ui/thirdparty/jqueryui/jquery-ui-position.js"), os, "UTF-8");
IOUtils.write(this.loadResource("/resources/sap/ui/Device.js"), os, "UTF-8");
IOUtils.write(this.loadResource("/resources/sap/ui/thirdparty/URI.js"), os, "UTF-8");
IOUtils.write(this.loadResource("/resources/jquery.sap.global.js"), os, "UTF-8");
}
IOUtils.write("sap.ui.requireSync(\"sap/ui/core/Core\"); sap.ui.getCore().boot && sap.ui.getCore().boot();", os, "UTF-8");
IOUtils.closeQuietly(os);
os.flush();
os.close();
}
return;
} else if ("/resources/sap-ui-core-nojQuery.js".equals(path)) {
this.log("Merging module: sap-ui-core-nojQuery.js");
response.setContentType(this.config.getServletContext().getMimeType(path));
httpResponse.addDateHeader("Last-Modified", System.currentTimeMillis());
if ("GET".equals(method)) {
OutputStream os = response.getOutputStream();
if (this.findResource("/resources/ui5loader.js") != null) {
IOUtils.write(this.loadResource("/resources/ui5loader.js"), os, "UTF-8");
IOUtils.write(this.loadResource("/resources/ui5loader-autoconfig.js"), os, "UTF-8");
} else {
IOUtils.write(this.loadResource("/resources/sap/ui/Device.js"), os, "UTF-8");
IOUtils.write(this.loadResource("/resources/sap/ui/thirdparty/URI.js"), os, "UTF-8");
IOUtils.write(this.loadResource("/resources/jquery.sap.global.js"), os, "UTF-8");
}
IOUtils.write("sap.ui.requireSync(\"sap/ui/core/Core\"); sap.ui.getCore().boot && sap.ui.getCore().boot();", os, "UTF-8");
IOUtils.closeQuietly(os);
os.flush();
os.close();
}
return;
} else if ("/resources/sap-ui-debug.js".equals(path)) {
this.log("Merging module: sap-ui-debug.js");
response.setContentType(this.config.getServletContext().getMimeType(path));
httpResponse.addDateHeader("Last-Modified", System.currentTimeMillis());
if ("GET".equals(method)) {
OutputStream os = response.getOutputStream();
IOUtils.write(this.loadResource("/resources/sap/ui/debug/ControlTree.js"), os, "UTF-8");
IOUtils.write(this.loadResource("/resources/sap/ui/debug/Highlighter.js"), os, "UTF-8");
IOUtils.write(this.loadResource("/resources/sap/ui/debug/LogViewer.js"), os, "UTF-8");
IOUtils.write(this.loadResource("/resources/sap/ui/debug/PropertyList.js"), os, "UTF-8");
IOUtils.write(this.loadResource("/resources/sap/ui/debug/DebugEnv.js"), os, "UTF-8");
IOUtils.closeQuietly(os);
os.flush();
os.close();
}
return;
}
}
}
// proceed in the filter chain
chain.doFilter(request, response);
} // method: doFilter
/**
* logs the message prepended by the filter name (copy of {@link GenericServlet#log(String)})
* @param msg the message
*/
private void log(String msg) {
this.config.getServletContext().log(this.config.getFilterName() + ": "+ msg);
} // method: log
/**
* logs the message and <code>Throwable</code> prepended by the filter name (copy of {@link GenericServlet#log(String, Throwable)})
* @param msg the message
* @param t the <code>Throwable</code>
*/
@SuppressWarnings("unused")
private void log(String msg, Throwable t) {
this.config.getServletContext().log(this.config.getFilterName() + ": "+ msg, t);
} // method: log
/**
* loads a resource for the specified path
* @param path path of the resource
*/
public String loadResource(String path) throws IOException {
String content = null;
URL resource = this.findResource(path);
if (resource != null) {
InputStream is = resource.openStream();
content = IOUtils.toString(is, "UTF-8");
IOUtils.closeQuietly(is);
}
return content + "\n";
} // method: loadResource
/**
* finds the resource for the given path
* @param path path of the resource
* @return URL to the resource or null
* @throws MalformedURLException
*/
private URL findResource(String path) throws MalformedURLException {
// normalize the path (JarURLConnection cannot resolve non-normalized paths)
String normalizedPath = URI.create(path).normalize().toString();
// define the classpath for the classloader lookup
String classPath = CLASSPATH_PREFIX + normalizedPath;
// first lookup the resource in the web context path
URL url = this.config.getServletContext().getResource(normalizedPath);
// lookup the resource in the current threads classloaders
if (url == null) {
url = Thread.currentThread().getContextClassLoader().getResource(classPath);
}
// lookup the resource in the local classloader
if (url == null) {
url = ResourceServlet.class.getClassLoader().getResource(classPath);
}
return url;
} // method: findResource
} // class: ConcatFilter | apache-2.0 |
philyum/pocketpet | src/com/spriteproject/pocketpet/main/gameobjects/GameObject.java | 5156 | package com.spriteproject.pocketpet.main.gameobjects;
import org.json.JSONException;
import org.json.JSONObject;
import android.graphics.BitmapFactory;
import android.util.Log;
import com.spriteproject.pocketpet.R;
import com.spriteproject.pocketpet.assets.Constants;
import com.spriteproject.pocketpet.assets.Constants.Type;
import com.spriteproject.pocketpet.assets.Screen;
import com.spriteproject.pocketpet.main.MainView;
import com.spriteproject.pocketpet.main.Object;
public abstract class GameObject extends Object {
protected GameObject(MainView view, int id, float x, float y) {
this.view = view;
// Bitmap and sprite animation values
this.posX = x;
this.posY = y;
this.id = id;
init();
setBitmapParameters(id);
setSpriteDimensions();
// Add to surface view
view.Add(this);
Log.d(TAG, "Created!");
}
protected GameObject(MainView view, JSONObject obj, float time) throws JSONException {
this.view = view;
init();
// Bitmap and sprite animation values
restoreObjectData(obj, time);
// Add to surface view
view.Add(this);
Log.d(TAG, "Created!");
}
private void init() {
// Touch values
draggable = true;
renderLayer = 2;
touched = false;
// Movement values
velX = 0;
velY = 0;
}
public void restoreObjectData(JSONObject obj, float time) throws JSONException {
posX = obj.getLong("posX");
posY = obj.getLong("posY");
id = obj.getInt("id");
setBitmapParameters(id);
setSpriteDimensions();
if (!view.isGamePaused()) {
lifeTime += time;
}
}
public JSONObject getObjectData() throws JSONException {
JSONObject obj = new JSONObject();
obj.put("id", id);
obj.put("type", type.name());
obj.put("posX", getPosX());
obj.put("posY", getPosY());
return obj;
}
@Override
protected void updateCollisionWithObjects(GameObject obj, float collisionDistance) {}
// check collision with walls
@Override
protected void updateCollisionWithWalls() {
if (velX < 0 && getPosX() - getSpriteWidthScaled() / 2f <= 0) {
velX *= -1;
}
else if (velX > 0 && getPosX() + getSpriteWidthScaled() / 2f >= Screen.getWidth()) {
velX *= -1;
}
if (velY < 0 && getPosY() - getSpriteHeightScaled() / 2f - Constants.BUTTON_HEIGHT <= 0) {
velY *= -1;
}
else if (velY > 0 && getPosY() + getSpriteHeightScaled() / 2f + Constants.BUTTON_HEIGHT >= Screen.getHeight()) {
velY *= -1;
}
}
protected void setBitmapParameters(int name) {
id = name;
bitmap = BitmapFactory.decodeResource(view.getResources(), name);
Log.d(TAG, "Setting bitmap parameters...");
switch (name) {
case R.drawable.baby_angel:
rows = 1;
frames = 11;
scale = 2f;
type = Type.ANGEL;
break;
case R.drawable.teen_angel:
rows = 1;
frames = 13;
scale = 2f;
type = Type.ANGEL;
break;
case R.drawable.teen_duck:
rows = 1;
frames = 10;
scale = 2f;
type = Type.ANGEL;
break;
case R.drawable.adult_angel:
rows = 1;
frames = 15;
scale = 2f;
type = Type.ANGEL;
break;
case R.drawable.adult_duck:
rows = 1;
frames = 9;
scale = 2f;
type = Type.ANGEL;
break;
case R.drawable.baby_human:
rows = 1;
frames = 15;
scale = 2f;
type = Type.ANGEL;
break;
case R.drawable.adult_human:
rows = 2;
frames = 6;
scale = 2f;
type = Type.ANGEL;
break;
case R.drawable.adult_unhealthy:
rows = 2;
frames = 4;
scale = 2f;
type = Type.ANGEL;
break;
case R.drawable.secret_twins:
rows = 2;
frames = 5;
scale = 2f;
type = Type.ANGEL;
break;
case R.drawable.secret_unhealthy:
rows = 2;
frames = 5;
scale = 2f;
type = Type.ANGEL;
break;
case R.drawable.ghost:
rows = 1;
frames = 13;
scale = 2f;
type = Type.ANGEL;
break;
case R.drawable.cat_sprites_blue:
rows = 10;
frames = 4;
scale = 4f;
type = Type.CAT;
break;
case R.drawable.cat_sprites_red:
rows = 10;
frames = 4;
scale = 4f;
type = Type.CAT;
break;
// Not pets
case R.drawable.poo:
rows = 1;
frames = 4;
scale = 2f;
type = Type.POO;
break;
case R.drawable.food_oni:
rows = 3;
frames = 3;
scale = 0.25f;
type = Type.FOOD;
break;
case R.drawable.food_milk:
rows = 4;
frames = 1;
scale = 0.50f;
type = Type.FOOD;
break;
case R.drawable.egg_white:
rows = 1;
frames = 2;
scale = 2f;
type = Type.EGG;
break;
case R.drawable.egg_green:
rows = 1;
frames = 2;
scale = 2f;
type = Type.EGG;
break;
case R.drawable.pill:
rows = 1;
frames = 1;
scale = 2f;
type = Type.MED;
break;
case R.drawable.zzz_black:
rows = 1;
frames = 6;
scale = 1f;
type = Type.OTHER;
break;
case R.drawable.zzz_white:
rows = 1;
frames = 6;
scale = 1f;
type = Type.OTHER;
break;
case R.drawable.skull:
rows = 1;
frames = 2;
scale = 1f;
type = Type.OTHER;
break;
default:
Log.d(TAG, "Unknown enum value! " + name + ". Do nothing");
break;
}
}
}
| apache-2.0 |
golo-lang/golo-netbeans | src/org/gololang/netbeans/structure/VariableStructureItem.java | 1528 | /*
* Copyright 2013 SERLI (www.serli.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.gololang.netbeans.structure;
import fr.insalyon.citi.golo.compiler.parser.ASTLetOrVar;
import java.util.List;
import org.netbeans.modules.csl.api.HtmlFormatter;
import org.netbeans.modules.csl.api.StructureItem;
import org.netbeans.modules.parsing.api.Source;
/**
*
* @author David Festal <david.festal@serli.com>
*/
public class VariableStructureItem extends GoloStructureItem<ASTLetOrVar> {
public VariableStructureItem(ASTLetOrVar node, Source source, List<? extends StructureItem> items) {
super(node, source, items);
}
@Override
public String getHtml(HtmlFormatter hf) {
return getName();
}
@Override
public GoloElementHandle createHandle(ASTLetOrVar node, Source source) {
return new VariableElementHandle(node, source);
}
@Override
public VariableElementHandle getElementHandle() {
return (VariableElementHandle) super.getElementHandle();
}
}
| apache-2.0 |
zhangda7/cointrade | src/main/java/com/spare/cointrade/model/HuobiAccount.java | 275 | package com.spare.cointrade.model;
import lombok.Data;
import java.util.List;
/**
* Created by dada on 2017/8/29.
*/
@Data
public class HuobiAccount {
private Long id;
private String state;
private String type;
private List<HuobiSubAccount> list;
}
| apache-2.0 |
AnimeROM/android_package_AnimeManager | src/com/animerom/filemanager/commands/DeleteFileExecutable.java | 267 | package com.animerom.filemanager.commands;
/**
* An interface that represents an executable for delete a new file.
*/
public interface DeleteFileExecutable extends WritableExecutable {
/**
* {@inheritDoc}
*/
@Override
Boolean getResult();
}
| apache-2.0 |
yeastrc/msdapl | MS_LIBRARY/src/org/yeastrc/ms/domain/general/ExperimentSearchCriteria.java | 761 | /**
* ExperimentSearchCriteria.java
* @author Vagisha Sharma
* Aug 9, 2010
*/
package org.yeastrc.ms.domain.general;
import java.sql.Date;
import java.util.List;
/**
*
*/
public class ExperimentSearchCriteria {
private List<Integer> searchDatabaseIds;
private Date startDate;
private Date endDate;
public List<Integer> getSearchDatabaseIds() {
return searchDatabaseIds;
}
public void setSearchDatabaseIds(List<Integer> searchDatabaseIds) {
this.searchDatabaseIds = searchDatabaseIds;
}
public Date getStartDate() {
return startDate;
}
public void setStartDate(Date startDate) {
this.startDate = startDate;
}
public Date getEndDate() {
return endDate;
}
public void setEndDate(Date endDate) {
this.endDate = endDate;
}
}
| apache-2.0 |
ufoscout/jpattern | cache/src/test/java/com/jpattern/cache/simple/CacheTest.java | 4375 | package com.jpattern.cache.simple;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import com.jpattern.cache.BaseTest;
import com.jpattern.cache.ICache;
import com.jpattern.cache.ICacheManager;
/**
*
* @author Francesco Cina
*
* 23 Sep 2011
*/
public class CacheTest extends BaseTest {
private static String CACHE_NAME = "query.ObjectShortTermCache";
private ICacheManager cacheService;
private int cacheSize = 10;
private long timeToLiveSecond = 1;
protected void setUp() throws Exception {
super.setUp();
ICache cache = new Cache(CACHE_NAME, cacheSize, timeToLiveSecond);
List<ICache> caches = new ArrayList<ICache>();
caches.add(cache);
cacheService = new CacheManager(caches);
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void testCache1() throws Exception {
ICache emptyStringCache = cacheService.getCache("");
assertNotNull( emptyStringCache );
assertNull( emptyStringCache.get("hello") );
assertNull( emptyStringCache.get(null) );
emptyStringCache.put("key", "value");
emptyStringCache.put("key", null);
emptyStringCache.put(null, "value");
emptyStringCache.put(null, null);
}
public void testCache2() throws Exception {
ICache nullStringCache = cacheService.getCache("");
assertNotNull( nullStringCache );
assertNull( nullStringCache.get("hello") );
assertNull( nullStringCache.get(null) );
nullStringCache.put("key", "value");
nullStringCache.put("key", null);
nullStringCache.put(null, "value");
nullStringCache.put(null, null);
}
public void testCache3() throws Exception {
ICache cache = cacheService.getCache(CACHE_NAME);
assertNotNull( cache );
String key = "test-key-" + new Date().getTime();
assertNull( cache.get(key) );
assertNull( cache.get(null) );
cache.put(key, "value");
assertNotNull(cache.get(key));
assertEquals( "value" , cache.get(key, String.class) );
cache.clear();
assertNull(cache.get(key));
cache.put("key", null);
cache.put(null, "value");
cache.put(null, null);
cache.clear();
}
public void testCache4() throws Exception {
ICache cache = cacheService.getCache(CACHE_NAME);
assertNotNull( cache );
String key1 = "test-key1-" + new Date().getTime();
String key2 = "test-key2-" + new Date().getTime();
String key3 = "test-key3-" + new Date().getTime();
assertNull( cache.get(key1) );
assertNull( cache.get(key2) );
assertNull( cache.get(key3) );
cache.put(key1, "value1");
assertNotNull(cache.get(key1));
assertEquals( "value1" , (String) cache.get(key1) );
cache.put(key2, "value2");
assertNotNull(cache.get(key2));
assertEquals( "value2" , (String) cache.get(key2) );
cache.put(key3, "value3");
assertNotNull(cache.get(key3));
assertEquals( "value3" , (String) cache.get(key3) );
cache.remove(key2);
assertNotNull(cache.get(key1));
assertNull(cache.get(key2));
assertNotNull(cache.get(key3));
cache.clear();
assertNull(cache.get(key1));
assertNull(cache.get(key2));
assertNull(cache.get(key3));
cache.clear();
}
public void testCacheSize() {
ICache cache = cacheService.getCache(CACHE_NAME);
cache.clear();
String first = "first";
String second = "second";
cache.put(first, "");
cache.put(second, "");
assertNotNull(cache.get(first));
assertNotNull(cache.get(second));
for (int i=0; i<cacheSize-2; i++) {
cache.put("key" + i, "");
}
assertNotNull(cache.get(first));
assertNotNull(cache.get(second));
cache.put("new-key-1", "");
assertNull(cache.get(first));
assertNotNull(cache.get(second));
cache.put("new-key-2", "");
assertNull(cache.get(first));
assertNull(cache.get(second));
}
public void testTime(){
ICache cache = cacheService.getCache(CACHE_NAME);
cache.clear();
String first = "first";
String second = "second";
cache.put(first, "");
try {
Thread.sleep((timeToLiveSecond*500) + 25);
cache.put(second, "");
assertNotNull(cache.get(first));
assertNotNull(cache.get(second));
Thread.sleep((timeToLiveSecond*500) + 25);
assertNull(cache.get(first));
assertNotNull(cache.get(second));
Thread.sleep((timeToLiveSecond*500) + 25);
assertNull(cache.get(first));
assertNull(cache.get(second));
} catch (InterruptedException e) {
assertTrue(false);
}
}
}
| apache-2.0 |
leapframework/framework | web/api/src/main/java/leap/web/api/meta/DefaultApiMetadataStrategy.java | 3113 | /*
*
* * Copyright 2016 the original author or authors.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package leap.web.api.meta;
import leap.lang.Strings;
import leap.web.api.config.ApiConfig;
import leap.web.api.meta.model.MApiOperationBuilder;
import leap.web.api.meta.model.MApiPathBuilder;
public class DefaultApiMetadataStrategy implements ApiMetadataStrategy {
private static final String SIMPLE_NAME_PATTERN = "[^0-9a-zA-Z_]+";
@Override
public boolean tryCreateOperationId(ApiConfig c, ApiMetadataBuilder m, MApiPathBuilder p, MApiOperationBuilder op) {
if(!c.isUniqueOperationId()) {
op.setId(op.getName());
return true;
}
String name = op.getName();
String method = op.getMethod().name().toLowerCase();
if(Strings.contains(name,"$Lambda$")){
// Lambda expression do not create operation id
return false;
}
if(!name.equalsIgnoreCase(method)) {
//id = name
if (trySetUniqueOperationId(m, op, name)) {
return true;
}
//id = name + "with" + method
if (!Strings.startsWithIgnoreCase(name, method)) {
String id = name + "With" + Strings.upperFirst(method);
if (trySetUniqueOperationId(m, op, id)) {
return true;
}
}
}
for(String tag : op.getTags()) {
String tn = Strings.upperCamel(tag.replaceAll(SIMPLE_NAME_PATTERN, ""), '_');
if(!Strings.isEmpty(tn) && !name.toLowerCase().contains(tn.toLowerCase())) {
//id = name + "In" + tag
String id = name + "In" + Strings.upperFirst(tn);
if(trySetUniqueOperationId(m, op, id)) {
return true;
}
//id = id + "With" + method
if (!Strings.startsWithIgnoreCase(id, method)) {
id = id + "With" + Strings.upperFirst(method);
if(trySetUniqueOperationId(m, op, id)) {
return true;
}
}
}
}
return false;
}
protected boolean trySetUniqueOperationId(ApiMetadataBuilder m, MApiOperationBuilder op, String id) {
String lowerId = Strings.lowerCase(id);
if(m.getOperationIds().contains(lowerId)) {
return false;
}
op.setId(id);
m.getOperationIds().add(lowerId);
return true;
}
}
| apache-2.0 |
kongch/OpenCSV-3.0 | OpenCSV/test/au/com/bytecode/opencsv/CSVParserTest.java | 15330 | package au.com.bytecode.opencsv;
/**
* Created by IntelliJ IDEA.
* User: Scott Conway
* Date: Oct 7, 2009
* Time: 9:56:48 PM
*/
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import static org.junit.Assert.*;
public class CSVParserTest {
CSVParser csvParser;
@Before
public void setUp() {
csvParser = new CSVParser();
}
@Test
public void testParseLine() throws Exception {
String nextItem[] = csvParser.parseLine("This, is, a, test.");
assertEquals(4, nextItem.length);
assertEquals("This", nextItem[0]);
assertEquals(" is", nextItem[1]);
assertEquals(" a", nextItem[2]);
assertEquals(" test.", nextItem[3]);
}
@Test
public void parseSimpleString() throws IOException {
String[] nextLine = csvParser.parseLine("a,b,c");
assertEquals(3, nextLine.length);
assertEquals("a", nextLine[0]);
assertEquals("b", nextLine[1]);
assertEquals("c", nextLine[2]);
assertFalse(csvParser.isPending());
}
/**
* Tests quotes in the middle of an element.
*
* @throws IOException if bad things happen
*/
@Test
public void testParsedLineWithInternalQuota() throws IOException {
String[] nextLine = csvParser.parseLine("a,123\"4\"567,c");
assertEquals(3, nextLine.length);
assertEquals("123\"4\"567", nextLine[1]);
}
@Test
public void parseQuotedStringWithCommas() throws IOException {
String[] nextLine = csvParser.parseLine("a,\"b,b,b\",c");
assertEquals("a", nextLine[0]);
assertEquals("b,b,b", nextLine[1]);
assertEquals("c", nextLine[2]);
assertEquals(3, nextLine.length);
}
@Test
public void parseQuotedStringWithDefinedSeperator() throws IOException {
csvParser = new CSVParser(':');
String[] nextLine = csvParser.parseLine("a:\"b:b:b\":c");
assertEquals("a", nextLine[0]);
assertEquals("b:b:b", nextLine[1]);
assertEquals("c", nextLine[2]);
assertEquals(3, nextLine.length);
}
@Test
public void parseQuotedStringWithDefinedSeperatorAndQuote() throws IOException {
csvParser = new CSVParser(':', '\'');
String[] nextLine = csvParser.parseLine("a:'b:b:b':c");
assertEquals("a", nextLine[0]);
assertEquals("b:b:b", nextLine[1]);
assertEquals("c", nextLine[2]);
assertEquals(3, nextLine.length);
}
@Test
public void parseEmptyElements() throws IOException {
String[] nextLine = csvParser.parseLine(",,");
assertEquals(3, nextLine.length);
assertEquals("", nextLine[0]);
assertEquals("", nextLine[1]);
assertEquals("", nextLine[2]);
}
@Test
public void parseMultiLinedQuoted() throws IOException {
String[] nextLine = csvParser.parseLine("a,\"PO Box 123,\nKippax,ACT. 2615.\nAustralia\",d.\n");
assertEquals(3, nextLine.length);
assertEquals("a", nextLine[0]);
assertEquals("PO Box 123,\nKippax,ACT. 2615.\nAustralia", nextLine[1]);
assertEquals("d.\n", nextLine[2]);
}
@Test
public void testADoubleQuoteAsDataElement() throws IOException {
String[] nextLine = csvParser.parseLine("a,\"\"\"\",c");// a,"""",c
assertEquals(3, nextLine.length);
assertEquals("a", nextLine[0]);
assertEquals(1, nextLine[1].length());
assertEquals("\"", nextLine[1]);
assertEquals("c", nextLine[2]);
}
@Test
public void testEscapedDoubleQuoteAsDataElement() throws IOException {
String[] nextLine = csvParser.parseLine("\"test\",\"this,test,is,good\",\"\\\"test\\\"\",\"\\\"quote\\\"\""); // "test","this,test,is,good","\"test\",\"quote\""
assertEquals(4, nextLine.length);
assertEquals("test", nextLine[0]);
assertEquals("this,test,is,good", nextLine[1]);
assertEquals("\"test\"", nextLine[2]);
assertEquals("\"quote\"", nextLine[3]);
}
// @Test
// public void testEscapingSeparator() throws IOException {
// String[] nextLine = csvParser.parseLine("test,this\\,test\\,is\\,good"); // "test","this,test,is,good","\"test\",\"quote\""
//
// assertEquals(2, nextLine.length);
//
// assertEquals("test", nextLine[0]);
// assertEquals("this,test,is,good", nextLine[1]);
// }
@Test
public void parseQuotedQuoteCharacters() throws IOException {
String[] nextLine = csvParser.parseLineMulti("\"Glen \"\"The Man\"\" Smith\",Athlete,Developer\n");
assertEquals(3, nextLine.length);
assertEquals("Glen \"The Man\" Smith", nextLine[0]);
assertEquals("Athlete", nextLine[1]);
assertEquals("Developer\n", nextLine[2]);
}
@Test
public void parseMultipleQuotes() throws IOException {
String[] nextLine = csvParser.parseLine("\"\"\"\"\"\",\"test\"\n"); // """""","test" representing: "", test
assertEquals("\"\"", nextLine[0]); // check the tricky situation
assertEquals("test\n", nextLine[1]); // make sure we didn't ruin the next field..
assertEquals(2, nextLine.length);
}
@Test
public void parseTrickyString() throws IOException {
String[] nextLine = csvParser.parseLine("\"a\nb\",b,\"\nd\",e\n");
assertEquals(4, nextLine.length);
assertEquals("a\nb", nextLine[0]);
assertEquals("b", nextLine[1]);
assertEquals("\nd", nextLine[2]);
assertEquals("e\n", nextLine[3]);
}
private String setUpMultiLineInsideQuotes() {
StringBuffer sb = new StringBuffer(CSVParser.INITIAL_READ_SIZE);
sb.append("Small test,\"This is a test across \ntwo lines.\"");
return sb.toString();
}
@Test
public void testAMultiLineInsideQuotes() throws IOException {
String testString = setUpMultiLineInsideQuotes();
String[] nextLine = csvParser.parseLine(testString);
assertEquals(2, nextLine.length);
assertEquals("Small test", nextLine[0]);
assertEquals("This is a test across \ntwo lines.", nextLine[1]);
assertFalse(csvParser.isPending());
}
@Test
public void testStrictQuoteSimple() throws IOException {
csvParser = new CSVParser(',', '\"', '\\', true);
String testString = "\"a\",\"b\",\"c\"";
String[] nextLine = csvParser.parseLine(testString);
assertEquals(3, nextLine.length);
assertEquals("a", nextLine[0]);
assertEquals("b", nextLine[1]);
assertEquals("c", nextLine[2]);
}
@Test
public void testStrictQuoteWithSpacesAndTabs() throws IOException {
csvParser = new CSVParser(',', '\"', '\\', true);
String testString = " \t \"a\",\"b\" \t , \"c\" ";
String[] nextLine = csvParser.parseLine(testString);
assertEquals(3, nextLine.length);
assertEquals("a", nextLine[0]);
assertEquals("b", nextLine[1]);
assertEquals("c", nextLine[2]);
}
@Test
public void testStrictQuoteWithGarbage() throws IOException {
csvParser = new CSVParser(',', '\"', '\\', true);
String testString = "abc',!@#\",\\\"\" xyz,";
String[] nextLine = csvParser.parseLine(testString);
assertEquals(3, nextLine.length);
assertEquals("", nextLine[0]);
assertEquals(",\"", nextLine[1]);
assertEquals("", nextLine[2]);
}
/**
* Test issue 2263439 where an escaped quote was causing the parse to fail.
* <p/>
* Special thanks to Chris Morris for fixing this (id 1979054)
*
* @throws IOException
*/
@Test
public void testIssue2263439() throws IOException {
csvParser = new CSVParser(',', '\'');
String[] nextLine = csvParser.parseLine("865,0,'AmeriKKKa\\'s_Most_Wanted','',294,0,0,0.734338696798625,'20081002052147',242429208,18448");
assertEquals(11, nextLine.length);
assertEquals("865", nextLine[0]);
assertEquals("0", nextLine[1]);
assertEquals("AmeriKKKa's_Most_Wanted", nextLine[2]);
assertEquals("", nextLine[3]);
assertEquals("18448", nextLine[10]);
}
/**
* Test issue 2859181 where an escaped character before a character
* that did not need escaping was causing the parse to fail.
*
* @throws IOException
*/
@Test
public void testIssue2859181() throws IOException {
csvParser = new CSVParser(';');
String[] nextLine = csvParser.parseLine("field1;\\=field2;\"\"\"field3\"\"\""); // field1;\=field2;"""field3"""
assertEquals(3, nextLine.length);
assertEquals("field1", nextLine[0]);
assertEquals("=field2", nextLine[1]);
assertEquals("\"field3\"", nextLine[2]);
}
/**
* Test issue 2726363
* <p/>
* Data given:
* <p/>
* "804503689","London",""London""shop","address","116.453182","39.918884"
* "453074125","NewYork","brief","address"","121.514683","31.228511"
*/
@Test
public void testIssue2726363() throws IOException {
String[] nextLine = csvParser.parseLine("\"804503689\",\"London\",\"\"London\"shop\",\"address\",\"116.453182\",\"39.918884\"");
assertEquals(6, nextLine.length);
assertEquals("804503689", nextLine[0]);
assertEquals("London", nextLine[1]);
assertEquals("\"London\"shop", nextLine[2]);
assertEquals("address", nextLine[3]);
assertEquals("116.453182", nextLine[4]);
assertEquals("39.918884", nextLine[5]);
}
@Test
public void parseLineMultiAllowsQuotesAcrossMultipleLines() throws IOException {
String[] nextLine = csvParser.parseLineMulti("This,\"is a \"good\" line\\\\ to parse");
assertEquals(1, nextLine.length);
assertEquals("This", nextLine[0]);
assertTrue(csvParser.isPending());
nextLine = csvParser.parseLineMulti("because we are using parseLineMulti.\"");
assertEquals(1, nextLine.length);
assertEquals("is a \"good\" line\\ to parse\nbecause we are using parseLineMulti.", nextLine[0]);
assertFalse(csvParser.isPending());
}
@Test
public void pendingIsClearedAfterCallToParseLine() throws IOException {
String[] nextLine = csvParser.parseLineMulti("This,\"is a \"good\" line\\\\ to parse");
assertEquals(1, nextLine.length);
assertEquals("This", nextLine[0]);
assertTrue(csvParser.isPending());
nextLine = csvParser.parseLine("because we are using parseLineMulti.");
assertEquals(1, nextLine.length);
assertEquals("because we are using parseLineMulti.", nextLine[0]);
assertFalse(csvParser.isPending());
}
@Test
public void returnPendingIfNullIsPassedIntoParseLineMulti() throws IOException {
String[] nextLine = csvParser.parseLineMulti("This,\"is a \"goo\\d\" line\\\\ to parse\\");
assertEquals(1, nextLine.length);
assertEquals("This", nextLine[0]);
assertTrue(csvParser.isPending());
nextLine = csvParser.parseLineMulti(null);
assertEquals(1, nextLine.length);
assertEquals("is a \"good\" line\\ to parse\n", nextLine[0]);
assertFalse(csvParser.isPending());
}
@Test
public void spacesAtEndOfQuotedStringDoNotCountIfStrictQuotesIsTrue() throws IOException {
CSVParser parser = new CSVParser(Constants.DEFAULT_SEPARATOR, Constants.DEFAULT_QUOTE_CHARACTER, Constants.DEFAULT_ESCAPE_CHARACTER, true);
String[] nextLine = parser.parseLine("\"Line with\", \"spaces at end\" ");
assertEquals(2, nextLine.length);
assertEquals("Line with", nextLine[0]);
assertEquals("spaces at end", nextLine[1]);
}
@Test
public void returnNullWhenNullPassedIn() throws IOException {
String[] nextLine = csvParser.parseLine(null);
assertNull(nextLine);
}
private static final String ESCAPE_TEST_STRING = "\\\\1\\2\\\"3\\"; // \\1\2\"\
@Test
public void validateEscapeStringBeforeRealTest() {
assertNotNull(ESCAPE_TEST_STRING);
assertEquals(9, ESCAPE_TEST_STRING.length());
}
@Test
public void whichCharactersAreEscapable() {
assertTrue(csvParser.isNextCharacterEscapable(ESCAPE_TEST_STRING, true, 0));
assertFalse(csvParser.isNextCharacterEscapable(ESCAPE_TEST_STRING, false, 0));
// Second character is not escapable because there is a non quote or non slash after it.
assertFalse(csvParser.isNextCharacterEscapable(ESCAPE_TEST_STRING, true, 1));
assertFalse(csvParser.isNextCharacterEscapable(ESCAPE_TEST_STRING, false, 1));
// Fourth character is not escapable because there is a non quote or non slash after it.
assertFalse(csvParser.isNextCharacterEscapable(ESCAPE_TEST_STRING, true, 3));
assertFalse(csvParser.isNextCharacterEscapable(ESCAPE_TEST_STRING, false, 3));
assertTrue(csvParser.isNextCharacterEscapable(ESCAPE_TEST_STRING, true, 5));
assertFalse(csvParser.isNextCharacterEscapable(ESCAPE_TEST_STRING, false, 5));
int lastChar = ESCAPE_TEST_STRING.length() - 1;
assertFalse(csvParser.isNextCharacterEscapable(ESCAPE_TEST_STRING, true, lastChar));
assertFalse(csvParser.isNextCharacterEscapable(ESCAPE_TEST_STRING, false, lastChar));
}
@Test
public void whitespaceBeforeEscape() throws IOException {
String[] nextItem = csvParser.parseLine("\"this\", \"is\",\"a test\""); //"this", "is","a test"
assertEquals("this", nextItem[0]);
assertEquals("is", nextItem[1]);
assertEquals("a test", nextItem[2]);
}
@Test
public void testIssue2958242WithoutQuotes() throws IOException {
CSVParser testParser = new CSVParser('\t');
String[] nextItem = testParser.parseLine("zo\"\"har\"at\t10-04-1980\t29\tC:\\\\foo.txt");
assertEquals(4, nextItem.length);
assertEquals("zo\"\"har\"at", nextItem[0]);
assertEquals("10-04-1980", nextItem[1]);
assertEquals("29", nextItem[2]);
System.out.println(nextItem[3]);
assertEquals("C:\\foo.txt", nextItem[3]);
}
@Test(expected = UnsupportedOperationException.class)
public void quoteAndEscapeCannotBeTheSame() {
new CSVParser(Constants.DEFAULT_SEPARATOR, Constants.DEFAULT_QUOTE_CHARACTER, Constants.DEFAULT_QUOTE_CHARACTER);
}
@Test
public void quoteAndEscapeCanBeTheSameIfNull() {
new CSVParser(Constants.DEFAULT_SEPARATOR, Constants.NULL_CHARACTER, Constants.NULL_CHARACTER);
}
@Test(expected = UnsupportedOperationException.class)
public void separatorCharacterCannotBeNull() {
new CSVParser(Constants.NULL_CHARACTER);
}
@Test(expected = UnsupportedOperationException.class)
public void separatorAndEscapeCannotBeTheSame() {
new CSVParser(Constants.DEFAULT_SEPARATOR, Constants.DEFAULT_QUOTE_CHARACTER, Constants.DEFAULT_SEPARATOR);
}
@Test(expected = UnsupportedOperationException.class)
public void separatorAndQuoteCannotBeTheSame() {
new CSVParser(Constants.DEFAULT_SEPARATOR, Constants.DEFAULT_SEPARATOR, Constants.DEFAULT_ESCAPE_CHARACTER);
}
} | apache-2.0 |
GwtDomino/domino | domino-test/domino-client-test/src/main/java/org/dominokit/domino/test/api/client/TestServerService.java | 455 | package org.dominokit.domino.test.api.client;
import org.dominokit.domino.api.shared.request.RequestBean;
import org.dominokit.domino.api.shared.request.ResponseBean;
@FunctionalInterface
public interface TestServerService {
interface RequestExecutionCallBack {
void onSuccess(ResponseBean response);
void onFailed(ResponseBean response);
}
void executeRequest(RequestBean request, TestResponseContext responseContext);
}
| apache-2.0 |
OSGP/Protocol-Adapter-OSLP | osgp-adapter-protocol-oslp-elster/src/main/java/org/opensmartgridplatform/adapter/protocol/oslp/elster/device/requests/GetPowerUsageHistoryDeviceRequest.java | 1580 | /**
* Copyright 2015 Smart Society Services B.V.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
package org.opensmartgridplatform.adapter.protocol.oslp.elster.device.requests;
import org.opensmartgridplatform.adapter.protocol.oslp.elster.device.DeviceRequest;
import org.opensmartgridplatform.dto.valueobjects.PowerUsageHistoryMessageDataContainerDto;
public class GetPowerUsageHistoryDeviceRequest extends DeviceRequest {
private final PowerUsageHistoryMessageDataContainerDto powerUsageHistoryContainer;
public GetPowerUsageHistoryDeviceRequest(final String organisationIdentification, final String deviceIdentification,
final String correlationUid, final int messagePriority,
final PowerUsageHistoryMessageDataContainerDto powerUsageHistoryContainer) {
super(organisationIdentification, deviceIdentification, correlationUid, messagePriority);
this.powerUsageHistoryContainer = powerUsageHistoryContainer;
}
public GetPowerUsageHistoryDeviceRequest(final Builder deviceRequestBuilder,
final PowerUsageHistoryMessageDataContainerDto powerUsageHistoryContainer) {
super(deviceRequestBuilder);
this.powerUsageHistoryContainer = powerUsageHistoryContainer;
}
public PowerUsageHistoryMessageDataContainerDto getPowerUsageHistoryContainer() {
return this.powerUsageHistoryContainer;
}
}
| apache-2.0 |
kidaa/incubator-geode | gemfire-core/src/test/java/com/gemstone/gemfire/cache/hdfs/internal/RegionWithHDFSBasicDUnitTest.java | 54424 | /*=========================================================================
* Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* one or more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.gemstone.gemfire.cache.hdfs.internal;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.logging.log4j.Logger;
import com.gemstone.gemfire.Delta;
import com.gemstone.gemfire.InvalidDeltaException;
import com.gemstone.gemfire.cache.AttributesFactory;
import com.gemstone.gemfire.cache.DataPolicy;
import com.gemstone.gemfire.cache.EvictionAction;
import com.gemstone.gemfire.cache.EvictionAttributes;
import com.gemstone.gemfire.cache.PartitionAttributesFactory;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.hdfs.HDFSStoreFactory;
import com.gemstone.gemfire.cache.hdfs.internal.hoplog.AbstractHoplogOrganizer;
import com.gemstone.gemfire.cache.hdfs.internal.hoplog.HDFSRegionDirector;
import com.gemstone.gemfire.internal.cache.DistributedPutAllOperation;
import com.gemstone.gemfire.internal.cache.EntryEventImpl;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.PartitionedRegion;
import com.gemstone.gemfire.internal.cache.persistence.soplog.SortedOplogStatistics;
import com.gemstone.gemfire.internal.cache.wan.parallel.ConcurrentParallelGatewaySenderQueue;
import com.gemstone.gemfire.internal.logging.LogService;
import com.gemstone.gemfire.internal.cache.wan.AbstractGatewaySender;
import dunit.AsyncInvocation;
import dunit.DistributedTestCase;
import dunit.Host;
import dunit.SerializableCallable;
import dunit.SerializableRunnable;
import dunit.VM;
/**
* A class for testing the basic HDFS functionality
*
* @author Hemant Bhanawat
*/
@SuppressWarnings({ "serial", "rawtypes", "deprecation", "unchecked", "unused" })
public class RegionWithHDFSBasicDUnitTest extends RegionWithHDFSTestBase {
private static final Logger logger = LogService.getLogger();
private ExpectedException ee0;
private ExpectedException ee1;
public RegionWithHDFSBasicDUnitTest(String name) {
super(name);
}
public void setUp() throws Exception {
super.setUp();
ee0 = DistributedTestCase.addExpectedException("com.gemstone.gemfire.cache.RegionDestroyedException");
ee1 = DistributedTestCase.addExpectedException("com.gemstone.gemfire.cache.RegionDestroyedException");
}
public void tearDown2() throws Exception {
ee0.remove();
ee1.remove();
super.tearDown2();
}
@Override
protected SerializableCallable getCreateRegionCallable(
final int totalnumOfBuckets, final int batchSizeMB,
final int maximumEntries, final String folderPath,
final String uniqueName, final int batchInterval,
final boolean queuePersistent, final boolean writeonly,
final long timeForRollover, final long maxFileSize) {
SerializableCallable createRegion = new SerializableCallable() {
public Object call() throws Exception {
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.HDFS_PARTITION);
PartitionAttributesFactory paf = new PartitionAttributesFactory();
paf.setTotalNumBuckets(totalnumOfBuckets);
paf.setRedundantCopies(1);
af.setHDFSStoreName(uniqueName);
af.setPartitionAttributes(paf.create());
HDFSStoreFactory hsf = getCache().createHDFSStoreFactory();
// Going two level up to avoid home directories getting created in
// VM-specific directory. This avoids failures in those tests where
// datastores are restarted and bucket ownership changes between VMs.
homeDir = new File(tmpDir + "/../../" + folderPath).getCanonicalPath();
logger.info("Setting homeDir to {}", homeDir);
hsf.setHomeDir(homeDir);
hsf.setBatchSize(batchSizeMB);
hsf.setBufferPersistent(queuePersistent);
hsf.setMaxMemory(3);
hsf.setBatchInterval(batchInterval);
if (timeForRollover != -1) {
hsf.setWriteOnlyFileRolloverInterval((int) timeForRollover);
System.setProperty("gemfire.HDFSRegionDirector.FILE_ROLLOVER_TASK_INTERVAL_SECONDS", "1");
}
if (maxFileSize != -1) {
hsf.setWriteOnlyFileRolloverSize((int) maxFileSize);
}
hsf.create(uniqueName);
af.setEvictionAttributes(EvictionAttributes.createLRUEntryAttributes(maximumEntries, EvictionAction.LOCAL_DESTROY));
af.setHDFSWriteOnly(writeonly);
Region r = createRootRegion(uniqueName, af.create());
((LocalRegion) r).setIsTest();
return 0;
}
};
return createRegion;
}
@Override
protected void doPuts(final String uniqueName, int start, int end) {
Region r = getRootRegion(uniqueName);
for (int i = start; i < end; i++) {
r.put("K" + i, "V" + i);
}
}
@Override
protected void doPutAll(final String uniqueName, Map map) {
Region r = getRootRegion(uniqueName);
r.putAll(map);
}
@Override
protected void doDestroys(final String uniqueName, int start, int end) {
Region r = getRootRegion(uniqueName);
for (int i = start; i < end; i++) {
r.destroy("K" + i);
}
}
@Override
protected void checkWithGet(String uniqueName, int start, int end, boolean expectValue) {
Region r = getRootRegion(uniqueName);
for (int i = start; i < end; i++) {
String expected = expectValue ? "V" + i : null;
assertEquals("Mismatch on key " + i, expected, r.get("K" + i));
}
}
@Override
protected void checkWithGetAll(String uniqueName, ArrayList arrayl) {
Region r = getRootRegion(uniqueName);
Map map = r.getAll(arrayl);
logger.info("Read entries {}", map.size());
for (Object e : map.keySet()) {
String v = e.toString().replaceFirst("K", "V");
assertTrue( "Reading entries failed for key " + e + " where value = " + map.get(e), v.equals(map.get(e)));
}
}
/**
* Tests if gets go to primary even if the value resides on secondary.
*/
public void testValueFetchedFromLocal() {
disconnectFromDS();
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
String homeDir = "./testValueFetchedFromLocal";
createServerRegion(vm0, 7, 1, 50, homeDir, "testValueFetchedFromLocal", 1000);
createServerRegion(vm1, 7, 1, 50, homeDir, "testValueFetchedFromLocal", 1000);
vm0.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testValueFetchedFromLocal");
for (int i = 0; i < 25; i++) {
r.put("K" + i, "V" + i);
}
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testValueFetchedFromLocal");
for (int i = 0; i < 25; i++) {
String s = null;
String k = "K" + i;
s = (String) r.get(k);
String v = "V" + i;
assertTrue( "The expected key " + v+ " didn't match the received value " + s, v.equals(s));
}
// with only two members and 1 redundant copy, we will have all data locally, make sure that some
// get operations results in a remote get operation
assertTrue( "gets should always go to primary, ", ((LocalRegion)r).getCountNotFoundInLocal() != 0 );
return null;
}
});
vm0.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testValueFetchedFromLocal");
assertTrue( "HDFS queue or HDFS should not have been accessed. They were accessed " + ((LocalRegion)r).getCountNotFoundInLocal() + " times",
((LocalRegion)r).getCountNotFoundInLocal() == 0 );
return null;
}
});
}
public void testHDFSQueueSizeTest() {
disconnectFromDS();
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
String homeDir = "./testHDFSQueueSize";
createServerRegion(vm0, 1, 10, 50, homeDir, "testHDFSQueueSize", 100000);
createServerRegion(vm1, 1, 10, 50, homeDir, "testHDFSQueueSize", 100000);
vm0.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testHDFSQueueSize");
byte[] b = new byte[1024];
byte[] k = new byte[1];
for (int i = 0; i < 1; i++) {
r.put(k, b);
}
ConcurrentParallelGatewaySenderQueue hdfsqueue = (ConcurrentParallelGatewaySenderQueue)((AbstractGatewaySender)((PartitionedRegion)r).getHDFSEventQueue().getSender()).getQueue();
HDFSBucketRegionQueue hdfsBQ = (HDFSBucketRegionQueue)((PartitionedRegion)hdfsqueue.getRegion()).getDataStore().getLocalBucketById(0);
if (hdfsBQ.getBucketAdvisor().isPrimary()) {
assertTrue("size should not as expected on primary " + hdfsBQ.queueSizeInBytes.get(), hdfsBQ.queueSizeInBytes.get() > 1024 && hdfsBQ.queueSizeInBytes.get() < 1150);
} else {
assertTrue("size should be 0 on secondary", hdfsBQ.queueSizeInBytes.get()==0);
}
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testHDFSQueueSize");
ConcurrentParallelGatewaySenderQueue hdfsqueue = (ConcurrentParallelGatewaySenderQueue)((AbstractGatewaySender)((PartitionedRegion)r).getHDFSEventQueue().getSender()).getQueue();
HDFSBucketRegionQueue hdfsBQ = (HDFSBucketRegionQueue)((PartitionedRegion)hdfsqueue.getRegion()).getDataStore().getLocalBucketById(0);
if (hdfsBQ.getBucketAdvisor().isPrimary()) {
assertTrue("size should not as expected on primary " + hdfsBQ.queueSizeInBytes.get(), hdfsBQ.queueSizeInBytes.get() > 1024 && hdfsBQ.queueSizeInBytes.get() < 1150);
} else {
assertTrue("size should be 0 on secondary", hdfsBQ.queueSizeInBytes.get()==0);
}
return null;
}
});
}
/**
* Does put for write only HDFS store
*/
public void testBasicPutsForWriteOnlyHDFSStore() {
disconnectFromDS();
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
String homeDir = "./testPutsForWriteOnlyHDFSStore";
createServerRegion(vm0, 7, 1, 20, homeDir, "testPutsForWriteOnlyHDFSStore",
100, true, false);
createServerRegion(vm1, 7, 1, 20, homeDir, "testPutsForWriteOnlyHDFSStore",
100, true, false);
// Do some puts
vm0.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testPutsForWriteOnlyHDFSStore");
for (int i = 0; i < 200; i++) {
r.put("K" + i, "V" + i);
}
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testPutsForWriteOnlyHDFSStore");
for (int i = 200; i < 400; i++) {
r.put("K" + i, "V" + i);
}
return null;
}
});
}
/**
* Does put for write only HDFS store
*/
public void testDelta() {
disconnectFromDS();
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
String homeDir = "./testDelta";
// Expected from com.gemstone.gemfire.internal.cache.ServerPingMessage.send()
ExpectedException ee1 = DistributedTestCase.addExpectedException("java.lang.InterruptedException");
ExpectedException ee2 = DistributedTestCase.addExpectedException("java.lang.InterruptedException");
createServerRegion(vm0, 7, 1, 20, homeDir, "testDelta", 100);
createServerRegion(vm1, 7, 1, 20, homeDir, "testDelta", 100);
// Do some puts
vm0.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testDelta");
for (int i = 0; i < 100; i++) {
r.put("K" + i, new CustomerDelta("V" + i, "address"));
}
for (int i = 0; i < 50; i++) {
CustomerDelta cd = new CustomerDelta("V" + i, "address");
cd.setAddress("updated address");
r.put("K" + i, cd);
}
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testDelta");
for (int i = 100; i < 200; i++) {
r.put("K" + i, new CustomerDelta("V" + i, "address"));
}
for (int i = 100; i < 150; i++) {
CustomerDelta cd = new CustomerDelta("V" + i, "address");
cd.setAddress("updated address");
r.put("K" + i, cd);
}
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testDelta");
for (int i = 0; i < 50; i++) {
CustomerDelta custDela = new CustomerDelta ("V" + i, "updated address" );
String k = "K" + i;
CustomerDelta s = (CustomerDelta) r.get(k);
assertTrue( "The expected value " + custDela + " didn't match the received value " + s, custDela.equals(s));
}
for (int i = 50; i < 100; i++) {
CustomerDelta custDela = new CustomerDelta("V" + i, "address");
String k = "K" + i;
CustomerDelta s = (CustomerDelta) r.get(k);
assertTrue( "The expected value " + custDela + " didn't match the received value " + s, custDela.equals(s));
}
for (int i = 100; i < 150; i++) {
CustomerDelta custDela = new CustomerDelta ("V" + i, "updated address" );
String k = "K" + i;
CustomerDelta s = (CustomerDelta) r.get(k);
assertTrue( "The expected value " + custDela + " didn't match the received value " + s, custDela.equals(s));
}
for (int i = 150; i < 200; i++) {
CustomerDelta custDela = new CustomerDelta ("V" + i, "address" );
String k = "K" + i;
CustomerDelta s = (CustomerDelta) r.get(k);
assertTrue( "The expected value " + custDela + " didn't match the received value " + s, custDela.equals(s));
}
return null;
}
});
ee1.remove();
ee2.remove();
}
/**
* Puts byte arrays and fetches them back to ensure that serialization of byte
* arrays is proper
*
*/
public void testByteArrays() {
disconnectFromDS();
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
String homeDir = "./testByteArrays";
createServerRegion(vm0, 7, 1, 20, homeDir, "testByteArrays", 100);
createServerRegion(vm1, 7, 1, 20, homeDir, "testByteArrays", 100);
// Do some puts
vm0.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testByteArrays");
byte[] b1 = { 0x11, 0x44, 0x77 };
byte[] b2 = { 0x22, 0x55 };
byte[] b3 = { 0x33 };
for (int i = 0; i < 100; i++) {
int x = i % 3;
if (x == 0) {
r.put("K" + i, b1);
} else if (x == 1) {
r.put("K" + i, b2);
} else {
r.put("K" + i, b3);
}
}
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testByteArrays");
byte[] b1 = { 0x11, 0x44, 0x77 };
byte[] b2 = { 0x22, 0x55 };
byte[] b3 = { 0x33 };
for (int i = 100; i < 200; i++) {
int x = i % 3;
if (x == 0) {
r.put("K" + i, b1);
} else if (x == 1) {
r.put("K" + i, b2);
} else {
r.put("K" + i, b3);
}
}
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion("testByteArrays");
byte[] b1 = { 0x11, 0x44, 0x77 };
byte[] b2 = { 0x22, 0x55 };
byte[] b3 = { 0x33 };
for (int i = 0; i < 200; i++) {
int x = i % 3;
String k = "K" + i;
byte[] s = (byte[]) r.get(k);
if (x == 0) {
assertTrue( "The expected value didn't match the received value of byte array" , Arrays.equals(b1, s));
} else if (x == 1) {
assertTrue( "The expected value didn't match the received value of byte array" , Arrays.equals(b2, s));
} else {
assertTrue( "The expected value didn't match the received value of byte array" , Arrays.equals(b3, s));
}
}
return null;
}
});
}
private static class CustomerDelta implements Serializable, Delta {
private String name;
private String address;
private boolean nameChanged;
private boolean addressChanged;
public CustomerDelta(CustomerDelta o) {
this.address = o.address;
this.name = o.name;
}
public CustomerDelta(String name, String address) {
this.name = name;
this.address = address;
}
public void fromDelta(DataInput in) throws IOException,
InvalidDeltaException {
boolean nameC = in.readBoolean();
if (nameC) {
this.name = in.readUTF();
}
boolean addressC = in.readBoolean();
if (addressC) {
this.address = in.readUTF();
}
}
public boolean hasDelta() {
return nameChanged || addressChanged;
}
public void toDelta(DataOutput out) throws IOException {
out.writeBoolean(nameChanged);
if (this.nameChanged) {
out.writeUTF(name);
}
out.writeBoolean(addressChanged);
if (this.addressChanged) {
out.writeUTF(address);
}
}
public void setName(String name) {
this.nameChanged = true;
this.name = name;
}
public String getName() {
return name;
}
public void setAddress(String address) {
this.addressChanged = true;
this.address = address;
}
public String getAddress() {
return address;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof CustomerDelta)) {
return false;
}
CustomerDelta other = (CustomerDelta) obj;
return this.name.equals(other.name) && this.address.equals(other.address);
}
@Override
public int hashCode() {
return this.address.hashCode() + this.name.hashCode();
}
@Override
public String toString() {
return "name=" + this.name + "address=" + address;
}
}
public void testClearRegionDataInQueue() throws Throwable {
doTestClearRegion(100000, false);
}
public void testClearRegionDataInHDFS() throws Throwable {
doTestClearRegion(1, true);
}
public void doTestClearRegion(int batchInterval, boolean waitForWriteToHDFS) throws Throwable {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
final int numEntries = 400;
String name = getName();
final String folderPath = "./" + name;
// Create some regions. Note that we want a large batch interval
// so that we will have some entries sitting in the queue when
// we do a clear.
final String uniqueName = name;
createServerRegion(vm0, 7, 31, 40, folderPath, uniqueName, batchInterval,
false, true);
createServerRegion(vm1, 7, 31, 40, folderPath, uniqueName, batchInterval,
false, true);
doPuts(vm0, uniqueName, numEntries);
// Make sure some files have been written to hdfs.
if (waitForWriteToHDFS) {
verifyDataInHDFS(vm0, uniqueName, true, true, waitForWriteToHDFS, numEntries);
}
// Do a clear
simulateClear(uniqueName, vm0, vm1);
validateEmpty(vm0, numEntries, uniqueName);
validateEmpty(vm1, numEntries, uniqueName);
// Double check that there is no data in hdfs now
verifyDataInHDFS(vm0, uniqueName, false, false, waitForWriteToHDFS, numEntries);
verifyDataInHDFS(vm1, uniqueName, false, false, waitForWriteToHDFS, numEntries);
closeCache(vm0);
closeCache(vm1);
AsyncInvocation async0 = createServerRegionAsync(vm0, 7, 31, 200, folderPath,
uniqueName, 100000, false, true);
AsyncInvocation async1 = createServerRegionAsync(vm1, 7, 31, 200, folderPath,
uniqueName, 100000, false, true);
async0.getResult();
async1.getResult();
validateEmpty(vm0, numEntries, uniqueName);
validateEmpty(vm1, numEntries, uniqueName);
}
private void simulateClear(final String name, VM... vms) throws Throwable {
simulateClearForTests(true);
try {
// Gemfire PRs don't support clear
// gemfirexd does a clear by taking gemfirexd ddl locks
// and then clearing each primary bucket on the primary.
// Simulate that by clearing all primaries on each vm.
// See GemFireContainer.clear
SerializableCallable clear = new SerializableCallable("clear") {
public Object call() throws Exception {
PartitionedRegion r = (PartitionedRegion) getRootRegion(name);
r.clearLocalPrimaries();
return null;
}
};
// Invoke the clears concurrently
AsyncInvocation[] async = new AsyncInvocation[vms.length];
for (int i = 0; i < vms.length; i++) {
async[i] = vms[i].invokeAsync(clear);
}
// Get the clear results.
for (int i = 0; i < async.length; i++) {
async[i].getResult();
}
} finally {
simulateClearForTests(false);
}
}
protected void simulateClearForTests(final boolean isGfxd) {
SerializableRunnable setGfxd = new SerializableRunnable() {
@Override
public void run() {
if (isGfxd) {
LocalRegion.simulateClearForTests(true);
} else {
LocalRegion.simulateClearForTests(false);
}
}
};
setGfxd.run();
invokeInEveryVM(setGfxd);
}
/**
* Test that we can locally destroy a member, without causing problems with
* the data in HDFS. This was disabled due to ticket 47793.
*
* @throws InterruptedException
*/
public void testLocalDestroy() throws InterruptedException {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
int numEntries = 200;
final String folderPath = "./testLocalDestroy";
final String uniqueName = "testLocalDestroy";
createServerRegion(vm0, 7, 31, 40, folderPath, uniqueName, 1, false, true);
createServerRegion(vm1, 7, 31, 40, folderPath, uniqueName, 1, false, true);
doPuts(vm0, uniqueName, numEntries);
// Make sure some files have been written to hdfs and wait for
// the queue to drain.
verifyDataInHDFS(vm0, uniqueName, true, true, true, numEntries);
validate(vm0, uniqueName, numEntries);
SerializableCallable localDestroy = new SerializableCallable("local destroy") {
public Object call() throws Exception {
Region r = getRootRegion(uniqueName);
r.localDestroyRegion();
return null;
}
};
vm0.invoke(localDestroy);
verifyNoQOrPR(vm0);
validate(vm1, uniqueName, numEntries);
vm1.invoke(localDestroy);
verifyNoQOrPR(vm1);
closeCache(vm0);
closeCache(vm1);
// Restart vm0 and see if the data is still available from HDFS
createServerRegion(vm0, 7, 31, 40, folderPath, uniqueName, 1, false, true);
validate(vm0, uniqueName, numEntries);
}
/**
* Test that doing a destroyRegion removes all data from HDFS.
*
* @throws InterruptedException
*/
public void testGlobalDestroyWithHDFSData() throws InterruptedException {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
final String folderPath = "./testGlobalDestroyWithHDFSData";
final String uniqueName = "testGlobalDestroyWithHDFSData";
int numEntries = 200;
createServerRegion(vm0, 7, 31, 40, folderPath, uniqueName, 1, false, true);
createServerRegion(vm1, 7, 31, 40, folderPath, uniqueName, 1, false, true);
doPuts(vm0, uniqueName, numEntries);
// Make sure some files have been written to hdfs.
verifyDataInHDFS(vm0, uniqueName, true, true, false, numEntries);
SerializableCallable globalDestroy = new SerializableCallable("destroy") {
public Object call() throws Exception {
Region r = getRootRegion(uniqueName);
r.destroyRegion();
return null;
}
};
vm0.invoke(globalDestroy);
// make sure data is not in HDFS
verifyNoQOrPR(vm0);
verifyNoQOrPR(vm1);
verifyNoHDFSData(vm0, uniqueName);
verifyNoHDFSData(vm1, uniqueName);
closeCache(vm0);
closeCache(vm1);
// Restart vm0 and make sure it's still empty
createServerRegion(vm0, 7, 31, 40, folderPath, uniqueName, 1, false, true);
createServerRegion(vm1, 7, 31, 40, folderPath, uniqueName, 1, false, true);
// make sure it's empty
validateEmpty(vm0, numEntries, uniqueName);
validateEmpty(vm1, numEntries, uniqueName);
}
/**
* Test that doing a destroyRegion removes all data from HDFS.
*/
public void testGlobalDestroyWithQueueData() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
final String folderPath = "./testGlobalDestroyWithQueueData";
final String uniqueName = "testGlobalDestroyWithQueueData";
int numEntries = 200;
// set a large queue timeout so that data is still in the queue
createServerRegion(vm0, 7, 31, 40, folderPath, uniqueName, 10000, false,
true);
createServerRegion(vm1, 7, 31, 40, folderPath, uniqueName, 10000, false,
true);
doPuts(vm0, uniqueName, numEntries);
SerializableCallable globalDestroy = new SerializableCallable("destroy") {
public Object call() throws Exception {
Region r = getRootRegion(uniqueName);
r.destroyRegion();
return null;
}
};
vm0.invoke(globalDestroy);
// make sure data is not in HDFS
verifyNoQOrPR(vm0);
verifyNoQOrPR(vm1);
verifyNoHDFSData(vm0, uniqueName);
verifyNoHDFSData(vm1, uniqueName);
closeCache(vm0);
closeCache(vm1);
// Restart vm0 and make sure it's still empty
createServerRegion(vm0, 7, 31, 40, folderPath, uniqueName, 1, false, true);
createServerRegion(vm1, 7, 31, 40, folderPath, uniqueName, 1, false, true);
// make sure it's empty
validateEmpty(vm0, numEntries, uniqueName);
validateEmpty(vm1, numEntries, uniqueName);
}
/**
* Make sure all async event queues and PRs a destroyed in a member
*/
public void verifyNoQOrPR(VM vm) {
vm.invoke(new SerializableRunnable() {
@Override
public void run() {
GemFireCacheImpl cache = (GemFireCacheImpl) getCache();
assertEquals(Collections.EMPTY_SET, cache.getAsyncEventQueues());
assertEquals(Collections.EMPTY_SET, cache.getPartitionedRegions());
}
});
}
/**
* Make sure all of the data for a region in HDFS is destroyed
*/
public void verifyNoHDFSData(final VM vm, final String uniqueName) {
vm.invoke(new SerializableCallable() {
@Override
public Object call() throws IOException {
HDFSStoreImpl hdfsStore = (HDFSStoreImpl) ((GemFireCacheImpl)getCache()).findHDFSStore(uniqueName);
FileSystem fs = hdfsStore.getFileSystem();
Path path = new Path(hdfsStore.getHomeDir(), uniqueName);
if (fs.exists(path)) {
dumpFiles(vm, uniqueName);
fail("Found files in " + path);
}
return null;
}
});
}
protected AsyncInvocation doAsyncPuts(VM vm, final String regionName,
final int start, final int end, final String suffix) throws Exception {
return doAsyncPuts(vm, regionName, start, end, suffix, "");
}
protected AsyncInvocation doAsyncPuts(VM vm, final String regionName,
final int start, final int end, final String suffix, final String value)
throws Exception {
return vm.invokeAsync(new SerializableCallable() {
public Object call() throws Exception {
Region r = getRootRegion(regionName);
String v = "V";
if (!value.equals("")) {
v = value;
}
logger.info("Putting entries ");
for (int i = start; i < end; i++) {
r.put("K" + i, v + i + suffix);
}
return null;
}
});
}
public void testGlobalDestroyFromAccessor() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
final String folderPath = "./testGlobalDestroyFromAccessor";
final String uniqueName = "testGlobalDestroyFromAccessor";
int numEntries = 200;
createServerRegion(vm0, 7, 31, 40, folderPath, uniqueName, 1, false, true);
createServerRegion(vm1, 7, 31, 40, folderPath, uniqueName, 1, false, true);
createServerAccessor(vm2, 7, 40, uniqueName);
doPuts(vm0, uniqueName, numEntries);
// Make sure some files have been written to hdfs.
verifyDataInHDFS(vm0, uniqueName, true, true, false, numEntries);
SerializableCallable globalDestroy = new SerializableCallable("destroy") {
public Object call() throws Exception {
Region r = getRootRegion(uniqueName);
r.destroyRegion();
return null;
}
};
// Destroy the region from an accessor
vm2.invoke(globalDestroy);
// make sure data is not in HDFS
verifyNoQOrPR(vm0);
verifyNoQOrPR(vm1);
verifyNoHDFSData(vm0, uniqueName);
verifyNoHDFSData(vm1, uniqueName);
closeCache(vm0);
closeCache(vm1);
closeCache(vm2);
// Restart vm0 and make sure it's still empty
createServerRegion(vm0, 7, 31, 40, folderPath, uniqueName, 1, false, true);
createServerRegion(vm1, 7, 31, 40, folderPath, uniqueName, 1, false, true);
// make sure it's empty
validateEmpty(vm0, numEntries, uniqueName);
validateEmpty(vm1, numEntries, uniqueName);
}
/**
* create a server with maxfilesize as 2 MB. Insert 4 entries of 1 MB each.
* There should be 2 files with 2 entries each.
*
* @throws Throwable
*/
public void testWOFileSizeParam() throws Throwable {
disconnectFromDS();
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
String homeDir = "./testWOFileSizeParam";
final String uniqueName = getName();
String value = "V";
for (int i = 0; i < 20; i++) {
value += value;
}
createServerRegion(vm0, 1, 1, 500, homeDir, uniqueName, 5, true, false, 2000, 2);
createServerRegion(vm1, 1, 1, 500, homeDir, uniqueName, 5, true, false, 2000, 2);
AsyncInvocation a1 = doAsyncPuts(vm0, uniqueName, 1, 3, "vm0", value);
AsyncInvocation a2 = doAsyncPuts(vm1, uniqueName, 2, 4, "vm1", value);
a1.join();
a2.join();
Thread.sleep(4000);
cacheClose(vm0, false);
cacheClose(vm1, false);
// Start the VMs in parallel for the persistent version subclass
AsyncInvocation async1 = createServerRegionAsync(vm0, 1, 1, 500, homeDir, uniqueName, 5, true, false, 2000, 2);
AsyncInvocation async2 = createServerRegionAsync(vm1, 1, 1, 500, homeDir, uniqueName, 5, true, false, 2000, 2);
async1.getResult();
async2.getResult();
// There should be two files in bucket 0.
verifyTwoHDFSFilesWithTwoEntries(vm0, uniqueName, value);
cacheClose(vm0, false);
cacheClose(vm1, false);
disconnectFromDS();
}
/**
* create server with file rollover time as 2 secs. Insert few entries and
* then sleep for 2 sec. A file should be created. Do it again At th end, two
* files with inserted entries should be created.
*
* @throws Throwable
*/
public void testWOTimeForRollOverParam() throws Throwable {
disconnectFromDS();
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
String homeDir = "./testWOTimeForRollOverParam";
final String uniqueName = getName();
createServerRegion(vm0, 1, 1, 500, homeDir, uniqueName, 5, true, false, 4, 1);
createServerRegion(vm1, 1, 1, 500, homeDir, uniqueName, 5, true, false, 4, 1);
AsyncInvocation a1 = doAsyncPuts(vm0, uniqueName, 1, 8, "vm0");
AsyncInvocation a2 = doAsyncPuts(vm1, uniqueName, 4, 10, "vm1");
a1.join();
a2.join();
Thread.sleep(8000);
a1 = doAsyncPuts(vm0, uniqueName, 10, 18, "vm0");
a2 = doAsyncPuts(vm1, uniqueName, 14, 20, "vm1");
a1.join();
a2.join();
Thread.sleep(8000);
cacheClose(vm0, false);
cacheClose(vm1, false);
AsyncInvocation async1 = createServerRegionAsync(vm0, 1, 1, 500, homeDir, uniqueName, 5, true, false, 4, 1);
AsyncInvocation async2 = createServerRegionAsync(vm1, 1, 1, 500, homeDir, uniqueName, 5, true, false, 4, 1);
async1.getResult();
async2.getResult();
// There should be two files in bucket 0.
// Each should have entry 1 to 10 and duplicate from 4 to 7
verifyTwoHDFSFiles(vm0, uniqueName);
cacheClose(vm0, false);
cacheClose(vm1, false);
disconnectFromDS();
}
private void createServerAccessor(VM vm, final int totalnumOfBuckets,
final int maximumEntries, final String uniqueName) {
SerializableCallable createRegion = new SerializableCallable() {
public Object call() throws Exception {
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.HDFS_PARTITION);
PartitionAttributesFactory paf = new PartitionAttributesFactory();
paf.setTotalNumBuckets(totalnumOfBuckets);
paf.setRedundantCopies(1);
// make this member an accessor.
paf.setLocalMaxMemory(0);
af.setEvictionAttributes(EvictionAttributes.createLRUEntryAttributes(maximumEntries, EvictionAction.LOCAL_DESTROY));
af.setPartitionAttributes(paf.create());
Region r = createRootRegion(uniqueName, af.create());
assertTrue(!((PartitionedRegion) r).isDataStore());
return null;
}
};
vm.invoke(createRegion);
}
@Override
protected void verifyHDFSData(VM vm, String uniqueName) throws Exception {
HashMap<String, HashMap<String, String>> filesToEntriesMap = createFilesAndEntriesMap(vm, uniqueName, uniqueName);
HashMap<String, String> entriesMap = new HashMap<String, String>();
for (HashMap<String, String> v : filesToEntriesMap.values()) {
entriesMap.putAll(v);
}
verifyInEntriesMap(entriesMap, 1, 50, "vm0");
verifyInEntriesMap(entriesMap, 40, 100, "vm1");
verifyInEntriesMap(entriesMap, 40, 100, "vm2");
verifyInEntriesMap(entriesMap, 90, 150, "vm3");
}
protected void verifyTwoHDFSFiles(VM vm, String uniqueName) throws Exception {
HashMap<String, HashMap<String, String>> filesToEntriesMap = createFilesAndEntriesMap(vm, uniqueName, uniqueName);
assertTrue("there should be exactly two files, but there are "
+ filesToEntriesMap.size(), filesToEntriesMap.size() == 2);
long timestamp = Long.MAX_VALUE;
String olderFile = null;
for (Map.Entry<String, HashMap<String, String>> e : filesToEntriesMap
.entrySet()) {
String fileName = e.getKey().substring(
0,
e.getKey().length()
- AbstractHoplogOrganizer.SEQ_HOPLOG_EXTENSION.length());
long newTimeStamp = Long.parseLong(fileName.substring(
fileName.indexOf("-") + 1, fileName.lastIndexOf("-")));
if (newTimeStamp < timestamp) {
olderFile = e.getKey();
timestamp = newTimeStamp;
}
}
verifyInEntriesMap(filesToEntriesMap.get(olderFile), 1, 8, "vm0");
verifyInEntriesMap(filesToEntriesMap.get(olderFile), 4, 10, "vm1");
filesToEntriesMap.remove(olderFile);
verifyInEntriesMap(filesToEntriesMap.values().iterator().next(), 10, 18, "vm0");
verifyInEntriesMap(filesToEntriesMap.values().iterator().next(), 14, 20, "vm1");
}
protected void verifyTwoHDFSFilesWithTwoEntries(VM vm, String uniqueName,
String value) throws Exception {
HashMap<String, HashMap<String, String>> filesToEntriesMap = createFilesAndEntriesMap(vm, uniqueName, uniqueName);
assertTrue( "there should be exactly two files, but there are " + filesToEntriesMap.size(), filesToEntriesMap.size() == 2);
HashMap<String, String> entriesMap = new HashMap<String, String>();
for (HashMap<String, String> v : filesToEntriesMap.values()) {
entriesMap.putAll(v);
}
assertTrue( "Expected key K1 received " + entriesMap.get(value+ "1vm0"), entriesMap.get(value+ "1vm0").equals("K1"));
assertTrue( "Expected key K2 received " + entriesMap.get(value+ "2vm0"), entriesMap.get(value+ "2vm0").equals("K2"));
assertTrue( "Expected key K2 received " + entriesMap.get(value+ "2vm1"), entriesMap.get(value+ "2vm1").equals("K2"));
assertTrue( "Expected key K3 received " + entriesMap.get(value+ "3vm1"), entriesMap.get(value+ "3vm1").equals("K3"));
}
/**
* verify that a PR accessor can be started
*/
public void testPRAccessor() {
Host host = Host.getHost(0);
VM accessor = host.getVM(0);
VM datastore1 = host.getVM(1);
VM datastore2 = host.getVM(2);
VM accessor2 = host.getVM(3);
final String regionName = getName();
final String storeName = "store_" + regionName;
SerializableCallable createRegion = new SerializableCallable() {
@Override
public Object call() throws Exception {
HDFSStoreFactory storefactory = getCache().createHDFSStoreFactory();
homeDir = new File("../" + regionName).getCanonicalPath();
storefactory.setHomeDir(homeDir);
storefactory.create(storeName);
AttributesFactory<Integer, String> af = new AttributesFactory<Integer, String>();
af.setDataPolicy(DataPolicy.HDFS_PARTITION);
af.setHDFSStoreName(storeName);
Region r = getCache().createRegionFactory(af.create()).create(regionName);
r.put("key1", "value1");
return null;
}
};
SerializableCallable createAccessorRegion = new SerializableCallable() {
@Override
public Object call() throws Exception {
HDFSStoreFactory storefactory = getCache().createHDFSStoreFactory();
homeDir = new File("../" + regionName).getCanonicalPath();
storefactory.setHomeDir(homeDir);
storefactory.create(storeName);
// DataPolicy PARTITION with localMaxMemory 0 cannot be created
AttributesFactory<Integer, String> af = new AttributesFactory<Integer, String>();
af.setDataPolicy(DataPolicy.PARTITION);
PartitionAttributesFactory<Integer, String> paf = new PartitionAttributesFactory<Integer, String>();
paf.setLocalMaxMemory(0);
af.setPartitionAttributes(paf.create());
// DataPolicy PARTITION with localMaxMemory 0 can be created if hdfsStoreName is set
af.setHDFSStoreName(storeName);
// No need to check with different storeNames (can never be done in GemFireXD)
Region r = getCache().createRegionFactory(af.create()).create(regionName);
r.localDestroyRegion();
// DataPolicy HDFS_PARTITION with localMaxMemory 0 can be created
af = new AttributesFactory<Integer, String>();
af.setDataPolicy(DataPolicy.HDFS_PARTITION);
af.setPartitionAttributes(paf.create());
getCache().createRegionFactory(af.create()).create(regionName);
return null;
}
};
datastore1.invoke(createRegion);
accessor.invoke(createAccessorRegion);
datastore2.invoke(createRegion);
accessor2.invoke(createAccessorRegion);
}
/**
* verify that PUT dml does not read from hdfs
*/
public void testPUTDMLSupport() {
doPUTDMLWork(false);
}
public void testPUTDMLBulkSupport() {
doPUTDMLWork(true);
}
private void doPUTDMLWork(final boolean isPutAll) {
Host host = Host.getHost(0);
VM vm1 = host.getVM(0);
VM vm2 = host.getVM(1);
final String regionName = getName();
createServerRegion(vm1, 7, 1, 50, "./" + regionName, regionName, 1000);
createServerRegion(vm2, 7, 1, 50, "./" + regionName, regionName, 1000);
vm1.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
Region r = getCache().getRegion(regionName);
LocalRegion lr = (LocalRegion) r;
SortedOplogStatistics stats = HDFSRegionDirector.getInstance().getHdfsRegionStats("/" + regionName);
long readsFromHDFS = stats.getRead().getCount();
assertEquals(0, readsFromHDFS);
if (isPutAll) {
Map m = new HashMap();
// map with only one entry
m.put("key0", "value0");
DistributedPutAllOperation ev = lr.newPutAllOperation(m, null);
lr.basicPutAll(m, ev, null);
m.clear();
// map with multiple entries
for (int i = 1; i < 100; i++) {
m.put("key" + i, "value" + i);
}
ev = lr.newPutAllOperation(m, null);
lr.basicPutAll(m, ev, null);
} else {
for (int i = 0; i < 100; i++) {
r.put("key" + i, "value" + i);
}
}
return null;
}
});
SerializableCallable getHDFSReadCount = new SerializableCallable() {
@Override
public Object call() throws Exception {
SortedOplogStatistics stats = HDFSRegionDirector.getInstance().getHdfsRegionStats("/" + regionName);
return stats.getRead().getCount();
}
};
long vm1Count = (Long) vm1.invoke(getHDFSReadCount);
long vm2Count = (Long) vm2.invoke(getHDFSReadCount);
assertEquals(100, vm1Count + vm2Count);
pause(10 * 1000);
vm1.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
// do puts using the new api
LocalRegion lr = (LocalRegion) getCache().getRegion(regionName);
if (isPutAll) {
Map m = new HashMap();
// map with only one entry
m.put("key0", "value0");
DistributedPutAllOperation ev = lr.newPutAllForPUTDmlOperation(m, null);
lr.basicPutAll(m, ev, null);
m.clear();
// map with multiple entries
for (int i = 1; i < 200; i++) {
m.put("key" + i, "value" + i);
}
ev = lr.newPutAllForPUTDmlOperation(m, null);
lr.basicPutAll(m, ev, null);
} else {
for (int i = 0; i < 200; i++) {
EntryEventImpl ev = lr.newPutEntryEvent("key" + i, "value" + i, null);
lr.validatedPut(ev, System.currentTimeMillis());
}
}
return null;
}
});
// verify the stat for hdfs reads has not incremented
vm1Count = (Long) vm1.invoke(getHDFSReadCount);
vm2Count = (Long) vm2.invoke(getHDFSReadCount);
assertEquals(100, vm1Count + vm2Count);
vm1.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
Region r = getCache().getRegion(regionName);
for (int i = 0; i < 200; i++) {
assertEquals("value" + i, r.get("key" + i));
}
return null;
}
});
}
/**
* verify that get on operational data does not read from HDFS
*/
public void testGetOperationalData() {
Host host = Host.getHost(0);
VM vm1 = host.getVM(0);
VM vm2 = host.getVM(1);
final String regionName = getName();
createServerRegion(vm1, 7, 1, 50, "./"+regionName, regionName, 1000);
createServerRegion(vm2, 7, 1, 50, "./"+regionName, regionName, 1000);
vm1.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
Region r = getCache().getRegion(regionName);
SortedOplogStatistics stats = HDFSRegionDirector.getInstance().getHdfsRegionStats("/" + regionName);
long readsFromHDFS = stats.getRead().getCount();
assertEquals(0, readsFromHDFS);
for (int i = 0; i < 100; i++) {
logger.info("SWAP:DOING PUT:key{}", i);
r.put("key" + i, "value" + i);
}
return null;
}
});
SerializableCallable getHDFSReadCount = new SerializableCallable() {
@Override
public Object call() throws Exception {
SortedOplogStatistics stats = HDFSRegionDirector.getInstance().getHdfsRegionStats("/" + regionName);
return stats.getRead().getCount();
}
};
long vm1Count = (Long) vm1.invoke(getHDFSReadCount);
long vm2Count = (Long) vm2.invoke(getHDFSReadCount);
assertEquals(100, vm1Count + vm2Count);
pause(10 * 1000);
// verify that get increments the read stat
vm1.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
Region r = getCache().getRegion(regionName);
for (int i = 0; i < 200; i++) {
if (i < 100) {
logger.info("SWAP:DOING GET:key", i);
assertEquals("value" + i, r.get("key" + i));
} else {
assertNull(r.get("key" + i));
}
}
return null;
}
});
vm1Count = (Long) vm1.invoke(getHDFSReadCount);
vm2Count = (Long) vm2.invoke(getHDFSReadCount);
// initial 100 + 150 for get (since 50 are in memory)
assertEquals(250, vm1Count + vm2Count);
// do gets with readFromHDFS set to false
vm1.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
Region r = getCache().getRegion(regionName);
LocalRegion lr = (LocalRegion) r;
int numEntries = 0;
for (int i = 0; i < 200; i++) {
logger.info("SWAP:DOING GET NO READ:key", i);
Object val = lr.get("key"+i, null, true, false, false, null, null, false, false/*allowReadFromHDFS*/);
if (val != null) {
numEntries++;
}
}
assertEquals(50, numEntries); // entries in memory
return null;
}
});
vm1Count = (Long) vm1.invoke(getHDFSReadCount);
vm2Count = (Long) vm2.invoke(getHDFSReadCount);
// get should not have incremented
assertEquals(250, vm1Count + vm2Count);
/**MergeGemXDHDFSToGFE Have not merged this API as this api is not called by any code*/
/*
// do gets using DataView
SerializableCallable getUsingDataView = new SerializableCallable() {
@Override
public Object call() throws Exception {
Region r = getCache().getRegion(regionName);
LocalRegion lr = (LocalRegion) r;
PartitionedRegion pr = (PartitionedRegion) lr;
long numEntries = 0;
for (int i=0; i<200; i++) {
InternalDataView idv = lr.getDataView();
logger.debug("SWAP:DATAVIEW");
Object val = idv.getLocally("key"+i, null, PartitionedRegionHelper.getHashKey(pr, "key"+i), lr, true, true, null, null, false, false);
if (val != null) {
numEntries++;
}
}
return numEntries;
}
};
vm1Count = (Long) vm1.invoke(getUsingDataView);
vm2Count = (Long) vm2.invoke(getUsingDataView);
assertEquals(50 * 2, vm1Count + vm2Count);// both VMs will find 50 entries*/
vm1Count = (Long) vm1.invoke(getHDFSReadCount);
vm2Count = (Long) vm2.invoke(getHDFSReadCount);
// get should not have incremented
assertEquals(250, vm1Count + vm2Count);
}
public void testSizeEstimate() {
Host host = Host.getHost(0);
VM vm1 = host.getVM(0);
VM vm2 = host.getVM(1);
VM vm3 = host.getVM(2);
final String regionName = getName();
createServerRegion(vm1, 7, 1, 50, "./"+regionName, regionName, 1000);
createServerRegion(vm2, 7, 1, 50, "./"+regionName, regionName, 1000);
createServerRegion(vm3, 7, 1, 50, "./"+regionName, regionName, 1000);
final int size = 226;
vm1.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
Region r = getCache().getRegion(regionName);
// LocalRegion lr = (LocalRegion) r;
for (int i = 0; i < size; i++) {
r.put("key" + i, "value" + i);
}
// before flush
// assertEquals(size, lr.sizeEstimate());
return null;
}
});
pause(10 * 1000);
vm2.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
Region r = getCache().getRegion(regionName);
LocalRegion lr = (LocalRegion) r;
logger.debug("SWAP:callingsizeEstimate");
long estimate = lr.sizeEstimate();
double err = Math.abs(estimate - size) / (double) size;
System.out.println("SWAP:estimate:" + estimate);
assertTrue(err < 0.2);
return null;
}
});
}
public void testForceAsyncMajorCompaction() throws Exception {
doForceCompactionTest(true, false);
}
public void testForceSyncMajorCompaction() throws Exception {
// more changes
doForceCompactionTest(true, true);
}
private void doForceCompactionTest(final boolean isMajor, final boolean isSynchronous) throws Exception {
Host host = Host.getHost(0);
VM vm1 = host.getVM(0);
VM vm2 = host.getVM(1);
VM vm3 = host.getVM(2);
final String regionName = getName();
createServerRegion(vm1, 7, 1, 50, "./" + regionName, regionName, 1000);
createServerRegion(vm2, 7, 1, 50, "./" + regionName, regionName, 1000);
createServerRegion(vm3, 7, 1, 50, "./" + regionName, regionName, 1000);
SerializableCallable noCompaction = new SerializableCallable() {
@Override
public Object call() throws Exception {
SortedOplogStatistics stats = HDFSRegionDirector.getInstance().getHdfsRegionStats("/" + regionName);
if (isMajor) {
assertEquals(0, stats.getMajorCompaction().getCount());
} else {
assertEquals(0, stats.getMinorCompaction().getCount());
}
return null;
}
};
vm1.invoke(noCompaction);
vm2.invoke(noCompaction);
vm3.invoke(noCompaction);
vm1.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
Region r = getCache().getRegion(regionName);
for (int i = 0; i < 500; i++) {
r.put("key" + i, "value" + i);
if (i % 100 == 0) {
// wait for flush
pause(3000);
}
}
pause(3000);
PartitionedRegion pr = (PartitionedRegion) r;
long lastCompactionTS = pr.lastMajorHDFSCompaction();
assertEquals(0, lastCompactionTS);
long beforeCompact = System.currentTimeMillis();
pr.forceHDFSCompaction(true, isSynchronous ? 0 : 1);
if (isSynchronous) {
final SortedOplogStatistics stats = HDFSRegionDirector.getInstance().getHdfsRegionStats("/" + regionName);
assertTrue(stats.getMajorCompaction().getCount() > 0);
assertTrue(pr.lastMajorHDFSCompaction() >= beforeCompact);
}
return null;
}
});
if (!isSynchronous) {
SerializableCallable verifyCompactionStat = new SerializableCallable() {
@Override
public Object call() throws Exception {
final SortedOplogStatistics stats = HDFSRegionDirector.getInstance().getHdfsRegionStats("/" + regionName);
waitForCriterion(new WaitCriterion() {
@Override
public boolean done() {
return stats.getMajorCompaction().getCount() > 0;
}
@Override
public String description() {
return "Major compaction stat not > 0";
}
}, 30 * 1000, 1000, true);
return null;
}
};
vm1.invoke(verifyCompactionStat);
vm2.invoke(verifyCompactionStat);
vm3.invoke(verifyCompactionStat);
} else {
SerializableCallable verifyCompactionStat = new SerializableCallable() {
@Override
public Object call() throws Exception {
final SortedOplogStatistics stats = HDFSRegionDirector.getInstance().getHdfsRegionStats("/" + regionName);
assertTrue(stats.getMajorCompaction().getCount() > 0);
return null;
}
};
vm2.invoke(verifyCompactionStat);
vm3.invoke(verifyCompactionStat);
}
}
public void testFlushQueue() throws Exception {
doFlushQueue(false);
}
public void testFlushQueueWO() throws Exception {
doFlushQueue(true);
}
private void doFlushQueue(boolean wo) throws Exception {
Host host = Host.getHost(0);
VM vm1 = host.getVM(0);
VM vm2 = host.getVM(1);
VM vm3 = host.getVM(2);
final String regionName = getName();
createServerRegion(vm1, 7, 1, 50, "./"+regionName, regionName, 300000, wo, false);
createServerRegion(vm2, 7, 1, 50, "./"+regionName, regionName, 300000, wo, false);
createServerRegion(vm3, 7, 1, 50, "./"+regionName, regionName, 300000, wo, false);
vm1.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(regionName);
for (int i = 0; i < 500; i++) {
pr.put("key" + i, "value" + i);
}
pr.flushHDFSQueue(0);
return null;
}
});
SerializableCallable verify = new SerializableCallable() {
@Override
public Object call() throws Exception {
PartitionedRegion pr = (PartitionedRegion) getCache().getRegion(regionName);
assertEquals(0, pr.getHDFSEventQueueStats().getEventQueueSize());
return null;
}
};
vm1.invoke(verify);
vm2.invoke(verify);
vm3.invoke(verify);
}
}
| apache-2.0 |
spring-projects/spring-framework | spring-messaging/src/test/java/org/springframework/messaging/handler/invocation/reactive/EncoderMethodReturnValueHandlerTests.java | 4558 | /*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.messaging.handler.invocation.reactive;
import java.util.Collections;
import io.reactivex.rxjava3.core.Completable;
import org.junit.jupiter.api.Test;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import org.springframework.core.MethodParameter;
import org.springframework.core.ReactiveAdapterRegistry;
import org.springframework.core.codec.CharSequenceEncoder;
import org.springframework.lang.Nullable;
import org.springframework.messaging.Message;
import org.springframework.messaging.support.GenericMessage;
import static org.springframework.messaging.handler.invocation.ResolvableMethod.on;
/**
* Unit tests for {@link AbstractEncoderMethodReturnValueHandler}.
*
* @author Rossen Stoyanchev
*/
public class EncoderMethodReturnValueHandlerTests {
private final TestEncoderMethodReturnValueHandler handler = new TestEncoderMethodReturnValueHandler(
Collections.singletonList(CharSequenceEncoder.textPlainOnly()),
ReactiveAdapterRegistry.getSharedInstance());
private final Message<?> message = new GenericMessage<>("shouldn't matter");
@Test
public void stringReturnValue() {
MethodParameter parameter = on(TestController.class).resolveReturnType(String.class);
this.handler.handleReturnValue("foo", parameter, this.message).block();
Flux<String> result = this.handler.getContentAsStrings();
StepVerifier.create(result).expectNext("foo").verifyComplete();
}
@Test
public void objectReturnValue() {
MethodParameter parameter = on(TestController.class).resolveReturnType(Object.class);
this.handler.handleReturnValue("foo", parameter, this.message).block();
Flux<String> result = this.handler.getContentAsStrings();
StepVerifier.create(result).expectNext("foo").verifyComplete();
}
@Test
public void fluxStringReturnValue() {
MethodParameter parameter = on(TestController.class).resolveReturnType(Flux.class, String.class);
this.handler.handleReturnValue(Flux.just("foo", "bar"), parameter, this.message).block();
Flux<String> result = this.handler.getContentAsStrings();
StepVerifier.create(result).expectNext("foo").expectNext("bar").verifyComplete();
}
@Test
public void fluxObjectReturnValue() {
MethodParameter parameter = on(TestController.class).resolveReturnType(Flux.class, Object.class);
this.handler.handleReturnValue(Flux.just("foo", "bar"), parameter, this.message).block();
Flux<String> result = this.handler.getContentAsStrings();
StepVerifier.create(result).expectNext("foo").expectNext("bar").verifyComplete();
}
@Test
public void voidReturnValue() {
testVoidReturnType(null, on(TestController.class).resolveReturnType(void.class));
testVoidReturnType(Mono.empty(), on(TestController.class).resolveReturnType(Mono.class, Void.class));
testVoidReturnType(Completable.complete(), on(TestController.class).resolveReturnType(Completable.class));
}
private void testVoidReturnType(@Nullable Object value, MethodParameter bodyParameter) {
this.handler.handleReturnValue(value, bodyParameter, this.message).block();
Flux<String> result = this.handler.getContentAsStrings();
StepVerifier.create(result).expectComplete().verify();
}
@Test
public void noEncoder() {
MethodParameter parameter = on(TestController.class).resolveReturnType(Object.class);
StepVerifier.create(this.handler.handleReturnValue(new Object(), parameter, this.message))
.expectErrorMessage("No encoder for java.lang.Object, current value type is class java.lang.Object")
.verify();
}
@SuppressWarnings({"unused", "ConstantConditions"})
private static class TestController {
String string() { return null; }
Object object() { return null; }
Flux<String> fluxString() { return null; }
Flux<Object> fluxObject() { return null; }
void voidReturn() { }
Mono<Void> monoVoid() { return null; }
Completable completable() { return null; }
}
}
| apache-2.0 |
lessthanoptimal/BoofCV | main/boofcv-ip/src/main/java/boofcv/alg/transform/ii/impl/SparseIntegralSample_I32.java | 1299 | /*
* Copyright (c) 2011-2017, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.transform.ii.impl;
import boofcv.alg.transform.ii.IntegralImageOps;
import boofcv.struct.image.GrayS32;
import boofcv.struct.sparse.SparseScaleSample_F64;
/**
* Samples a square region inside an integral image
*
* @author Peter Abeles
*/
public class SparseIntegralSample_I32 extends SparseScaleSample_F64<GrayS32> {
int r;
@Override
public void setWidth(double width) {
r = ((int)( width + 0.5 ))/2;
if( r <= 0 )
r = 1;
x0 = y0 = -r-1;
x1 = y1 = r;
}
@Override
public double compute(int x, int y) {
return IntegralImageOps.block_unsafe(input,x+x0,y+y0,x+x1,y+y1);
}
}
| apache-2.0 |
luj1985/dionysus | dionysus-webapp/src/main/java/com/huixinpn/dionysus/controller/PassTemplate.java | 198 | package com.huixinpn.dionysus.controller;
import lombok.Data;
/**
* Created by huanghao on 7/9/15.
*/
@Data
public class PassTemplate {
private String oldPass;
private String newPass;
}
| apache-2.0 |
mouse3150/blooming | joss/src/test/java/org/javaswift/joss/command/impl/identity/KeystoneAuthenticationCommandImplTest.java | 2745 | package org.javaswift.joss.command.impl.identity;
import mockit.Expectations;
import mockit.Mocked;
import org.apache.http.entity.StringEntity;
import org.javaswift.joss.command.impl.core.BaseCommandTest;
import org.javaswift.joss.command.shared.identity.AuthenticationCommand;
import org.javaswift.joss.exception.CommandException;
import org.javaswift.joss.exception.UnauthorizedException;
import org.javaswift.joss.model.Access;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import static junit.framework.Assert.*;
public class KeystoneAuthenticationCommandImplTest extends BaseCommandTest {
@Before
public void setup() throws IOException {
super.setup();
loadSampleJson("/sample-access.json");
}
@Test
public void getUrl() {
AuthenticationCommand command = new KeystoneAuthenticationCommandImpl(httpClient, "someurl", null, null, "user", "pwd");
assertEquals("someurl", command.getUrl());
}
@Test
public void noTenantSupplied() throws IOException {
AuthenticationCommand command = new KeystoneAuthenticationCommandImpl(httpClient, "someurl", null, null, "user", "pwd");
Access access = command.call();
assertFalse(access.isTenantSupplied());
}
@Test
public void noTenantNameSupplied() throws IOException {
AuthenticationCommand command = new KeystoneAuthenticationCommandImpl(httpClient, "someurl", null, "tenantid", "user", "pwd");
Access access = command.call();
assertTrue(access.isTenantSupplied());
}
@Test
public void authenticateSuccessful() throws IOException {
Access access = new KeystoneAuthenticationCommandImpl(httpClient, "someurl", "sometenant", "tenantid", "user", "pwd").call();
assertEquals("a376b74fbdb64a4986cd3234647ff6f8", access.getToken());
}
@Test (expected = UnauthorizedException.class)
public void authenticateFail() throws IOException {
checkForError(401, new KeystoneAuthenticationCommandImpl(httpClient, "someurl", "sometenant", "tenantid", "user", "pwd"));
}
@Test (expected = CommandException.class)
public void unknownError() throws IOException {
checkForError(500, new KeystoneAuthenticationCommandImpl(httpClient, "someurl", "sometenant", "tenantid", "user", "pwd"));
}
@Test(expected = CommandException.class)
public void ioException(@Mocked(stubOutClassInitialization = true) StringEntity unused) throws Exception {
new Expectations() {{
new StringEntity(anyString);
result = new IOException();
}};
new KeystoneAuthenticationCommandImpl(httpClient, "someurl", "sometenant", "tenantid", "user", "pwd");
}
}
| apache-2.0 |
t4gedieb/ews-client | src/main/java/ews/client/web/websocket/package-info.java | 89 | /**
* WebSocket services, using Spring Websocket.
*/
package ews.client.web.websocket;
| apache-2.0 |
kebernet/skillz | api/src/test/java/net/kebernet/skillz/util/CoercionTest.java | 2828 | /*
* Copyright (c) 2016 Robert Cooper
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.kebernet.skillz.util;
import org.junit.Test;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class CoercionTest {
private Coercion coercion = new Coercion();
@Test
public void testCoercions() {
HashMap foo = new HashMap<>();
assertTrue( foo == coercion.coerce(HashMap.class, foo, Map.class));
assertEquals(Integer.valueOf(3), coercion.coerce("3", Integer.class));
assertEquals(Integer.valueOf(3), coercion.coerce("03", Integer.class));
assertEquals(Integer.valueOf(3), coercion.coerce("3", int.class));
assertEquals(Double.valueOf(3.1D), coercion.coerce("3.1", Double.class));
assertEquals(Double.valueOf(3.1D), coercion.coerce("0003.1", Double.class));
assertEquals(Double.valueOf(3.1D), coercion.coerce("3.10000", Double.class));
assertEquals(Double.valueOf(3.1D), coercion.coerce("3.1", double.class));
assertEquals(Boolean.TRUE, coercion.coerce("true", Boolean.class));
assertEquals(Boolean.FALSE, coercion.coerce("false", Boolean.class));
assertEquals(Boolean.TRUE, coercion.coerce("yes", Boolean.class));
assertEquals(Boolean.FALSE, coercion.coerce("no", Boolean.class));
//Dates in fixed format.
Calendar cal = Calendar.getInstance();
cal.set(Calendar.YEAR, 2013);
cal.set(Calendar.MONTH, Calendar.FEBRUARY);
cal.set(Calendar.DAY_OF_MONTH, 2);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
assertEquals(cal.getTime(), coercion.coerce("2013-02-02 00:00:00", Date.class));
//Identity for passthrough conversions
Object four = Integer.valueOf(4);
assertTrue(coercion.coerce(four, Integer.class) == four);
assertEquals(TestEnum.FIRST, coercion.coerce("first", TestEnum.class));
assertEquals(TestEnum.SECOND, coercion.coerce(1, TestEnum.class));
}
enum TestEnum {
FIRST,
SECOND
}
} | apache-2.0 |
jpavlich/spring-sandbox | security/secTest1/src/main/java/com/example/demo/ServletInitializer.java | 400 | package com.example.demo;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.web.support.SpringBootServletInitializer;
public class ServletInitializer extends SpringBootServletInitializer {
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(SecTest1Application.class);
}
}
| apache-2.0 |
OpenGamma/Strata | modules/pricer/src/test/java/com/opengamma/strata/pricer/fra/DiscountingFraTradePricerTest.java | 4574 | /*
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.pricer.fra;
import static com.opengamma.strata.basics.index.IborIndices.GBP_LIBOR_3M;
import static com.opengamma.strata.pricer.fra.FraDummyData.FRA;
import static com.opengamma.strata.pricer.fra.FraDummyData.FRA_TRADE;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.time.LocalDate;
import org.junit.jupiter.api.Test;
import com.opengamma.strata.basics.ReferenceData;
import com.opengamma.strata.basics.currency.CurrencyAmount;
import com.opengamma.strata.basics.currency.MultiCurrencyAmount;
import com.opengamma.strata.basics.index.IborIndexObservation;
import com.opengamma.strata.collect.timeseries.LocalDateDoubleTimeSeries;
import com.opengamma.strata.pricer.DiscountFactors;
import com.opengamma.strata.pricer.datasets.RatesProviderDataSets;
import com.opengamma.strata.pricer.rate.IborIndexRates;
import com.opengamma.strata.pricer.rate.IborRateSensitivity;
import com.opengamma.strata.pricer.rate.ImmutableRatesProvider;
import com.opengamma.strata.pricer.rate.SimpleRatesProvider;
import com.opengamma.strata.product.TradeInfo;
import com.opengamma.strata.product.fra.FraTrade;
import com.opengamma.strata.product.fra.ResolvedFra;
import com.opengamma.strata.product.fra.ResolvedFraTrade;
import com.opengamma.strata.product.rate.IborRateComputation;
/**
* Test {@link DiscountingFraTradePricer}.
* <p>
* Some of the methods in the trade pricer are comparable to the product pricer methods, thus tested in
* {@link DiscountingFraProductPricerTest}.
*/
public class DiscountingFraTradePricerTest {
private static final ReferenceData REF_DATA = ReferenceData.standard();
private static final LocalDate VAL_DATE = LocalDate.of(2014, 1, 22);
private static final double DISCOUNT_FACTOR = 0.98d;
private static final double FORWARD_RATE = 0.02;
private static final DiscountingFraProductPricer PRICER_PRODUCT = DiscountingFraProductPricer.DEFAULT;
private static final DiscountingFraTradePricer PRICER_TRADE = new DiscountingFraTradePricer(PRICER_PRODUCT);
private static final ResolvedFraTrade RFRA_TRADE = FRA_TRADE.resolve(REF_DATA);
private static final ResolvedFra RFRA = FRA.resolve(REF_DATA);
private static final SimpleRatesProvider RATES_PROVIDER;
static {
DiscountFactors mockDf = mock(DiscountFactors.class);
IborIndexRates mockIbor = mock(IborIndexRates.class);
RATES_PROVIDER = new SimpleRatesProvider(VAL_DATE, mockDf);
RATES_PROVIDER.setIborRates(mockIbor);
IborIndexObservation obs = ((IborRateComputation) RFRA.getFloatingRate()).getObservation();
IborRateSensitivity sens = IborRateSensitivity.of(obs, 1d);
when(mockIbor.ratePointSensitivity(obs)).thenReturn(sens);
when(mockIbor.rate(obs)).thenReturn(FORWARD_RATE);
when(mockDf.discountFactor(RFRA.getPaymentDate())).thenReturn(DISCOUNT_FACTOR);
}
//-------------------------------------------------------------------------
@Test
public void test_getters() {
assertThat(DiscountingFraTradePricer.DEFAULT.getProductPricer()).isEqualTo(DiscountingFraProductPricer.DEFAULT);
}
//-------------------------------------------------------------------------
@Test
public void test_currencyExposure() {
assertThat(PRICER_TRADE.currencyExposure(RFRA_TRADE, RATES_PROVIDER)).isEqualTo(MultiCurrencyAmount.of(PRICER_TRADE.presentValue(RFRA_TRADE, RATES_PROVIDER)));
}
@Test
public void test_currentCash_zero() {
assertThat(PRICER_TRADE.currentCash(RFRA_TRADE, RATES_PROVIDER)).isEqualTo(CurrencyAmount.zero(FRA.getCurrency()));
}
@Test
public void test_currentCash_onPaymentDate() {
LocalDate paymentDate = RFRA.getPaymentDate();
double publishedRate = 0.025;
ResolvedFraTrade trade = FraTrade.builder()
.info(TradeInfo.builder().tradeDate(paymentDate).build())
.product(FRA)
.build()
.resolve(REF_DATA);
ImmutableRatesProvider ratesProvider = RatesProviderDataSets.multiGbp(paymentDate).toBuilder()
.timeSeries(GBP_LIBOR_3M, LocalDateDoubleTimeSeries.of(paymentDate, publishedRate))
.build();
assertThat(PRICER_TRADE.currentCash(trade, ratesProvider)).isEqualTo(CurrencyAmount.of(FRA.getCurrency(),
(publishedRate - FRA.getFixedRate()) / (1d + publishedRate * RFRA.getYearFraction()) *
RFRA.getYearFraction() * RFRA.getNotional()));
}
}
| apache-2.0 |
darkpsy3934/community-plugins | ofswitch/src/java/org/ifsoft/openlink/component/OpenlinkComponent.java | 76911 | package org.ifsoft.openlink.component;
import java.net.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import org.dom4j.Document;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
import org.jivesoftware.openfire.http.HttpBindManager;
import org.jivesoftware.openfire.SessionManager;
import org.jivesoftware.openfire.session.LocalClientSession;
import org.jivesoftware.openfire.session.Session;
import org.jivesoftware.openfire.RoutingTable;
import org.jivesoftware.openfire.XMPPServer;
import org.jivesoftware.openfire.PrivateStorage;
import org.jivesoftware.openfire.user.UserManager;
import org.jivesoftware.openfire.user.User;
import org.jivesoftware.openfire.vcard.*;
import org.jivesoftware.openfire.cluster.ClusterManager;
import org.jivesoftware.util.JiveGlobals;
import org.jivesoftware.openfire.roster.Roster;
import org.jivesoftware.openfire.roster.RosterItem;
import org.jivesoftware.openfire.roster.RosterManager;
import org.xmpp.component.Component;
import org.xmpp.component.AbstractComponent;
import org.xmpp.component.ComponentException;
import org.xmpp.component.ComponentManager;
import org.xmpp.component.ComponentManagerFactory;
import org.xmpp.packet.IQ;
import org.xmpp.packet.JID;
import org.xmpp.packet.Message;
import org.xmpp.packet.Packet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.ifsoft.openlink.*;
import org.ifsoft.openlink.commands.*;
import net.sf.json.*;
public class OpenlinkComponent extends AbstractTSComponent implements OpenlinkConstants
{
private static final Logger Log = LoggerFactory.getLogger(OpenlinkComponent.class);
private ComponentManager componentManager;
private JID componentJID = null;
private PrivateStorage privateStorage;
private UserManager userManager;
private RosterManager rosterManager;
private SessionManager sessionManager;
private OpenlinkCommandManager openlinkManger;
public Map<String, OpenlinkUserInterest> openlinkInterests;
public Map<String, OpenlinkUser> traderLyncUserTable;
public Map<String, OpenlinkInterest> traderLyncInterests;
public Map<String, OpenlinkInterest> callInterests;
public Map<String, OpenlinkUser> userProfiles;
public TelephoneNumberFormatter telephoneNumberFormatter;
public Site site;
private Vector<OpenlinkUser> sortedProfiles;
private Timer timer = null;
static public OpenlinkComponent self;
//-------------------------------------------------------
//
//
//
//-------------------------------------------------------
public OpenlinkComponent(Site site)
{
super(16, 1000, true);
this.site = site;
this.componentJID = new JID(getName() + "." + getDomain());
sortedProfiles = new Vector<OpenlinkUser>();
self = this;
Log.info( "["+ site.getName() + "] OpenlinkComponent Initialised");
}
public void componentEnable()
{
privateStorage = XMPPServer.getInstance().getPrivateStorage();
userManager = XMPPServer.getInstance().getUserManager();
rosterManager = XMPPServer.getInstance().getRosterManager();
sessionManager = XMPPServer.getInstance().getSessionManager();
componentManager = ComponentManagerFactory.getComponentManager();
openlinkManger = new OpenlinkCommandManager();
openlinkInterests = Collections.synchronizedMap( new HashMap<String, OpenlinkUserInterest>());
traderLyncUserTable = Collections.synchronizedMap( new HashMap<String, OpenlinkUser>());
traderLyncInterests = Collections.synchronizedMap( new HashMap<String, OpenlinkInterest>());
callInterests = Collections.synchronizedMap( new HashMap<String, OpenlinkInterest>());
userProfiles = Collections.synchronizedMap( new HashMap<String, OpenlinkUser>());
timer = new Timer();
Log.info( "["+ site.getName() + "] Creating telephoneNumberFormatter object");
setupTelephoneNumberFormatter();
Log.info( "["+ site.getName() + "] Creating user profiles");
getUserProfiles();
openlinkManger.addCommand(new GetProfiles(this));
openlinkManger.addCommand(new GetProfile(this));
openlinkManger.addCommand(new GetInterests(this));
openlinkManger.addCommand(new GetInterest(this));
openlinkManger.addCommand(new GetFeatures(this));
openlinkManger.addCommand(new MakeCall(this));
openlinkManger.addCommand(new IntercomCall(this));
openlinkManger.addCommand(new RequestAction(this));
openlinkManger.addCommand(new SetFeature(this));
openlinkManger.addCommand(new QueryFeatures(this));
openlinkManger.addCommand(new ManageVoiceBridge(this));
}
public void componentDestroyed()
{
try {
openlinkManger.stop();
if (timer != null) {
timer.cancel();
timer = null;
}
}
catch(Exception e) {
Log.error(e.toString());
}
}
public void setupTelephoneNumberFormatter()
{
Log.info( "["+ site.getName() + "] setupTelephoneNumberFormatter");
try
{
String pname = site.getName().toLowerCase();
String country = JiveGlobals.getProperty(Properties.Openlink_PBX_COUNTRY_CODE + "." + pname, Locale.getDefault().getCountry());
String pbxAccessDigits = JiveGlobals.getProperty(Properties.Openlink_PBX_ACCESS_DIGITS + "." + pname, "9");
String areaCode = JiveGlobals.getProperty(Properties.Openlink_AREA_CODE + "." + pname, "0207");
String pbxNumberLength = JiveGlobals.getProperty(Properties.Openlink_PBX_NUMBER_LENGTH + "." + pname, "5");
telephoneNumberFormatter = new TelephoneNumberFormatter(new Locale("en", country));
telephoneNumberFormatter.setExtensionNumberLength(Integer.parseInt(pbxNumberLength));
telephoneNumberFormatter.setOutsideAccess(pbxAccessDigits);
telephoneNumberFormatter.setAreaCode(areaCode);
telephoneNumberFormatter.setLocale(new Locale("en", country));
}
catch (Exception e)
{
Log.error( "["+ site.getName() + "] setupTelephoneNumberFormatter " + e);
}
}
public String formatCanonicalNumber(String dialDigits)
{
String canonicalNumber = dialDigits;
try
{
canonicalNumber = telephoneNumberFormatter.formatCanonicalNumber(dialDigits);
}
catch (Exception e)
{
Log.error( "["+ site.getName() + "] formatCanonicalNumber " + e);
}
return canonicalNumber;
}
public String formatDialableNumber(String cononicalNumber)
{
String dialableNumber = cononicalNumber;
try
{
dialableNumber = telephoneNumberFormatter.formatDialableNumber(cononicalNumber);
}
catch (Exception e)
{
cononicalNumber = formatCanonicalNumber(cononicalNumber);
try
{
dialableNumber = telephoneNumberFormatter.formatDialableNumber(cononicalNumber);
}
catch (Exception e1)
{
Log.error( "["+ site.getName() + "] formatDialableNumber " + e1);
}
}
return dialableNumber;
}
//-------------------------------------------------------
//
//
//
//-------------------------------------------------------
@Override public String getDescription()
{
return "TraderLynk Component";
}
@Override public String getName()
{
return "traderlynk";
}
@Override public String getDomain()
{
return XMPPServer.getInstance().getServerInfo().getXMPPDomain();
}
@Override public void postComponentStart()
{
}
@Override public void postComponentShutdown()
{
}
public JID getComponentJID()
{
return new JID(getName() + "." + getDomain());
}
public String getSiteName()
{
if (site == null)
return "";
else
return site.getName();
}
public int getUserCount()
{
return traderLyncUserTable.values().size();
}
public List<OpenlinkUser> getUsers(int startIndex, int numResults)
{
List<OpenlinkUser> profiles = new ArrayList<OpenlinkUser>();
int counter = 0;
if (startIndex == 0 || sortedProfiles.size() == 0)
{
sortedProfiles = new Vector<OpenlinkUser>(traderLyncUserTable.values());
Collections.sort(sortedProfiles);
}
Iterator it = sortedProfiles.iterator();
while( it.hasNext() )
{
OpenlinkUser traderLyncUser = (OpenlinkUser)it.next();
if (counter > (startIndex + numResults))
{
break;
}
if (counter >= startIndex)
{
profiles.add(traderLyncUser);
}
counter++;
}
return profiles;
}
//-------------------------------------------------------
//
//
//
//-------------------------------------------------------
public void interceptMessage(Message received)
{
//traderLyncLinkService.interceptMessage(received);
}
@Override protected void handleMessage(Message received)
{
Log.info("["+ site.getName() + "] handleMessage \n"+ received.toString());
}
@Override protected void handleIQResult(IQ iq)
{
Log.info("["+ site.getName() + "] handleIQResult \n"+ iq.toString());
Element element = iq.getChildElement();
if (element != null)
{
String namespace = element.getNamespaceURI();
if("http://jabber.org/protocol/pubsub#owner".equals(namespace))
{
Element subscriptions = element.element("subscriptions");
if (subscriptions != null)
{
String node = subscriptions.attributeValue("node");
Log.info("["+ site.getName() + "] handleIQResult found subscription node " + node);
if (openlinkInterests.containsKey(node))
{
Log.info("["+ site.getName() + "] handleIQResult found user interest " + node);
OpenlinkUserInterest traderLyncUserInterest = openlinkInterests.get(node);
for ( Iterator<Element> i = subscriptions.elementIterator( "subscription" ); i.hasNext(); )
{
Element subscription = (Element) i.next();
JID jid = new JID(subscription.attributeValue("jid"));
String sub = subscription.attributeValue("subscription");
OpenlinkSubscriber traderLyncSubscriber = traderLyncUserInterest.getSubscriber(jid);
traderLyncSubscriber.setSubscription(sub);
setSubscriberDetails(jid, traderLyncSubscriber);
Log.info("["+ site.getName() + "] handleIQResult added subscriber " + jid);
}
}
}
}
}
}
private void setSubscriberDetails(JID jid, OpenlinkSubscriber traderLyncSubscriber)
{
if (userManager.isRegisteredUser(jid.getNode()))
{
User user = null;
try {
user = userManager.getUser(jid.getNode());
}
catch(Exception e) { }
if (user != null)
{
//traderLyncSubscriber.setOnline(presenceManager.isAvailable(user));
traderLyncSubscriber.setName(user.getName());
traderLyncSubscriber.setJID(jid); // we need the full JID including resource to get session object
}
}
}
@Override protected void handleIQError(IQ iq)
{
String xml = iq.toString();
if (xml.indexOf("<create node=") == -1)
Log.info("["+ site.getName() + "] handleIQError \n"+ iq.toString());
}
@Override public IQ handleDiscoInfo(IQ iq)
{
JID jid = iq.getFrom();
Element child = iq.getChildElement();
String node = child.attributeValue("node");
IQ iq1 = IQ.createResultIQ(iq);
iq1.setType(org.xmpp.packet.IQ.Type.result);
iq1.setChildElement(iq.getChildElement().createCopy());
Element queryElement = iq1.getChildElement();
Element identity = queryElement.addElement("identity");
queryElement.addElement("feature").addAttribute("var",NAMESPACE_DISCO_INFO);
queryElement.addElement("feature").addAttribute("var",NAMESPACE_XMPP_PING);
identity.addAttribute("category", "component");
identity.addAttribute("name", "traderLync");
if (node == null) // Disco discovery of openlink
{
identity.addAttribute("type", "command-list");
queryElement.addElement("feature").addAttribute("var", "http://jabber.org/protocol/commands");
queryElement.addElement("feature").addAttribute("var", "http://xmpp.org/protocol/openlink:01:00:00");
queryElement.addElement("feature").addAttribute("var", "http://xmpp.org/protocol/openlink:01:00:00#tsc");
} else {
// Disco discovery of Openlink command
OpenlinkCommand command = openlinkManger.getCommand(node);
if (command != null && command.hasPermission(jid))
{
identity.addAttribute("type", "command-node");
queryElement.addElement("feature").addAttribute("var", "http://jabber.org/protocol/commands");
queryElement.addElement("feature").addAttribute("var", "http://xmpp.org/protocol/openlink:01:00:00");
}
}
//Log.info("["+ site.getName() + "] handleDiscoInfo "+ iq1.toString());
return iq1;
}
@Override public IQ handleDiscoItems(IQ iq)
{
JID jid = iq.getFrom();
Element child = iq.getChildElement();
String node = child.attributeValue("node");
IQ iq1 = IQ.createResultIQ(iq);
iq1.setType(org.xmpp.packet.IQ.Type.result);
iq1.setChildElement(iq.getChildElement().createCopy());
Element queryElement = iq1.getChildElement();
Element identity = queryElement.addElement("identity");
identity.addAttribute("category", "component");
identity.addAttribute("name", "openlink");
identity.addAttribute("type", "command-list");
if ("http://jabber.org/protocol/commands".equals(node))
{
for (OpenlinkCommand command : openlinkManger.getCommands())
{
// Only include commands that the sender can invoke (i.e. has enough permissions)
if (command.hasPermission(jid))
{
Element item = queryElement.addElement("item");
item.addAttribute("jid", componentJID.toString());
item.addAttribute("node", command.getCode());
item.addAttribute("name", command.getLabel());
}
}
}
//Log.info("["+ site.getName() + "] handleDiscoItems "+ iq1.toString());
return iq1;
}
@Override public IQ handleIQGet(IQ iq)
{
return handleIQPacket(iq);
}
@Override public IQ handleIQSet(IQ iq)
{
return handleIQPacket(iq);
}
private IQ handleIQPacket(IQ iq)
{
Log.info("["+ site.getName() + "] handleIQPacket \n"+ iq.toString());
Element element = iq.getChildElement();
IQ iq1 = IQ.createResultIQ(iq);
iq1.setType(org.xmpp.packet.IQ.Type.result);
iq1.setChildElement(iq.getChildElement().createCopy());
if (element != null)
{
String namespace = element.getNamespaceURI();
if("http://jabber.org/protocol/commands".equals(namespace))
iq1 = openlinkManger.process(iq);
}
return iq1;
}
//-------------------------------------------------------
//
//
//
//-------------------------------------------------------
public String makeCallDefault(Element newCommand, JID jid, String handset, String privacy, String autoHold, String dialDigits)
{
return makeCallDefault(newCommand, jid, handset, privacy, autoHold, dialDigits, null);
}
public String makeCallDefault(Element newCommand, JID jid, String handset, String privacy, String autoHold, String dialDigits, String callId)
{
Log.info( "["+ site.getName() + "] makeCallDefault "+ jid + " " + handset + " " + dialDigits + " " + privacy);
String errorMessage = "No default profile found";
try {
if (dialDigits != null && !"".equals(dialDigits))
{
dialDigits = makeDialableNumber(dialDigits);
if (dialDigits == null || "".equals(dialDigits))
{
errorMessage = "Destination is not a dialable number";
return errorMessage;
}
}
boolean foundDefaultProfile = false;
String userName = jid.getNode();
Iterator<OpenlinkUser> it = traderLyncUserTable.values().iterator();
while( it.hasNext() )
{
OpenlinkUser traderLyncUser = (OpenlinkUser)it.next();
if (userName.equals(traderLyncUser.getUserId()) && "true".equals(traderLyncUser.getDefault()))
{
foundDefaultProfile = true;
handset = handset == null ? traderLyncUser.getHandsetNo() : handset;
privacy = privacy == null ? traderLyncUser.getLastPrivacy() : privacy;
autoHold = autoHold == null ? (traderLyncUser.autoHold() ? "true" : "false") : autoHold;
if (traderLyncUser.getDefaultInterest() != null)
{
traderLyncUser.setWaitingInterest(null);
errorMessage = makeOutgoingCall(traderLyncUser.getDefaultInterest().getUserInterests().get(traderLyncUser.getUserNo()), dialDigits, handset, callId);
} else errorMessage = "no default interest found";
break;
}
}
if (foundDefaultProfile == false)
errorMessage = "no default profile found";
}
catch(Exception e) {
Log.error("makeCallDefault " + e);
e.printStackTrace();
errorMessage = "Internal error - " + e.toString();
}
return errorMessage;
}
public String makeCall(Element newCommand, String userInterest, String handset, String privacy, String autoHold, String dialDigits)
{
return makeCall(newCommand, userInterest, handset, privacy, autoHold, dialDigits, null);
}
public String makeCall(Element newCommand, String userInterest, String handset, String privacy, String autoHold, String dialDigits, String callId)
{
Log.info( "["+ site.getName() + "] makeCall "+ userInterest + " " + dialDigits + " " + callId);
String errorMessage = "Interest not found";
try {
if (openlinkInterests.containsKey(userInterest))
{
OpenlinkUserInterest traderLyncUserInterest = openlinkInterests.get(userInterest);
OpenlinkUser traderLyncUser = traderLyncUserInterest.getUser();
handset = handset == null ? traderLyncUser.getHandsetNo() : handset;
privacy = privacy == null ? traderLyncUser.getLastPrivacy() : privacy;
if ("D".equals(traderLyncUserInterest.getInterest().getInterestType()))
{
if (dialDigits != null && !"".equals(dialDigits))
{
dialDigits = makeDialableNumber(dialDigits);
if (dialDigits == null || "".equals(dialDigits))
{
errorMessage = "Destination is not a dialable number";
return errorMessage;
}
}
errorMessage = makeOutgoingCall(traderLyncUserInterest, dialDigits, handset, callId);
}
else if ("L".equals(traderLyncUserInterest.getInterest().getInterestType()))
{
String digits = traderLyncUserInterest.getInterest().getInterestValue();
if (digits.startsWith("tel:")) digits = digits.substring(4);
errorMessage = makeOutgoingCall(traderLyncUserInterest, digits, handset, callId);
}
}
}
catch(Exception e) {
Log.error("makeCall " + e);
e.printStackTrace();
errorMessage = "Internal error - " + e.toString();
}
return errorMessage;
}
private String makeOutgoingCall(OpenlinkUserInterest traderLyncUserInterest, String dialDigits, String handset, String callId)
{
Log.info( "["+ site.getName() + "] makeOutgoingCall "+ traderLyncUserInterest.getInterest().getInterestValue() + " " + dialDigits + " " + callId);
String deviceNo = traderLyncUserInterest.getUser().getDeviceNo();
String username = traderLyncUserInterest.getUser().getUserId();
String errorMessage = null;
try {
if (callId == null) callId = username + "-" + System.currentTimeMillis();
errorMessage = null; //sendSkype4BRequest(traderLyncUserInterest, "startPhoneAudio", username, dialDigits, callId, handset);
if (errorMessage == null)
{
callInterests.put(callId, traderLyncUserInterest.getInterest());
if (deviceNo != null)
{
//connectHandsetLine(deviceNo, callId);
}
outgoingCallNotification(username, callId, true, dialDigits, traderLyncUserInterest.getInterest().getInterestLabel());
}
}
catch(Exception e) {
Log.error("makeOutgoingCall ", e);
errorMessage = "Internal error - " + e.toString();
}
return errorMessage;
}
public String intercomCall(Element newCommand, String profileID, JID to, String groupID)
{
Log.info( "["+ site.getName() + "] intercomCall "+ profileID + " -> " + to + " => " + groupID);
String errorMessage = null;
try {
OpenlinkUser fromUser = getOpenlinkProfile(profileID);
if (groupID == null)
{
OpenlinkUser toUser = getOpenlinkUser(to);
//traderLyncLinkService.platformIntercomCall(fromUser.getDeviceNo(), toUser.getUserNo());
} else {
//traderLyncLinkService.groupIntercomCall(fromUser.getDeviceNo(), groupID);
}
//errorMessage = waitForFirstEvent(newCommand, fromUser, true, "0");
}
catch(Exception e) {
Log.error("["+ site.getName() + "] intercomCall " + e);
e.printStackTrace();
errorMessage = "Internal error - " + e.toString();
}
return errorMessage;
}
private boolean isValidAction(OpenlinkCall traderLyncCall, String validAction)
{
boolean valid = false;
Iterator it4 = traderLyncCall.getValidActions().iterator();
while( it4.hasNext() )
{
String action = (String)it4.next();
if (action.equals(validAction))
{
valid = true;
break;
}
}
return valid;
}
public String processUserAction(Element command, String userInterest, String action, String callID, String value1)
{
Log.info( "["+ site.getName() + "] processUserAction " + userInterest + " " + action + " " + callID + " " + value1);
String errorMessage = null;
try {
if (openlinkInterests.containsKey(userInterest))
{
OpenlinkUserInterest traderLyncUserInterest = openlinkInterests.get(userInterest);
OpenlinkInterest traderLyncInterest = traderLyncUserInterest.getInterest();
OpenlinkUser traderLyncUser = traderLyncUserInterest.getUser();
OpenlinkCall traderLyncCall = traderLyncUserInterest.getCallById(callID);
if (traderLyncCall != null)
{
traderLyncCall.published = true;
Log.info( "[" + site.getName() + "] processUserAction");
if (isValidAction(traderLyncCall, action))
{
if ("AnswerCall".equals(action) || "JoinCall".equals(action) || "RetrieveCall".equals(action))
{
if (traderLyncUser.getDeviceNo() != null && !"0.0.0.0".equals(traderLyncUser.getDeviceNo()))
{
if ("D".equals(traderLyncInterest.getInterestType()))
{
errorMessage = null; //sendSkype4BRequest(traderLyncUserInterest, "resumePhoneAudio", traderLyncUser.getUserNo(), null, callID, null);
if (errorMessage == null)
{
Iterator it = traderLyncInterest.getUserInterests().values().iterator();
while( it.hasNext() )
{
OpenlinkUserInterest eachInterest = (OpenlinkUserInterest)it.next();
if (eachInterest.getUser().getUserNo().equals(traderLyncUser.getUserNo()))
{
eachInterest.handleCallConnected(callID);
} else {
eachInterest.handleCallBusy(callID);
}
}
publishOpenlinkCallEvent(traderLyncInterest);
}
}
else
if ("L".equals(traderLyncInterest.getInterestType()))
{
makeCall(null, userInterest, traderLyncUser.getHandsetNo(), null, null, userInterest);
}
if (traderLyncUser.autoPrivate())
{
//Thread.sleep(500);
//traderLyncLinkService.privateCall(traderLyncUser.getDeviceNo(), traderLyncUser.getHandsetNo(), "Y");
}
} else {
errorMessage = "User device not online";
}
}
if ("ClearConnection".equals(action))
{
errorMessage = null; //sendSkype4BRequest(traderLyncUserInterest, "stopPhoneAudio", traderLyncUser.getUserNo(), null, callID, null);
if (errorMessage == null)
{
Iterator it = traderLyncInterest.getUserInterests().values().iterator();
while( it.hasNext() )
{
OpenlinkUserInterest eachInterest = (OpenlinkUserInterest)it.next();
eachInterest.handleConnectionCleared(callID);
}
publishOpenlinkCallEvent(traderLyncInterest);
}
deleteEvents(traderLyncInterest, callID);
}
if ("PrivateCall".equals(action))
{
//traderLyncLinkService.privateCall(traderLyncCall.getConsole(), traderLyncCall.getHandset(), "Y");
}
if ("PublicCall".equals(action))
{
//traderLyncLinkService.privateCall(traderLyncCall.getConsole(), traderLyncCall.getHandset(), "N");
}
if ("SendDigits".equals(action))
{
value1 = makeDialableNumber(value1);
if (value1 == null || "".equals(value1))
{
errorMessage = "A dialable number is required for SendDigits";
} else {
//traderLyncLinkService.dialDigits(traderLyncCall.getLine(), value1);
}
}
if ("SendDigit".equals(action))
{
if (value1 != null && value1.length() > 0)
{
//traderLyncLinkService.dialDigit(traderLyncCall.getConsole(), traderLyncCall.getHandset(), value1.substring(0, 1));
//traderLyncLinkService.publishOpenlinkUserCallEvent(traderLyncUserInterest); // no event, so we force pub-sub of current event
} else errorMessage = "A dialable digit must be provided for SendDigit action";
}
if ("ClearCall".equals(action))
{
errorMessage = null; //sendSkype4BRequest(traderLyncUserInterest, "stopPhoneAudio", traderLyncUser.getUserNo(), null, callID, null);
if (errorMessage == null)
{
Iterator it = traderLyncInterest.getUserInterests().values().iterator();
while( it.hasNext() )
{
OpenlinkUserInterest eachInterest = (OpenlinkUserInterest)it.next();
eachInterest.handleConnectionCleared(callID);
}
publishOpenlinkCallEvent(traderLyncInterest);
}
deleteEvents(traderLyncInterest, callID);
}
if ("ConferenceCall".equals(action))
{
//traderLyncLinkService.joinELC(traderLyncCall.getConsole());
}
if ("ClearConference".equals(action))
{
//traderLyncLinkService.clearELC(traderLyncCall.getConsole());
//traderLyncLinkService.clearLine(traderLyncCall.getLine());
}
if ("IntercomTransfer".equals(action))
{
try {
OpenlinkUser newOpenlinkUser = getOpenlinkUser(new JID(value1));
if (newOpenlinkUser != null)
{
//traderLyncLinkService.traderLyncTransferCall(traderLyncCall.getConsole(), traderLyncCall.getHandset(), traderLyncUser.getUserNo());
} else errorMessage = value1 + " is either not a valid user or logged into a device";
} catch (Exception e) {
errorMessage = value1 + " is not a valid user identity";
}
}
if ("ConsultationCall".equals(action))
{
if (!traderLyncCall.transferFlag)
{
value1 = makeDialableNumber(value1);
if (value1 == null || "".equals(value1))
{
errorMessage = "A dialable number must be provided for ConsultationCall action";
} else {
traderLyncCall.previousCalledNumber = traderLyncCall.proceedingDigits; // store old called number.
traderLyncCall.previousCalledLabel = traderLyncCall.proceedingDigitsLabel;
//traderLyncLinkService.transferCall(traderLyncCall.getConsole(), traderLyncCall.getHandset(), traderLyncCall.getLine(), value1);
traderLyncCall.transferFlag = true;
}
} else {
//traderLyncLinkService.ringRecall(traderLyncCall.getConsole(), traderLyncCall.getHandset()); // terminate current ConsultationCall
traderLyncCall.transferFlag = false;
if (traderLyncCall.previousCalledNumber != null)
{
Iterator<OpenlinkUserInterest> it3 = traderLyncUserInterest.getInterest().getUserInterests().values().iterator();
while( it3.hasNext() )
{
OpenlinkUserInterest theUserInterest = (OpenlinkUserInterest)it3.next();
OpenlinkCall theCall = theUserInterest.getCallByLine(traderLyncCall.getLine());
if (theCall != null)
{
theCall.proceedingDigits = traderLyncCall.previousCalledNumber;
theCall.proceedingDigitsLabel = traderLyncCall.previousCalledLabel;
}
}
}
}
traderLyncCall.setValidActions();
//traderLyncLinkService.publishOpenlinkUserCallEvent(traderLyncUserInterest); // no event, so we force pub-sub of current event
}
if ("TransferCall".equals(action))
{
if (traderLyncCall.transferFlag)
{
//traderLyncLinkService.clearCall(traderLyncCall.getConsole(), traderLyncCall.getHandset());
traderLyncCall.transferFlag = false;
} else errorMessage = "ConsultationCall must be done before TransferCall";
}
if ("SingleStepTransfer".equals(action))
{
value1 = makeDialableNumber(value1);
if (value1 == null || "".equals(value1))
{
errorMessage = "A dialable number must be provided for a SingleStepTransfer action";
} else {
//traderLyncLinkService.transferCall(traderLyncCall.getConsole(), traderLyncCall.getHandset(), traderLyncCall.getLine(), value1);
//traderLyncLinkService.clearCall(traderLyncCall.getConsole(), traderLyncCall.getHandset());
}
}
if ("AddThirdParty".equals(action))
{
value1 = makeDialableNumber(value1);
if (value1 == null || "".equals(value1))
{
errorMessage = "A dialable number must be provided for a AddThirdParty action";
} else {
//errorMessage = traderLyncLinkService.addExternalCall(traderLyncCall.getLine(), value1);
}
}
if ("RemoveThirdParty".equals(action))
{
//errorMessage = traderLyncLinkService.removeExternalCall(traderLyncCall.getLine(), makeDialableNumber(value1));
}
if ("HoldCall".equals(action))
{
if (traderLyncUser.getDeviceNo() != null && !"0.0.0.0".equals(traderLyncUser.getDeviceNo()))
{
errorMessage = null; //sendSkype4BRequest(traderLyncUserInterest, "holdPhoneAudio", traderLyncUser.getUserNo(), null, callID, null);
if (errorMessage == null)
{
Iterator it = traderLyncInterest.getUserInterests().values().iterator();
while( it.hasNext() )
{
OpenlinkUserInterest eachInterest = (OpenlinkUserInterest)it.next();
eachInterest.handleCallHeld(callID);
}
publishOpenlinkCallEvent(traderLyncInterest);
}
} else {
errorMessage = "User device not online";
}
}
if ("StartVoiceDrop".equals(action))
{
//VMessage message = getVMId(traderLyncUser, value1);
//if (message == null)
//{
// errorMessage = "A valid voice message feature Id must be provided for a StartVoiceDrop action";
//} else {
//String exten = traderLyncVmsService.getVMExtenToDial(traderLyncUser, message.getId(), message.getName());
//errorMessage = traderLyncLinkService.addExternalCall(traderLyncCall.getLine(), makeDialableNumber(exten));
//}
}
if ("StopVoiceDrop".equals(action))
{
//Message message = getVMId(traderLyncUser, value1);
//if (message == null)
//{
// errorMessage = "A valid voice message feature Id must be provided for a StartVoiceDrop action";
//} else {
//String exten = traderLyncVmsService.getVMExtenToDial(traderLyncUser, message.getId(), message.getName());
//errorMessage = traderLyncLinkService.removeExternalCall(traderLyncCall.getLine(), makeDialableNumber(exten));
//}
}
} else errorMessage = "Action is not valid";
} else errorMessage = "Call id not found";
} else errorMessage = "Interest not found";
}
catch(Exception e) {
Log.error("["+ site.getName() + "] processUserAction " + e);
e.printStackTrace();
errorMessage = "Request Action internal error - " + e.toString();
}
if (errorMessage != null && command != null)
{
Element note = command.addElement("note");
note.addAttribute("type", "error");
note.setText("Request Action - " + errorMessage);
}
return errorMessage;
}
public String setFeature(Element newCommand, String profileID, String featureID, String value1, String value2)
{
Log.info( "["+ site.getName() + "] setFeature " + profileID + " " + featureID + " " + value1 + " " + value2);
String errorMessage = null;
try {
if (value1 != null && value1.length() > 0)
{
OpenlinkUser traderLyncUser = getOpenlinkProfile(profileID);
if (traderLyncUser != null)
{
if ("hs_1".equals(featureID))
{
if (validateTrueFalse(value1))
traderLyncUser.setHandsetNo("true".equals(value1.toLowerCase()) ? "1" : "2");
else
errorMessage = "value1 is not true or false";
}
else if ("hs_2".equals(featureID))
{
if (validateTrueFalse(value1))
traderLyncUser.setHandsetNo("true".equals(value1.toLowerCase()) ? "2" : "1");
else
errorMessage = "value1 is not true or false";
}
else if ("priv_1".equals(featureID))
{
if (validateTrueFalse(value1))
traderLyncUser.setAutoPrivate("true".equals(value1.toLowerCase()));
else
errorMessage = "value1 is not true or false";
}
else if ("hold_1".equals(featureID))
{
if (validateTrueFalse(value1))
traderLyncUser.setAutoHold("true".equals(value1.toLowerCase()));
else
errorMessage = "value1 is not true or false";
}
else if ("callback_1".equals(featureID))
{
if (validateTrueFalse(value1))
{
if ("true".equals(value1.toLowerCase()))
{
if (value2 != null && !"".equals(value2))
{
String dialableNumber = makeDialableNumber(value2);
if (dialableNumber != null && !"".equals(dialableNumber))
{
traderLyncUser.setCallback(dialableNumber);
OpenlinkCallback traderLyncCallback = null; //traderLyncLinkService.allocateCallback(traderLyncUser);
if (traderLyncCallback == null)
errorMessage = "unable to allocate a virtual turret";
} else errorMessage = "value2 is not a dialable number";
} else {
if (traderLyncUser.getCallback() != null)
{
OpenlinkCallback traderLyncCallback = null; //traderLyncLinkService.allocateCallback(traderLyncUser);
if (traderLyncCallback == null)
errorMessage = "unable to allocate a callback";
} else errorMessage = "calback destination is missing";
}
} else {
//traderLyncLinkService.freeCallback(traderLyncUser.getUserNo());
traderLyncUser.setPhoneCallback(null);
}
}
else errorMessage = "value1 is not true or false";
}
else if ("fwd_1".equals(featureID)) // call forward
{
if (openlinkInterests.containsKey(value1)) // value is interest id
{
OpenlinkUserInterest traderLyncUserInterest = openlinkInterests.get(value1);
if (traderLyncUser.getUserNo().equals(traderLyncUser.getUserNo()))
{
if ("D".equals(traderLyncUserInterest.getInterest().getInterestType()))
{
String pname = site.getName().toLowerCase();
String pbxFWDCodePrefix = JiveGlobals.getProperty(Properties.Openlink_PBX_FWD_CODE_PREFIX + "." + pname, "*41");
String pbxFWDCodeSuffix = JiveGlobals.getProperty(Properties.Openlink_PBX_FWD_CODE_SUFFIX + "." + pname, "");
String pbxFWDCodeCancel = JiveGlobals.getProperty(Properties.Openlink_PBX_FWD_CODE_CANCEL + "." + pname, "*41");
String dialDigits = null;
if (value2 == null || "".equals(value2))
{
dialDigits = pbxFWDCodeCancel;
errorMessage = doCallForward(dialDigits, traderLyncUserInterest, newCommand);
if (errorMessage == null)
{
Iterator<OpenlinkUserInterest> iter2 = traderLyncUserInterest.getInterest().getUserInterests().values().iterator();
while( iter2.hasNext() )
{
OpenlinkUserInterest theUserInterest = (OpenlinkUserInterest)iter2.next();
theUserInterest.setCallFWD("false");
}
traderLyncUser.setLastCallForward("");
}
} else {
String dialableNumber = makeDialableNumber(value2);
if (dialableNumber != null && !"".equals(dialableNumber))
{
dialDigits = pbxFWDCodePrefix + dialableNumber + pbxFWDCodeSuffix;
errorMessage = doCallForward(dialDigits, traderLyncUserInterest, newCommand);
if (errorMessage == null)
{
Iterator<OpenlinkUserInterest> iter2 = traderLyncUserInterest.getInterest().getUserInterests().values().iterator();
while( iter2.hasNext() )
{
OpenlinkUserInterest theUserInterest = (OpenlinkUserInterest)iter2.next();
theUserInterest.setCallFWD("true");
theUserInterest.setCallFWDDigits(value2);
}
traderLyncUser.setLastCallForwardInterest(value1);
traderLyncUser.setLastCallForward(value2);
}
} else errorMessage = "value2 is not a dialable number";
}
} else errorMessage = "CallForward requires a directory number interest";
} else errorMessage = "Interest does not belong to this profile";
} else errorMessage = "Interest not found";
}
else errorMessage = "Feature not found";
} else errorMessage = "Profile not found";
} else errorMessage = "Input1 is missing";
}
catch(Exception e) {
Log.error("["+ site.getName() + "] setFeature " + e);
e.printStackTrace();
errorMessage = "Internal error - " + e.toString();
}
return errorMessage;
}
private String doCallForward(String dialDigits, OpenlinkUserInterest traderLyncUserInterest, Element newCommand)
{
String errorMessage = null;
//traderLyncUserInterest.getUser().selectCallset(this, traderLyncUserInterest.getInterest().getCallset(), traderLyncUserInterest.getUser().getHandsetNo(), "true", "true", dialDigits);
//errorMessage = waitForFirstEvent(newCommand, traderLyncUserInterest.getUser(), false, traderLyncUserInterest.getUser().getHandsetNo());
//traderLyncLinkService.clearCall(traderLyncUserInterest.getUser().getDeviceNo(), traderLyncUserInterest.getUser().getHandsetNo());
return errorMessage;
}
public String manageVoiceBridge(Element newCommand, JID userJID, List<Object[]> actions)
{
Log.info( "["+ site.getName() + "] manageVoiceMessage " + userJID + " ");
String errorMessage = "";
List<String> actionList = new ArrayList<String>();
try {
if (actions != null && actions.size() > 0)
{
Iterator it = actions.iterator();
while( it.hasNext() )
{
Object[] action = (Object[])it.next();
String name = (String) action[0];
String value1 = (String) action[1];
String value2 = (String) action[2];
String thisErrorMessage = null; //traderLyncLinkService.manageCallParticipant(userJID, value1, name, value2);
if (thisErrorMessage == null)
{
if ("MakeCall".equalsIgnoreCase(name))
{
actionList.add(value1);
}
} else {
errorMessage = errorMessage + thisErrorMessage + "; ";
}
}
if (actionList.size() > 0)
{
//traderLyncLinkService.handlePostBridge(actionList);
}
} else errorMessage = "Voice message features are missing";
}
catch(Exception e) {
Log.error("["+ site.getName() + "] manageVoiceBridge " + e);
e.printStackTrace();
errorMessage = "Internal error - " + e.toString();
}
return errorMessage.length() == 0 ? null : errorMessage;
}
public String manageVoiceMessage(Element newCommand, String profileID, String featureId, String action, String value1)
{
Log.info( "["+ site.getName() + "] manageVoiceMessage " + profileID + " " + featureId + " " + action + " " + value1);
String errorMessage = null;
try {
if (action != null && action.length() > 0)
{
OpenlinkUser traderLyncUser = getOpenlinkProfile(profileID);
if (traderLyncUser != null)
{
action = action.toLowerCase();
if ("record".equals(action))
{
}
else if ("edit".equals(action))
{
}
else if ("playback".equals(action))
{
}
else if ("delete".equals(action))
{
}
else if ("save".equals(action))
{
}
else if ("archive".equals(action))
{
} else errorMessage = "Action not supported";
} else errorMessage = "Profile not found";
} else errorMessage = "Action is missing";
}
catch(Exception e) {
Log.error("["+ site.getName() + "] manageVoiceMessage " + e);
e.printStackTrace();
errorMessage = "Internal error - " + e.toString();
}
return errorMessage;
}
private void addVoiceMessageExtension(Element newCommand, String exten, OpenlinkUser traderLyncUser, String msgId)
{
Element iodata = newCommand.addElement("iodata", "urn:xmpp:tmp:io-data");
iodata.addAttribute("type","output");
Element devicestatus = iodata.addElement("out").addElement("devicestatus", "http://xmpp.org/protocol/openlink:01:00:00#device-status");
devicestatus.addElement("profile").setText(traderLyncUser.getProfileName());
Element feature = devicestatus.addElement("features").addElement("feature").addAttribute("id", msgId);
Element voicemessage = feature.addElement("voicemessage").addAttribute("xmlns", "http://xmpp.org/protocol/openlink:01:00:00/features#voice-message");
voicemessage.addElement("msglen");
voicemessage.addElement("status").setText("ok");
voicemessage.addElement("statusdescriptor");
voicemessage.addElement("state");
if (exten == null || exten.length() == 0)
voicemessage.addElement("exten");
else
voicemessage.addElement("exten").setText(exten);
}
//-------------------------------------------------------
//
//
//
//-------------------------------------------------------
public List<OpenlinkUser> getOpenlinkProfiles(JID jid)
{
List<OpenlinkUser> traderLyncUsers = new ArrayList();
String userName = jid.getNode();
if (jid.getDomain().indexOf(getDomain()) > -1)
{
Iterator<OpenlinkUser> it = traderLyncUserTable.values().iterator();
while( it.hasNext() )
{
OpenlinkUser traderLyncUser = (OpenlinkUser)it.next();
if (userName.equals(traderLyncUser.getUserId()))
{
traderLyncUsers.add(traderLyncUser);
}
}
}
return traderLyncUsers;
}
public OpenlinkUser getOpenlinkUser(JID jid)
{
return getOpenlinkUser(jid.getNode());
}
public OpenlinkUser getOpenlinkUser(String userName)
{
Iterator<OpenlinkUser> it = traderLyncUserTable.values().iterator();
while( it.hasNext() )
{
OpenlinkUser traderLyncUser = (OpenlinkUser)it.next();
if (userName.equals(traderLyncUser.getUserId()) && !"0.0.0.0".equals(traderLyncUser.getDeviceNo()))
{
return traderLyncUser;
}
}
return null;
}
public OpenlinkUser getOpenlinkProfile(String profileID)
{
OpenlinkUser traderLyncUser = null;
if (traderLyncUserTable.containsKey(profileID))
{
traderLyncUser = traderLyncUserTable.get(profileID);
}
return traderLyncUser;
}
public OpenlinkUserInterest getOpenlinkInterest(String userInterest)
{
OpenlinkUserInterest traderLyncUserInterest = null;
if (openlinkInterests.containsKey(userInterest))
{
traderLyncUserInterest = openlinkInterests.get(userInterest);
}
return traderLyncUserInterest;
}
public String getSiteID()
{
return String.valueOf(site.getSiteID());
}
public void sendPacket(Packet packet)
{
try {
componentManager.sendPacket(this, packet);
} catch (Exception e) {
Log.error("Exception occured while sending packet." + e);
}
}
public void getInterestSubscriptions()
{
Log.info( "["+ site.getName() + "] getInterestSubscriptions");
try {
Iterator<OpenlinkUser> iter = traderLyncUserTable.values().iterator();
while(iter.hasNext())
{
OpenlinkUser traderLyncUser = (OpenlinkUser)iter.next();
Iterator<OpenlinkInterest> iter2 = traderLyncUser.getInterests().values().iterator();
while( iter2.hasNext() )
{
OpenlinkInterest traderLyncInterest = (OpenlinkInterest)iter2.next();
getInterestSubscriptions(traderLyncInterest, traderLyncUser.getUserNo());
}
}
}
catch(Exception e) {
Log.error("["+ site.getName() + "] getInterestSubscriptions " + e);
}
}
//-------------------------------------------------------
//
//
//
//-------------------------------------------------------
public boolean validateTrueFalse(String value1)
{
boolean valid = false;
String flag = value1.toLowerCase();
if ("true".equals(flag) || "false".equals(flag))
{
valid = true;
}
return valid;
}
public String makeDialableNumber(String digits)
{
String dialableNumber = null;
if ((digits != null && !"".equals(digits)) || digits.startsWith("sip:") || digits.startsWith("tel:"))
{
dialableNumber = digits;
/*
String cononicalNumber = formatCanonicalNumber(convertAlpha(digits));
if (cononicalNumber != null && !"".equals(cononicalNumber))
{
dialableNumber = formatDialableNumber(cononicalNumber);
}
*/
Log.info( "["+ site.getName() + "] makeDialableNumber " + digits + "=>" + dialableNumber);
}
return dialableNumber;
}
private String convertAlpha(String input)
{
int inputlength = input.length();
input = input.toLowerCase();
String phonenumber = "";
for (int i = 0; i < inputlength; i++) {
int character = input.charAt(i);
switch(character) {
case '+': phonenumber+="+";break;
case '*': phonenumber+="*";break;
case '#': phonenumber+="#";break;
case '0': phonenumber+="0";break;
case '1': phonenumber+="1";break;
case '2': phonenumber+="2";break;
case '3': phonenumber+="3";break;
case '4': phonenumber+="4";break;
case '5': phonenumber+="5";break;
case '6': phonenumber+="6";break;
case '7': phonenumber+="7";break;
case '8': phonenumber+="8";break;
case '9': phonenumber+="9";break;
case 'a': case 'b': case 'c': phonenumber+="2";break;
case 'd': case 'e': case 'f': phonenumber+="3";break;
case 'g': case 'h': case 'i': phonenumber+="4";break;
case 'j': case 'k': case 'l': phonenumber+="5";break;
case 'm': case 'n': case 'o': phonenumber+="6";break;
case 'p': case 'q': case 'r': case 's': phonenumber+="7";break;
case 't': case 'u': case 'v': phonenumber+="8";break;
case 'w': case 'x': case 'y': case 'z': phonenumber+="9";break;
}
}
return (phonenumber);
}
public boolean isComponent(JID jid) {
final RoutingTable routingTable = XMPPServer.getInstance().getRoutingTable();
if (routingTable != null)
{
return routingTable.hasComponentRoute(jid);
}
return false;
}
public void setRefreshCacheInterval()
{
Log.info( "["+ site.getName() + "] setRefreshCacheInterval ");
try {
}
catch (Exception e)
{
Log.error("["+ site.getName() + "] setRefreshCacheInterval " + e);
}
}
//-------------------------------------------------------
//
//
//
//-------------------------------------------------------
private void getUserProfiles()
{
try
{
Collection<User> users = userManager.getUsers();
Iterator it = users.iterator();
while( it.hasNext() )
{
User user = (User)it.next();
String userEmail = user.getEmail();
String userPhone = user.getProperties().get("wirelynk.phone.other") != null ? user.getProperties().get("wirelynk.phone.other") : (userEmail != null && userEmail.indexOf(";fsu=") > -1 ? userEmail : "");
String userId = user.getUsername(); //getUserNo(user.getEmail());
if(userId != null && userId.equals(JiveGlobals.getProperty("wirelynk.default.username", "wirelynk")) == false && userPhone.indexOf(";wl=") == -1 && userPhone.indexOf(";ddi=") == -1 && userPhone.indexOf(";lid=") == -1)
{
Log.info( "["+ site.getName() + "] getUserProfiles - user profile " + user.getUsername());
OpenlinkUser traderLyncUser = new OpenlinkUser();
traderLyncUser.setUserName(user.getName());
traderLyncUser.setUserId(userId);
traderLyncUser.setUserNo(user.getUsername());
traderLyncUser.setSiteName(getName());
traderLyncUser.setSiteID(1);
traderLyncUser.setHandsetNo("1");
//traderLyncUser.setDeviceType("wirelynk");
createInterest(traderLyncUser, userId, "D", user.getName(), "true", userId);
if (userProfiles.containsKey(userId) == false)
{
traderLyncUser.setDefault("true");
userProfiles.put(userId, traderLyncUser);
}
traderLyncUserTable.put(traderLyncUser.getUserNo(), traderLyncUser);
Roster roster = rosterManager.getRoster(user.getUsername());
List<RosterItem> rosterItems = new ArrayList<RosterItem>(roster.getRosterItems());
Collections.sort(rosterItems, new RosterItemComparator());
for (RosterItem rosterItem : rosterItems)
{
try {
String interestNode = rosterItem.getJid().getNode();
User itemUser = userManager.getUser(interestNode);
String itemEmail = itemUser.getEmail();
String phone = itemUser.getProperties().get("wirelynk.phone.other") != null ? itemUser.getProperties().get("wirelynk.phone.other") : (itemEmail != null && itemEmail.indexOf(";fsu=") > -1 ? itemEmail : "");
if (phone.indexOf(";wl=") > -1 && (phone.indexOf(";ddi=") > -1 || phone.indexOf(";lid=") > -1) && userId.equals(interestNode) == false && interestNode.equals(JiveGlobals.getProperty("wirelynk.default.username", "wirelynk")) == false)
{
createInterest(traderLyncUser, interestNode, "L", rosterItem.getNickname(), "false", phone);
}
} catch (Exception e) {
Log.error( "["+ site.getName() + "] " + "Error in getProfiles ",e);
}
}
createPubsubNode(user.getUsername() + "@" + getDomain());
}
}
}
catch (Exception e)
{
Log.error( "["+ site.getName() + "] " + "Error in getProfiles ",e);
}
}
private void createInterest(OpenlinkUser traderLyncUser, String interestNode, String interestType, String nickname, String defaultInterest, String interestValue)
{
Log.info( "["+ site.getName() + "] createInterest " + interestNode);
OpenlinkInterest traderLyncInterest = null;
if (traderLyncInterests.containsKey(interestNode))
{
traderLyncInterest = traderLyncInterests.get(interestNode);
} else {
traderLyncInterest = new OpenlinkInterest(interestNode);
}
traderLyncInterest.setInterestType(interestType);
traderLyncInterest.setSiteName(getName());
traderLyncInterest.setInterestLabel(nickname);
traderLyncInterest.setInterestValue(interestValue);
traderLyncInterests.put(interestNode, traderLyncInterest);
if (defaultInterest.equals("true"))
{
traderLyncUser.setDefaultInterest(traderLyncInterest);
}
OpenlinkUserInterest traderLyncUserInterest = traderLyncInterest.addUserInterest(traderLyncUser, defaultInterest);
traderLyncUser.addInterest(traderLyncInterest);
openlinkInterests.put(interestNode + traderLyncUser.getUserNo(), traderLyncUserInterest);
createPubsubNode(traderLyncInterest.getInterestId() + traderLyncUser.getUserNo());
getInterestSubscriptions(traderLyncInterest, traderLyncUser.getUserNo());
}
//-------------------------------------------------------
//
//
//
//-------------------------------------------------------
private String getUserNo(String email)
{
if (email != null)
{
int pos = email.indexOf("@");
if ( pos > -1)
{
return email.substring(0, pos);
} else {
return email;
}
} else return null;
}
private String getTelVoiceNumber(Element vCard, String work, String voice)
{
String telVoiceNumber = null;
for ( Iterator i = vCard.elementIterator( "TEL" ); i.hasNext(); )
{
Element tel = (Element) i.next();
//Log.info( "["+ site.getName() + "] getTelVoiceNumber - tel " + tel.asXML());
if (tel.element(work) != null && tel.element(voice) != null)
{
Element number = tel.element("NUMBER");
if (number != null)
{
//Log.info( "["+ site.getName() + "] getTelVoiceNumber - number " + number.getText());
telVoiceNumber = number.getText();
break;
}
}
}
return telVoiceNumber;
}
//-------------------------------------------------------
//
//
//
//-------------------------------------------------------
public void outgoingCallNotification(String requester, String callId, boolean connected, String dialDigits, String label)
{
Log.info( "["+ site.getName() + "] outgoingCallNotification " + requester + " " + callId);
try {
if (callInterests.containsKey(callId))
{
OpenlinkInterest callInterest = callInterests.get(callId);
Iterator<OpenlinkUserInterest> it4 = callInterest.getUserInterests().values().iterator();
while( it4.hasNext() )
{
OpenlinkUserInterest userInterest = (OpenlinkUserInterest)it4.next();
if (connected)
{
if (requester.equals(userInterest.getUser().getUserNo()))
{
userInterest.handleCallOutgoing("CallOriginated", callId, dialDigits, label);
userInterest.handleCallOutgoing("CallDelivered", callId, dialDigits, label);
userInterest.handleCallOutgoing("CallEstablished", callId, dialDigits, label);
} else {
userInterest.handleCallOutgoing("CallBusy", callId, dialDigits, label);
}
} else {
userInterest.handleConnectionCleared(callId);
}
}
publishOpenlinkCallEvent(callInterest);
if (!connected)
{
//deleteEvents(callInterest, callId);
}
}
} catch (Exception e) {
Log.error( "["+ site.getName() + "] " + "Error in outgoingCallNotification ", e);
}
}
/*
public void notifyConferenceMonitors(ConferenceEvent conferenceEvent)
{
if ("IncomingCallsConference".equals(conferenceEvent.getConferenceId()) == false)
{
Log.info( "["+ site.getName() + "] notifyConferenceMonitors " + conferenceEvent.toString());
try {
if (conferenceEvent.equals(ConferenceEvent.MEMBER_LEFT) || conferenceEvent.equals(ConferenceEvent.MEMBER_JOINED))
{
Log.info( "["+ site.getName() + "] notifyConferenceMonitors looking for call " + conferenceEvent.getCallId() + " " + conferenceEvent.getMemberCount());
CallHandler callHandler = CallHandler.findCall(conferenceEvent.getCallId());
if (callHandler != null)
{
Log.info( "["+ site.getName() + "] notifyConferenceMonitors found call handler " + callHandler);
CallParticipant callParticipant = callHandler.getCallParticipant();
if (callParticipant != null && callParticipant.getDeviceAddress() != null)
{
Log.info( "["+ site.getName() + "] notifyConferenceMonitors found device " + callParticipant.getDeviceAddress());
Iterator<OpenlinkUser> iter = traderLyncUserTable.values().iterator();
while(iter.hasNext())
{
OpenlinkUser traderLyncUser = (OpenlinkUser)iter.next();
if (callParticipant.getDeviceAddress().equals(traderLyncUser.getDeviceNo()))
{
if (conferenceEvent.equals(ConferenceEvent.MEMBER_JOINED))
{
Log.info( "["+ site.getName() + "] notifyConferenceMonitors setting device callid " + traderLyncUser.getUserNo() + " " + conferenceEvent.getCallId());
traderLyncUser.setHandsetCallId(conferenceEvent.getCallId());
} else {
Log.info( "["+ site.getName() + "] notifyConferenceMonitors resetting device callid " + traderLyncUser.getUserNo() + " " + conferenceEvent.getCallId());
traderLyncUser.setHandsetCallId(null);
}
break;
}
}
// set call state
if (callInterests.containsKey(conferenceEvent.getConferenceId()))
{
// conf id is our far party call id
OpenlinkInterest callInterest = callInterests.get(conferenceEvent.getConferenceId());
Log.info( "["+ site.getName() + "] notifyConferenceMonitors found far party " + callInterest.getInterestValue());
Iterator it = callInterest.getUserInterests().values().iterator();
while( it.hasNext() )
{
OpenlinkUserInterest traderLyncUserInterest = (OpenlinkUserInterest)it.next();
handleCallState(conferenceEvent, traderLyncUserInterest);
}
publishOpenlinkCallEvent(callInterest);
if (conferenceEvent.getMemberCount() == 1 && conferenceEvent.equals(ConferenceEvent.MEMBER_LEFT) && ConferenceManager.getLastMember(conferenceEvent.getConferenceId()).isConferenceMuted() == false)
{
// held callers are muted. orphan participant wil NOT be muted and should be disconnected
Log.info( "["+ site.getName() + "] notifyConferenceMonitors tearing down " + conferenceEvent.getConferenceId());
CallHandler.hangup(conferenceEvent.getConferenceId(), "User requested call termination");
deleteEvents(callInterest, conferenceEvent.getConferenceId());
ConferenceManager.endConference(conferenceEvent.getConferenceId());
}
}
}
}
}
} catch (Exception e) {
Log.error( "["+ site.getName() + "] " + "Error in notifyConferenceMonitors " + e);
e.printStackTrace();
}
}
}
*/
//-------------------------------------------------------
//
//
//
//-------------------------------------------------------
/*
private void handleCallState(ConferenceEvent conferenceEvent, OpenlinkUserInterest traderLyncUserInterest)
{
Log.info( "["+ site.getName() + "] handleCallState " + traderLyncUserInterest.getUser().getUserNo() + " " + conferenceEvent.getMemberCount());
OpenlinkInterest traderLyncInterest = traderLyncUserInterest.getInterest();
OpenlinkUser traderLyncUser = traderLyncUserInterest.getUser();
CallHandler callHandlerFarParty = CallHandler.findCall(conferenceEvent.getConferenceId()); // get far party call objects
if (callHandlerFarParty != null && callHandlerFarParty.getCallParticipant() != null)
{
CallParticipant callParticipantFarParty = callHandlerFarParty.getCallParticipant();
String activeUser = callParticipantFarParty.getRequester();
if (conferenceEvent.getMemberCount() == 2)
{
Log.info( "["+ site.getName() + "] handleCallState 2 participants " + activeUser);
// far party and a single user left, find user and set as connected, everyone else busy
if (conferenceEvent.equals(ConferenceEvent.MEMBER_JOINED))
{
if (activeUser.equals(traderLyncUser.getUserNo()))
{
traderLyncUserInterest.handleCallConnected(conferenceEvent.getConferenceId());
} else {
traderLyncUserInterest.handleCallBusy(conferenceEvent.getConferenceId());
}
} else { // participant left, find last remaining participant
if (activeUser.equals(traderLyncUser.getUserNo()) == false && traderLyncUser.getHandsetCallId() != null)
{
traderLyncUserInterest.handleCallConnected(conferenceEvent.getConferenceId());
} else {
traderLyncUserInterest.handleCallBusy(conferenceEvent.getConferenceId());
}
}
} else if (conferenceEvent.getMemberCount() == 1) {
Log.info( "["+ site.getName() + "] handleCallState 1 participant " + traderLyncUser.getHandsetCallId());
if (conferenceEvent.equals(ConferenceEvent.MEMBER_JOINED) && traderLyncUser.getHandsetCallId() != null)
{
// new joiner is a device (handset/speaker), we should be connected
traderLyncUserInterest.handleCallConnected(conferenceEvent.getConferenceId());
}
} else { // just change state of new joiner, everyone else keep old state
Log.info( "["+ site.getName() + "] handleCallState multiple participants " + traderLyncUser.getUserNo());
if (conferenceEvent.equals(ConferenceEvent.MEMBER_JOINED) && activeUser.equals(traderLyncUser.getUserNo()))
{
traderLyncUserInterest.handleCallConferenced(conferenceEvent.getConferenceId());
}
}
}
}
*/
public void logRecordEvent(OpenlinkUserInterest userInterest, JSONObject eventJSON, String callId, String direction)
{
Log.info("logRecordEvent " + callId + "\n" + eventJSON);
OpenlinkCall traderLyncCall = userInterest.getCallById(callId);
if (traderLyncCall != null)
{
traderLyncCall.line = (eventJSON.has("recording") ? eventJSON.getString("recording") : "");
traderLyncCall.direction = direction;
if (eventJSON.has("name")) traderLyncCall.ddi = eventJSON.getString("name");
if (eventJSON.has("label")) traderLyncCall.ddiLabel = eventJSON.getString("label");
if (eventJSON.has("number"))
{
String conference = eventJSON.getString("number");
traderLyncCall.setCLI(conference);
traderLyncCall.setCLILabel(conference);
}
userInterest.logCall(traderLyncCall, getDomain(), 0);
}
}
public void sendUserInterestEvent(String username, JSONObject eventJSON)
{
Log.info("sendUserInterestEvent " + username + "\n" + eventJSON);
String state = eventJSON.getString("state");
String privateWire = eventJSON.getString("name");
String label = eventJSON.getString("label");
String callId = privateWire + username;
if (traderLyncInterests.containsKey(privateWire))
{
OpenlinkInterest callInterest = traderLyncInterests.get(privateWire);
if (callInterests.containsKey(callId) == false)
{
callInterests.put(callId, callInterest);
}
if (callInterest.getUserInterests().containsKey(username))
{
OpenlinkUserInterest userInterest = callInterest.getUserInterests().get(username);
boolean processed = false;
if (state.equals("ringing"))
{
String from = privateWire;
String to = privateWire;
if (eventJSON.has("from")) from = eventJSON.getString("from");
if (eventJSON.has("to")) from = eventJSON.getString("to");
userInterest.handleCallIncoming(callId, from, to);
logRecordEvent(userInterest, eventJSON, callId, "Incoming");
processed = true;
}
if (state.equals("originated"))
{
String dialDigits = privateWire;
userInterest.handleCallOutgoing("CallOriginated", callId, dialDigits, label);
userInterest.handleCallOutgoing("CallDelivered", callId, dialDigits, label);
processed = true;
}
if (state.equals("connected") || state.equals("answered"))
{
userInterest.handleCallConnected(callId);
if (state.equals("connected")) logRecordEvent(userInterest, eventJSON, callId, "Outgoing");
processed = true;
}
if (state.equals("conferenced"))
{
userInterest.handleCallConferenced(callId);
processed = true;
}
if (state.equals("busy"))
{
userInterest.handleCallBusy(callId);
processed = true;
}
if (state.equals("idle"))
{
userInterest.handleConnectionCleared(callId);
processed = true;
}
if (state.equals("held"))
{
userInterest.handleCallHeld(callId);
processed = true;
}
if (processed)
{
publishOpenlinkUserCallEvent(userInterest);
}
if (state.equals("idle"))
{
userInterest.removeCallById(callId);
}
}
}
}
public void handleCallIncomingPW(String callId, String privateWire, String from, String to)
{
OpenlinkInterest lineInterest = getInterest(callId, privateWire);
if (lineInterest!= null)
{
Iterator<OpenlinkUserInterest> it = lineInterest.getUserInterests().values().iterator();
while(it.hasNext())
{
OpenlinkUserInterest userInterest = (OpenlinkUserInterest)it.next();
userInterest.handleCallIncoming(callId, from, to);
}
publishOpenlinkCallEvent(lineInterest);
}
}
private OpenlinkInterest getInterest(String callId, String privateWire)
{
Log.info( "["+ site.getName() + "] getInterest " + callId + " " + privateWire);
OpenlinkInterest callInterest = null;
if (callInterests.containsKey(callId))
callInterest = callInterests.get(callId);
else {
if (traderLyncInterests.containsKey(privateWire))
{
callInterest = traderLyncInterests.get(privateWire);
}
if (callInterest != null)
{
callInterests.put(callId, callInterest);
}
}
return callInterest;
}
private void deleteEvents(OpenlinkInterest callInterest, String callId)
{
Log.info( "["+ site.getName() + "] deleteEvents " + callId);
Iterator<OpenlinkUserInterest> it4 = callInterest.getUserInterests().values().iterator();
while( it4.hasNext() )
{
OpenlinkUserInterest userInterest = (OpenlinkUserInterest)it4.next();
OpenlinkCall traderLyncCall = userInterest.removeCallById(callId);
if (traderLyncCall != null)
{
userInterest.logCall(traderLyncCall, getDomain(), 0);
}
}
callInterests.remove(callId);
}
//-------------------------------------------------------
//
//
//
//-------------------------------------------------------
public synchronized void publishOpenlinkCallEvent(OpenlinkInterest traderLyncInterest)
{
if ((ClusterManager.isClusteringEnabled() && ClusterManager.isSeniorClusterMember()) || !ClusterManager.isClusteringEnabled())
{
Log.info( "["+ site.getName() + "] publishOpenlinkCallEvent - interest " + traderLyncInterest.getInterestId());
Iterator it = traderLyncInterest.getUserInterests().values().iterator();
while( it.hasNext() )
{
OpenlinkUserInterest traderLyncUserInterest = (OpenlinkUserInterest)it.next();
publishOpenlinkUserCallEvent(traderLyncUserInterest);
}
}
}
public synchronized void publishOpenlinkUserCallEvent(OpenlinkUserInterest traderLyncUserInterest)
{
if ((ClusterManager.isClusteringEnabled() && ClusterManager.isSeniorClusterMember()) || !ClusterManager.isClusteringEnabled())
{
Log.info( "["+ site.getName() + "] publishOpenlinkUserEvent - user interest " + traderLyncUserInterest.getInterestName() + " enabled: " + traderLyncUserInterest.getUser().enabled());
if (traderLyncUserInterest.getUser().enabled())
{
if (traderLyncUserInterest.canPublish(this))
{
publishOpenlinkUserInterestEvent(traderLyncUserInterest.getInterest(), traderLyncUserInterest);
}
updateCacheContent(traderLyncUserInterest);
}
}
}
private void publishOpenlinkUserInterestEvent(OpenlinkInterest traderLyncInterest, OpenlinkUserInterest traderLyncUserInterest)
{
Log.info( "["+ site.getName() + "] publishOpenlinkUserInterestEvent - scan user interest " + traderLyncUserInterest.getUser().getUserId() + " " + traderLyncUserInterest.getInterest().getInterestId());
if (!"0.0.0.0".equals(traderLyncUserInterest.getUser().getDeviceNo()))
{
Log.info( "["+ site.getName() + "] publishOpenlinkUserInterestEvent - publish user interest " + traderLyncUserInterest.getUser().getUserId() + " " + traderLyncUserInterest.getInterest().getInterestId());
String interestNode = traderLyncInterest.getInterestId() + traderLyncUserInterest.getUser().getUserNo();
IQ iq = new IQ(IQ.Type.set);
iq.setFrom(getName() + "." + getDomain());
iq.setTo("pubsub." + getDomain());
Element pubsub = iq.setChildElement("pubsub", "http://jabber.org/protocol/pubsub");
Element publish = pubsub.addElement("publish").addAttribute("node", interestNode);
Element item = publish.addElement("item").addAttribute("id", interestNode);
Element calls = item.addElement("callstatus", "http://xmpp.org/protocol/openlink:01:00:00#call-status");
boolean busy = traderLyncUserInterest.getBusyStatus();
calls.addAttribute("busy", busy ? "true" : "false");
if ("true".equals(traderLyncUserInterest.getCallFWD()))
{
calls.addAttribute("fwd", traderLyncUserInterest.getCallFWDDigits());
}
addOpenlinkCallsEvents(traderLyncInterest, traderLyncUserInterest, calls);
if (calls.nodeCount() > 0)
{
sendPacket(iq);
}
JID profileJID = new JID(traderLyncUserInterest.getUser().getUserNo() + "@" + getDomain() + "/traderlync");
Session session = (LocalClientSession) XMPPServer.getInstance().getSessionManager().getSession(profileJID);
if (session != null)
{
}
}
}
private void updateCacheContent(OpenlinkUserInterest traderLyncUserInterest)
{
Log.info( "["+ site.getName() + "] updateCacheContent - user interest " + traderLyncUserInterest.getInterestName());
Iterator it2 = traderLyncUserInterest.getCalls().values().iterator();
while( it2.hasNext() )
{
OpenlinkCall traderLyncCall = (OpenlinkCall)it2.next();
}
}
private void addOpenlinkCallsEvents(OpenlinkInterest traderLyncInterest, OpenlinkUserInterest traderLyncUserInterest, Element calls)
{
Log.info( "["+ site.getName() + "] addOpenlinkCallsEvents - user interest " + traderLyncUserInterest.getInterestName());
Iterator it2 = traderLyncUserInterest.getCalls().values().iterator();
while( it2.hasNext() )
{
OpenlinkCall traderLyncCall = (OpenlinkCall)it2.next();
if (!"Unknown".equals(traderLyncCall.getState()) && !traderLyncCall.deleted)
{
Element call = calls.addElement("call");
addOpenlinkCallEvents(traderLyncInterest, traderLyncUserInterest, call, traderLyncCall);
}
}
}
public synchronized void addOpenlinkCallEvents(OpenlinkInterest traderLyncInterest, OpenlinkUserInterest traderLyncUserInterest, Element call, OpenlinkCall traderLyncCall)
{
Log.info( "["+ site.getName() + "] addOpenlinkCallEvents - user interest " + traderLyncUserInterest.getInterestName() + " " + traderLyncCall.getCallID());
call.addElement("id").setText(traderLyncCall.getCallID());
call.addElement("profile").setText(traderLyncUserInterest.getUser().getProfileName());
call.addElement("interest").setText(traderLyncUserInterest.getInterestName());
call.addElement("changed").setText(traderLyncCall.getStatus());
call.addElement("state").setText(traderLyncCall.getState());
call.addElement("direction").setText(traderLyncCall.getDirection());
Element caller = call.addElement("caller");
caller.addElement("number").setText(traderLyncCall.getCallerNumber(traderLyncInterest.getInterestType()));
caller.addElement("name").setText(traderLyncCall.getCallerName(traderLyncInterest.getInterestType()));
Element called = call.addElement("called");
called.addElement("number").setText(traderLyncCall.getCalledNumber(traderLyncInterest.getInterestType()));
called.addElement("name").setText(traderLyncCall.getCalledName(traderLyncInterest.getInterestType()));
call.addElement("duration").setText(String.valueOf(traderLyncCall.getDuration()));
Element actions = call.addElement("actions");
Iterator it4 = traderLyncCall.getValidActions().iterator();
while( it4.hasNext() )
{
String action = (String)it4.next();
actions.addElement(action);
}
Element features = call.addElement("features");
addFeature(features, "priv_1", "Y".equals(traderLyncCall.getPrivacy()) ? "true" : "false");
addFeature(features, "hs_1", "1".equals(traderLyncCall.getHandset()) ? "true" : "false");
addFeature(features, "hs_2", "2".equals(traderLyncCall.getHandset()) ? "true" : "false");
/*
if (traderLyncUserInterest.getUser().getCallback() != null && isCallbackAvailable() && traderLyncUserInterest.getUser().getCallbackActive())
{
addFeature(features, "callback_1", traderLyncUserInterest.getUser().getCallback());
}
*/
Element participants = call.addElement("participants");
Iterator it3 = traderLyncInterest.getUserInterests().values().iterator();
while( it3.hasNext() )
{
OpenlinkUserInterest traderLyncParticipant = (OpenlinkUserInterest)it3.next();
if (!"0.0.0.0".equals(traderLyncParticipant.getUser().getDeviceNo()))
{
OpenlinkCall participantCall = traderLyncParticipant.getCallByLine(traderLyncCall.getLine());
if (participantCall != null)
{
Element participant = participants.addElement("participant");
participant.addAttribute("jid", traderLyncParticipant.getUser().getUserId() + "@" + getDomain());
participant.addAttribute("type", participantCall.getParticipation());
participant.addAttribute("direction", participantCall.getDirection());
if (participantCall.firstTimeStamp != 0)
{
participant.addAttribute("timestamp", String.valueOf(new Date(participantCall.firstTimeStamp)));
}
}
}
}
}
private void addFeature(Element features, String id, String value)
{
Element feature = features.addElement("feature");
feature.addAttribute("id", id);
feature.setText(value);
}
public synchronized void publishOpenlinkUserDeviceEvent(OpenlinkUser traderLyncUser)
{
Log.info( "["+ site.getName() + "] publishOpenlinkUserDeviceEvent - " + traderLyncUser.getUserId());
if (!"0.0.0.0".equals(traderLyncUser.getDeviceNo()))
{
OpenlinkInterest traderLyncInterest = traderLyncUser.getDefaultInterest();
if (traderLyncInterest != null)
{
String interestNode = traderLyncInterest.getInterestId() + traderLyncUser.getUserNo();
IQ iq = new IQ(IQ.Type.set);
iq.setFrom(getName() + "." + getDomain());
iq.setTo("pubsub." + getDomain());
Element pubsub = iq.setChildElement("pubsub", "http://jabber.org/protocol/pubsub");
Element publish = pubsub.addElement("publish").addAttribute("node", interestNode);
Element item = publish.addElement("item").addAttribute("id", interestNode);
Element device = item.addElement("devicestatus", "http://xmpp.org/protocol/openlink:01:00:00#device-status");
Element features = device.addElement("features");
addFeature(features, "icom_1", traderLyncUser.intercom() ? "true" : "false");
sendPacket(iq);
}
}
}
public void createPubsubNode(String interestNode)
{
//Log.info("["+site.getName()+"] createPubsubNode - " + interestNode);
String domain = getDomain();
IQ iq1 = new IQ(IQ.Type.set);
iq1.setFrom(getName() + "." + domain);
iq1.setTo("pubsub." + domain);
Element pubsub1 = iq1.setChildElement("pubsub", "http://jabber.org/protocol/pubsub");
Element create = pubsub1.addElement("create").addAttribute("node", interestNode);
Element configure = pubsub1.addElement("configure");
Element x = configure.addElement("x", "jabber:x:data").addAttribute("type", "submit");
Element field1 = x.addElement("field");
field1.addAttribute("var", "FORM_TYPE");
field1.addAttribute("type", "hidden");
field1.addElement("value").setText("http://jabber.org/protocol/pubsub#node_config");
//Element field2 = x.addElement("field");
//field2.addAttribute("var", "pubsub#persist_items");
//field2.addElement("value").setText("1");
Element field3 = x.addElement("field");
field3.addAttribute("var", "pubsub#max_items");
field3.addElement("value").setText("1");
Log.info("createPubsubNode " + iq1.toString());
sendPacket(iq1);
}
public void getInterestSubscriptions(OpenlinkInterest traderLyncInterest, String userNo)
{
String interestNode = traderLyncInterest.getInterestId() + userNo;
String domain = getDomain();
Log.info("["+site.getName()+"] getInterestSubscriptions - " + interestNode);
IQ iq2 = new IQ(IQ.Type.get);
iq2.setFrom(getName() + "." + domain);
iq2.setTo("pubsub." + domain);
Element pubsub2 = iq2.setChildElement("pubsub", "http://jabber.org/protocol/pubsub#owner");
Element subscriptions = pubsub2.addElement("subscriptions").addAttribute("node", interestNode);
Log.info("subscriptions " + iq2.toString());
sendPacket(iq2);
}
class RosterItemComparator implements Comparator<RosterItem>
{
public int compare(RosterItem itemA, RosterItem itemB)
{
return itemA.getJid().toBareJID().compareTo(itemB.getJid().toBareJID());
}
}
//-------------------------------------------------------
//
//
//
//-------------------------------------------------------
public void loadProfile(String userName)
{
Log.info("["+site.getName()+"] loadProfile - " + userName);
try {
Document document = DocumentHelper.parseText("<traderlyncprofile xmlns=\"http://xmpp.org/protocol/traderlync#user-profile-device\"></traderlyncprofile>");
Element searchElement = document.getRootElement();
Element foundElement = XMPPServer.getInstance().getPrivateStorage().get(userName, searchElement);
if (foundElement != null)
{
if (foundElement.element("ipaddress") != null)
{
String ipAddress = foundElement.element("ipaddress").getText();
String hostName = foundElement.element("host").getText();
String macAddress = foundElement.element("mac").getText();
String userAgent = foundElement.element("agent").getText();
String expansionMod1 = foundElement.element("exten1").getText();
String expansionMod2 = foundElement.element("exten2").getText();
String expansionMod3 = foundElement.element("exten3").getText();
Log.info("["+site.getName()+"] loadProfile - " + userName + " " + macAddress + " " + hostName + " " + ipAddress + " " + userAgent + " " + expansionMod1 + " " + expansionMod2 + " " + expansionMod3);
OpenlinkUser traderLyncUser = getOpenlinkProfile(userName);
}
}
} catch (Exception e) {
Log.error("["+site.getName()+"] loadProfile - " + e);
}
}
public void unloadProfile(String userName)
{
Log.info("["+site.getName()+"] unloadProfile - " + userName);
}
}
| apache-2.0 |
NotFound403/WePay | src/main/java/cn/felord/wepay/ali/sdk/api/response/AlipayMobileBeaconDeviceAddResponse.java | 1077 | package cn.felord.wepay.ali.sdk.api.response;
import cn.felord.wepay.ali.sdk.api.internal.mapping.ApiField;
import cn.felord.wepay.ali.sdk.api.AlipayResponse;
/**
* ALIPAY API: alipay.mobile.beacon.device.add response.
*
* @author auto create
* @version $Id: $Id
*/
public class AlipayMobileBeaconDeviceAddResponse extends AlipayResponse {
private static final long serialVersionUID = 1455918553194294212L;
/**
* 请求操作成功与否,200为成功
*/
@ApiField("code")
private String code;
/**
* 请求的处理结果
*/
@ApiField("msg")
private String msg;
/** {@inheritDoc} */
public void setCode(String code) {
this.code = code;
}
/**
* <p>Getter for the field <code>code</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getCode( ) {
return this.code;
}
/** {@inheritDoc} */
public void setMsg(String msg) {
this.msg = msg;
}
/**
* <p>Getter for the field <code>msg</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getMsg( ) {
return this.msg;
}
}
| apache-2.0 |
pengrad/java-telegram-bot-api | library/src/main/java/com/pengrad/telegrambot/model/botcommandscope/BotCommandsScopeChat.java | 423 | package com.pengrad.telegrambot.model.botcommandscope;
public class BotCommandsScopeChat extends BotCommandScope {
private Object chat_id;
/**
*
* @param chatId Unique identifier for the target chat or username of the target supergroup (in the format @supergroupusername)
*/
public BotCommandsScopeChat(Object chatId) {
this.type = "chat";
this.chat_id = chatId;
}
}
| apache-2.0 |
lletsica/my_test_repo | rave-components/rave-core/src/main/java/org/apache/rave/portal/service/PlayerHasTournamentService.java | 135 | package org.apache.rave.portal.service;
/**
* Created by fhernandez on 25/09/14.
*/
public interface PlayerHasTournamentService {
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-eventbridge/src/main/java/com/amazonaws/services/eventbridge/model/CreateArchiveResult.java | 8714 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.eventbridge.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/eventbridge-2015-10-07/CreateArchive" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateArchiveResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The ARN of the archive that was created.
* </p>
*/
private String archiveArn;
/**
* <p>
* The state of the archive that was created.
* </p>
*/
private String state;
/**
* <p>
* The reason that the archive is in the state.
* </p>
*/
private String stateReason;
/**
* <p>
* The time at which the archive was created.
* </p>
*/
private java.util.Date creationTime;
/**
* <p>
* The ARN of the archive that was created.
* </p>
*
* @param archiveArn
* The ARN of the archive that was created.
*/
public void setArchiveArn(String archiveArn) {
this.archiveArn = archiveArn;
}
/**
* <p>
* The ARN of the archive that was created.
* </p>
*
* @return The ARN of the archive that was created.
*/
public String getArchiveArn() {
return this.archiveArn;
}
/**
* <p>
* The ARN of the archive that was created.
* </p>
*
* @param archiveArn
* The ARN of the archive that was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateArchiveResult withArchiveArn(String archiveArn) {
setArchiveArn(archiveArn);
return this;
}
/**
* <p>
* The state of the archive that was created.
* </p>
*
* @param state
* The state of the archive that was created.
* @see ArchiveState
*/
public void setState(String state) {
this.state = state;
}
/**
* <p>
* The state of the archive that was created.
* </p>
*
* @return The state of the archive that was created.
* @see ArchiveState
*/
public String getState() {
return this.state;
}
/**
* <p>
* The state of the archive that was created.
* </p>
*
* @param state
* The state of the archive that was created.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ArchiveState
*/
public CreateArchiveResult withState(String state) {
setState(state);
return this;
}
/**
* <p>
* The state of the archive that was created.
* </p>
*
* @param state
* The state of the archive that was created.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ArchiveState
*/
public CreateArchiveResult withState(ArchiveState state) {
this.state = state.toString();
return this;
}
/**
* <p>
* The reason that the archive is in the state.
* </p>
*
* @param stateReason
* The reason that the archive is in the state.
*/
public void setStateReason(String stateReason) {
this.stateReason = stateReason;
}
/**
* <p>
* The reason that the archive is in the state.
* </p>
*
* @return The reason that the archive is in the state.
*/
public String getStateReason() {
return this.stateReason;
}
/**
* <p>
* The reason that the archive is in the state.
* </p>
*
* @param stateReason
* The reason that the archive is in the state.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateArchiveResult withStateReason(String stateReason) {
setStateReason(stateReason);
return this;
}
/**
* <p>
* The time at which the archive was created.
* </p>
*
* @param creationTime
* The time at which the archive was created.
*/
public void setCreationTime(java.util.Date creationTime) {
this.creationTime = creationTime;
}
/**
* <p>
* The time at which the archive was created.
* </p>
*
* @return The time at which the archive was created.
*/
public java.util.Date getCreationTime() {
return this.creationTime;
}
/**
* <p>
* The time at which the archive was created.
* </p>
*
* @param creationTime
* The time at which the archive was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateArchiveResult withCreationTime(java.util.Date creationTime) {
setCreationTime(creationTime);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getArchiveArn() != null)
sb.append("ArchiveArn: ").append(getArchiveArn()).append(",");
if (getState() != null)
sb.append("State: ").append(getState()).append(",");
if (getStateReason() != null)
sb.append("StateReason: ").append(getStateReason()).append(",");
if (getCreationTime() != null)
sb.append("CreationTime: ").append(getCreationTime());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateArchiveResult == false)
return false;
CreateArchiveResult other = (CreateArchiveResult) obj;
if (other.getArchiveArn() == null ^ this.getArchiveArn() == null)
return false;
if (other.getArchiveArn() != null && other.getArchiveArn().equals(this.getArchiveArn()) == false)
return false;
if (other.getState() == null ^ this.getState() == null)
return false;
if (other.getState() != null && other.getState().equals(this.getState()) == false)
return false;
if (other.getStateReason() == null ^ this.getStateReason() == null)
return false;
if (other.getStateReason() != null && other.getStateReason().equals(this.getStateReason()) == false)
return false;
if (other.getCreationTime() == null ^ this.getCreationTime() == null)
return false;
if (other.getCreationTime() != null && other.getCreationTime().equals(this.getCreationTime()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getArchiveArn() == null) ? 0 : getArchiveArn().hashCode());
hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode());
hashCode = prime * hashCode + ((getStateReason() == null) ? 0 : getStateReason().hashCode());
hashCode = prime * hashCode + ((getCreationTime() == null) ? 0 : getCreationTime().hashCode());
return hashCode;
}
@Override
public CreateArchiveResult clone() {
try {
return (CreateArchiveResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| apache-2.0 |
leitoaxl/shower | app/src/androidTest/java/com/shower/shower2/ApplicationTest.java | 349 | package com.shower.shower2;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | apache-2.0 |
dkhwangbo/druid | indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java | 15193 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.firehose;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import org.apache.commons.io.FileUtils;
import org.apache.druid.data.input.Firehose;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.MapBasedInputRow;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.data.input.impl.JSONParseSpec;
import org.apache.druid.data.input.impl.MapInputRowParser;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.indexing.common.SegmentLoaderFactory;
import org.apache.druid.indexing.common.TaskLock;
import org.apache.druid.indexing.common.TaskLockType;
import org.apache.druid.indexing.common.TaskToolboxFactory;
import org.apache.druid.indexing.common.TestUtils;
import org.apache.druid.indexing.common.actions.LockAcquireAction;
import org.apache.druid.indexing.common.actions.SegmentListUsedAction;
import org.apache.druid.indexing.common.actions.TaskAction;
import org.apache.druid.indexing.common.actions.TaskActionClient;
import org.apache.druid.indexing.common.actions.TaskActionClientFactory;
import org.apache.druid.indexing.common.config.TaskConfig;
import org.apache.druid.indexing.common.task.NoopTask;
import org.apache.druid.indexing.common.task.NoopTestTaskFileWriter;
import org.apache.druid.indexing.common.task.Task;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.JodaUtils;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.filter.TrueDimFilter;
import org.apache.druid.segment.IndexIO;
import org.apache.druid.segment.IndexMergerV9;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.incremental.IndexSizeExceededException;
import org.apache.druid.segment.loading.SegmentLoaderConfig;
import org.apache.druid.segment.loading.SegmentLoaderLocalCacheManager;
import org.apache.druid.segment.loading.StorageLocationConfig;
import org.apache.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory;
import org.apache.druid.segment.transform.TransformSpec;
import org.apache.druid.server.metrics.NoopServiceEmitter;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.easymock.EasyMock;
import org.joda.time.Interval;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
@RunWith(Parameterized.class)
public class IngestSegmentFirehoseFactoryTimelineTest
{
private static final String DATA_SOURCE = "foo";
private static final String TIME_COLUMN = "t";
private static final String[] DIMENSIONS = new String[]{"d1"};
private static final String[] METRICS = new String[]{"m1"};
// Must decorate the parser, since IngestSegmentFirehoseFactory will undecorate it.
private static final InputRowParser<Map<String, Object>> ROW_PARSER = TransformSpec.NONE.decorate(
new MapInputRowParser(
new JSONParseSpec(
new TimestampSpec(TIME_COLUMN, "auto", null),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(Arrays.asList(DIMENSIONS)),
null,
null
),
null,
null
)
)
);
private final IngestSegmentFirehoseFactory factory;
private final File tmpDir;
private final int expectedCount;
private final long expectedSum;
private static final ObjectMapper MAPPER;
private static final IndexIO INDEX_IO;
private static final IndexMergerV9 INDEX_MERGER_V9;
static {
TestUtils testUtils = new TestUtils();
MAPPER = IngestSegmentFirehoseFactoryTest.setupInjectablesInObjectMapper(testUtils.getTestObjectMapper());
INDEX_IO = testUtils.getTestIndexIO();
INDEX_MERGER_V9 = testUtils.getTestIndexMergerV9();
}
public IngestSegmentFirehoseFactoryTimelineTest(
String name,
IngestSegmentFirehoseFactory factory,
File tmpDir,
int expectedCount,
long expectedSum
)
{
this.factory = factory;
this.tmpDir = tmpDir;
this.expectedCount = expectedCount;
this.expectedSum = expectedSum;
}
@Test
public void testSimple() throws Exception
{
int count = 0;
long sum = 0;
try (final Firehose firehose = factory.connect(ROW_PARSER, null)) {
while (firehose.hasMore()) {
final InputRow row = firehose.nextRow();
count++;
sum += row.getMetric(METRICS[0]).longValue();
}
}
Assert.assertEquals("count", expectedCount, count);
Assert.assertEquals("sum", expectedSum, sum);
}
@After
public void tearDown() throws Exception
{
FileUtils.deleteDirectory(tmpDir);
}
private static TestCase TC(
String intervalString,
int expectedCount,
long expectedSum,
DataSegmentMaker... segmentMakers
)
{
final File tmpDir = Files.createTempDir();
final Set<DataSegment> segments = Sets.newHashSet();
for (DataSegmentMaker segmentMaker : segmentMakers) {
segments.add(segmentMaker.make(tmpDir));
}
return new TestCase(
tmpDir,
Intervals.of(intervalString),
expectedCount,
expectedSum,
segments
);
}
private static DataSegmentMaker DS(
String intervalString,
String version,
int partitionNum,
InputRow... rows
)
{
return new DataSegmentMaker(Intervals.of(intervalString), version, partitionNum, Arrays.asList(rows));
}
private static InputRow IR(String timeString, long metricValue)
{
return new MapBasedInputRow(
DateTimes.of(timeString).getMillis(),
Arrays.asList(DIMENSIONS),
ImmutableMap.of(
TIME_COLUMN, DateTimes.of(timeString).toString(),
DIMENSIONS[0], "bar",
METRICS[0], metricValue
)
);
}
private static Map<String, Object> persist(File tmpDir, InputRow... rows)
{
final File persistDir = new File(tmpDir, UUID.randomUUID().toString());
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder()
.withMinTimestamp(JodaUtils.MIN_INSTANT)
.withDimensionsSpec(ROW_PARSER)
.withMetrics(new LongSumAggregatorFactory(METRICS[0], METRICS[0]))
.build();
final IncrementalIndex index = new IncrementalIndex.Builder()
.setIndexSchema(schema)
.setMaxRowCount(rows.length)
.buildOnheap();
for (InputRow row : rows) {
try {
index.add(row);
}
catch (IndexSizeExceededException e) {
throw Throwables.propagate(e);
}
}
try {
INDEX_MERGER_V9.persist(index, persistDir, new IndexSpec(), null);
}
catch (IOException e) {
throw Throwables.propagate(e);
}
return ImmutableMap.of(
"type", "local",
"path", persistDir.getAbsolutePath()
);
}
@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> constructorFeeder()
{
final List<TestCase> testCases = ImmutableList.of(
TC(
"2000/2000T02", 3, 7,
DS("2000/2000T01", "v1", 0, IR("2000", 1), IR("2000T00:01", 2)),
DS("2000T01/2000T02", "v1", 0, IR("2000T01", 4))
) /* Adjacent segments */,
TC(
"2000/2000T02", 3, 7,
DS("2000/2000T02", "v1", 0, IR("2000", 1), IR("2000T00:01", 2), IR("2000T01", 8)),
DS("2000T01/2000T02", "v2", 0, IR("2000T01:01", 4))
) /* 1H segment overlaid on top of 2H segment */,
TC(
"2000/2000-01-02", 4, 23,
DS("2000/2000-01-02", "v1", 0, IR("2000", 1), IR("2000T00:01", 2), IR("2000T01", 8), IR("2000T02", 16)),
DS("2000T01/2000T02", "v2", 0, IR("2000T01:01", 4))
) /* 1H segment overlaid on top of 1D segment */,
TC(
"2000/2000T02", 4, 15,
DS("2000/2000T02", "v1", 0, IR("2000", 1), IR("2000T00:01", 2), IR("2000T01", 8)),
DS("2000/2000T02", "v1", 1, IR("2000T01:01", 4))
) /* Segment set with two segments for the same interval */,
TC(
"2000T01/2000T02", 1, 2,
DS("2000/2000T03", "v1", 0, IR("2000", 1), IR("2000T01", 2), IR("2000T02", 4))
) /* Segment wider than desired interval */,
TC(
"2000T02/2000T04", 2, 12,
DS("2000/2000T03", "v1", 0, IR("2000", 1), IR("2000T01", 2), IR("2000T02", 4)),
DS("2000T03/2000T04", "v1", 0, IR("2000T03", 8))
) /* Segment intersecting desired interval */
);
final List<Object[]> constructors = Lists.newArrayList();
for (final TestCase testCase : testCases) {
final TaskActionClient taskActionClient = new TaskActionClient()
{
@Override
public <RetType> RetType submit(TaskAction<RetType> taskAction)
{
if (taskAction instanceof SegmentListUsedAction) {
// Expect the interval we asked for
final SegmentListUsedAction action = (SegmentListUsedAction) taskAction;
if (action.getIntervals().equals(ImmutableList.of(testCase.interval))) {
return (RetType) ImmutableList.copyOf(testCase.segments);
} else {
throw new IllegalArgumentException("WTF");
}
} else if (taskAction instanceof LockAcquireAction) {
return (RetType) new TaskLock(TaskLockType.EXCLUSIVE, null, DATA_SOURCE, Intervals.of("2000/2001"), "v1", 0);
} else {
throw new UnsupportedOperationException();
}
}
};
SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
EasyMock.replay(notifierFactory);
SegmentLoaderConfig segmentLoaderConfig = new SegmentLoaderConfig()
{
@Override
public List<StorageLocationConfig> getLocations()
{
return Lists.newArrayList();
}
};
final TaskToolboxFactory taskToolboxFactory = new TaskToolboxFactory(
new TaskConfig(testCase.tmpDir.getAbsolutePath(), null, null, 50000, null, false, null, null),
new TaskActionClientFactory()
{
@Override
public TaskActionClient create(Task task)
{
return taskActionClient;
}
},
new NoopServiceEmitter(),
null, // segment pusher
null, // segment killer
null, // segment mover
null, // segment archiver
null, // segment announcer,
null,
notifierFactory,
null, // query runner factory conglomerate corporation unionized collective
null, // query executor service
null, // monitor scheduler
new SegmentLoaderFactory(
new SegmentLoaderLocalCacheManager(null, segmentLoaderConfig, MAPPER)
),
MAPPER,
INDEX_IO,
null,
null,
null,
INDEX_MERGER_V9,
null,
null,
null,
null,
new NoopTestTaskFileWriter()
);
final IngestSegmentFirehoseFactory factory = new IngestSegmentFirehoseFactory(
DATA_SOURCE,
testCase.interval,
new TrueDimFilter(),
Arrays.asList(DIMENSIONS),
Arrays.asList(METRICS),
INDEX_IO
);
factory.setTaskToolbox(taskToolboxFactory.build(NoopTask.create(DATA_SOURCE)));
constructors.add(
new Object[]{
testCase.toString(),
factory,
testCase.tmpDir,
testCase.expectedCount,
testCase.expectedSum
}
);
}
return constructors;
}
private static class TestCase
{
final File tmpDir;
final Interval interval;
final int expectedCount;
final long expectedSum;
final Set<DataSegment> segments;
public TestCase(
File tmpDir,
Interval interval,
int expectedCount,
long expectedSum,
Set<DataSegment> segments
)
{
this.tmpDir = tmpDir;
this.interval = interval;
this.expectedCount = expectedCount;
this.expectedSum = expectedSum;
this.segments = segments;
}
@Override
public String toString()
{
final List<String> segmentIdentifiers = Lists.newArrayList();
for (DataSegment segment : segments) {
segmentIdentifiers.add(segment.getIdentifier());
}
return "TestCase{" +
"interval=" + interval +
", expectedCount=" + expectedCount +
", expectedSum=" + expectedSum +
", segments=" + segmentIdentifiers +
'}';
}
}
private static class DataSegmentMaker
{
final Interval interval;
final String version;
final int partitionNum;
final List<InputRow> rows;
public DataSegmentMaker(
Interval interval,
String version,
int partitionNum,
List<InputRow> rows
)
{
this.interval = interval;
this.version = version;
this.partitionNum = partitionNum;
this.rows = rows;
}
public DataSegment make(File tmpDir)
{
final Map<String, Object> loadSpec = persist(tmpDir, Iterables.toArray(rows, InputRow.class));
return new DataSegment(
DATA_SOURCE,
interval,
version,
loadSpec,
Arrays.asList(DIMENSIONS),
Arrays.asList(METRICS),
new LinearShardSpec(partitionNum),
-1,
0L
);
}
}
}
| apache-2.0 |
pili-engineering/PLDroidShortVideo | ShortVideoUIDemo/app/src/main/java/com/qiniu/shortvideo/app/activity/VideoRecordActivity.java | 35340 | package com.qiniu.shortvideo.app.activity;
import android.animation.ValueAnimator;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.hardware.Camera;
import android.media.AudioFormat;
import android.opengl.GLSurfaceView;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.constraint.Group;
import android.support.v7.app.AppCompatActivity;
import android.text.SpannableString;
import android.text.Spanned;
import android.text.style.AbsoluteSizeSpan;
import android.util.Log;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.faceunity.FURenderer;
import com.qiniu.pili.droid.shortvideo.PLBuiltinFilter;
import com.qiniu.pili.droid.shortvideo.PLCaptureFrameListener;
import com.qiniu.pili.droid.shortvideo.PLVideoFrame;
import com.qiniu.shortvideo.app.R;
import com.qiniu.shortvideo.app.adapter.FilterItemAdapter;
import com.qiniu.shortvideo.app.faceunity.BeautyControlView;
import com.qiniu.shortvideo.app.model.AudioFile;
import com.qiniu.shortvideo.app.utils.Config;
import com.qiniu.shortvideo.app.utils.MediaUtils;
import com.qiniu.shortvideo.app.utils.RecordSettings;
import com.qiniu.shortvideo.app.utils.Utils;
import com.qiniu.shortvideo.app.utils.ViewOperator;
import com.qiniu.shortvideo.app.view.ListBottomView;
import com.qiniu.shortvideo.app.view.SectionProgressBar;
import com.qiniu.shortvideo.app.utils.ToastUtils;
import com.qiniu.pili.droid.shortvideo.PLAudioEncodeSetting;
import com.qiniu.pili.droid.shortvideo.PLCameraPreviewListener;
import com.qiniu.pili.droid.shortvideo.PLCameraSetting;
import com.qiniu.pili.droid.shortvideo.PLFaceBeautySetting;
import com.qiniu.pili.droid.shortvideo.PLFocusListener;
import com.qiniu.pili.droid.shortvideo.PLMicrophoneSetting;
import com.qiniu.pili.droid.shortvideo.PLRecordSetting;
import com.qiniu.pili.droid.shortvideo.PLRecordStateListener;
import com.qiniu.pili.droid.shortvideo.PLShortVideoRecorder;
import com.qiniu.pili.droid.shortvideo.PLVideoEncodeSetting;
import com.qiniu.pili.droid.shortvideo.PLVideoFilterListener;
import com.qiniu.pili.droid.shortvideo.PLVideoSaveListener;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Stack;
/**
* 视频录制模块,实现了基础的短视频录制、添加音乐等功能,同时加入了相芯美颜
*/
public class VideoRecordActivity extends AppCompatActivity implements
PLRecordStateListener,
PLVideoSaveListener,
PLVideoFilterListener,
PLCameraPreviewListener,
PLFocusListener {
private static final String TAG = "VideoRecordActivity";
public static final String PREVIEW_SIZE_RATIO = "PreviewSizeRatio";
public static final String PREVIEW_SIZE_LEVEL = "PreviewSizeLevel";
public static final String ENCODING_MODE = "EncodingMode";
public static final String ENCODING_SIZE_LEVEL = "EncodingSizeLevel";
public static final String ENCODING_BITRATE_LEVEL = "EncodingBitrateLevel";
public static final String AUDIO_CHANNEL_NUM = "AudioChannelNum";
private static final int FLING_MIN_DISTANCE = 350;
private static final int CHOOSE_MUSIC_REQUEST_CODE = 0;
private static final int RECORD = 0;
private static final int CAPTURE = 1;
private static final int FLING_MIN_DISTANCE_SWITCH_MODE = 20;// 移动最小距离
private View mDecorView;
private Button mRecordBtn;
private Button mConcatBtn;
private TextView mSwitchCameraBtn;
private TextView mSwitchFlashBtn;
private ImageButton mDeleteBtn;
private TextView mSpeedTextView;
private Group mRecordBtns;
// 模式切换相关
private TextView mRecordModeBtn;
private TextView mCaptureModeBtn;
/** 模式按键切换动画 */
private ValueAnimator valueAnimator;
/** 录制按键模式 */
private RelativeLayout mRecordModeLayout;
private int mRecordMode = RECORD;
private float mPosX, mCurPosX;
private float mOffset = Float.MIN_VALUE;
private int mSectionCount;
private GLSurfaceView mPreview;
private PLShortVideoRecorder mShortVideoRecorder;
private SectionProgressBar mSectionProgressBar;
private ListBottomView mFilterBottomView;
private FilterItemAdapter mFilterItemAdapter;
private boolean mIsSelectingFilter;
private String mCurrentFilter;
/** 特效描述信息控件 */
private TextView mEffectDescription;
private TextView mCurrentDescriptionText;
private PLCameraSetting mCameraSetting;
private PLMicrophoneSetting mMicrophoneSetting;
private PLRecordSetting mRecordSetting;
private PLVideoEncodeSetting mVideoEncodeSetting;
private PLAudioEncodeSetting mAudioEncodeSetting;
private PLFaceBeautySetting mFaceBeautySetting;
private GestureDetector mGestureDetector;
private boolean mSectionBegan;
private boolean mFlashEnabled = false;
private double mRecordSpeed = 1;
private long mSectionBeginTSMs;
private Stack<Long> mDurationRecordStack = new Stack();
private Stack<Double> mDurationVideoStack = new Stack();
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_record);
mDecorView = getWindow().getDecorView();
mPreview = findViewById(R.id.preview);
mSectionProgressBar = findViewById(R.id.section_progress_bar);
mRecordBtn = findViewById(R.id.record_btn);
mConcatBtn = findViewById(R.id.next_btn);
mSwitchCameraBtn = findViewById(R.id.switch_camera_btn);
mSwitchFlashBtn = findViewById(R.id.flash_light_btn);
mDeleteBtn = findViewById(R.id.delete_section_btn);
mSpeedTextView = findViewById(R.id.normal_speed_btn);
mFaceUnityControlView = findViewById(R.id.face_unity_panel);
TextView speedText = findViewById(R.id.normal_speed_btn);
speedText.setTextColor(getResources().getColor(R.color.colorAccent));
mRecordBtns = findViewById(R.id.record_btn_group);
mEffectDescription = findViewById(R.id.effect_description);
mRecordModeLayout = findViewById(R.id.record_mode_layout);
mRecordModeBtn = findViewById(R.id.record_mode_video);
mCaptureModeBtn = findViewById(R.id.record_mode_picture);
mRecordModeBtn.setOnTouchListener(onModeBarTouchListener);
mCaptureModeBtn.setOnTouchListener(onModeBarTouchListener);
mShortVideoRecorder = new PLShortVideoRecorder();
int previewSizeRatioPos = getIntent().getIntExtra(PREVIEW_SIZE_RATIO, 0);
int previewSizeLevelPos = getIntent().getIntExtra(PREVIEW_SIZE_LEVEL, 0);
int encodingModePos = getIntent().getIntExtra(ENCODING_MODE, 0);
int encodingSizeLevelPos = getIntent().getIntExtra(ENCODING_SIZE_LEVEL, 0);
int encodingBitrateLevelPos = getIntent().getIntExtra(ENCODING_BITRATE_LEVEL, 0);
int audioChannelNumPos = getIntent().getIntExtra(AUDIO_CHANNEL_NUM, 0);
mCameraSetting = new PLCameraSetting();
PLCameraSetting.CAMERA_FACING_ID facingId = chooseCameraFacingId();
mCameraSetting.setCameraId(facingId);
mCameraSetting.setCameraPreviewSizeRatio(RecordSettings.PREVIEW_SIZE_RATIO_ARRAY[previewSizeRatioPos]);
mCameraSetting.setCameraPreviewSizeLevel(RecordSettings.PREVIEW_SIZE_LEVEL_ARRAY[previewSizeLevelPos]);
mMicrophoneSetting = new PLMicrophoneSetting();
mMicrophoneSetting.setChannelConfig(RecordSettings.AUDIO_CHANNEL_NUM_ARRAY[audioChannelNumPos] == 1 ?
AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
mVideoEncodeSetting = new PLVideoEncodeSetting(this);
mVideoEncodeSetting.setEncodingSizeLevel(RecordSettings.ENCODING_SIZE_LEVEL_ARRAY[encodingSizeLevelPos]);
mVideoEncodeSetting.setEncodingBitrate(RecordSettings.ENCODING_BITRATE_LEVEL_ARRAY[encodingBitrateLevelPos]);
mVideoEncodeSetting.setHWCodecEnabled(encodingModePos == 0);
mVideoEncodeSetting.setConstFrameRateEnabled(true);
mAudioEncodeSetting = new PLAudioEncodeSetting();
mAudioEncodeSetting.setHWCodecEnabled(encodingModePos == 0);
mAudioEncodeSetting.setChannels(RecordSettings.AUDIO_CHANNEL_NUM_ARRAY[audioChannelNumPos]);
mRecordSetting = new PLRecordSetting();
mRecordSetting.setMaxRecordDuration(RecordSettings.DEFAULT_MAX_RECORD_DURATION);
mRecordSetting.setRecordSpeedVariable(true);
mRecordSetting.setVideoCacheDir(Config.VIDEO_STORAGE_DIR);
mRecordSetting.setVideoFilepath(Config.RECORD_FILE_PATH);
mFaceBeautySetting = new PLFaceBeautySetting(1.0f, 0.5f, 0.5f);
mShortVideoRecorder.prepare(mPreview, mCameraSetting, mMicrophoneSetting, mVideoEncodeSetting,
mAudioEncodeSetting, null, mRecordSetting);
mShortVideoRecorder.setRecordStateListener(this);
mShortVideoRecorder.setFocusListener(this);
mShortVideoRecorder.setVideoFilterListener(this);
mShortVideoRecorder.setCameraPreviewListener(this);
mSectionProgressBar.setFirstPointTime(RecordSettings.DEFAULT_MIN_RECORD_DURATION);
mShortVideoRecorder.setRecordSpeed(mRecordSpeed);
mSectionProgressBar.setProceedingSpeed(mRecordSpeed);
mSectionProgressBar.setTotalTime(this, mRecordSetting.getMaxRecordDuration());
initBuiltInFilters();
// init faceUnity engine
mCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
mInputProp = getCameraOrientation(mCameraId);
mFURenderer = new FURenderer
.Builder(this)
.inputPropOrientation(mInputProp)
.build();
mFaceUnityEffectDescription = findViewById(R.id.face_unity_effect_description);
mFaceUnityControlView = findViewById(R.id.face_unity_panel);
mFaceUnityControlView.setOnFUControlListener(mFURenderer);
mFaceUnityControlView.setOnDescriptionShowListener(new BeautyControlView.OnDescriptionShowListener() {
@Override
public void onDescriptionShowListener(int str) {
showDescription(str, 1500);
}
});
mRecordBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (mRecordMode == CAPTURE) {
mShortVideoRecorder.captureFrame(new PLCaptureFrameListener() {
@Override
public void onFrameCaptured(PLVideoFrame plVideoFrame) {
if (plVideoFrame == null) {
Log.e(TAG, "capture frame failed");
return;
}
Log.i(TAG, "captured frame width: " + plVideoFrame.getWidth() + " height: " + plVideoFrame.getHeight() + " timestamp: " + plVideoFrame.getTimestampMs());
try {
FileOutputStream fos = new FileOutputStream(Config.CAPTURED_FRAME_FILE_PATH);
plVideoFrame.toBitmap().compress(Bitmap.CompressFormat.JPEG, 100, fos);
fos.close();
runOnUiThread(new Runnable() {
@Override
public void run() {
ToastUtils.s(VideoRecordActivity.this, "截帧已保存到路径:" + Config.CAPTURED_FRAME_FILE_PATH);
}
});
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
});
} else {
if (mSectionBegan) {
endSectionInternal();
} else {
beginSectionInternal();
}
}
}
});
mGestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
if (e1 == null || e2 == null) {
return false;
}
if (e1.getX() - e2.getX() > FLING_MIN_DISTANCE) {
mFilterItemAdapter.changeToNextFilter();
return true;
} else if (e1.getX() - e2.getX() < -120) {
mFilterItemAdapter.changeToLastFilter();
return true;
}
return false;
}
});
mPreview.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
if (mFaceUnityControlView.getVisibility() == View.VISIBLE) {
ViewOperator.startDisappearAnimY(mFaceUnityControlView);
mFaceUnityControlView.setVisibility(View.GONE);
mRecordBtns.setVisibility(View.VISIBLE);
return true;
}
if (mIsSelectingFilter) {
ViewOperator.startDisappearAnimY(mFilterBottomView);
mFilterBottomView.setVisibility(View.GONE);
mRecordBtns.setVisibility(View.VISIBLE);
mIsSelectingFilter = false;
}
mGestureDetector.onTouchEvent(event);
return true;
}
});
}
@Override
protected void onStart() {
super.onStart();
if (Build.VERSION.SDK_INT >= 19 && Utils.checkDeviceHasNavigationBar(this)) {
int flag = View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION // hide
| View.SYSTEM_UI_FLAG_FULLSCREEN // hide status bar
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY;
mDecorView.setSystemUiVisibility(flag);
}
}
@Override
protected void onResume() {
super.onResume();
Log.i(TAG, "onResume");
mRecordBtn.setEnabled(false);
mShortVideoRecorder.resume();
mShortVideoRecorder.setBuiltinFilter(mCurrentFilter);
mFaceUnityControlView.onResume();
}
@Override
protected void onPause() {
super.onPause();
Log.i(TAG, "onPause");
if (mSectionBegan) {
endSectionInternal();
}
mShortVideoRecorder.pause();
}
@Override
protected void onDestroy() {
super.onDestroy();
mShortVideoRecorder.destroy();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK && requestCode == CHOOSE_MUSIC_REQUEST_CODE) {
AudioFile audioFile = (AudioFile) data.getSerializableExtra(ChooseMusicActivity.SELECTED_MUSIC_FILE);
long startTime = data.getLongExtra(ChooseMusicActivity.START_TIME, 0);
if (audioFile != null) {
mShortVideoRecorder.setMusicFile(audioFile.getFilePath());
mShortVideoRecorder.setMusicPosition((int) startTime);
} else {
mShortVideoRecorder.setMusicFile(null);
}
}
}
public void onClickBack(View v) {
finish();
}
public void onClickDeleteLastSection(View v) {
if (!mShortVideoRecorder.deleteLastSection()) {
ToastUtils.s(this, "回删视频段失败");
}
}
public void onClickSwitchFlash(View v) {
if (!mShortVideoRecorder.isFlashSupport()) {
return;
}
mFlashEnabled = !mFlashEnabled;
mShortVideoRecorder.setFlashEnabled(mFlashEnabled);
mSwitchFlashBtn.setActivated(mFlashEnabled);
Drawable drawable= getResources().getDrawable(mFlashEnabled ? R.mipmap.qn_flash_on : R.mipmap.qn_flash_off);
drawable.setBounds(0, 0, drawable.getMinimumWidth(), drawable.getMinimumHeight());
mSwitchFlashBtn.setCompoundDrawables(null, drawable, null, null);
}
public void onClickSwitchCamera(View v) {
mShortVideoRecorder.switchCamera();
if (mCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mCameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
} else {
mCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
Drawable drawable= getResources().getDrawable(R.mipmap.qn_flash_off);
drawable.setBounds(0, 0, drawable.getMinimumWidth(), drawable.getMinimumHeight());
mSwitchFlashBtn.setCompoundDrawables(null, drawable, null, null);
}
mInputProp = getCameraOrientation(mCameraId);
Log.d("mInputProp", mInputProp + "");
mFURenderer.onCameraChange(getCameraType(mCameraId), getCameraOrientation(mCameraId), mInputProp);
}
public void onClickChooseMusic(View v) {
if (mSectionCount > 0) {
ToastUtils.s(this, getString(R.string.can_not_add_music_tips));
return;
}
Intent intent = new Intent(VideoRecordActivity.this, ChooseMusicActivity.class);
intent.putExtra("videoDurationMs", mRecordSetting.getMaxRecordDuration());
startActivityForResult(intent, CHOOSE_MUSIC_REQUEST_CODE);
}
public void onClickConcatSections(View v) {
if (mSectionBegan) {
ToastUtils.s(VideoRecordActivity.this, "当前正在拍摄,无法拼接!");
return;
}
mShortVideoRecorder.concatSections(VideoRecordActivity.this);
}
public void onClickStickers(View v) {
if (mFaceUnityControlView.getVisibility() == View.GONE) {
mFaceUnityControlView.setVisibility(View.VISIBLE);
mRecordBtns.setVisibility(View.INVISIBLE);
ViewOperator.startAppearAnimY(mFaceUnityControlView);
} else {
ViewOperator.startDisappearAnimY(mFaceUnityControlView);
mFaceUnityControlView.setVisibility(View.GONE);
mRecordBtns.setVisibility(View.VISIBLE);
}
}
public void onClickFilterSelect(View v) {
mRecordBtns.setVisibility(View.INVISIBLE);
mFilterBottomView.setVisibility(View.VISIBLE);
ViewOperator.startAppearAnimY(mFilterBottomView);
mIsSelectingFilter = true;
}
public void onSpeedClicked(View view) {
if (mRecordMode == CAPTURE) {
ToastUtils.s(this, "拍照模式下无法修改拍摄倍数!");
return;
}
if (!mVideoEncodeSetting.IsConstFrameRateEnabled() || !mRecordSetting.IsRecordSpeedVariable()) {
if (mSectionProgressBar.isRecorded()) {
ToastUtils.s(this, "变帧率模式下,无法在拍摄中途修改拍摄倍数!");
return;
}
}
if (mSpeedTextView != null) {
mSpeedTextView.setTextColor(Color.WHITE);
}
TextView textView = (TextView) view;
textView.setTextColor(getResources().getColor(R.color.colorAccent));
mSpeedTextView = textView;
switch (view.getId()) {
case R.id.super_slow_speed_btn:
mRecordSpeed = 0.25;
break;
case R.id.slow_speed_btn:
mRecordSpeed = 0.5;
break;
case R.id.normal_speed_btn:
mRecordSpeed = 1;
break;
case R.id.fast_speed_btn:
mRecordSpeed = 2;
break;
case R.id.super_fast_speed_btn:
mRecordSpeed = 4;
break;
}
mShortVideoRecorder.setRecordSpeed(mRecordSpeed);
if (mRecordSetting.IsRecordSpeedVariable() && mVideoEncodeSetting.IsConstFrameRateEnabled()) {
mSectionProgressBar.setProceedingSpeed(mRecordSpeed);
mRecordSetting.setMaxRecordDuration(RecordSettings.DEFAULT_MAX_RECORD_DURATION);
mSectionProgressBar.setFirstPointTime(RecordSettings.DEFAULT_MIN_RECORD_DURATION);
} else {
mRecordSetting.setMaxRecordDuration((long) (RecordSettings.DEFAULT_MAX_RECORD_DURATION * mRecordSpeed));
mSectionProgressBar.setFirstPointTime((long) (RecordSettings.DEFAULT_MIN_RECORD_DURATION * mRecordSpeed));
}
mSectionProgressBar.setTotalTime(this, mRecordSetting.getMaxRecordDuration());
}
private void initBuiltInFilters() {
mFilterBottomView = findViewById(R.id.filter_select_view);
mFilterItemAdapter = new FilterItemAdapter(this,
new ArrayList<PLBuiltinFilter>(Arrays.asList(mShortVideoRecorder.getBuiltinFilterList())),
new ArrayList<String>(Arrays.asList(getResources().getStringArray(R.array.built_in_filters))));
mFilterItemAdapter.setOnFilterSelectListener(new FilterItemAdapter.OnFilterSelectListener() {
@Override
public void onFilterSelected(String filterName, String description) {
mCurrentFilter = filterName;
mShortVideoRecorder.setBuiltinFilter(filterName);
showDescription(description, 1500, false);
}
});
mFilterBottomView.init(mFilterItemAdapter);
}
private PLCameraSetting.CAMERA_FACING_ID chooseCameraFacingId() {
if (PLCameraSetting.hasCameraFacing(PLCameraSetting.CAMERA_FACING_ID.CAMERA_FACING_3RD)) {
return PLCameraSetting.CAMERA_FACING_ID.CAMERA_FACING_3RD;
} else if (PLCameraSetting.hasCameraFacing(PLCameraSetting.CAMERA_FACING_ID.CAMERA_FACING_FRONT)) {
return PLCameraSetting.CAMERA_FACING_ID.CAMERA_FACING_FRONT;
} else {
return PLCameraSetting.CAMERA_FACING_ID.CAMERA_FACING_BACK;
}
}
private void onSectionCountChanged(final int count, final long totalTime) {
mSectionCount = count;
runOnUiThread(new Runnable() {
@Override
public void run() {
mDeleteBtn.setEnabled(count > 0);
mDeleteBtn.setImageResource(count > 0 ? R.mipmap.qn_delete_section_active : R.mipmap.qn_delete_section_inactive);
mConcatBtn.setEnabled(totalTime >= (RecordSettings.DEFAULT_MIN_RECORD_DURATION));
mRecordModeLayout.setVisibility(count > 0 ? View.INVISIBLE : View.VISIBLE);
}
});
}
private void updateRecordingBtns(boolean isRecording) {
mSwitchCameraBtn.setEnabled(!isRecording);
mRecordBtn.setBackgroundResource(isRecording ? R.mipmap.qn_shooting : R.mipmap.qn_video);
}
private void beginSectionInternal() {
if (mShortVideoRecorder.beginSection()) {
mSectionBegan = true;
mSectionBeginTSMs = System.currentTimeMillis();
mSectionProgressBar.setCurrentState(SectionProgressBar.State.START);
updateRecordingBtns(true);
} else {
ToastUtils.s(VideoRecordActivity.this, "无法开始视频段录制");
}
}
private void endSectionInternal() {
long sectionRecordDurationMs = System.currentTimeMillis() - mSectionBeginTSMs;
long totalRecordDurationMs = sectionRecordDurationMs + (mDurationRecordStack.isEmpty() ? 0 : mDurationRecordStack.peek().longValue());
double sectionVideoDurationMs = sectionRecordDurationMs / mRecordSpeed;
double totalVideoDurationMs = sectionVideoDurationMs + (mDurationVideoStack.isEmpty() ? 0 : mDurationVideoStack.peek().doubleValue());
mDurationRecordStack.push(new Long(totalRecordDurationMs));
mDurationVideoStack.push(new Double(totalVideoDurationMs));
if (mRecordSetting.IsRecordSpeedVariable()) {
mSectionProgressBar.addBreakPointTime((long) totalVideoDurationMs);
} else {
mSectionProgressBar.addBreakPointTime(totalRecordDurationMs);
}
mSectionProgressBar.setCurrentState(SectionProgressBar.State.PAUSE);
mShortVideoRecorder.endSection();
mSectionBegan = false;
}
private View.OnTouchListener onModeBarTouchListener = new View.OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
mPosX = event.getX();
mCurPosX = 0;
return true;
case MotionEvent.ACTION_MOVE:
mCurPosX = event.getX();
// 滑动效果处理
if (mCurPosX - mPosX > 0 && (Math.abs(mCurPosX - mPosX) > FLING_MIN_DISTANCE_SWITCH_MODE)) {
//向左滑动
if(mRecordMode == CAPTURE) {
switchCameraModeButton(RECORD);
}
return false;
} else if (mCurPosX - mPosX < 0 && (Math.abs(mCurPosX - mPosX) > FLING_MIN_DISTANCE_SWITCH_MODE)) {
//向右滑动
if(mRecordMode == RECORD)
{
switchCameraModeButton(CAPTURE);
}
return false;
}
return true;
case MotionEvent.ACTION_UP:
// 点击效果处理
if(Math.abs(mCurPosX - mPosX) < FLING_MIN_DISTANCE_SWITCH_MODE || mCurPosX == 0){
switch (v.getId()){
// 拍照模式
case R.id.record_mode_picture:
switchCameraModeButton(CAPTURE);
break;
// 点击录制模式
case R.id.record_mode_video:
switchCameraModeButton(RECORD);
break;
}
return false;
}
}
return false;
}
};
/**
* 切换摄像模式按键
* @param index
*/
private void switchCameraModeButton(int index)
{
if(valueAnimator != null && valueAnimator.isRunning() || mRecordMode == index) return;
// 设置文字颜色
mRecordModeBtn.setTextColor(index == 0 ? getResources().getColor(R.color.white) : getResources().getColor(R.color.alpha_white_66));
mCaptureModeBtn.setTextColor(index == 1 ? getResources().getColor(R.color.white) : getResources().getColor(R.color.alpha_white_66));
final float[] Xs = getModeButtonWidth();
float offSet = 0;
if (mOffset == Float.MIN_VALUE) {
mOffset = mCaptureModeBtn.getX() + (float) mCaptureModeBtn.getMeasuredWidth() / 2
- (mRecordModeBtn.getX() + (float) mRecordModeBtn.getMeasuredWidth() / 2);
}
if(mRecordMode == 0 && index == 1) {
offSet = -mOffset;
} else if(mRecordMode == 1 && index == 0) {
offSet = mOffset;
}
// 切换动画
valueAnimator = ValueAnimator.ofFloat(0, offSet);
valueAnimator.setDuration(200);
valueAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener()
{
@Override
public void onAnimationUpdate(ValueAnimator animation)
{
float offSet = (float) animation.getAnimatedValue();
mRecordModeBtn.setX(Xs[0] + offSet);
mCaptureModeBtn.setX(Xs[1] + offSet);
}
});
valueAnimator.start();
// 录制按键背景
mRecordBtn.setBackground(getResources().getDrawable(index == CAPTURE ? R.mipmap.qn_photo : R.mipmap.qn_video));
mRecordMode = index;
}
/**
* 获取底部拍摄模式按键宽度
*/
private float[] getModeButtonWidth()
{
float[] xs = new float[2];
xs[0] = mRecordModeBtn.getX();
xs[1] = mCaptureModeBtn.getX();
return xs;
}
@Override
public void onManualFocusStart(boolean b) {
}
@Override
public void onManualFocusStop(boolean b) {
}
@Override
public void onManualFocusCancel() {
}
@Override
public void onAutoFocusStart() {
}
@Override
public void onAutoFocusStop() {
}
@Override
public void onReady() {
Log.i(TAG, "onReady");
runOnUiThread(new Runnable() {
@Override
public void run() {
mRecordBtn.setEnabled(true);
ToastUtils.s(VideoRecordActivity.this, "可以开始拍摄咯");
}
});
}
@Override
public void onError(int i) {
Log.i(TAG, "onError : " + i);
}
@Override
public void onDurationTooShort() {
}
@Override
public void onRecordStarted() {
}
@Override
public void onSectionRecording(long sectionDurationMs, long videoDurationMs, int sectionCount) {
}
@Override
public void onRecordStopped() {
runOnUiThread(new Runnable() {
@Override
public void run() {
updateRecordingBtns(false);
}
});
}
@Override
public void onSectionIncreased(long incDuration, long totalDuration, int sectionCount) {
double videoSectionDuration = mDurationVideoStack.isEmpty() ? 0 : mDurationVideoStack.peek().doubleValue();
if ((videoSectionDuration + incDuration / mRecordSpeed) >= mRecordSetting.getMaxRecordDuration()) {
videoSectionDuration = mRecordSetting.getMaxRecordDuration();
}
Log.d(TAG, "videoSectionDuration: " + videoSectionDuration + "; incDuration: " + incDuration);
onSectionCountChanged(sectionCount, (long) videoSectionDuration);
}
@Override
public void onSectionDecreased(long decDuration, long totalDuration, int sectionCount) {
mSectionProgressBar.removeLastBreakPoint();
if (!mDurationVideoStack.isEmpty()) {
mDurationVideoStack.pop();
}
if (!mDurationRecordStack.isEmpty()) {
mDurationRecordStack.pop();
}
double currentDuration = mDurationVideoStack.isEmpty() ? 0 : mDurationVideoStack.peek().doubleValue();
onSectionCountChanged(sectionCount, (long) currentDuration);
}
@Override
public void onRecordCompleted() {
runOnUiThread(new Runnable() {
@Override
public void run() {
mSectionBegan = false;
mSectionProgressBar.addBreakPointTime(mRecordSetting.getMaxRecordDuration());
mSectionProgressBar.setCurrentState(SectionProgressBar.State.PAUSE);
ToastUtils.s(VideoRecordActivity.this, "已达到拍摄总时长");
}
});
}
@Override
public void onSaveVideoSuccess(String s) {
MediaUtils.storeVideo(this, new File(s), Config.MIME_TYPE_VIDEO);
runOnUiThread(new Runnable() {
@Override
public void run() {
VideoEditActivity.start(VideoRecordActivity.this, Config.RECORD_FILE_PATH);
}
});
}
@Override
public void onSaveVideoFailed(int i) {
}
@Override
public void onSaveVideoCanceled() {
}
@Override
public void onProgressUpdate(float v) {
}
@Override
public boolean onPreviewFrame(byte[] data, int width, int height, int rotation, int fmt, long tsInNanoTime) {
mCameraData = data;
return false;
}
@Override
public void onSurfaceCreated() {
mFURenderer.loadItems();
}
@Override
public void onSurfaceChanged(int width, int height) {
}
@Override
public void onSurfaceDestroy() {
mFURenderer.destroyItems();
mCameraData = null;
}
@Override
public int onDrawFrame(int texId, int texWidth, int texHeight, long timeStampNs, float[] transformMatrix) {
if (mCameraData != null) {
return mFURenderer.onDrawFrameByFBO(mCameraData, texId, texWidth, texHeight);
}
return texId;
}
//----- FaceUnity SDK 相关 -----//
private FURenderer mFURenderer;
private int mCameraId;
private int mInputProp;
private BeautyControlView mFaceUnityControlView;
private TextView mFaceUnityEffectDescription;
// 原始的相机数据
private byte[] mCameraData;
public int getCameraOrientation(int cameraId) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
Log.d("orientation", info.orientation + "");
return info.orientation;
}
public int getCameraType(int cameraId) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
Log.d("facing", info.facing + "");
return info.facing;
}
private Runnable effectDescriptionHide = new Runnable() {
@Override
public void run() {
mCurrentDescriptionText.setText("");
mCurrentDescriptionText.setVisibility(View.INVISIBLE);
}
};
private void showDescription(int str, int time) {
if (str == 0) {
return;
}
String filterName = getString(str);
showDescription(filterName, time, true);
}
private void showDescription(String description, int time, boolean isFaceUnity) {
SpannableString descriptionText;
if (isFaceUnity) {
descriptionText = new SpannableString(description);
} else {
descriptionText = new SpannableString(String.format("%s%n<<左右滑动切换滤镜>>", description));
descriptionText.setSpan(new AbsoluteSizeSpan(30, true), 0, description.length(), Spanned.SPAN_INCLUSIVE_EXCLUSIVE);
descriptionText.setSpan(new AbsoluteSizeSpan(14, true), description.length(), descriptionText.length(), Spanned.SPAN_INCLUSIVE_EXCLUSIVE);
}
mCurrentDescriptionText = isFaceUnity ? mFaceUnityEffectDescription : mEffectDescription;
mCurrentDescriptionText.removeCallbacks(effectDescriptionHide);
mCurrentDescriptionText.setVisibility(View.VISIBLE);
mCurrentDescriptionText.setText(descriptionText);
mCurrentDescriptionText.postDelayed(effectDescriptionHide, time);
}
}
| apache-2.0 |
wspeirs/sop4j-base | src/main/java/com/sop4j/base/joda/time/chrono/BasicFixedMonthChronology.java | 5921 | /*
* Copyright 2001-2005 Stephen Colebourne
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sop4j.base.joda.time.chrono;
import com.sop4j.base.joda.time.Chronology;
import com.sop4j.base.joda.time.DateTimeConstants;
/**
* Abstract implementation of a calendar system based around fixed length months.
* <p>
* As the month length is fixed various calculations can be optimised.
* This implementation assumes any additional days after twelve
* months fall into a thirteenth month.
* <p>
* BasicFixedMonthChronology is thread-safe and immutable, and all
* subclasses must be as well.
*
* @author Brian S O'Neill
* @author Stephen Colebourne
* @since 1.2, refactored from CopticChronology
*/
abstract class BasicFixedMonthChronology extends BasicChronology {
/** Serialization lock */
private static final long serialVersionUID = 261387371998L;
/** The length of the month. */
static final int MONTH_LENGTH = 30;
/** The typical millis per year. */
static final long MILLIS_PER_YEAR =
(long) (365.25 * DateTimeConstants.MILLIS_PER_DAY);
/** The length of the month in millis. */
static final long MILLIS_PER_MONTH = ((long) MONTH_LENGTH) * DateTimeConstants.MILLIS_PER_DAY;
//-----------------------------------------------------------------------
/**
* Restricted constructor.
*
* @param base the base chronology
* @param param the init parameter
* @param minDaysInFirstWeek the minimum days in the first week
*/
BasicFixedMonthChronology(Chronology base, Object param, int minDaysInFirstWeek) {
super(base, param, minDaysInFirstWeek);
}
//-----------------------------------------------------------------------
long setYear(long instant, int year) {
// optimsed implementation of set, due to fixed months
int thisYear = getYear(instant);
int dayOfYear = getDayOfYear(instant, thisYear);
int millisOfDay = getMillisOfDay(instant);
if (dayOfYear > 365) {
// Current year is leap, and day is leap.
if (!isLeapYear(year)) {
// Moving to a non-leap year, leap day doesn't exist.
dayOfYear--;
}
}
instant = getYearMonthDayMillis(year, 1, dayOfYear);
instant += millisOfDay;
return instant;
}
//-----------------------------------------------------------------------
long getYearDifference(long minuendInstant, long subtrahendInstant) {
// optimsed implementation of getDifference, due to fixed months
int minuendYear = getYear(minuendInstant);
int subtrahendYear = getYear(subtrahendInstant);
// Inlined remainder method to avoid duplicate calls to get.
long minuendRem = minuendInstant - getYearMillis(minuendYear);
long subtrahendRem = subtrahendInstant - getYearMillis(subtrahendYear);
int difference = minuendYear - subtrahendYear;
if (minuendRem < subtrahendRem) {
difference--;
}
return difference;
}
//-----------------------------------------------------------------------
long getTotalMillisByYearMonth(int year, int month) {
return ((month - 1) * MILLIS_PER_MONTH);
}
//-----------------------------------------------------------------------
int getDayOfMonth(long millis) {
// optimised for fixed months
return (getDayOfYear(millis) - 1) % MONTH_LENGTH + 1;
}
//-----------------------------------------------------------------------
boolean isLeapYear(int year) {
return (year & 3) == 3;
}
//-----------------------------------------------------------------------
int getDaysInYearMonth(int year, int month) {
return (month != 13) ? MONTH_LENGTH : (isLeapYear(year) ? 6 : 5);
}
//-----------------------------------------------------------------------
int getDaysInMonthMax() {
return MONTH_LENGTH;
}
//-----------------------------------------------------------------------
int getDaysInMonthMax(int month) {
return (month != 13 ? MONTH_LENGTH : 6);
}
//-----------------------------------------------------------------------
int getMonthOfYear(long millis) {
return (getDayOfYear(millis) - 1) / MONTH_LENGTH + 1;
}
//-----------------------------------------------------------------------
int getMonthOfYear(long millis, int year) {
long monthZeroBased = (millis - getYearMillis(year)) / MILLIS_PER_MONTH;
return ((int) monthZeroBased) + 1;
}
//-----------------------------------------------------------------------
int getMaxMonth() {
return 13;
}
//-----------------------------------------------------------------------
long getAverageMillisPerYear() {
return MILLIS_PER_YEAR;
}
//-----------------------------------------------------------------------
long getAverageMillisPerYearDividedByTwo() {
return MILLIS_PER_YEAR / 2;
}
//-----------------------------------------------------------------------
long getAverageMillisPerMonth() {
return MILLIS_PER_MONTH;
}
}
| apache-2.0 |
Syncleus/aparapi | src/test/java/com/aparapi/codegen/test/VarargsForEach.java | 1081 | /**
* Copyright (c) 2016 - 2018 Syncleus, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aparapi.codegen.test;
public class VarargsForEach {
int out[] = new int[1];
public static int max(int... values) {
if (values.length == 0) {
return 0;
}
int max = Integer.MIN_VALUE;
for (int i : values) {
if (i > max)
max = i;
}
return max;
}
public void run() {
out[0] = max(1, 4, 5, 9, 3);
}
}
/**{Throws{ClassParseException}Throws}**/
| apache-2.0 |
mattyb149/incubator-drill | exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/PageReader.java | 15272 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.parquet.columnreaders;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.DrillBuf;
import io.netty.buffer.Unpooled;
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.exec.store.parquet.ColumnDataReader;
import org.apache.drill.exec.store.parquet.ParquetFormatPlugin;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import parquet.bytes.BytesInput;
import parquet.column.Dictionary;
import parquet.column.ValuesType;
import parquet.column.page.DictionaryPage;
import parquet.column.page.Page;
import parquet.column.values.ValuesReader;
import parquet.column.values.dictionary.DictionaryValuesReader;
import parquet.format.PageHeader;
import parquet.format.PageType;
import parquet.format.Util;
import parquet.hadoop.metadata.ColumnChunkMetaData;
import parquet.hadoop.metadata.CompressionCodecName;
import parquet.schema.PrimitiveType;
// class to keep track of the read position of variable length columns
final class PageReader {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PageReader.class);
private final ColumnReader parentColumnReader;
private final ColumnDataReader dataReader;
// store references to the pages that have been uncompressed, but not copied to ValueVectors yet
Page currentPage;
// buffer to store bytes of current page
DrillBuf pageDataByteArray;
// for variable length data we need to keep track of our current position in the page data
// as the values and lengths are intermixed, making random access to the length data impossible
long readyToReadPosInBytes;
// read position in the current page, stored in the ByteBuf in ParquetRecordReader called bufferWithAllData
long readPosInBytes;
// bit shift needed for the next page if the last one did not line up with a byte boundary
int bitShift;
// storage space for extra bits at the end of a page if they did not line up with a byte boundary
// prevents the need to keep the entire last page, as these pageDataByteArray need to be added to the next batch
//byte extraBits;
// used for columns where the number of values that will fit in a vector is unknown
// currently used for variable length
// TODO - reuse this when compressed vectors are added, where fixed length values will take up a
// variable amount of space
// For example: if nulls are stored without extra space left in the data vector
// (this is currently simplifying random access to the data during processing, but increases the size of the vectors)
int valuesReadyToRead;
// the number of values read out of the last page
int valuesRead;
int byteLength;
//int rowGroupIndex;
ValuesReader definitionLevels;
ValuesReader repetitionLevels;
ValuesReader valueReader;
ValuesReader dictionaryLengthDeterminingReader;
ValuesReader dictionaryValueReader;
Dictionary dictionary;
PageHeader pageHeader = null;
List<ByteBuf> allocatedBuffers;
// These need to be held throughout reading of the entire column chunk
List<ByteBuf> allocatedDictionaryBuffers;
PageReader(ColumnReader parentStatus, FileSystem fs, Path path, ColumnChunkMetaData columnChunkMetaData)
throws ExecutionSetupException{
this.parentColumnReader = parentStatus;
allocatedBuffers = new ArrayList<ByteBuf>();
allocatedDictionaryBuffers = new ArrayList<ByteBuf>();
long totalByteLength = columnChunkMetaData.getTotalUncompressedSize();
long start = columnChunkMetaData.getFirstDataPageOffset();
try {
FSDataInputStream f = fs.open(path);
this.dataReader = new ColumnDataReader(f, start, totalByteLength);
if (columnChunkMetaData.getDictionaryPageOffset() > 0) {
f.seek(columnChunkMetaData.getDictionaryPageOffset());
PageHeader pageHeader = Util.readPageHeader(f);
assert pageHeader.type == PageType.DICTIONARY_PAGE;
BytesInput bytesIn;
ByteBuf uncompressedData=allocateBuffer(pageHeader.getUncompressed_page_size());
allocatedDictionaryBuffers.add(uncompressedData);
if(parentColumnReader.columnChunkMetaData.getCodec()==CompressionCodecName.UNCOMPRESSED) {
dataReader.getPageAsBytesBuf(uncompressedData, pageHeader.compressed_page_size);
bytesIn=parentColumnReader.parentReader.getCodecFactoryExposer().getBytesInput(uncompressedData,
pageHeader.getUncompressed_page_size());
}else{
ByteBuf compressedData=allocateBuffer(pageHeader.compressed_page_size);
dataReader.getPageAsBytesBuf(compressedData, pageHeader.compressed_page_size);
bytesIn = parentColumnReader.parentReader.getCodecFactoryExposer()
.decompress(parentColumnReader.columnChunkMetaData.getCodec(),
compressedData,
uncompressedData,
pageHeader.compressed_page_size,
pageHeader.getUncompressed_page_size());
compressedData.release();
}
DictionaryPage page = new DictionaryPage(
bytesIn,
pageHeader.uncompressed_page_size,
pageHeader.dictionary_page_header.num_values,
parquet.column.Encoding.valueOf(pageHeader.dictionary_page_header.encoding.name())
);
this.dictionary = page.getEncoding().initDictionary(parentStatus.columnDescriptor, page);
}
} catch (IOException e) {
throw new ExecutionSetupException("Error opening or reading metadata for parquet file at location: "
+ path.getName(), e);
}
}
/**
* Grab the next page.
*
* @return - if another page was present
* @throws java.io.IOException
*/
public boolean next() throws IOException {
currentPage = null;
valuesRead = 0;
valuesReadyToRead = 0;
// TODO - the metatdata for total size appears to be incorrect for impala generated files, need to find cause
// and submit a bug report
if(!dataReader.hasRemainder() || parentColumnReader.totalValuesRead == parentColumnReader.columnChunkMetaData.getValueCount()) {
return false;
}
clearBuffers();
// next, we need to decompress the bytes
// TODO - figure out if we need multiple dictionary pages, I believe it may be limited to one
// I think we are clobbering parts of the dictionary if there can be multiple pages of dictionary
do {
pageHeader = dataReader.readPageHeader();
if (pageHeader.getType() == PageType.DICTIONARY_PAGE) {
//TODO: Handle buffer allocation exception
BytesInput bytesIn;
ByteBuf uncompressedData=allocateBuffer(pageHeader.getUncompressed_page_size());
allocatedDictionaryBuffers.add(uncompressedData);
if( parentColumnReader.columnChunkMetaData.getCodec()== CompressionCodecName.UNCOMPRESSED) {
dataReader.getPageAsBytesBuf(uncompressedData, pageHeader.compressed_page_size);
bytesIn=parentColumnReader.parentReader.getCodecFactoryExposer().getBytesInput(uncompressedData,
pageHeader.getUncompressed_page_size());
}else{
ByteBuf compressedData=allocateBuffer(pageHeader.compressed_page_size);
dataReader.getPageAsBytesBuf(compressedData, pageHeader.compressed_page_size);
bytesIn = parentColumnReader.parentReader.getCodecFactoryExposer()
.decompress(parentColumnReader.columnChunkMetaData.getCodec(),
compressedData,
uncompressedData,
pageHeader.compressed_page_size,
pageHeader.getUncompressed_page_size());
compressedData.release();
}
DictionaryPage page = new DictionaryPage(
bytesIn,
pageHeader.uncompressed_page_size,
pageHeader.dictionary_page_header.num_values,
parquet.column.Encoding.valueOf(pageHeader.dictionary_page_header.encoding.name())
);
this.dictionary = page.getEncoding().initDictionary(parentColumnReader.columnDescriptor, page);
}
} while (pageHeader.getType() == PageType.DICTIONARY_PAGE);
//TODO: Handle buffer allocation exception
BytesInput bytesIn;
ByteBuf uncompressedData=allocateBuffer(pageHeader.getUncompressed_page_size());
allocatedBuffers.add(uncompressedData);
if(parentColumnReader.columnChunkMetaData.getCodec()==CompressionCodecName.UNCOMPRESSED) {
dataReader.getPageAsBytesBuf(uncompressedData, pageHeader.compressed_page_size);
bytesIn=parentColumnReader.parentReader.getCodecFactoryExposer().getBytesInput(uncompressedData,
pageHeader.getUncompressed_page_size());
}else{
ByteBuf compressedData=allocateBuffer(pageHeader.compressed_page_size);
dataReader.getPageAsBytesBuf(compressedData, pageHeader.compressed_page_size);
bytesIn = parentColumnReader.parentReader.getCodecFactoryExposer()
.decompress(parentColumnReader.columnChunkMetaData.getCodec(),
compressedData,
uncompressedData,
pageHeader.compressed_page_size,
pageHeader.getUncompressed_page_size());
compressedData.release();
}
currentPage = new Page(
bytesIn,
pageHeader.data_page_header.num_values,
pageHeader.uncompressed_page_size,
ParquetFormatPlugin.parquetMetadataConverter.getEncoding(pageHeader.data_page_header.repetition_level_encoding),
ParquetFormatPlugin.parquetMetadataConverter.getEncoding(pageHeader.data_page_header.definition_level_encoding),
ParquetFormatPlugin.parquetMetadataConverter.getEncoding(pageHeader.data_page_header.encoding)
);
byteLength = pageHeader.uncompressed_page_size;
if (currentPage == null) {
return false;
}
pageDataByteArray = DrillBuf.wrapByteBuffer(currentPage.getBytes().toByteBuffer());
allocatedBuffers.add(pageDataByteArray);
readPosInBytes = 0;
if (parentColumnReader.getColumnDescriptor().getMaxRepetitionLevel() > 0) {
repetitionLevels = currentPage.getRlEncoding().getValuesReader(parentColumnReader.columnDescriptor, ValuesType.REPETITION_LEVEL);
repetitionLevels.initFromPage(currentPage.getValueCount(), pageDataByteArray.nioBuffer(), (int) readPosInBytes);
// we know that the first value will be a 0, at the end of each list of repeated values we will hit another 0 indicating
// a new record, although we don't know the length until we hit it (and this is a one way stream of integers) so we
// read the first zero here to simplify the reading processes, and start reading the first value the same as all
// of the rest. Effectively we are 'reading' the non-existent value in front of the first allowing direct access to
// the first list of repetition levels
readPosInBytes = repetitionLevels.getNextOffset();
repetitionLevels.readInteger();
}
if (parentColumnReader.columnDescriptor.getMaxDefinitionLevel() != 0){
parentColumnReader.currDefLevel = -1;
definitionLevels = currentPage.getDlEncoding().getValuesReader(parentColumnReader.columnDescriptor, ValuesType.DEFINITION_LEVEL);
definitionLevels.initFromPage(currentPage.getValueCount(), pageDataByteArray.nioBuffer(), (int) readPosInBytes);
readPosInBytes = definitionLevels.getNextOffset();
if ( ! currentPage.getValueEncoding().usesDictionary()) {
valueReader = currentPage.getValueEncoding().getValuesReader(parentColumnReader.columnDescriptor, ValuesType.VALUES);
valueReader.initFromPage(currentPage.getValueCount(), pageDataByteArray.nioBuffer(), (int) readPosInBytes);
}
}
if (parentColumnReader.columnDescriptor.getType() == PrimitiveType.PrimitiveTypeName.BOOLEAN) {
valueReader = currentPage.getValueEncoding().getValuesReader(parentColumnReader.columnDescriptor, ValuesType.VALUES);
valueReader.initFromPage(currentPage.getValueCount(), pageDataByteArray.nioBuffer(), (int) readPosInBytes);
}
if (currentPage.getValueEncoding().usesDictionary()) {
// initialize two of the dictionary readers, one is for determining the lengths of each value, the second is for
// actually copying the values out into the vectors
dictionaryLengthDeterminingReader = new DictionaryValuesReader(dictionary);
dictionaryLengthDeterminingReader.initFromPage(currentPage.getValueCount(), pageDataByteArray.nioBuffer(), (int) readPosInBytes);
dictionaryValueReader = new DictionaryValuesReader(dictionary);
dictionaryValueReader.initFromPage(currentPage.getValueCount(), pageDataByteArray.nioBuffer(), (int) readPosInBytes);
parentColumnReader.usingDictionary = true;
} else {
parentColumnReader.usingDictionary = false;
}
// readPosInBytes is used for actually reading the values after we determine how many will fit in the vector
// readyToReadPosInBytes serves a similar purpose for the vector types where we must count up the values that will
// fit one record at a time, such as for variable length data. Both operations must start in the same location after the
// definition and repetition level data which is stored alongside the page data itself
readyToReadPosInBytes = readPosInBytes;
return true;
}
public void clearBuffers() {
for (ByteBuf b : allocatedBuffers) {
b.release();
}
allocatedBuffers.clear();
}
public void clearDictionaryBuffers() {
for (ByteBuf b : allocatedDictionaryBuffers) {
b.release();
}
allocatedDictionaryBuffers.clear();
}
public void clear(){
this.dataReader.clear();
// Free all memory, including fixed length types. (Data is being copied for all types not just var length types)
//if(!this.parentColumnReader.isFixedLength) {
clearBuffers();
clearDictionaryBuffers();
//}
}
/*
Allocate direct memory to read data into
*/
private ByteBuf allocateBuffer(int size) {
ByteBuf b;
try {
b = parentColumnReader.parentReader.getOperatorContext().getAllocator().buffer(size);
//b = UnpooledByteBufAllocator.DEFAULT.heapBuffer(size);
}catch(Exception e){
throw new DrillRuntimeException("Unable to allocate "+size+" bytes of memory in the Parquet Reader."+
"[Exception: "+e.getMessage()+"]"
);
}
return b;
}
}
| apache-2.0 |
lesaint/experimenting-annotation-processing | experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation1/sub1/Class_1385.java | 151 | package fr.javatronic.blog.massive.annotation1.sub1;
import fr.javatronic.blog.processor.Annotation_001;
@Annotation_001
public class Class_1385 {
}
| apache-2.0 |
kelemen/JTrim | subprojects/jtrim-image-transform/src/main/java/org/jtrim2/image/transform/SerialImagePointTransformer.java | 5887 | package org.jtrim2.image.transform;
import java.awt.geom.NoninvertibleTransformException;
import java.awt.geom.Point2D;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import org.jtrim2.utils.ExceptionHelper;
/**
* Defines a coordinate transformation based on a list of other coordinate
* transformations. That is, this coordinate transformation simply applies the
* transformations in the order the transformations were specified at
* construction time.
*
* <h2>Thread safety</h2>
* Instances of this class are safe to be accessed from multiple threads
* concurrently.
*
* <h3>Synchronization transparency</h3>
* Methods of this interface are not <I>synchronization transparent</I> and
* calling them while holding a lock should be avoided.
*/
public final class SerialImagePointTransformer implements ImagePointTransformer {
private final ImagePointTransformer[] transformers;
/**
* Returns a transformation equivalent to applying all the specified
* transformations in the given order.
* <P>
* This method does not necessarily returns an instance of
* {@code SerialImagePointTransformer}. For example, if you specify only
* a single transformation, then that transformation is returned.
*
* @param transformers the coordinate transformations to be applied in the
* order they need to be applied. This argument cannot be {@code null}
* and cannot contain {@code null} elements. This argument can be an
* empty array, in which case an identity transformation is returned.
* @return a transformation equivalent to applying all the specified
* transformations in the given order. This method never returns
* {@code null}.
*
* @throws NullPointerException thrown if the specified array or any of its
* element is {@code null}
*/
public static ImagePointTransformer combine(ImagePointTransformer... transformers) {
return combine(Arrays.asList(transformers));
}
/**
* Returns a transformation equivalent to applying all the specified
* transformations in the given order.
* <P>
* This method does not necessarily returns an instance of
* {@code SerialImagePointTransformer}. For example, if you specify only
* a single transformation, then that transformation is returned.
*
* @param transformers the coordinate transformations to be applied in the
* order they need to be applied. This argument cannot be {@code null}
* and cannot contain {@code null} elements. This argument can be an
* empty list, in which case an identity transformation is returned.
* @return a transformation equivalent to applying all the specified
* transformations in the given order. This method never returns
* {@code null}.
*
* @throws NullPointerException thrown if the specified list or any of its
* element is {@code null}
*/
public static ImagePointTransformer combine(List<? extends ImagePointTransformer> transformers) {
ImagePointTransformer[] filtered = unfold(transformers);
switch (filtered.length) {
case 0:
return AffineImagePointTransformer.IDENTITY;
case 1:
return filtered[0];
default:
return new SerialImagePointTransformer(filtered);
}
}
private static ImagePointTransformer[] unfold(
List<? extends ImagePointTransformer> transformers) {
int transformerCount = transformers.size();
if (transformerCount <= 1) {
ImagePointTransformer result = transformerCount == 0
? AffineImagePointTransformer.IDENTITY
: transformers.get(0);
Objects.requireNonNull(result, "transformers[0]");
return new ImagePointTransformer[]{result};
}
List<ImagePointTransformer> result = new ArrayList<>();
for (ImagePointTransformer transformer: transformers) {
if (transformer.getClass() == SerialImagePointTransformer.class) {
result.addAll(Arrays.asList(((SerialImagePointTransformer) transformer).transformers));
} else if (transformer != AffineImagePointTransformer.IDENTITY) {
result.add(transformer);
}
}
return result.toArray(new ImagePointTransformer[result.size()]);
}
private SerialImagePointTransformer(ImagePointTransformer... transformers) {
this.transformers = transformers.clone();
ExceptionHelper.checkNotNullElements(this.transformers, "transformers");
}
/**
* {@inheritDoc }
* <P>
* <B>Implementation note</B>: This method will simply call the
* {@link ImagePointTransformer#transformSrcToDest(Point2D, Point2D) transformSrcToDest}
* method of the coordinate transformations in the order they were specified
* at construction time.
*/
@Override
public void transformSrcToDest(Point2D src, Point2D dest) {
dest.setLocation(src);
for (ImagePointTransformer transformer: transformers) {
transformer.transformSrcToDest(dest, dest);
}
}
/**
* {@inheritDoc }
* <P>
* <B>Implementation note</B>: This method will simply call the
* {@link ImagePointTransformer#transformDestToSrc(Point2D, Point2D) transformDestToSrc}
* method of the coordinate transformations in the reverse order they were
* specified at construction time.
*/
@Override
public void transformDestToSrc(Point2D dest, Point2D src) throws NoninvertibleTransformException {
src.setLocation(dest);
for (int i = transformers.length - 1; i >= 0; i--) {
transformers[i].transformDestToSrc(src, src);
}
}
}
| apache-2.0 |
SINCConcept/sanalytics | filter-proxy/src/test/java/at/ac/tuwien/dsg/sanalytics/filterproxy/MetricsServletTestWithPromClientTest.java | 255 | package at.ac.tuwien.dsg.sanalytics.filterproxy;
public class MetricsServletTestWithPromClientTest {
//https://github.com/prometheus/client_java/blob/master/simpleclient_jetty/src/test/java/io/prometheus/client/jetty/JettyStatisticsCollectorTest.java
}
| apache-2.0 |
masukuma/addresslookup | client/LookupAddress/src/com/nobledesignlabs/entities/Address.java | 1960 | package com.nobledesignlabs.entities;
import java.io.Serializable;
public class Address implements Serializable {
private static final long serialVersionUID = 2310640779987082782L;
private double latitude;
private double longitude;
private double altitude;
private int typeofaddress;
private String name;
private String locationname;
private boolean notifywhenqueried;
private String userid;
private String registrationID;
public Address(){
}
public Address(double lati,double longi){
this.latitude=lati;
this.longitude=longi;
}
public Address(double lati,double longi, double altitude ){
this.latitude=lati;
this.longitude=longi;
this.altitude=altitude;
}
public double getLatitude() {
return latitude;
}
public void setLatitude(double latitude) {
this.latitude = latitude;
}
public double getLongitude() {
return longitude;
}
public void setLongitude(double longitude) {
this.longitude = longitude;
}
public double getAltitude() {
return altitude;
}
public void setAltitude(double altitude) {
this.altitude = altitude;
}
public int getTypeofaddress() {
return typeofaddress;
}
public void setTypeofaddress(int typeofaddress) {
this.typeofaddress = typeofaddress;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getLocationname() {
return locationname;
}
public void setLocationname(String locationname) {
this.locationname = locationname;
}
public String getUserid() {
return userid;
}
public void setUserid(String userid) {
this.userid = userid;
}
public boolean isNotifywhenqueried() {
return notifywhenqueried;
}
public void setNotifywhenqueried(boolean notifywhenqueried) {
this.notifywhenqueried = notifywhenqueried;
}
public String getRegistrationID() {
return registrationID;
}
public void setRegistrationID(String registrationID) {
this.registrationID = registrationID;
}
}
| apache-2.0 |
748251120/bainian_audit01 | audit3/src/java/com/gbicc/cfl/util/MinDate.java | 14580 | /**
* Prophet 2.5
* 版权所有:(C)2003-2010 吉贝克
*
*
*/
package com.gbicc.cfl.util;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.TimeZone;
/**
* 功能描述: 日期类 精确到分钟
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年8月26日 星期二
*/
public class MinDate {
private int y; //年
private int m; //月
private int d; //日
private int h; //小时
private int min; //分钟
private long totalMins;
/**
* 功能描述: 有参构造函数
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年8月26日 星期二
* 输入参数说明:date的格式必须为:YYYY_MM_DD HH:MM
*/
public MinDate(String date) {
totalMins = 0;
String[] str = new String[5];
str[0] = date.substring(0, 4);
str[1] = date.substring(5, 7);
str[2] = date.substring(8, 10);
str[3] = date.substring(11, 13);
str[4] = date.substring(14, 16);
//去年前面的 "0"
for (int i = 0; i < 5; i++) {
while (str[i].startsWith("0") && !str[i].equals("0")) {
str[i] = str[i].substring(1, str[i].length());
}
}
y = Integer.parseInt(str[0]);
m = Integer.parseInt(str[1]);
d = Integer.parseInt(str[2]);
h = Integer.parseInt(str[3]);
min = Integer.parseInt(str[4]);
}
/*
* 功能描述: 无参构造函数
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年8月26日 星期二
*/
public MinDate() {
Date dateVo = new Date();
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm");
String date = format.format(dateVo);
totalMins = 0;
String[] str = new String[5];
str[0] = date.substring(0, 4);
str[1] = date.substring(5, 7);
str[2] = date.substring(8, 10);
str[3] = date.substring(11, 13);
str[4] = date.substring(14, 16);
//去年前面的 "0"
for (int i = 0; i < 5; i++) {
while (str[i].startsWith("0") && !str[i].equals("0")) {
str[i] = str[i].substring(1, str[i].length());
}
}
y = Integer.parseInt(str[0]);
m = Integer.parseInt(str[1]);
d = Integer.parseInt(str[2]);
h = Integer.parseInt(str[3]);
min = Integer.parseInt(str[4]);
}
/*
* 功能描述: 判断是否是闰年
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月02日 星期二
*/
public boolean isLeapYear() {
boolean is = false;
if ((y % 4 == 0 && y % 100 != 0) || y % 400 == 0) {
is = true;
}
return is;
}
/*
* 功能描述: 求得本日期减N分钟
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月02日 星期二
*/
public void minusOneMins(int mins) {
for (int i = 0; i < mins; i++) {
this.minusOneMin();
}
}
/*
* 功能描述: 求得本日期减1分钟
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月02日 星期二
*/
public void minusOneMin() {
min--;
if (min == -1) {
min = 59;
h--;
}
if (h == -1) {
h = 23;
d--;
}
int c[] = { 31, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
if (0 == d) {
if (m == 2 || m == 4 || m == 8 || m == 9 || m == 11 || m == 1) {//1, 3, 5, 7, 8, 10, 12
d = 31;
m--;
} else if (m == 5 || m == 7 || m == 10 || m == 12) {//4, 6, 9, 11
d = 30;
m--;
} else if (m == 3) {
//如果是闰年
if ((y % 4 == 0 && y % 100 != 0) || y % 400 == 0) {
d = 29;
m--;
} else {
d = 28;
m--;
}
}
}
if (0 == m) {
m = 12;
y--;
}
}
/*
* 功能描述: 求得本日期减N小时
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月02日 星期二
*/
public void minusOneHours(int hours) {
for (int i = 0; i < hours; i++) {
this.minusOneHour();
}
}
/*
* 功能描述: 求得本日期减1小时
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月02日 星期二
*/
public void minusOneHour() {
h--;
if (h == -1) {
h = 23;
d--;
}
int c[] = { 31, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
if (0 == d) {
if (m == 2 || m == 4 || m == 8 || m == 9 || m == 11 || m == 1) {//1, 3, 5, 7, 8, 10, 12
d = 31;
m--;
} else if (m == 5 || m == 7 || m == 10 || m == 12) {//4, 6, 9, 11
d = 30;
m--;
} else if (m == 3) {
//如果是闰年
if ((y % 4 == 0 && y % 100 != 0) || y % 400 == 0) {
d = 29;
m--;
} else {
d = 28;
m--;
}
}
}
if (0 == m) {
m = 12;
y--;
}
}
/*
* 功能描述: 求得本日期减N天
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月02日 星期二
*/
public void minusOneDays(int days) {
for (int i = 0; i < days; i++) {
this.minusOneDay();
}
}
/*
* 功能描述: 求得本日期减1天
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月02日 星期二
*/
public void minusOneDay() {
d--;
int c[] = { 31, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
if (0 == d) {
if (m == 2 || m == 4 || m == 6 || m == 8 || m == 9 || m == 11 || m == 1) {//1, 3, 5, 7, 8, 10, 12
d = 31;
m--;
} else if (m == 5 || m == 7 || m == 10 || m == 12) {//4, 6, 9, 11
d = 30;
m--;
} else if (m == 3) {
//如果是闰年
if ((y % 4 == 0 && y % 100 != 0) || y % 400 == 0) {
d = 29;
m--;
} else {
d = 28;
m--;
}
}
}
if (0 == m) {
m = 12;
y--;
}
}
/*
* 功能描述: 求得本日期减N月
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月02日 星期二
*/
public void minusOneMonths(int months) {
for (int i = 0; i < months; i++) {
this.minusOneMonth();
}
}
/*
* 功能描述: 求得本日期减1月
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月02日 星期二
*/
public void minusOneMonth() {
m--;
if (0 == m) {
m = 12;
y--;
}
}
/*
* 功能描述: 求得本日期减N年
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月02日 星期二
*/
public void minusOneYears(int years) {
for (int i = 0; i < years; i++) {
this.minusOneYear();
}
}
/*
* 功能描述: 求得本日期减1年
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月02日 星期二
*/
public void minusOneYear() {
y--;
}
/*
* 功能描述: 求得本日期加N分钟
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年8月26日 星期二
*/
public void addMins(int mins) {
for (int i = 0; i < mins; i++) {
this.addOneMin();
}
}
/*
* 功能描述: 求得本日期加一分钟
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年8月26日 星期二
*/
public void addOneMin() {
min++;
if (min == 60) {
min = 0;
h++;
}
if (h == 24) {
h = 0;
d++;
}
int c[] = { 31, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
//如果是二月
if (m == 2) {
//如果是闰年
if ((y % 4 == 0 && y % 100 != 0) || y % 400 == 0) {
//如果超过本月的天数,则月加 1,日为 1
if (30 == d) {
d = 1;
m++;
if (m == 13) {
m = 1;
y++;
}
}
//如果没超过本月的天数,则日加 1
else {
//什么都不用做
}
}
//如果不是闰年
else {
//如果超过本月的天数,则月加 1,日为 1
if (29 == d) {
d = 1;
m++;
if (m == 13) {
m = 1;
y++;
}
}
//如果没超过本月的天数,则日加 1
else {
//什么都不用做
}
}
}
//如果不是二月
else {
//如果超过本月的天数,则月加 1,日为 1
if (c[m] < d) {
d = 1;
m++;
if (m == 13) {
m = 1;
y++;
}
}
//如果没超过本月的天数,则日加 1
else {
//什么都不用做
}
}
}
/*
* 功能描述: 求得本日期加N小时
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月03日 星期三
*/
public void addHours(int hour) {
for (int i = 0; i < hour; i++) {
this.addHour();
}
}
/*
* 功能描述: 求得本日期加1小时
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月03日 星期三
*/
public void addHour() {
h++;
if (h == 24) {
h = 0;
d++;
}
int c[] = { 31, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
//如果是二月
if (m == 2) {
//如果是闰年
if ((y % 4 == 0 && y % 100 != 0) || y % 400 == 0) {
//如果超过本月的天数,则月加 1,日为 1
if (30 == d) {
d = 1;
m++;
if (m == 13) {
m = 1;
y++;
}
}
//如果没超过本月的天数,则日加 1
else {
//什么都不用做
}
}
//如果不是闰年
else {
//如果超过本月的天数,则月加 1,日为 1
if (29 == d) {
d = 1;
m++;
if (m == 13) {
m = 1;
y++;
}
}
//如果没超过本月的天数,则日加 1
else {
//什么都不用做
}
}
}
//如果不是二月
else {
//如果超过本月的天数,则月加 1,日为 1
if (c[m] < d) {
d = 1;
m++;
if (m == 13) {
m = 1;
y++;
}
}
//如果没超过本月的天数,则日加 1
else {
//什么都不用做
}
}
}
/*
* 功能描述: 求得本日期加N天
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月03日 星期三
*/
public void addDays(int day) {
for (int i = 0; i < day; i++) {
this.addDay();
}
}
/*
* 功能描述: 求得本日期加1天
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月03日 星期三
*/
public void addDay() {
d++;
int c[] = { 31, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
//如果是二月
if (m == 2) {
//如果是闰年
if ((y % 4 == 0 && y % 100 != 0) || y % 400 == 0) {
//如果超过本月的天数,则月加 1,日为 1
if (30 == d) {
d = 1;
m++;
if (m == 13) {
m = 1;
y++;
}
}
//如果没超过本月的天数,则日加 1
else {
//什么都不用做
}
}
//如果不是闰年
else {
//如果超过本月的天数,则月加 1,日为 1
if (29 == d) {
d = 1;
m++;
if (m == 13) {
m = 1;
y++;
}
}
//如果没超过本月的天数,则日加 1
else {
//什么都不用做
}
}
}
//如果不是二月
else {
//如果超过本月的天数,则月加 1,日为 1
if (c[m] < d) {
d = 1;
m++;
if (m == 13) {
m = 1;
y++;
}
}
//如果没超过本月的天数,则日加 1
else {
//什么都不用做
}
}
}
/*
* 功能描述: 求得本日期加N月
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月03日 星期三
*/
public void addMonths(int month) {
for (int i = 0; i < month; i++) {
this.addMonth();
}
}
/*
* 功能描述: 求得本日期加1月
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月03日 星期三
*/
public void addMonth() {
m++;
if (m == 13) {
m = 1;
y++;
}
}
/*
* 功能描述: 求得本日期加N年
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月03日 星期三
*/
public void addYears(int year) {
for (int i = 0; i < year; i++) {
this.addYear();
}
}
/*
* 功能描述: 求得本日期加1年
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2008年09月03日 星期三
*/
public void addYear() {
y++;
}
/*
* 功能名:得到字符类型的日期
* 特别说明:
* 作 者: 谢立锋
* 创建时间: 2007年7月5日 星期四
*/
public String toString() {
StringBuffer str = new StringBuffer().append(y).append("-");//
if (m < 10) {
str.append("0").append(m);
} else {
str.append(m);
}
str.append("-");
if (d < 10) {
str.append("0").append(d);
} else {
str.append(d);
}
str.append(" ");
if (h < 10) {
str.append("0").append(h);
} else {
str.append(h);
}
str.append(":");
if (min < 10) {
str.append("0").append(min);
} else {
str.append(min);
}
return str.toString();
}
public int getD() {
return d;
}
public void setD(int d) {
this.d = d;
}
public int getM() {
return m;
}
public void setM(int m) {
this.m = m;
}
public int getY() {
return y;
}
public void setY(int y) {
this.y = y;
}
public int getH() {
return h;
}
public void setH(int h) {
this.h = h;
}
public int getMin() {
return min;
}
public void setMin(int min) {
this.min = min;
}
/*
* 功能描述: 得相对0000-00-00 00:00的总分钟数
* 特别说明: 每次调用时临时计算
* 作 者: 谢立锋
* 创建时间: 2008年09月03日 星期三
*/
public long getTotalMins() {
int yunnian = 0;
for (int i = 0; i < y; i++) {
if ((i % 4 == 0 && i % 100 != 0) || i % 400 == 0) {
yunnian++;
}
}
//如果本年是闰年且大于2月时
if ((y % 4 == 0 && y % 100 != 0) || y % 400 == 0) {
if (m > 2) {
yunnian++;
}
}
int c[] = { 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365 };
totalMins = (y * 365 + c[m - 1] + d + yunnian) * 24 * 60 + h * 60 + min;
return totalMins;
}
/**
* 返回当天时间 格式:20110717
* @return
*/
public static String getNowDate(){
TimeZone.setDefault(TimeZone.getTimeZone("GMT+8:00"));
SimpleDateFormat sdf=new SimpleDateFormat("yyyyMMdd");
return sdf.format(new Date());
}
/**
* 返回昨天时间 格式:20110717
* @return
*/
public static String getYestardayDate(){
TimeZone.setDefault(TimeZone.getTimeZone("GMT+8:00"));
SimpleDateFormat sdf=new SimpleDateFormat("yyyyMMdd");
Calendar c = Calendar.getInstance(); //获得当前时间
c.add(Calendar.DAY_OF_MONTH, -1); //减一,就是上一个月
return sdf.format(c.getTime());
}
/*
* 测试
*/
public static void main(String[] args) {
MinDate dd = new MinDate("0000-00-00 00:00");
for (int i = 1; i < 43603; i++) {
dd.addOneMin();
if (dd.getM() > 0) {
System.out.println(i);
System.out.println(dd.toString());
break;
}
}
dd.addOneMin();
System.out.println(dd.toString());
dd.addMins(34600);
System.out.println(dd.toString());
// MinDate d = new MinDate("2006-01-01 00:01");
// MinDate d2 = new MinDate("2007-01-01 00:01");
// System.out.println(d2.getTotalMins() - d.getTotalMins());
// d.addOneMin();
// System.out.println(d.toString());
// d.addMins(2883);
// System.out.println(d.toString());
// System.out.println(d.getTotalMins() - d2.getTotalMins());
}
}
| apache-2.0 |
xuraylei/Andorfin_Controller | src/main/java/org/andorfin/protocol/action/ModFlowTableType.java | 156 | package org.andorfin.protocol.action;
public class ModFlowTableType {
public static final byte ENABLE = 0x01;
public static final byte DISABLE = 0x02;
}
| apache-2.0 |
T-Nilsson/AndroidAssignment1 | app/src/main/java/dv606/tn222ei/assignment1/AddCountry.java | 1577 | package dv606.tn222ei.assignment1;
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
public class AddCountry extends AppCompatActivity {
EditText year;
EditText countryName;
TextView a;
String oneCountry;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_add_country);
Button buttonAddCountry = (Button)findViewById(R.id. button_addCountry);
buttonAddCountry.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
year = (EditText) findViewById(R.id.text_enterYear);
countryName = (EditText) findViewById(R.id.text_enterCountry);
if(countryName.getText().toString().trim().length() == 0 || year.getText().toString().trim().length() == 0 ){
Toast.makeText(v.getContext(), "Fill in fields", Toast.LENGTH_LONG).show();
return;}
oneCountry = (year.getText().toString() + " " + countryName.getText().toString());
Intent result = new Intent(AddCountry.this, MyCountries.class);
result.putExtra("result", oneCountry); // Add key/value pair
setResult(RESULT_OK, result);
finish();
} });
}} | apache-2.0 |
Gigaspaces/xap-openspaces | src/main/java/org/openspaces/admin/pu/elastic/ElasticStatelessProcessingUnitDeployment.java | 8226 | /*******************************************************************************
*
* Copyright (c) 2012 GigaSpaces Technologies Ltd. All rights reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.openspaces.admin.pu.elastic;
import java.io.File;
import org.openspaces.admin.internal.pu.dependency.ProcessingUnitDetailedDependencies;
import org.openspaces.admin.internal.pu.elastic.AbstractElasticProcessingUnitDeployment;
import org.openspaces.admin.pu.dependency.ProcessingUnitDependency;
import org.openspaces.admin.pu.dependency.ProcessingUnitDeploymentDependenciesConfigurer;
import org.openspaces.admin.pu.elastic.config.AutomaticCapacityScaleConfig;
import org.openspaces.admin.pu.elastic.config.EagerScaleConfig;
import org.openspaces.admin.pu.elastic.config.ElasticStatelessProcessingUnitConfig;
import org.openspaces.admin.pu.elastic.config.ManualCapacityPerZonesScaleConfig;
import org.openspaces.admin.pu.elastic.config.ManualCapacityScaleConfig;
import org.openspaces.admin.pu.elastic.topology.ElasticStatelessDeploymentTopology;
import org.openspaces.core.util.MemoryUnit;
import com.gigaspaces.security.directory.UserDetails;
/**
* Defines an elastic processing unit deployment that does not contain a space.
*
* @author itaif
*/
public class ElasticStatelessProcessingUnitDeployment
extends AbstractElasticProcessingUnitDeployment
implements ElasticStatelessDeploymentTopology<ProcessingUnitDependency> {
/**
* Constructs a stateless processing unit deployment based on the specified processing unit name
* (should exists under the <code>[GS ROOT]/deploy</code> directory.
*/
public ElasticStatelessProcessingUnitDeployment(String processingUnit) {
super(new ElasticStatelessProcessingUnitConfig());
getConfig().setProcessingUnit(processingUnit);
}
/**
* Constructs a stateless processing unit deployment based on the specified processing unit file path
* (points either to a processing unit jar/zip file or a directory).
*/
public ElasticStatelessProcessingUnitDeployment(File processingUnit) {
this(processingUnit.getAbsolutePath());
}
@Override
public ElasticStatelessProcessingUnitDeployment scale(ManualCapacityPerZonesScaleConfig strategy) {
return (ElasticStatelessProcessingUnitDeployment) super.scale(strategy);
}
@Override
public ElasticStatelessProcessingUnitDeployment scale(ManualCapacityScaleConfig strategy) {
return (ElasticStatelessProcessingUnitDeployment) super.scale(strategy);
}
@Override
public ElasticStatelessProcessingUnitDeployment scale(EagerScaleConfig strategy) {
return (ElasticStatelessProcessingUnitDeployment) super.scale(strategy);
}
@Override
public ElasticStatelessProcessingUnitDeployment scale(AutomaticCapacityScaleConfig strategy) {
return (ElasticStatelessProcessingUnitDeployment) super.scale(strategy);
}
@Override
public ElasticStatelessProcessingUnitDeployment name(String name) {
return (ElasticStatelessProcessingUnitDeployment) super.name(name);
}
/**
* @see #addContextProperty(String,String)
*/
@Deprecated
public ElasticStatelessProcessingUnitDeployment setContextProperty(String key, String value) {
return (ElasticStatelessProcessingUnitDeployment) addContextProperty(key, value);
}
@Override
public ElasticStatelessProcessingUnitDeployment secured(boolean secured) {
return (ElasticStatelessProcessingUnitDeployment) super.secured(secured);
}
@Override
public ElasticStatelessProcessingUnitDeployment userDetails(UserDetails userDetails) {
return (ElasticStatelessProcessingUnitDeployment) super.userDetails(userDetails);
}
@Override
public ElasticStatelessProcessingUnitDeployment userDetails(String userName, String password) {
return (ElasticStatelessProcessingUnitDeployment) super.userDetails(userName, password);
}
@Override
public ElasticStatelessProcessingUnitDeployment useScriptToStartContainer() {
return (ElasticStatelessProcessingUnitDeployment) super.useScriptToStartContainer();
}
@Override
public ElasticStatelessProcessingUnitDeployment overrideCommandLineArguments() {
return (ElasticStatelessProcessingUnitDeployment) super.overrideCommandLineArguments();
}
@Override
public ElasticStatelessProcessingUnitDeployment commandLineArgument(String vmInputArgument) {
return addCommandLineArgument(vmInputArgument);
}
@Override
public ElasticStatelessProcessingUnitDeployment environmentVariable(String name, String value) {
return addEnvironmentVariable(name, value);
}
@Override
public ElasticStatelessProcessingUnitDeployment addCommandLineArgument(String vmInputArgument) {
return (ElasticStatelessProcessingUnitDeployment) super.commandLineArgument(vmInputArgument);
}
@Override
public ElasticStatelessProcessingUnitDeployment addEnvironmentVariable(String name, String value) {
return (ElasticStatelessProcessingUnitDeployment) super.environmentVariable(name, value);
}
@Override
public ElasticStatelessProcessingUnitDeployment addContextProperty(String key, String value) {
return (ElasticStatelessProcessingUnitDeployment) super.addContextProperty(key, value);
}
@Override
public ElasticStatelessProcessingUnitDeployment dedicatedMachineProvisioning(ElasticMachineProvisioningConfig config) {
return (ElasticStatelessProcessingUnitDeployment) super.dedicatedMachineProvisioning(config);
}
@Override
public ElasticStatelessProcessingUnitDeployment publicMachineProvisioning(ElasticMachineProvisioningConfig config) {
return (ElasticStatelessProcessingUnitDeployment) super.publicMachineProvisioning(config);
}
@Override
public ElasticStatelessProcessingUnitDeployment sharedMachineProvisioning(String sharingId, ElasticMachineProvisioningConfig config) {
if (sharingId == null) {
throw new IllegalArgumentException("sharingId can't be null");
}
return (ElasticStatelessProcessingUnitDeployment) super.sharedMachineProvisioning(config, sharingId);
}
@Override
public ElasticStatelessProcessingUnitDeployment memoryCapacityPerContainer(int memoryCapacityPerContainer, MemoryUnit unit) {
super.memoryCapacityPerContainer(memoryCapacityPerContainer,unit);
return this;
}
@Override
public ElasticStatelessProcessingUnitDeployment memoryCapacityPerContainer(String memoryCapacityPerContainer) {
super.memoryCapacityPerContainer(memoryCapacityPerContainer);
return this;
}
@Override
public ElasticStatelessProcessingUnitDeployment addDependency(String requiredProcessingUnitName) {
addDependencies(new ProcessingUnitDeploymentDependenciesConfigurer().dependsOnDeployed(requiredProcessingUnitName).create());
return this;
}
@Override
public ElasticStatelessProcessingUnitDeployment addDependencies(ProcessingUnitDetailedDependencies<? extends ProcessingUnitDependency> detailedDependencies) {
super.addDependencies(detailedDependencies);
return this;
}
@Override
protected ElasticStatelessProcessingUnitConfig getConfig() {
return (ElasticStatelessProcessingUnitConfig) super.getConfig();
}
@Override
public ElasticStatelessProcessingUnitConfig create() {
return getConfig();
}
}
| apache-2.0 |
diorcety/intellij-community | platform/core-api/src/com/intellij/openapi/vfs/impl/ArchiveHandler.java | 5199 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vfs.impl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.io.FileAttributes;
import com.intellij.openapi.util.io.FileSystemUtil;
import com.intellij.reference.SoftReference;
import com.intellij.util.ArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.lang.ref.Reference;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
public abstract class ArchiveHandler {
public static final long DEFAULT_LENGTH = 0L;
public static final long DEFAULT_TIMESTAMP = -1L;
protected static class EntryInfo {
public final EntryInfo parent;
public final String shortName;
public final boolean isDirectory;
public final long length;
public final long timestamp;
public EntryInfo(EntryInfo parent, @NotNull String shortName, boolean isDirectory, long length, long timestamp) {
this.parent = parent;
this.shortName = shortName;
this.isDirectory = isDirectory;
this.length = length;
this.timestamp = timestamp;
}
}
private final String myPath;
private final Object myLock = new Object();
private volatile Reference<Map<String, EntryInfo>> myEntries = new SoftReference<Map<String, EntryInfo>>(null);
private boolean myCorrupted = false;
protected ArchiveHandler(@NotNull String path) {
myPath = path;
}
@NotNull
public File getFile() {
return new File(myPath);
}
@Nullable
public FileAttributes getAttributes(@NotNull String relativePath) {
if (relativePath.isEmpty()) {
FileAttributes attributes = FileSystemUtil.getAttributes(myPath);
return attributes != null ? new FileAttributes(true, false, false, false, DEFAULT_LENGTH, DEFAULT_TIMESTAMP, false) : null;
}
else {
EntryInfo entry = getEntryInfo(relativePath);
return entry != null ? new FileAttributes(entry.isDirectory, false, false, false, entry.length, entry.timestamp, false) : null;
}
}
@NotNull
public String[] list(@NotNull String relativePath) {
EntryInfo entry = getEntryInfo(relativePath);
if (entry == null || !entry.isDirectory) return ArrayUtil.EMPTY_STRING_ARRAY;
Set<String> names = new HashSet<String>();
for (EntryInfo info : getEntriesMap().values()) {
if (info.parent == entry) {
names.add(info.shortName);
}
}
return ArrayUtil.toStringArray(names);
}
@Nullable
protected EntryInfo getEntryInfo(@NotNull String relativePath) {
return getEntriesMap().get(relativePath);
}
@NotNull
protected Map<String, EntryInfo> getEntriesMap() {
Map<String, EntryInfo> map = SoftReference.dereference(myEntries);
if (map == null) {
synchronized (myLock) {
map = SoftReference.dereference(myEntries);
if (map == null) {
if (myCorrupted) {
map = Collections.emptyMap();
}
else {
try {
map = Collections.unmodifiableMap(createEntriesMap());
}
catch (Exception e) {
myCorrupted = true;
Logger.getInstance(getClass()).warn(e.getMessage() + ": " + myPath, e);
map = Collections.emptyMap();
}
}
myEntries = new SoftReference<Map<String, EntryInfo>>(map);
}
}
}
return map;
}
@NotNull
protected abstract Map<String, EntryInfo> createEntriesMap() throws IOException;
@NotNull
protected EntryInfo createRootEntry() {
return new EntryInfo(null, "", true, DEFAULT_LENGTH, DEFAULT_TIMESTAMP);
}
@NotNull
protected EntryInfo getOrCreate(@NotNull Map<String, EntryInfo> map, @NotNull String entryName) {
EntryInfo entry = map.get(entryName);
if (entry == null) {
Pair<String, String> path = splitPath(entryName);
EntryInfo parentEntry = getOrCreate(map, path.first);
entry = new EntryInfo(parentEntry, path.second, true, DEFAULT_LENGTH, DEFAULT_TIMESTAMP);
map.put(entryName, entry);
}
return entry;
}
@NotNull
protected Pair<String, String> splitPath(@NotNull String entryName) {
int p = entryName.lastIndexOf('/');
String parentName = p > 0 ? entryName.substring(0, p) : "";
String shortName = p > 0 ? entryName.substring(p + 1) : entryName;
return Pair.create(parentName, shortName);
}
@NotNull
public abstract byte[] contentsToByteArray(@NotNull String relativePath) throws IOException;
}
| apache-2.0 |
dremio/dremio-oss | services/accelerator/src/main/java/com/dremio/service/reflection/ReflectionStatus.java | 4846 | /*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.service.reflection;
/**
* Reflection Status computed from the reflection goal, entry, and existing materializations
*/
public class ReflectionStatus {
/**
* Overall status that describes the general state of the reflection
*/
public enum COMBINED_STATUS {
NONE,
CAN_ACCELERATE,
CAN_ACCELERATE_WITH_FAILURES,
REFRESHING,
FAILED,
EXPIRED,
DISABLED,
INVALID,
INCOMPLETE,
CANNOT_ACCELERATE_SCHEDULED,
CANNOT_ACCELERATE_MANUAL
}
/**
* Reflection config (definition) validity status
*/
public enum CONFIG_STATUS {
OK,
INVALID
}
/**
* Reflection refresh status
*/
public enum REFRESH_STATUS {
MANUAL, // reflection is setup to refresh manually
SCHEDULED, // next reflection refresh will occur according to the refresh policy
RUNNING, // reflection refresh currently running
GIVEN_UP // reflection is in failed state, no more refresh
}
/**
* Refelction availability status
*/
public enum AVAILABILITY_STATUS {
NONE, // reflection has no materialization at all
INCOMPLETE, // reflection has no valid materialization, and latest materialization has missing pdfs data nodes
EXPIRED, // reflection has no valid materialization, and latest materialization expired
AVAILABLE // reflection has a valid materialization
}
private final CONFIG_STATUS configStatus;
private final REFRESH_STATUS refreshStatus;
private final AVAILABILITY_STATUS availabilityStatus;
private final COMBINED_STATUS combinedStatus;
private final int numFailures;
private final long lastDataFetch;
private final long expiresAt;
public ReflectionStatus(boolean reflectionEnabled, CONFIG_STATUS configStatus, REFRESH_STATUS refreshStatus,
AVAILABILITY_STATUS availabilityStatus, int numFailures, long lastDataFetch, long expiresAt) {
this.configStatus = configStatus;
this.refreshStatus = refreshStatus;
this.availabilityStatus = availabilityStatus;
this.combinedStatus = computeCombinedStatus(reflectionEnabled, configStatus, refreshStatus, availabilityStatus, numFailures > 0);
this.numFailures = numFailures;
this.lastDataFetch = lastDataFetch;
this.expiresAt = expiresAt;
}
public CONFIG_STATUS getConfigStatus() {
return configStatus;
}
public REFRESH_STATUS getRefreshStatus() {
return refreshStatus;
}
public AVAILABILITY_STATUS getAvailabilityStatus() {
return availabilityStatus;
}
public COMBINED_STATUS getCombinedStatus() {
return combinedStatus;
}
public int getNumFailures() {
return numFailures;
}
public long getLastDataFetch() {
return lastDataFetch;
}
public long getExpiresAt() {
return expiresAt;
}
private static COMBINED_STATUS computeCombinedStatus(boolean reflectionEnabled, final CONFIG_STATUS configStatus,
REFRESH_STATUS refreshStatus, AVAILABILITY_STATUS availabilityStatus, boolean hasFailures) {
if (!reflectionEnabled) {
return COMBINED_STATUS.DISABLED;
}
if (configStatus == CONFIG_STATUS.INVALID) {
return COMBINED_STATUS.INVALID;
}
if (refreshStatus == REFRESH_STATUS.GIVEN_UP) {
return COMBINED_STATUS.FAILED;
} else if (availabilityStatus == AVAILABILITY_STATUS.INCOMPLETE) {
return COMBINED_STATUS.INCOMPLETE;
} else if (availabilityStatus == AVAILABILITY_STATUS.EXPIRED) {
return COMBINED_STATUS.EXPIRED;
} else if (refreshStatus == REFRESH_STATUS.RUNNING) {
if (availabilityStatus == AVAILABILITY_STATUS.AVAILABLE) {
return COMBINED_STATUS.CAN_ACCELERATE;
} else {
return COMBINED_STATUS.REFRESHING;
}
} else if (availabilityStatus == AVAILABILITY_STATUS.AVAILABLE) {
if (hasFailures) {
return COMBINED_STATUS.CAN_ACCELERATE_WITH_FAILURES;
} else {
return COMBINED_STATUS.CAN_ACCELERATE;
}
} else if (refreshStatus == REFRESH_STATUS.SCHEDULED) {
return COMBINED_STATUS.CANNOT_ACCELERATE_SCHEDULED;
} else if (refreshStatus == REFRESH_STATUS.MANUAL) {
return COMBINED_STATUS.CANNOT_ACCELERATE_MANUAL;
}
return COMBINED_STATUS.NONE; // we should never reach this
}
}
| apache-2.0 |
redox/OrientDB | server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/OHttpRequestWrapper.java | 3038 | /*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.server.network.protocol.http;
import java.util.Map;
/**
* Wrapper to use the HTTP request in functions and scripts. This class mimics the J2EE HTTPRequest class.
*
* @author Luca Garulli (l.garulli--at--orientechnologies.com)
*
*/
public class OHttpRequestWrapper {
private final OHttpRequest request;
public OHttpRequestWrapper(final OHttpRequest request) {
this.request = request;
}
/**
* Returns the request's content.
*
* @return The content in form of String
*/
public String getContent() {
return request.content;
}
/**
* Gets the request's user name.
*
* @return The user name in form of String
*/
public String getUser() {
return request.authorization != null ? request.authorization.substring(0, request.authorization.indexOf(":")) : null;
}
/**
* Returns the request's content type.
*
* @return The content type in form of String
*/
public String getContentType() {
return request.contentType;
}
/**
* Return the request's HTTP version.
*
* @return The HTTP method version in form of String
*/
public String getHttpVersion() {
return request.httpVersion;
}
/**
* Return the request's HTTP method called.
*
* @return The HTTP method name in form of String
*/
public String getHttpMethod() {
return request.httpMethod;
}
/**
* Return the request's IF-MATCH header.
*
* @return The if-match header in form of String
*/
public String getIfMatch() {
return request.ifMatch;
}
/**
* Returns if the requests has multipart.
*
* @return true if is multipart, otherwise false
*/
public boolean getisMultipart() {
return request.isMultipart;
}
/**
* Returns the request's parameters.
*
* @return The parameters as a Map<String,String>
*/
public Map<String, String> getParameters() {
return request.parameters;
}
/**
* Return the session-id.
*
* @return The session-id in form of String
*/
public String getSessionId() {
return request.sessionId;
}
/**
* Return the request's URL.
*
* @return The URL requested in form of String
*/
public String getURL() {
return request.url;
}
}
| apache-2.0 |
MICommunity/psi-jami | jami-xml/src/test/java/psidev/psi/mi/jami/xml/io/writer/elements/impl/extended/expanded/xml25/XmlModelledBinaryInteractionWriterTest.java | 65629 | package psidev.psi.mi.jami.xml.io.writer.elements.impl.extended.expanded.xml25;
import junit.framework.Assert;
import org.junit.Ignore;
import org.junit.Test;
import psidev.psi.mi.jami.binary.ModelledBinaryInteraction;
import psidev.psi.mi.jami.exception.IllegalRangeException;
import psidev.psi.mi.jami.model.*;
import psidev.psi.mi.jami.model.impl.*;
import psidev.psi.mi.jami.utils.CvTermUtils;
import psidev.psi.mi.jami.utils.InteractorUtils;
import psidev.psi.mi.jami.utils.RangeUtils;
import psidev.psi.mi.jami.xml.cache.PsiXmlObjectCache;
import psidev.psi.mi.jami.xml.cache.InMemoryIdentityObjectCache;
import psidev.psi.mi.jami.xml.io.writer.elements.impl.AbstractXmlWriterTest;
import psidev.psi.mi.jami.xml.model.extension.ExtendedPsiXmlInteraction;
import psidev.psi.mi.jami.xml.model.extension.binary.xml25.XmlModelledBinaryInteraction;
import javax.xml.stream.XMLStreamException;
import java.io.IOException;
import java.math.BigDecimal;
/**
* Unit tester for XmlModelledBinaryInteractionWriter
*
* @author Marine Dumousseau (marine@ebi.ac.uk)
* @version $Id$
* @since <pre>26/11/13</pre>
*/
public class XmlModelledBinaryInteractionWriterTest extends AbstractXmlWriterTest {
private String interaction = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_complex = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactionRef>4</interactionRef>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_complexAsInteractor ="<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>test complex</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>complex</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0314\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_shortName ="<interaction id=\"1\">\n" +
" <names>\n" +
" <shortLabel>interaction test</shortLabel>\n"+
" </names>\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_fullName ="<interaction id=\"1\">\n" +
" <names>\n" +
" <fullName>interaction test</fullName>\n"+
" </names>\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_aliases ="<interaction id=\"1\">\n" +
" <names>\n" +
" <alias type=\"synonym\">interaction synonym</alias>\n"+
" <alias>test</alias>\n"+
" </names>\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_identifier = "<interaction id=\"1\">\n" +
" <xref>\n" +
" <primaryRef db=\"intact\" id=\"EBI-xxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" <secondaryRef db=\"test\" id=\"xxxx1\"/>\n"+
" </xref>\n"+
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_xref = "<interaction id=\"1\">\n" +
" <xref>\n" +
" <primaryRef db=\"test2\" id=\"xxxx2\"/>\n" +
" <secondaryRef db=\"test\" id=\"xxxx1\"/>\n"+
" </xref>\n"+
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_inferred = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" <featureList>\n" +
" <feature id=\"5\">\n" +
" <featureType>\n" +
" <names>\n" +
" <shortLabel>biological feature</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0252\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </featureType>\n" +
" <featureRangeList>\n" +
" <featureRange>\n" +
" <startStatus>\n" +
" <names>\n" +
" <shortLabel>certain</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </startStatus>\n" +
" <begin position=\"1\"/>\n"+
" <endStatus>\n" +
" <names>\n" +
" <shortLabel>certain</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </endStatus>\n" +
" <end position=\"4\"/>\n"+
" </featureRange>\n"+
" </featureRangeList>\n" +
" </feature>\n"+
" </featureList>\n" +
" </participant>\n"+
" <participant id=\"6\">\n" +
" <interactor id=\"7\">\n" +
" <names>\n" +
" <shortLabel>protein test2</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" <featureList>\n" +
" <feature id=\"8\">\n" +
" <featureType>\n" +
" <names>\n" +
" <shortLabel>biological feature</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0252\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </featureType>\n" +
" <featureRangeList>\n" +
" <featureRange>\n" +
" <startStatus>\n" +
" <names>\n" +
" <shortLabel>certain</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </startStatus>\n" +
" <begin position=\"1\"/>\n"+
" <endStatus>\n" +
" <names>\n" +
" <shortLabel>certain</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </endStatus>\n" +
" <end position=\"4\"/>\n"+
" </featureRange>\n"+
" </featureRangeList>\n" +
" </feature>\n"+
" </featureList>\n" +
" </participant>\n"+
" </participantList>\n" +
" <inferredInteractionList>\n" +
" <inferredInteraction>\n" +
" <participant>\n" +
" <participantFeatureRef>5</participantFeatureRef>\n" +
" </participant>\n"+
" <participant>\n" +
" <participantFeatureRef>8</participantFeatureRef>\n" +
" </participant>\n"+
" </inferredInteraction>\n"+
" </inferredInteractionList>\n" +
"</interaction>";
private String interaction_type = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <interactionType>\n" +
" <names>\n" +
" <shortLabel>association</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0914\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactionType>\n" +
"</interaction>";
private String interaction_attributes = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <attributeList>\n" +
" <attribute name=\"test2\"/>\n"+
" <attribute name=\"test3\"/>\n"+
" <attribute name=\"spoke expansion\" nameAc=\"MI:1060\"/>\n"+
" </attributeList>\n"+
"</interaction>";
private String interaction_registered = "<interaction id=\"2\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"3\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"4\">\n" +
" <interactor id=\"5\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_confidence = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <confidenceList>\n" +
" <confidence>\n" +
" <unit>\n" +
" <names>\n" +
" <shortLabel>intact-miscore</shortLabel>\n"+
" </names>\n"+
" </unit>\n" +
" <value>0.8</value>\n" +
" </confidence>\n"+
" </confidenceList>\n" +
"</interaction>";
private String interaction_parameter = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <parameterList>\n" +
" <parameter term=\"kd\" base=\"10\" exponent=\"0\" factor=\"5\">\n" +
" <experimentRef>2</experimentRef>\n" +
" </parameter>\n"+
" </parameterList>\n" +
"</interaction>";
private String interaction_preAssembly = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <attributeList>\n" +
" <attribute name=\"pre-assembly\" nameAc=\"MI:1158\"/>\n" +
" <attribute name=\"positive cooperative effect\" nameAc=\"MI:1154\"/>\n" +
" <attribute name=\"configurational pre-organization\" nameAc=\"MI:1174\"/>\n"+
" <attribute name=\"affected interaction\" nameAc=\"MI:1150\">5</attribute>\n" +
" </attributeList>\n" +
"</interaction>";
private String interaction_allostery = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <attributeList>\n" +
" <attribute name=\"allostery\" nameAc=\"MI:1157\"/>\n" +
" <attribute name=\"allosteric molecule\" nameAc=\"MI:1159\">3</attribute>\n" +
" <attribute name=\"allosteric effector\" nameAc=\"MI:1160\">5</attribute>\n" +
" <attribute name=\"heterotropic allostery\" nameAc=\"MI:1168\"/>\n" +
" <attribute name=\"allosteric change in structure\" nameAc=\"MI:1165\"/>\n" +
" <attribute name=\"positive cooperative effect\" nameAc=\"MI:1154\"/>\n" +
" <attribute name=\"allosteric v-type response\" nameAc=\"MI:1163\"/>\n" +
" <attribute name=\"affected interaction\" nameAc=\"MI:1150\">6</attribute>\n" +
" </attributeList>\n" +
"</interaction>";
private String interaction_intra = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <intraMolecular>true</intraMolecular>\n" +
"</interaction>";
private PsiXmlObjectCache elementCache = new InMemoryIdentityObjectCache();
@Test
public void test_write_interaction() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction, output.toString());
}
@Test
public void test_write_participant_complex() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
Complex complex = new DefaultComplex("test complex");
complex.getParticipants().add(new DefaultModelledParticipant(new DefaultProtein("test protein")));
ModelledParticipant participant = new DefaultModelledParticipant(complex);
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_complex, output.toString());
}
@Test
public void test_write_participant_complex_as_interactor() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
Complex complex = new DefaultComplex("test complex");
complex.getParticipants().add(new DefaultModelledParticipant(new DefaultProtein("test protein")));
ModelledParticipant participant = new DefaultModelledParticipant(complex);
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.setComplexAsInteractor(true);
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_complexAsInteractor, output.toString());
}
@Test
public void test_write_participant_complex_no_participants() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
Complex complex = new DefaultComplex("test complex");
ModelledParticipant participant = new DefaultModelledParticipant(complex);
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_complexAsInteractor, output.toString());
}
@Test
public void test_write_interaction_shortName() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction("interaction test");
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_shortName, output.toString());
}
@Test
public void test_write_interaction_fullName() throws XMLStreamException, IOException, IllegalRangeException {
NamedInteraction interaction = new XmlModelledBinaryInteraction();
interaction.setFullName("interaction test");
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write((ModelledBinaryInteraction)interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_fullName, output.toString());
}
@Test
public void test_write_interaction_alias() throws XMLStreamException, IOException, IllegalRangeException {
NamedInteraction interaction = new XmlModelledBinaryInteraction();
interaction.getAliases().add(new DefaultAlias(new DefaultCvTerm("synonym"), "interaction synonym"));
interaction.getAliases().add(new DefaultAlias("test"));
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write((ModelledBinaryInteraction)interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_aliases, output.toString());
}
@Test
public void test_write_interaction_identifier() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.getIdentifiers().add(new DefaultXref(new DefaultCvTerm("intact"), "EBI-xxx"));
interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test"), "xxxx1"));
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_identifier, output.toString());
}
@Test
public void test_write_interaction_xref() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test2"), "xxxx2"));
interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test"), "xxxx1"));
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_xref, output.toString());
}
@Test
@Ignore
public void test_write_interaction_inferred() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
ModelledParticipant participant2 = new DefaultModelledParticipant(new DefaultProtein("protein test2"));
// two inferred interactiosn f1, f2, f3 and f3,f4
ModelledFeature f1 = new DefaultModelledFeature();
f1.getRanges().add(RangeUtils.createRangeFromString("1-4"));
ModelledFeature f2 = new DefaultModelledFeature();
f2.getRanges().add(RangeUtils.createRangeFromString("1-4"));
f1.getLinkedFeatures().add(f2);
f2.getLinkedFeatures().add(f1);
participant.addFeature(f1);
participant2.addFeature(f2);
interaction.addParticipant(participant);
interaction.addParticipant(participant2);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_inferred, output.toString());
}
@Test
public void test_write_interaction_type() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.setInteractionType(CvTermUtils.createMICvTerm("association", "MI:0914"));
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_type, output.toString());
}
@Test
public void test_write_interaction_attributes() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.getAnnotations().add(new DefaultAnnotation(new DefaultCvTerm("test2")));
interaction.getAnnotations().add(new DefaultAnnotation(new DefaultCvTerm("test3")));
interaction.setComplexExpansion(CvTermUtils.createMICvTerm("spoke expansion", "MI:1060"));
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_attributes, output.toString());
}
@Test
public void test_write_interaction_registered() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
elementCache.clear();
elementCache.extractIdForInteraction(new DefaultInteraction());
elementCache.extractIdForInteraction(interaction);
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_registered, output.toString());
}
@Test
public void test_write_interaction_parameter() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.getModelledParameters().add(new DefaultModelledParameter(new DefaultCvTerm("kd"), new ParameterValue(new BigDecimal(5))));
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_parameter, output.toString());
}
@Test
public void test_write_interaction_confidence() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.getModelledConfidences().add(new DefaultModelledConfidence(new DefaultCvTerm("intact-miscore"), "0.8"));
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_confidence, output.toString());
}
@Test
public void test_write_interaction_preassembly() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
Preassembly assembly = new DefaultPreassemby(CvTermUtils.createMICvTerm("positive cooperative effect", "MI:1154"));
assembly.setResponse(CvTermUtils.createMICvTerm("configurational pre-organization", "MI:1174"));
assembly.getAffectedInteractions().add(new DefaultModelledInteraction());
interaction.getCooperativeEffects().add(assembly);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_preAssembly, output.toString());
}
@Test
public void test_write_interaction_preassembly_defaultExperiment() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
Preassembly assembly = new DefaultPreassemby(CvTermUtils.createMICvTerm("positive cooperative effect", "MI:1154"));
assembly.setResponse(CvTermUtils.createMICvTerm("configurational pre-organization", "MI:1174"));
assembly.getAffectedInteractions().add(new DefaultModelledInteraction());
assembly.getCooperativityEvidences().add(new DefaultCooperativityEvidence(new DefaultPublication("xxxxxx")));
interaction.getCooperativeEffects().add(assembly);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("12345")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_preAssembly, output.toString());
}
@Test
public void test_write_interaction_allostery() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
Allostery allostery = new DefaultAllostery(CvTermUtils.createMICvTerm("positive cooperative effect", "MI:1154"),
participant, new DefaultMoleculeEffector(new DefaultModelledParticipant(InteractorUtils.createUnknownBasicInteractor())));
allostery.setResponse(CvTermUtils.createMICvTerm("allosteric v-type response", "MI:1163"));
allostery.getAffectedInteractions().add(new DefaultModelledInteraction());
allostery.setAllostericMechanism(CvTermUtils.createMICvTerm("allosteric change in structure", "MI:1165"));
allostery.setAllosteryType(CvTermUtils.createMICvTerm("heterotropic allostery", "MI:1168"));
interaction.getCooperativeEffects().add(allostery);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_allostery, output.toString());
}
@Test
public void test_write_interaction_intraMolecular() throws XMLStreamException, IOException, IllegalRangeException {
ExtendedPsiXmlInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.setIntraMolecular(true);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write((ModelledBinaryInteraction)interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_intra, output.toString());
}
}
| apache-2.0 |
chrisnappin/home-media-microservices | tvlistings-service/src/main/java/com/nappin/homemedia/tvlisting/dao/impl/ListingRepository.java | 521 | package com.nappin.homemedia.tvlisting.dao.impl;
import org.springframework.data.mongodb.repository.MongoRepository;
/**
* Entity repository used to retrieve listings from MongoDB.
*/
public interface ListingRepository extends MongoRepository<ProgrammeListing, String> {
/**
* Finds a programme listing for the specified date.
* @param listingDate The date
* @return The programme listing, or <code>null</code> if not found.
*/
ProgrammeListing findByListingDate(String listingDate);
}
| apache-2.0 |
jefperito/nfe | src/main/java/com/fincatto/documentofiscal/nfe310/classes/lote/consulta/NFLoteConsultaRetorno.java | 3499 | package com.fincatto.documentofiscal.nfe310.classes.lote.consulta;
import java.util.List;
import java.time.LocalDateTime;
import org.simpleframework.xml.*;
import com.fincatto.documentofiscal.DFAmbiente;
import com.fincatto.documentofiscal.DFBase;
import com.fincatto.documentofiscal.DFUnidadeFederativa;
import com.fincatto.documentofiscal.nfe310.classes.NFProtocolo;
@Root(name = "retConsReciNFe")
@Namespace(reference = "http://www.portalfiscal.inf.br/nfe")
public class NFLoteConsultaRetorno extends DFBase {
private static final long serialVersionUID = -4164491132370082153L;
@Attribute(name = "versao", required = true)
private String versao;
@Element(name = "tpAmb", required = true)
private DFAmbiente ambiente;
@Element(name = "verAplic", required = true)
private String versaoAplicacao;
@Element(name = "nRec", required = false)
private String numeroRecibo;
@Element(name = "cStat", required = true)
private String status;
@Element(name = "dhRecbto", required = true)
private LocalDateTime dataHoraRecebimento;
@Element(name = "xMotivo", required = true)
private String motivo;
@Element(name = "cUF", required = true)
private DFUnidadeFederativa uf;
@Element(name = "cMsg", required = false)
private String codigoMessage;
@Element(name = "xMsg", required = false)
private String mensagem;
@ElementList(entry = "protNFe", inline = true, required = false)
protected List<NFProtocolo> protocolos;
public String getVersao() {
return this.versao;
}
public void setVersao(final String versao) {
this.versao = versao;
}
public DFAmbiente getAmbiente() {
return this.ambiente;
}
public void setAmbiente(final DFAmbiente ambiente) {
this.ambiente = ambiente;
}
public String getVersaoAplicacao() {
return this.versaoAplicacao;
}
public void setVersaoAplicacao(final String versaoAplicacao) {
this.versaoAplicacao = versaoAplicacao;
}
public String getNumeroRecibo() {
return this.numeroRecibo;
}
public void setNumeroRecibo(final String numeroRecibo) {
this.numeroRecibo = numeroRecibo;
}
public String getStatus() {
return this.status;
}
public void setStatus(final String status) {
this.status = status;
}
public String getMotivo() {
return this.motivo;
}
public void setMotivo(final String motivo) {
this.motivo = motivo;
}
public DFUnidadeFederativa getUf() {
return this.uf;
}
public void setUf(final DFUnidadeFederativa uf) {
this.uf = uf;
}
public List<NFProtocolo> getProtocolos() {
return this.protocolos;
}
public void setProtocolos(final List<NFProtocolo> protocolos) {
this.protocolos = protocolos;
}
public String getCodigoMessage() {
return this.codigoMessage;
}
public void setCodigoMessage(final String codigoMessage) {
this.codigoMessage = codigoMessage;
}
public String getMensagem() {
return this.mensagem;
}
public void setMensagem(final String mensagem) {
this.mensagem = mensagem;
}
public LocalDateTime getDataHoraRecebimento() {
return this.dataHoraRecebimento;
}
public void setDataHoraRecebimento(final LocalDateTime dataHoraRecebimento) {
this.dataHoraRecebimento = dataHoraRecebimento;
}
} | apache-2.0 |
hbz/metafacture-core | metafacture-mangling/src/main/java/org/metafacture/mangling/StreamEventDiscarder.java | 7775 | /*
* Copyright 2016 Christoph Böhme
*
* Licensed under the Apache License, Version 2.0 the "License";
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.metafacture.mangling;
import java.util.EnumSet;
import org.metafacture.framework.FluxCommand;
import org.metafacture.framework.StreamPipe;
import org.metafacture.framework.StreamReceiver;
import org.metafacture.framework.annotations.In;
import org.metafacture.framework.annotations.Out;
/**
* Discards stream events by type. The type of a stream event is either
* {@linkplain EventType#RECORD record}, {@linkplain EventType#ENTITY entity},
* {@linkplain EventType#LITERAL literal}, {@linkplain EventType#RESET_STREAM
* reset-stream} or {@linkplain EventType#CLOSE_STREAM close-stream}. All events
* which have not been discarded are simply passed on the next module.
* <p>
* Use {@link #setDiscardedEvents(EnumSet)} to control which events will be
* discarded.
* <p>
* This module can be used, for example, to extract the contents of a record by
* discarding the <i>start-record</i> and <i>end-record</i> events and embed the
* contents of the record in another record.
*
* @author Christoph Böhme
*/
@In(StreamReceiver.class)
@Out(StreamReceiver.class)
@FluxCommand("discard-events")
public class StreamEventDiscarder implements StreamPipe<StreamReceiver> {
private StreamReceiver receiver;
private EnumSet<EventType> discardedEvents = EnumSet.noneOf(EventType.class);
@Override
public <R extends StreamReceiver> R setReceiver(final R receiver) {
this.receiver = receiver;
return receiver;
}
/**
* Sets whether to discard {@linkplain EventType#RECORD record} events. By
* default record events are not discarded.
* <p>
* This is a convenience method for changing the set of discarded events.
* <p>
* The state must not be changed while processing a stream. Doing so may
* result in unbalanced <i>start-record</i> and <i>end-record</i> events.
*
* @param discard if true record events will be discarded, otherwise passed
* on.
*/
public void setDiscardRecordEvents(final boolean discard) {
setDiscardEventsByType(EventType.RECORD, discard);
}
/**
* Sets whether to discard {@linkplain EventType#ENTITY entity} events. By
* default entity events are not discarded.
* <p>
* This is a convenience method for changing the set of discarded events.
* <p>
* The state must not be changed while processing a stream. Doing so may
* result in unbalanced <i>start-entity</i> and <i>end-entity</i> events.
*
* @param discard if true entity events will be discarded, otherwise passed
* on.
*/
public void setDiscardEntityEvents(final boolean discard) {
setDiscardEventsByType(EventType.ENTITY, discard);
}
/**
* Sets whether to discard {@linkplain EventType#LITERAL literal} events. By
* default literal events are not discarded.
* <p>
* This is a convenience method for changing the set of discarded events.
* <p>
* The state must not be changed while processing a stream.
*
* @param discard if true literal events will be discarded, otherwise passed
* on.
*/
public void setDiscardLiteralEvents(final boolean discard) {
setDiscardEventsByType(EventType.LITERAL, discard);
}
/**
* Sets whether to discard {@linkplain EventType#RESET_STREAM reset-stream}
* and {@linkplain EventType#CLOSE_STREAM close-stream} events. By default
* lifecycle events are not discarded.
* <p>
* This is a convenience method for changing the set of discarded events.
* <p>
* The state must not be changed while processing a stream.
*
* @param discard if true the lifecycle events will be discarded, otherwise
* passed on.
*/
public void setDiscardLifecycleEvents(final boolean discard) {
setDiscardEventsByType(EventType.RESET_STREAM, discard);
setDiscardEventsByType(EventType.CLOSE_STREAM, discard);
}
private void setDiscardEventsByType(final EventType type,
final boolean discard) {
if (discard) {
discardedEvents.add(type);
} else {
discardedEvents.remove(type);
}
}
/**
* Returns the set of currently discarded event types.
*
* @return a copy of the set of discarded event types. Changes to the returned
* set do not affect the module.
*/
public EnumSet<EventType> getDiscardedEvents() {
return EnumSet.copyOf(discardedEvents);
}
/**
* Sets the stream event types which should be discarded. By default no events
* are discarded.
* <p>
* The set of discarded events must not be changed while processing a stream.
* Doing so may result in unbalanced start and end events.
*
* @param discardedEvents set of event types to discard. The set is copied
* into an internal representation by the method.
* Changes to the set do not affect the module.
*/
public void setDiscardedEvents(final EnumSet<EventType> discardedEvents) {
this.discardedEvents = EnumSet.copyOf(discardedEvents);
}
@Override
public void startRecord(final String identifier) {
if (!discardedEvents.contains(EventType.RECORD)) {
receiver.startRecord(identifier);
}
}
@Override
public void endRecord() {
if (!discardedEvents.contains(EventType.RECORD)) {
receiver.endRecord();
}
}
@Override
public void startEntity(final String name) {
if (!discardedEvents.contains(EventType.ENTITY)) {
receiver.startEntity(name);
}
}
@Override
public void endEntity() {
if (!discardedEvents.contains(EventType.ENTITY)) {
receiver.endEntity();
}
}
@Override
public void literal(final String name, final String value) {
if (!discardedEvents.contains(EventType.LITERAL)) {
receiver.literal(name, value);
}
}
@Override
public void resetStream() {
if (!discardedEvents.contains(EventType.RESET_STREAM)) {
receiver.resetStream();
}
}
@Override
public void closeStream() {
if (!discardedEvents.contains(EventType.CLOSE_STREAM)) {
receiver.closeStream();
}
}
/**
* Types representing stream and lifecycle events.
*/
public enum EventType {
/**
* Type representing <i>start-record</i> and <i>end-record</i> stream
* events.
*/
RECORD,
/**
* Type representing <i>start-entity</i> and <i>end-entity</i> stream
* events.
*/
ENTITY,
/**
* Type representing the <i>literal</i> stream event.
*/
LITERAL,
/**
* Type representing the <i>reset-stream</i> lifecycle event.
*/
RESET_STREAM,
/**
* Type representing the <i>close-stream</i> lifecycle event.
*/
CLOSE_STREAM
}
}
| apache-2.0 |
felipenami/SISP | Implementação/projectmanager/src/main/java/br/edu/udc/projectmanager/controller/HomeController.java | 2779 | package br.edu.udc.projectmanager.controller;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.ModelAndView;
import br.edu.udc.projectmanager.entity.Activity;
import br.edu.udc.projectmanager.entity.Project;
import br.edu.udc.projectmanager.entity.User;
import br.edu.udc.projectmanager.service.ProjectService;
@Controller
@RestController
public class HomeController {
@RequestMapping("/")
public ModelAndView menu()
{
return new ModelAndView("/index.html");
}
/*-------------------------------------------------------------------
* ATTRIBUTES
*-------------------------------------------------------------------*/
/**
*
*/
@Autowired
private ProjectService projectService;
/**
*
*/
/*-------------------------------------------------------------------
* PROJECT
*-------------------------------------------------------------------*/
/**
*
* @return
*/
@RequestMapping("/projectList")
public List<Project> projectList () {
return projectService.findAll();
}
/**
*
* @return
*/
@RequestMapping("/UserList")
public List<User> userList () {
return projectService.findAllUsers();
}
/**
*
* @param project
* @return
*/
@RequestMapping("/projectSave")
public Project save (@RequestBody Project project){
return projectService.insertProjeto(project);
}
/**
*
* @param project
* @return
*/
@RequestMapping("/activitySave")
public Activity save (@RequestBody Activity activity){
return projectService.insertAtividade(activity);
}
/**
*
*/
@RequestMapping("/projectFindById")
public Project findById (@RequestBody Long id){
return projectService.findById(id);
}
/**
*
* @param project
* @return
*/
@RequestMapping("/updateProject")
public Project updateProject (@RequestBody Project project)
{
return projectService.updateProject(project);
}
/**
*
* @param filter
* @param pageable
* @return
*/
@RequestMapping("/deleteProject")
public void deleteProject(@RequestBody Long projectId)
{
projectService.removeProject(projectId);;
}
/*-------------------------------------------------------------------
* ACTIVITY
*-------------------------------------------------------------------*/
@RequestMapping("/findActivityByProjectId")
public List<Activity> findActivityByProjectId (@RequestBody Long projectId)
{
return projectService.findActivityByProjectId(projectId);
}
}
| apache-2.0 |
pghazal/NoMoreLine | Java/src/main/java/fr/ece/pfe_project/panel/ToolbarEntityPanel.java | 10187 | package fr.ece.pfe_project.panel;
import fr.ece.pfe_project.interfaces.ToolbarEntityListener;
import java.awt.Component;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JToggleButton;
import fr.ece.pfe_project.panel.MainPanel.ToolbarsListener;
/**
*
* @author pierreghazal
*/
public class ToolbarEntityPanel extends javax.swing.JPanel implements ActionListener {
private final ToolbarsListener toolbarsListener;
/**
* Creates new form ToolbarEntity
*
* @param listener
*/
public ToolbarEntityPanel(ToolbarsListener listener) {
initComponents();
this.toolbarsListener = listener;
this.cameraButton.addActionListener(this);
this.excelButton.addActionListener(this);
this.listingVolsButton.addActionListener(this);
this.carnetAdressesButton.addActionListener(this);
this.planButton.addActionListener(this);
}
@Override
public void actionPerformed(ActionEvent e) {
if (e.getSource() == this.cameraButton) {
if (!cameraButton.isSelected()) {
this.cameraButton.setSelected(false);
this.toolbarsListener.entityHasChange(ToolbarEntityListener.ENTITY.NONE);
} else {
this.cameraButton.setSelected(true);
this.excelButton.setSelected(false);
this.planButton.setSelected(false);
this.listingVolsButton.setSelected(false);
this.carnetAdressesButton.setSelected(false);
this.toolbarsListener.entityHasChange(ToolbarEntityListener.ENTITY.CAMERA);
}
} else if (e.getSource() == this.excelButton) {
if (!excelButton.isSelected()) {
this.excelButton.setSelected(false);
this.toolbarsListener.entityHasChange(ToolbarEntityListener.ENTITY.NONE);
} else {
this.cameraButton.setSelected(false);
this.excelButton.setSelected(true);
this.planButton.setSelected(false);
this.carnetAdressesButton.setSelected(false);
this.listingVolsButton.setSelected(false);
this.toolbarsListener.entityHasChange(ToolbarEntityListener.ENTITY.EXCELROW);
}
} else if (e.getSource() == this.listingVolsButton) {
if (!listingVolsButton.isSelected()) {
this.listingVolsButton.setSelected(false);
this.toolbarsListener.entityHasChange(ToolbarEntityListener.ENTITY.NONE);
} else {
this.cameraButton.setSelected(false);
this.excelButton.setSelected(false);
this.planButton.setSelected(false);
this.carnetAdressesButton.setSelected(false);
this.listingVolsButton.setSelected(true);
this.toolbarsListener.entityHasChange(ToolbarEntityListener.ENTITY.LISTINGVOLS);
}
} else if (e.getSource() == this.carnetAdressesButton) {
if (!carnetAdressesButton.isSelected()) {
this.carnetAdressesButton.setSelected(false);
this.toolbarsListener.entityHasChange(ToolbarEntityListener.ENTITY.NONE);
} else {
this.cameraButton.setSelected(false);
this.excelButton.setSelected(false);
this.planButton.setSelected(false);
this.listingVolsButton.setSelected(false);
this.carnetAdressesButton.setSelected(true);
this.toolbarsListener.entityHasChange(ToolbarEntityListener.ENTITY.CARNETADRESSE);
}
} else if (e.getSource() == this.planButton) {
if (!planButton.isSelected()) {
this.planButton.setSelected(false);
this.toolbarsListener.entityHasChange(ToolbarEntityListener.ENTITY.NONE);
} else {
this.cameraButton.setSelected(false);
this.excelButton.setSelected(false);
this.carnetAdressesButton.setSelected(false);
this.listingVolsButton.setSelected(false);
this.planButton.setSelected(true);
this.toolbarsListener.entityHasChange(ToolbarEntityListener.ENTITY.PLAN);
}
}
}
public void resetToggleButtons() {
this.toolbarsListener.entityHasChange(ToolbarEntityListener.ENTITY.NONE);
// Unselect every JToggleButton in the Container
for (Component button : this.getComponents()) {
if (button instanceof JToggleButton) {
((JToggleButton) button).setSelected(false);
}
}
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
cameraButton = new javax.swing.JToggleButton();
excelButton = new javax.swing.JToggleButton();
listingVolsButton = new javax.swing.JToggleButton();
carnetAdressesButton = new javax.swing.JToggleButton();
planButton = new javax.swing.JToggleButton();
setMaximumSize(new java.awt.Dimension(90, 32767));
setPreferredSize(new java.awt.Dimension(90, 600));
cameraButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/nomoreline/img/video.png"))); // NOI18N
cameraButton.setToolTipText("Système de détection par caméra");
excelButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/nomoreline/img/excel.png"))); // NOI18N
excelButton.setToolTipText("Fréquentations journalières");
excelButton.setMaximumSize(new java.awt.Dimension(104, 29));
excelButton.setMinimumSize(new java.awt.Dimension(104, 29));
excelButton.setPreferredSize(new java.awt.Dimension(104, 29));
listingVolsButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/nomoreline/img/plane.png"))); // NOI18N
listingVolsButton.setToolTipText("Liste des vols");
carnetAdressesButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/nomoreline/img/directory.png"))); // NOI18N
carnetAdressesButton.setToolTipText("Carnet d'adresses");
planButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/nomoreline/img/map.png"))); // NOI18N
planButton.setToolTipText("Plan de l'aéroport");
planButton.setMaximumSize(new java.awt.Dimension(64, 72));
planButton.setMinimumSize(new java.awt.Dimension(64, 72));
planButton.setPreferredSize(new java.awt.Dimension(64, 72));
planButton.setSize(new java.awt.Dimension(64, 72));
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(cameraButton, javax.swing.GroupLayout.DEFAULT_SIZE, 79, Short.MAX_VALUE)
.addComponent(listingVolsButton, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(carnetAdressesButton, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(planButton, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(excelButton, javax.swing.GroupLayout.PREFERRED_SIZE, 79, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(40, 40, 40)
.addComponent(cameraButton, javax.swing.GroupLayout.PREFERRED_SIZE, 75, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(planButton, javax.swing.GroupLayout.PREFERRED_SIZE, 74, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(excelButton, javax.swing.GroupLayout.PREFERRED_SIZE, 68, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(2, 2, 2)
.addComponent(carnetAdressesButton, javax.swing.GroupLayout.PREFERRED_SIZE, 70, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(listingVolsButton, javax.swing.GroupLayout.PREFERRED_SIZE, 70, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(183, Short.MAX_VALUE))
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JToggleButton cameraButton;
private javax.swing.JToggleButton carnetAdressesButton;
private javax.swing.JToggleButton excelButton;
private javax.swing.JToggleButton listingVolsButton;
private javax.swing.JToggleButton planButton;
// End of variables declaration//GEN-END:variables
}
| apache-2.0 |
qiqitangtang/gradleDemo | src/main/java/org/effectivejava/examples/chapter06/item35/ExceptionTest.java | 543 | // Annotation type with an array parameter - Page 173
package org.effectivejava.examples.chapter06.item35;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Indicates that the annotated method is a test method that must throw the any
* of the designated exceptions to succeed.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface ExceptionTest {
Class<? extends Exception>[] value();
}
| apache-2.0 |
AlexeyOs/Alexey | chapter_004/src/main/java/ru/osetsky/stream/Student.java | 496 | package ru.osetsky.stream;
public class Student {
private int score;
private String surname;
public Student(String surname, int score){
this.surname = surname;
this.score = score;
}
public String getSurname() {
return surname;
}
public void setSurname(String surname) {
this.surname = surname;
}
public int getScore() {
return score;
}
public void setScore(int score) {
this.score = score;
}
}
| apache-2.0 |
PkayJava/FAClient | src/main/java/com/angkorteam/finance/server/widget/ApplicationFeedbackPanel.java | 2027 | package com.angkorteam.finance.server.widget;
import org.apache.wicket.AttributeModifier;
import org.apache.wicket.Page;
import org.apache.wicket.feedback.FeedbackMessage;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.panel.FeedbackPanel;
/**
* Created by socheatkhauv on 4/8/17.
*/
public class ApplicationFeedbackPanel extends FeedbackPanel {
private Level level;
public ApplicationFeedbackPanel(String id, Level level) {
super(id, message -> {
if (message.getReporter() == null || message.getReporter() instanceof Form || message.getReporter() instanceof Page) {
if (level == Level.Danger) {
return message.getLevel() == FeedbackMessage.ERROR || message.getLevel() == FeedbackMessage.DEBUG || message.getLevel() == FeedbackMessage.FATAL;
} else if (level == Level.Info) {
return message.getLevel() == FeedbackMessage.INFO;
} else if (level == Level.Success) {
return message.getLevel() == FeedbackMessage.SUCCESS;
} else if (level == Level.Warning) {
return message.getLevel() == FeedbackMessage.WARNING;
}
}
return false;
});
this.level = level;
}
@Override
protected void onBeforeRender() {
super.onBeforeRender();
WebMarkupContainer feedbackul = (WebMarkupContainer) get("feedbackul");
feedbackul.add(AttributeModifier.replace("class", this.level.getCssClass()));
}
public enum Level {
Danger("callout callout-danger"), Info("callout callout-info"), Warning("callout callout-warning"), Success("callout callout-success");
private final String cssClass;
Level(String cssClass) {
this.cssClass = cssClass;
}
public String getCssClass() {
return this.cssClass;
}
}
}
| apache-2.0 |
jexp/idea2 | plugins/groovy/src/org/jetbrains/plugins/groovy/intentions/conversions/IndexingMethodConversionIntention.java | 2523 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.intentions.conversions;
import com.intellij.psi.PsiElement;
import com.intellij.psi.tree.IElementType;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.plugins.groovy.intentions.base.*;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.path.GrMethodCallExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.arguments.GrArgumentList;
public class IndexingMethodConversionIntention extends Intention {
@NotNull
public PsiElementPredicate getElementPredicate() {
return new IndexingMethodConversionPredicate();
}
public void processIntention(@NotNull PsiElement element)
throws IncorrectOperationException {
final GrMethodCallExpression callExpression =
(GrMethodCallExpression) element;
final GrArgumentList argList = callExpression.getArgumentList();
final GrExpression[] arguments = argList.getExpressionArguments();
GrReferenceExpression methodExpression = (GrReferenceExpression) callExpression.getInvokedExpression();
final IElementType referenceType = methodExpression.getDotTokenType();
final String methodName = methodExpression.getName();
final GrExpression qualifier = methodExpression.getQualifierExpression();
if("getAt".equals(methodName)|| "get".equals(methodName))
{
IntentionUtils.replaceExpression(qualifier.getText() + '[' + arguments[0].getText() + ']',
callExpression);
}
else{
IntentionUtils.replaceExpression(qualifier.getText() + '[' + arguments[0].getText() + "]=" +arguments[1].getText(),
callExpression);
}
}
}
| apache-2.0 |
fugeritaetas/morozko-lib | java14-morozko/org.morozko.java.core/src/org/morozko/java/core/jvfs/fun/JFileFunSafe.java | 1820 | /*****************************************************************
<copyright>
Morozko Java Library org.morozko.java.core
Copyright (c) 2006 Morozko
All rights reserved. This program and the accompanying materials
are made available under the terms of the Apache License v2.0
which accompanies this distribution, and is available at
http://www.apache.org/licenses/
(txt version : http://www.apache.org/licenses/LICENSE-2.0.txt
html version : http://www.apache.org/licenses/LICENSE-2.0.html)
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).
</copyright>
*****************************************************************/
/*
* @(#)JFileFunSafe.java
*
* @project : org.morozko.java.core
* @package : org.morozko.java.core.jvfs.fun
* @creation : 17-gen-2005 0.13.57
* @release : xxxx.xx.xx
*/
package org.morozko.java.core.jvfs.fun;
import java.io.IOException;
import org.morozko.java.core.jvfs.JFile;
import org.morozko.java.core.jvfs.JFileFun;
import org.morozko.java.core.lang.ExHandler;
/**
* <p></p>
*
* @author Matteo Franci aka TUX2
*/
public class JFileFunSafe extends JFileFunWrapper {
private ExHandler handler;
/**
* <p>Crea un nuovo JFileFunSafe</p>
*
* @param fileFun
*/
public JFileFunSafe(JFileFun fileFun, ExHandler handler) {
super(fileFun);
this.handler = handler;
}
/* (non-Javadoc)
* @see org.morozko.java.core.jvfs.JFileFun#handle(org.morozko.java.core.jvfs.JFile)
*/
public void handle(JFile file) throws IOException {
try {
super.handle(file);
} catch (IOException ioe) {
this.handler.error(ioe);
}
}
}
| apache-2.0 |
dbarentine/totalconnect | totalconnect/src/main/java/com/barentine/totalconnect/ws/GetEventMetaDataExResponse.java | 1772 |
package com.barentine.totalconnect.ws;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="GetEventMetaDataExResult" type="{https://services.alarmnet.com/TC2/}EventMetaDataExResult" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"getEventMetaDataExResult"
})
@XmlRootElement(name = "GetEventMetaDataExResponse")
public class GetEventMetaDataExResponse {
@XmlElement(name = "GetEventMetaDataExResult")
protected EventMetaDataExResult getEventMetaDataExResult;
/**
* Gets the value of the getEventMetaDataExResult property.
*
* @return
* possible object is
* {@link EventMetaDataExResult }
*
*/
public EventMetaDataExResult getGetEventMetaDataExResult() {
return getEventMetaDataExResult;
}
/**
* Sets the value of the getEventMetaDataExResult property.
*
* @param value
* allowed object is
* {@link EventMetaDataExResult }
*
*/
public void setGetEventMetaDataExResult(EventMetaDataExResult value) {
this.getEventMetaDataExResult = value;
}
}
| apache-2.0 |
jbrasileiro/resume-online | resume-online-backend/resume-online-commons/src/main/java/resumeonline/commons/ResourceThreadLoader.java | 1040 | package resumeonline.commons;
import java.io.InputStream;
import java.net.URL;
import resumeonline.commons.exeception.NoNewInstanceAllowed;
/**
*
* @deprecated
* Class will be removed.
* Use resumeonline.commons.classloader.ResourceLoaderCL
*/
@Deprecated
public final class ResourceThreadLoader {
private ResourceThreadLoader() {
super();
throw new NoNewInstanceAllowed(getClass());
}
public static URL getResource(
final String name) {
return getResource(Thread.currentThread(), name);
}
public static URL getResource(
final Thread thread,
final String name) {
ClassLoader classLoader = thread.getContextClassLoader();
return ResourceClassLoader.getResource(classLoader, name);
}
public static InputStream getResourceAsStream(
final Thread thread,
final String name) {
ClassLoader classLoader = thread.getContextClassLoader();
return ResourceClassLoader.getResourceAsStream(classLoader, name);
}
}
| apache-2.0 |
lettuce-io/lettuce-core | src/main/java/io/lettuce/core/dynamic/support/AnnotationParameterNameDiscoverer.java | 2335 | /*
* Copyright 2011-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.lettuce.core.dynamic.support;
import java.lang.annotation.Annotation;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import io.lettuce.core.dynamic.annotation.Param;
/**
* {@link ParameterNameDiscoverer} based on {@link Param} annotations to resolve parameter names.
*
* @author Mark Paluch
*/
public class AnnotationParameterNameDiscoverer implements ParameterNameDiscoverer {
@Override
public String[] getParameterNames(Method method) {
if (method.getParameterCount() == 0) {
return new String[0];
}
return doGetParameterNames(method.getParameterAnnotations());
}
@Override
public String[] getParameterNames(Constructor<?> ctor) {
if (ctor.getParameterCount() == 0) {
return new String[0];
}
return doGetParameterNames(ctor.getParameterAnnotations());
}
protected String[] doGetParameterNames(Annotation[][] parameterAnnotations) {
List<String> names = new ArrayList<>();
for (int i = 0; i < parameterAnnotations.length; i++) {
boolean foundParam = false;
for (int j = 0; j < parameterAnnotations[i].length; j++) {
if (parameterAnnotations[i][j].annotationType().equals(Param.class)) {
foundParam = true;
Param param = (Param) parameterAnnotations[i][j];
names.add(param.value());
break;
}
}
if (!foundParam) {
return null;
}
}
return names.toArray(new String[names.size()]);
}
}
| apache-2.0 |
McLeodMoores/starling | projects/master/src/main/java/com/opengamma/master/position/ManageablePosition.java | 27645 | /**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.position;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.DerivedProperty;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.google.common.collect.Maps;
import com.opengamma.core.position.impl.SimplePosition;
import com.opengamma.core.security.Security;
import com.opengamma.id.ExternalId;
import com.opengamma.id.ExternalIdBundle;
import com.opengamma.id.MutableUniqueIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.UniqueId;
import com.opengamma.id.UniqueIdentifiable;
import com.opengamma.master.security.ManageableSecurityLink;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.JdkUtils;
import com.opengamma.util.PublicSPI;
/**
* A position held in a position master.
* <p>
* A position is fundamentally a quantity of a security.
* For example, a position might be 50 shares of OpenGamma.
* <p>
* Positions are formed from a set of trades, however trade data may not always be available or complete.
* Even if trade data is available, the position details cannot necessarily be derived from the trades.
* Therefore the position holds the quantity and security reference directly, separately
* from the underlying trades.
* <p>
* Positions are logically attached to nodes in the portfolio tree, however they are
* stored and returned separately from the position master.
*/
@PublicSPI
@BeanDefinition
public class ManageablePosition extends DirectBean
implements MutableUniqueIdentifiable, UniqueIdentifiable, Serializable {
/** Serialization version. */
private static final long serialVersionUID = 1L;
/**
* The unique identifier of the position.
* This must be null when adding to a master and not null when retrieved from a master.
*/
@PropertyDefinition(overrideGet = true, overrideSet = true)
private UniqueId _uniqueId;
/**
* The number of units in the position.
* This field must not be null for the object to be valid.
*/
@PropertyDefinition(validate = "notNull")
private BigDecimal _quantity;
/**
* The link referencing the security, not null.
* This may also hold the resolved security.
*/
@PropertyDefinition(validate = "notNull")
private ManageableSecurityLink _securityLink;
/**
* The trades that the make up the position, not null.
* An empty list usually means that trade data is unavailable.
*/
@PropertyDefinition
private final List<ManageableTrade> _trades = new ArrayList<>();
/**
* The general purpose position attributes.
* These can be used to add arbitrary additional information to the object
* and for aggregating in portfolios.
*/
@PropertyDefinition(validate = "notNull")
private final Map<String, String> _attributes = Maps.newHashMap();
/**
* The provider external identifier for the data.
* This optional field can be used to capture the identifier used by the data provider.
* This can be useful when receiving updates from the same provider.
*/
@PropertyDefinition
private ExternalId _providerId;
/**
* Construct an empty instance that must be populated via setters.
*/
public ManageablePosition() {
_securityLink = new ManageableSecurityLink();
}
/**
* Creates a position from an amount of a security.
*
* @param quantity the amount of the position, not null
* @param securityId the security identifier, not null
*/
public ManageablePosition(final BigDecimal quantity, final ExternalId securityId) {
ArgumentChecker.notNull(quantity, "quantity");
ArgumentChecker.notNull(securityId, "securityId");
_quantity = quantity;
_securityLink = new ManageableSecurityLink(securityId);
}
/**
* Creates a position from an amount of a security.
*
* @param quantity the amount of the position, not null
* @param securityId the security bundle, not null
*/
public ManageablePosition(final BigDecimal quantity, final ExternalIdBundle securityId) {
ArgumentChecker.notNull(quantity, "quantity");
ArgumentChecker.notNull(securityId, "securityId");
_quantity = quantity;
_securityLink = new ManageableSecurityLink(securityId);
}
/**
* Creates a deep copy of the specified position.
*
* @param copyFrom the position to copy from, not null
*/
public ManageablePosition(final ManageablePosition copyFrom) {
ArgumentChecker.notNull(copyFrom, "position");
_uniqueId = copyFrom.getUniqueId();
_quantity = copyFrom.getQuantity();
_providerId = copyFrom.getProviderId();
_securityLink = JodaBeanUtils.clone(copyFrom.getSecurityLink());
if (copyFrom.getAttributes() != null) {
for (final Entry<String, String> entry : copyFrom.getAttributes().entrySet()) {
addAttribute(entry.getKey(), entry.getValue());
}
}
if (copyFrom.getTrades() != null) {
for (final ManageableTrade trade : copyFrom.getTrades()) {
addTrade(JodaBeanUtils.clone(trade));
}
}
}
/**
* Creates a populated instance (no trades or attributes).
*
* @param uniqueId the position unique identifier, may be null
* @param quantity the amount of the position, not null
* @param securityId the security identifier, not null
*/
public ManageablePosition(final UniqueId uniqueId, final BigDecimal quantity, final ExternalIdBundle securityId) {
ArgumentChecker.notNull(quantity, "quantity");
ArgumentChecker.notNull(securityId, "securityId");
setUniqueId(uniqueId);
setQuantity(quantity);
_securityLink = new ManageableSecurityLink(securityId);
}
//-------------------------------------------------------------------------
/**
* Adds a trade to the list.
*
* @param trade the trade to add, not null
*/
public void addTrade(final ManageableTrade trade) {
ArgumentChecker.notNull(trade, "trade");
getTrades().add(trade);
}
/**
* Removes a given trade from the set of trades.
*
* @param trade the trade to remove, null ignored
* @return true if the set of trades contained the specified trade
*/
public boolean removeTrade(final ManageableTrade trade) {
return getTrades().remove(trade);
}
/**
* Gets a suitable name for the position.
*
* @return the name, not null
*/
@DerivedProperty
public String getName() {
final String bestName = getSecurityLink().getBestName();
if (getQuantity() != null && bestName.length() > 0) {
return JdkUtils.stripTrailingZeros(getQuantity()).toPlainString() + " x " + bestName;
}
return getUniqueId() != null ? getUniqueId().getObjectId().toString() : "";
}
//-------------------------------------------------------------------------
/**
* Gets a trade from the list by object identifier.
*
* @param tradeObjectId the trade object identifier, not null
* @return the trade with the identifier, null if not found
*/
public ManageableTrade getTrade(final ObjectIdentifiable tradeObjectId) {
ArgumentChecker.notNull(tradeObjectId, "tradeObjectId");
final ObjectId objectId = tradeObjectId.getObjectId();
for (final ManageableTrade trade : getTrades()) {
if (trade.getUniqueId().equalObjectId(objectId)) {
return trade;
}
}
return null;
}
/**
* Checks if any trade object identifier matches one in the specified list.
*
* @param objectIds the object identifiers to match against, not null
* @return true if at least one identifier matches
*/
public boolean matchesAnyTrade(final Collection<ObjectId> objectIds) {
ArgumentChecker.notNull(objectIds, "objectIds");
for (final ManageableTrade trade : getTrades()) {
if (objectIds.contains(trade.getUniqueId().getObjectId())) {
return true;
}
}
return false;
}
/**
* Checks if any trade provider key matches.
*
* @param tradeProviderId the trade provider key to match against, not null
* @return true if the key matches
*/
public boolean matchesAnyTradeProviderId(final ExternalId tradeProviderId) {
ArgumentChecker.notNull(tradeProviderId, "tradeProviderId");
for (final ManageableTrade trade : getTrades()) {
if (tradeProviderId.equals(trade.getProviderId())) {
return true;
}
}
return false;
}
//-------------------------------------------------------------------------
/**
* Adds a key value pair to attributes.
*
* @param key the key to add, not null
* @param value the value to add, not null
*/
public void addAttribute(final String key, final String value) {
ArgumentChecker.notNull(key, "key");
ArgumentChecker.notNull(value, "value");
_attributes.put(key, value);
}
//-------------------------------------------------------------------------
/**
* Gets the resolved security from the link.
*
* @return the security from the link, null if not resolve
*/
public Security getSecurity() {
return _securityLink.getTarget();
}
//-------------------------------------------------------------------------
/**
* Converts this position to an object implementing the Position interface.
* <p>
* The interface contains different data to this class due to database design.
*
* @return the security from the link, null if not resolve
*/
public SimplePosition toPosition() {
final SimplePosition sp = new SimplePosition();
sp.setQuantity(this.getQuantity());
sp.setSecurityLink(this.getSecurityLink());
sp.getTrades().addAll(getTrades());
sp.setAttributes(this.getAttributes());
// Workaround for PLAT-2371 until PLAT-2286
if (this.getProviderId() != null) {
sp.addAttribute(this.providerId().name(), this.getProviderId().toString());
}
if (this.getUniqueId() != null) { // may not have an id yet
sp.setUniqueId(this.getUniqueId());
}
return sp;
}
//-----------------------------------------------------------------------
@Override
public ManageablePosition clone() {
return new ManageablePosition(this);
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code ManageablePosition}.
* @return the meta-bean, not null
*/
public static ManageablePosition.Meta meta() {
return ManageablePosition.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(ManageablePosition.Meta.INSTANCE);
}
@Override
public ManageablePosition.Meta metaBean() {
return ManageablePosition.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the unique identifier of the position.
* This must be null when adding to a master and not null when retrieved from a master.
* @return the value of the property
*/
@Override
public UniqueId getUniqueId() {
return _uniqueId;
}
/**
* Sets the unique identifier of the position.
* This must be null when adding to a master and not null when retrieved from a master.
* @param uniqueId the new value of the property
*/
@Override
public void setUniqueId(UniqueId uniqueId) {
this._uniqueId = uniqueId;
}
/**
* Gets the the {@code uniqueId} property.
* This must be null when adding to a master and not null when retrieved from a master.
* @return the property, not null
*/
public final Property<UniqueId> uniqueId() {
return metaBean().uniqueId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the number of units in the position.
* This field must not be null for the object to be valid.
* @return the value of the property, not null
*/
public BigDecimal getQuantity() {
return _quantity;
}
/**
* Sets the number of units in the position.
* This field must not be null for the object to be valid.
* @param quantity the new value of the property, not null
*/
public void setQuantity(BigDecimal quantity) {
JodaBeanUtils.notNull(quantity, "quantity");
this._quantity = quantity;
}
/**
* Gets the the {@code quantity} property.
* This field must not be null for the object to be valid.
* @return the property, not null
*/
public final Property<BigDecimal> quantity() {
return metaBean().quantity().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the link referencing the security, not null.
* This may also hold the resolved security.
* @return the value of the property, not null
*/
public ManageableSecurityLink getSecurityLink() {
return _securityLink;
}
/**
* Sets the link referencing the security, not null.
* This may also hold the resolved security.
* @param securityLink the new value of the property, not null
*/
public void setSecurityLink(ManageableSecurityLink securityLink) {
JodaBeanUtils.notNull(securityLink, "securityLink");
this._securityLink = securityLink;
}
/**
* Gets the the {@code securityLink} property.
* This may also hold the resolved security.
* @return the property, not null
*/
public final Property<ManageableSecurityLink> securityLink() {
return metaBean().securityLink().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the trades that the make up the position, not null.
* An empty list usually means that trade data is unavailable.
* @return the value of the property, not null
*/
public List<ManageableTrade> getTrades() {
return _trades;
}
/**
* Sets the trades that the make up the position, not null.
* An empty list usually means that trade data is unavailable.
* @param trades the new value of the property, not null
*/
public void setTrades(List<ManageableTrade> trades) {
JodaBeanUtils.notNull(trades, "trades");
this._trades.clear();
this._trades.addAll(trades);
}
/**
* Gets the the {@code trades} property.
* An empty list usually means that trade data is unavailable.
* @return the property, not null
*/
public final Property<List<ManageableTrade>> trades() {
return metaBean().trades().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the general purpose position attributes.
* These can be used to add arbitrary additional information to the object
* and for aggregating in portfolios.
* @return the value of the property, not null
*/
public Map<String, String> getAttributes() {
return _attributes;
}
/**
* Sets the general purpose position attributes.
* These can be used to add arbitrary additional information to the object
* and for aggregating in portfolios.
* @param attributes the new value of the property, not null
*/
public void setAttributes(Map<String, String> attributes) {
JodaBeanUtils.notNull(attributes, "attributes");
this._attributes.clear();
this._attributes.putAll(attributes);
}
/**
* Gets the the {@code attributes} property.
* These can be used to add arbitrary additional information to the object
* and for aggregating in portfolios.
* @return the property, not null
*/
public final Property<Map<String, String>> attributes() {
return metaBean().attributes().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the provider external identifier for the data.
* This optional field can be used to capture the identifier used by the data provider.
* This can be useful when receiving updates from the same provider.
* @return the value of the property
*/
public ExternalId getProviderId() {
return _providerId;
}
/**
* Sets the provider external identifier for the data.
* This optional field can be used to capture the identifier used by the data provider.
* This can be useful when receiving updates from the same provider.
* @param providerId the new value of the property
*/
public void setProviderId(ExternalId providerId) {
this._providerId = providerId;
}
/**
* Gets the the {@code providerId} property.
* This optional field can be used to capture the identifier used by the data provider.
* This can be useful when receiving updates from the same provider.
* @return the property, not null
*/
public final Property<ExternalId> providerId() {
return metaBean().providerId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the the {@code name} property.
*
* @return the property, not null
*/
public final Property<String> name() {
return metaBean().name().createProperty(this);
}
//-----------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
ManageablePosition other = (ManageablePosition) obj;
return JodaBeanUtils.equal(getUniqueId(), other.getUniqueId()) &&
JodaBeanUtils.equal(getQuantity(), other.getQuantity()) &&
JodaBeanUtils.equal(getSecurityLink(), other.getSecurityLink()) &&
JodaBeanUtils.equal(getTrades(), other.getTrades()) &&
JodaBeanUtils.equal(getAttributes(), other.getAttributes()) &&
JodaBeanUtils.equal(getProviderId(), other.getProviderId());
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(getUniqueId());
hash = hash * 31 + JodaBeanUtils.hashCode(getQuantity());
hash = hash * 31 + JodaBeanUtils.hashCode(getSecurityLink());
hash = hash * 31 + JodaBeanUtils.hashCode(getTrades());
hash = hash * 31 + JodaBeanUtils.hashCode(getAttributes());
hash = hash * 31 + JodaBeanUtils.hashCode(getProviderId());
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(224);
buf.append("ManageablePosition{");
int len = buf.length();
toString(buf);
if (buf.length() > len) {
buf.setLength(buf.length() - 2);
}
buf.append('}');
return buf.toString();
}
protected void toString(StringBuilder buf) {
buf.append("uniqueId").append('=').append(JodaBeanUtils.toString(getUniqueId())).append(',').append(' ');
buf.append("quantity").append('=').append(JodaBeanUtils.toString(getQuantity())).append(',').append(' ');
buf.append("securityLink").append('=').append(JodaBeanUtils.toString(getSecurityLink())).append(',').append(' ');
buf.append("trades").append('=').append(JodaBeanUtils.toString(getTrades())).append(',').append(' ');
buf.append("attributes").append('=').append(JodaBeanUtils.toString(getAttributes())).append(',').append(' ');
buf.append("providerId").append('=').append(JodaBeanUtils.toString(getProviderId())).append(',').append(' ');
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code ManageablePosition}.
*/
public static class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code uniqueId} property.
*/
private final MetaProperty<UniqueId> _uniqueId = DirectMetaProperty.ofReadWrite(
this, "uniqueId", ManageablePosition.class, UniqueId.class);
/**
* The meta-property for the {@code quantity} property.
*/
private final MetaProperty<BigDecimal> _quantity = DirectMetaProperty.ofReadWrite(
this, "quantity", ManageablePosition.class, BigDecimal.class);
/**
* The meta-property for the {@code securityLink} property.
*/
private final MetaProperty<ManageableSecurityLink> _securityLink = DirectMetaProperty.ofReadWrite(
this, "securityLink", ManageablePosition.class, ManageableSecurityLink.class);
/**
* The meta-property for the {@code trades} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<ManageableTrade>> _trades = DirectMetaProperty.ofReadWrite(
this, "trades", ManageablePosition.class, (Class) List.class);
/**
* The meta-property for the {@code attributes} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<Map<String, String>> _attributes = DirectMetaProperty.ofReadWrite(
this, "attributes", ManageablePosition.class, (Class) Map.class);
/**
* The meta-property for the {@code providerId} property.
*/
private final MetaProperty<ExternalId> _providerId = DirectMetaProperty.ofReadWrite(
this, "providerId", ManageablePosition.class, ExternalId.class);
/**
* The meta-property for the {@code name} property.
*/
private final MetaProperty<String> _name = DirectMetaProperty.ofDerived(
this, "name", ManageablePosition.class, String.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"uniqueId",
"quantity",
"securityLink",
"trades",
"attributes",
"providerId",
"name");
/**
* Restricted constructor.
*/
protected Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -294460212: // uniqueId
return _uniqueId;
case -1285004149: // quantity
return _quantity;
case 807992154: // securityLink
return _securityLink;
case -865715313: // trades
return _trades;
case 405645655: // attributes
return _attributes;
case 205149932: // providerId
return _providerId;
case 3373707: // name
return _name;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends ManageablePosition> builder() {
return new DirectBeanBuilder<ManageablePosition>(new ManageablePosition());
}
@Override
public Class<? extends ManageablePosition> beanType() {
return ManageablePosition.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code uniqueId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueId> uniqueId() {
return _uniqueId;
}
/**
* The meta-property for the {@code quantity} property.
* @return the meta-property, not null
*/
public final MetaProperty<BigDecimal> quantity() {
return _quantity;
}
/**
* The meta-property for the {@code securityLink} property.
* @return the meta-property, not null
*/
public final MetaProperty<ManageableSecurityLink> securityLink() {
return _securityLink;
}
/**
* The meta-property for the {@code trades} property.
* @return the meta-property, not null
*/
public final MetaProperty<List<ManageableTrade>> trades() {
return _trades;
}
/**
* The meta-property for the {@code attributes} property.
* @return the meta-property, not null
*/
public final MetaProperty<Map<String, String>> attributes() {
return _attributes;
}
/**
* The meta-property for the {@code providerId} property.
* @return the meta-property, not null
*/
public final MetaProperty<ExternalId> providerId() {
return _providerId;
}
/**
* The meta-property for the {@code name} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> name() {
return _name;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case -294460212: // uniqueId
return ((ManageablePosition) bean).getUniqueId();
case -1285004149: // quantity
return ((ManageablePosition) bean).getQuantity();
case 807992154: // securityLink
return ((ManageablePosition) bean).getSecurityLink();
case -865715313: // trades
return ((ManageablePosition) bean).getTrades();
case 405645655: // attributes
return ((ManageablePosition) bean).getAttributes();
case 205149932: // providerId
return ((ManageablePosition) bean).getProviderId();
case 3373707: // name
return ((ManageablePosition) bean).getName();
}
return super.propertyGet(bean, propertyName, quiet);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
switch (propertyName.hashCode()) {
case -294460212: // uniqueId
((ManageablePosition) bean).setUniqueId((UniqueId) newValue);
return;
case -1285004149: // quantity
((ManageablePosition) bean).setQuantity((BigDecimal) newValue);
return;
case 807992154: // securityLink
((ManageablePosition) bean).setSecurityLink((ManageableSecurityLink) newValue);
return;
case -865715313: // trades
((ManageablePosition) bean).setTrades((List<ManageableTrade>) newValue);
return;
case 405645655: // attributes
((ManageablePosition) bean).setAttributes((Map<String, String>) newValue);
return;
case 205149932: // providerId
((ManageablePosition) bean).setProviderId((ExternalId) newValue);
return;
case 3373707: // name
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: name");
}
super.propertySet(bean, propertyName, newValue, quiet);
}
@Override
protected void validate(Bean bean) {
JodaBeanUtils.notNull(((ManageablePosition) bean)._quantity, "quantity");
JodaBeanUtils.notNull(((ManageablePosition) bean)._securityLink, "securityLink");
JodaBeanUtils.notNull(((ManageablePosition) bean)._trades, "trades");
JodaBeanUtils.notNull(((ManageablePosition) bean)._attributes, "attributes");
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| apache-2.0 |
racker/java-service-registry-client | service-registry-curator/src/main/java/com/rackspacecloud/client/service_registry/curator/ServiceTracker.java | 5706 | package com.rackspacecloud.client.service_registry.curator;
import com.netflix.curator.x.discovery.ServiceInstance;
import com.rackspacecloud.client.service_registry.Client;
import com.rackspacecloud.client.service_registry.HeartBeater;
import com.rackspacecloud.client.service_registry.ServiceCreateResponse;
import com.rackspacecloud.client.service_registry.events.client.ClientEvent;
import com.rackspacecloud.client.service_registry.events.client.HeartbeatAckEvent;
import com.rackspacecloud.client.service_registry.events.client.HeartbeatErrorEvent;
import com.rackspacecloud.client.service_registry.events.client.HeartbeatEventListener;
import com.rackspacecloud.client.service_registry.events.client.HeartbeatStoppedEvent;
import com.rackspacecloud.client.service_registry.objects.Service;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* This class is responsible for managing the lifecycle of an RSR service.
* todo: Provide a lifecycle strategy. this will be responsible for deciding how/if/when to register and deal with
* various events.
*/
class ServiceTracker<T> {
public static final String DISCOVERY = "discovery";
public static final String CURATOR_TAG = "curator-x-discovery";
public static final String NAME = "name";
private static final String ADDRESS = "address";
private static final String PORT = "port";
private static final String REG_TIME = "regtime";
private static final String SVC_TYPE = "svcType";
private static final String SSL_PORT = "sslPort";
private static final String URI_SPEC = "uriSpec";
private volatile Service service = null;
private volatile HeartBeater heartbeater = null;
private final String typeTag;
private final ServiceInstance<T> instance;
private final Client client;
private final HeartbeatEventListener heartbeatListener;
public ServiceTracker(Client client, ServiceInstance<T> instance, String typeTag) {
this.client = client;
this.instance = instance;
this.typeTag = typeTag;
this.heartbeatListener = new CuratorHeartbeatEventListener();
}
public synchronized Service register() throws Exception {
if (this.service == null) {
List<String> tags = new ArrayList<String>();
tags.add(typeTag);
tags.add(this.instance.getName());
tags.add(CURATOR_TAG);
ServiceCreateResponse res = client.getServicesClient().create(
this.instance.getId(),
30,
tags,
getMetadata(this.instance, this.typeTag));
heartbeater = res.getHeartbeater();
heartbeater.addEventListener(this.heartbeatListener);
heartbeater.start();
this.service = res.getService();
}
return service;
}
public synchronized void stop() {
if (this.heartbeater != null) {
this.heartbeater.stop();
// heartbeat event eventually causes everything to get cleaned up.
}
}
private static Map<String, String> getMetadata(ServiceInstance service, String typeTag) {
Map<String, String> map = new HashMap<String, String>();
map.put(DISCOVERY, typeTag);
map.put(NAME, service.getName());
map.put(ADDRESS, service.getAddress());
if (service.getPort() != null)
map.put(PORT, service.getPort().toString());
map.put(REG_TIME, Long.toString(service.getRegistrationTimeUTC()));
map.put(SVC_TYPE, service.getServiceType().name());
if (service.getSslPort() != null)
map.put(SSL_PORT, service.getSslPort().toString());
if (service.getUriSpec() != null)
map.put(URI_SPEC, service.getUriSpec().build());
// what else?
for (Field f : Utils.getMetaFields(service.getPayload().getClass())) {
try {
f.setAccessible(true);
map.put(f.getName(), f.get(service.getPayload()).toString());
} catch (Exception ex) {
// todo: log
}
}
return map;
}
private class CuratorHeartbeatEventListener extends HeartbeatEventListener {
private void resetService(ClientEvent event) {
((HeartBeater)event.getSource()).removeEventListener(this);
ServiceTracker.this.service = null;
try {
// make sure the thread is stopped.
ServiceTracker.this.heartbeater.stop();
} catch (Exception ignore) {
// todo: log maybe?
}
ServiceTracker.this.heartbeater = null;
}
@Override
public void onAck(HeartbeatAckEvent ack) {
// do nothing.
}
@Override
public void onStopped(HeartbeatStoppedEvent stopped) {
// service was stopped cleanly.
resetService(stopped);
if (stopped.isError()) {
try {
register();
} catch (Exception ex) {
// depends on what the exception policy is.
}
}
}
@Override
public void onError(HeartbeatErrorEvent error) {
resetService(error);
if (error.isError()) {
try {
register();
} catch (Exception ex) {
// depends on what the exception policy is.
}
}
}
}
}
| apache-2.0 |
stelar7/L4J8 | src/main/java/no/stelar7/api/r4j/pojo/val/content/Content.java | 4175 | package no.stelar7.api.r4j.pojo.val.content;
import java.io.Serializable;
import java.util.*;
public class Content implements Serializable
{
private static final long serialVersionUID = 4439671460876034863L;
private String version;
private List<ContentItem> characters;
private List<ContentItem> maps;
private List<ContentItem> chromas;
private List<ContentItem> skins;
private List<ContentItem> skinLevels;
private List<ContentItem> equips;
private List<ContentItem> gameModes;
private List<ContentItem> sprays;
private List<ContentItem> sprayLevels;
private List<ContentItem> charms;
private List<ContentItem> charmLevels;
private List<ContentItem> playerCards;
private List<ContentItem> playerTitles;
public String getVersion()
{
return version;
}
public List<ContentItem> getCharacters()
{
return characters;
}
public List<ContentItem> getMaps()
{
return maps;
}
public List<ContentItem> getChromas()
{
return chromas;
}
public List<ContentItem> getSkins()
{
return skins;
}
public List<ContentItem> getSkinLevels()
{
return skinLevels;
}
public List<ContentItem> getEquips()
{
return equips;
}
public List<ContentItem> getGameModes()
{
return gameModes;
}
public List<ContentItem> getSprays()
{
return sprays;
}
public List<ContentItem> getSprayLevels()
{
return sprayLevels;
}
public List<ContentItem> getCharms()
{
return charms;
}
public List<ContentItem> getCharmLevels()
{
return charmLevels;
}
public List<ContentItem> getPlayerCards()
{
return playerCards;
}
public List<ContentItem> getPlayerTitles()
{
return playerTitles;
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
Content content = (Content) o;
return Objects.equals(version, content.version) &&
Objects.equals(characters, content.characters) &&
Objects.equals(maps, content.maps) &&
Objects.equals(chromas, content.chromas) &&
Objects.equals(skins, content.skins) &&
Objects.equals(skinLevels, content.skinLevels) &&
Objects.equals(equips, content.equips) &&
Objects.equals(gameModes, content.gameModes) &&
Objects.equals(sprays, content.sprays) &&
Objects.equals(sprayLevels, content.sprayLevels) &&
Objects.equals(charms, content.charms) &&
Objects.equals(charmLevels, content.charmLevels) &&
Objects.equals(playerCards, content.playerCards) &&
Objects.equals(playerTitles, content.playerTitles);
}
@Override
public int hashCode()
{
return Objects.hash(version, characters, maps, chromas, skins, skinLevels, equips, gameModes, sprays, sprayLevels, charms, charmLevels, playerCards, playerTitles);
}
@Override
public String toString()
{
return "Content{" +
"version='" + version + '\'' +
", characters=" + characters +
", maps=" + maps +
", chromas=" + chromas +
", skins=" + skins +
", skinLevels=" + skinLevels +
", equips=" + equips +
", gameModes=" + gameModes +
", sprays=" + sprays +
", sprayLevels=" + sprayLevels +
", charms=" + charms +
", charmLevels=" + charmLevels +
", playerCards=" + playerCards +
", playerTitles=" + playerTitles +
'}';
}
}
| apache-2.0 |
emmartins/wildfly-server-migration | core/src/main/java/org/jboss/migration/core/task/component/LeafTask.java | 1910 | /*
* Copyright 2017 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.migration.core.task.component;
import org.jboss.migration.core.task.ServerMigrationTask;
import org.jboss.migration.core.task.ServerMigrationTaskName;
/**
* @author emmartins
*/
public class LeafTask extends ComponentTask {
protected LeafTask(ServerMigrationTaskName name, TaskRunnable taskRunnable) {
super(name, taskRunnable);
}
protected abstract static class BaseBuilder<P extends BuildParameters, T extends BaseBuilder<P, T>> extends ComponentTask.Builder<P, T> implements LeafTaskBuilder<P, T> {
private TaskRunnable.Builder<? super P> runnableBuilder;
@Override
public T runBuilder(TaskRunnable.Builder<? super P> builder) {
this.runnableBuilder = builder;
return getThis();
}
@Override
public TaskRunnable.Builder<? super P> getRunnableBuilder() {
return runnableBuilder;
}
}
public static class Builder<P extends BuildParameters> extends BaseBuilder<P, Builder<P>> {
@Override
protected Builder<P> getThis() {
return this;
}
@Override
protected ServerMigrationTask buildTask(ServerMigrationTaskName name, TaskRunnable taskRunnable) {
return new LeafTask(name, taskRunnable);
}
}
}
| apache-2.0 |
rurikovich/GeoHashInterpolation | src/main/java/org/rurik/geohash/geojson/GeoJsonExporter.java | 2334 | package org.rurik.geohash.geojson;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.CoordinateSequence;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineString;
import com.vividsolutions.jts.geom.impl.CoordinateArraySequence;
import org.geotools.data.DataUtilities;
import org.geotools.feature.SchemaException;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.geotools.geojson.feature.FeatureJSON;
import org.geotools.geometry.jts.GeometryBuilder;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.simple.SimpleFeatureType;
import org.rurik.geohash.GeoPoint;
import org.rurik.geohash.GreatCircleArc;
import java.io.IOException;
import java.io.StringWriter;
import java.util.List;
/**
* Created by User on 09.12.2016.
*/
public class GeoJsonExporter {
private SimpleFeatureType TYPE;
public GeoJsonExporter() throws SchemaException {
TYPE = DataUtilities.createType("GreatCircleArc", "Location:LineString");
}
public String geoData(List<GeoPoint> geoPoints) throws IOException {
int size = geoPoints.size();
Coordinate[] coordinates = new Coordinate[size];
GeometryBuilder builder = new GeometryBuilder();
for (int i = 0; i < size; i++) {
GeoPoint p = geoPoints.get(i);
coordinates[i] = new Coordinate(p.getLatitude(), p.getLongitude());
}
CoordinateSequence coordinateSequence = new CoordinateArraySequence(coordinates);
GeometryFactory geometryFactory = new GeometryFactory();
LineString lineString = new LineString(coordinateSequence, geometryFactory);
SimpleFeatureBuilder fBuild = new SimpleFeatureBuilder(TYPE);
SimpleFeature feature = fBuild.buildFeature(null);
feature.setDefaultGeometry(lineString);
StringWriter writer = new StringWriter();
FeatureJSON fjson = new FeatureJSON();
fjson.writeFeature(feature, writer);
return writer.toString();
}
public static void main(String[] args) throws SchemaException, IOException {
GeoJsonExporter test = new GeoJsonExporter();
GreatCircleArc arc = new GreatCircleArc(-80, 0, 40, 70);
System.out.println(test.geoData(arc.generateArcPoints(1000)));
}
}
| apache-2.0 |
dagnir/aws-sdk-java | aws-java-sdk-pinpoint/src/main/java/com/amazonaws/services/pinpoint/model/transform/CreateSegmentRequestProtocolMarshaller.java | 2610 | /*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.pinpoint.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.pinpoint.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* CreateSegmentRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class CreateSegmentRequestProtocolMarshaller implements Marshaller<Request<CreateSegmentRequest>, CreateSegmentRequest> {
private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.REST_JSON)
.requestUri("/v1/apps/{application-id}/segments").httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(true).hasPayloadMembers(true)
.serviceName("AmazonPinpoint").build();
private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory;
public CreateSegmentRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) {
this.protocolFactory = protocolFactory;
}
public Request<CreateSegmentRequest> marshall(CreateSegmentRequest createSegmentRequest) {
if (createSegmentRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
final ProtocolRequestMarshaller<CreateSegmentRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING,
createSegmentRequest);
protocolMarshaller.startMarshalling();
CreateSegmentRequestMarshaller.getInstance().marshall(createSegmentRequest, protocolMarshaller);
return protocolMarshaller.finishMarshalling();
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
moreus/hadoop | hadoop-0.23.10/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java | 16490 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.Socket;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.protocol.BlockLocalPathInfo;
import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.hdfs.server.datanode.BlockMetadataHeader;
import org.apache.hadoop.hdfs.util.DirectBufferPool;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.DataChecksum;
/**
* BlockReaderLocal enables local short circuited reads. If the DFS client is on
* the same machine as the datanode, then the client can read files directly
* from the local file system rather than going through the datanode for better
* performance. <br>
* {@link BlockReaderLocal} works as follows:
* <ul>
* <li>The client performing short circuit reads must be configured at the
* datanode.</li>
* <li>The client gets the path to the file where block is stored using
* {@link org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol#getBlockLocalPathInfo(ExtendedBlock, Token)}
* RPC call</li>
* <li>Client uses kerberos authentication to connect to the datanode over RPC,
* if security is enabled.</li>
* </ul>
*/
class BlockReaderLocal implements BlockReader {
private static final Log LOG = LogFactory.getLog(DFSClient.class);
//Stores the cache and proxy for a local datanode.
private static class LocalDatanodeInfo {
private ClientDatanodeProtocol proxy = null;
private final Map<ExtendedBlock, BlockLocalPathInfo> cache;
LocalDatanodeInfo() {
final int cacheSize = 10000;
final float hashTableLoadFactor = 0.75f;
int hashTableCapacity = (int) Math.ceil(cacheSize / hashTableLoadFactor) + 1;
cache = Collections
.synchronizedMap(new LinkedHashMap<ExtendedBlock, BlockLocalPathInfo>(
hashTableCapacity, hashTableLoadFactor, true) {
private static final long serialVersionUID = 1;
@Override
protected boolean removeEldestEntry(
Map.Entry<ExtendedBlock, BlockLocalPathInfo> eldest) {
return size() > cacheSize;
}
});
}
private synchronized ClientDatanodeProtocol getDatanodeProxy(
DatanodeInfo node, Configuration conf, int socketTimeout)
throws IOException {
if (proxy == null) {
proxy = DFSUtil.createClientDatanodeProtocolProxy(node, conf,
socketTimeout);
}
return proxy;
}
private synchronized void resetDatanodeProxy() {
if (null != proxy) {
RPC.stopProxy(proxy);
proxy = null;
}
}
private BlockLocalPathInfo getBlockLocalPathInfo(ExtendedBlock b) {
return cache.get(b);
}
private void setBlockLocalPathInfo(ExtendedBlock b, BlockLocalPathInfo info) {
cache.put(b, info);
}
private void removeBlockLocalPathInfo(ExtendedBlock b) {
cache.remove(b);
}
}
// Multiple datanodes could be running on the local machine. Store proxies in
// a map keyed by the ipc port of the datanode.
private static Map<Integer, LocalDatanodeInfo> localDatanodeInfoMap = new HashMap<Integer, LocalDatanodeInfo>();
private final FileInputStream dataIn; // reader for the data file
private FileInputStream checksumIn; // reader for the checksum file
private int offsetFromChunkBoundary;
private byte[] skipBuf = null;
private ByteBuffer dataBuff = null;
private ByteBuffer checksumBuff = null;
private DataChecksum checksum;
private final boolean verifyChecksum;
private static DirectBufferPool bufferPool = new DirectBufferPool();
private int bytesPerChecksum;
private int checksumSize;
/** offset in block where reader wants to actually read */
private long startOffset;
private final String filename;
/**
* The only way this object can be instantiated.
*/
static BlockReaderLocal newBlockReader(Configuration conf, String file,
ExtendedBlock blk, Token<BlockTokenIdentifier> token, DatanodeInfo node,
int socketTimeout, long startOffset, long length) throws IOException {
LocalDatanodeInfo localDatanodeInfo = getLocalDatanodeInfo(node
.getIpcPort());
// check the cache first
BlockLocalPathInfo pathinfo = localDatanodeInfo.getBlockLocalPathInfo(blk);
if (pathinfo == null) {
pathinfo = getBlockPathInfo(blk, node, conf, socketTimeout, token);
}
// check to see if the file exists. It may so happen that the
// HDFS file has been deleted and this block-lookup is occurring
// on behalf of a new HDFS file. This time, the block file could
// be residing in a different portion of the fs.data.dir directory.
// In this case, we remove this entry from the cache. The next
// call to this method will re-populate the cache.
FileInputStream dataIn = null;
FileInputStream checksumIn = null;
BlockReaderLocal localBlockReader = null;
boolean skipChecksumCheck = skipChecksumCheck(conf);
try {
// get a local file system
File blkfile = new File(pathinfo.getBlockPath());
dataIn = new FileInputStream(blkfile);
if (LOG.isDebugEnabled()) {
LOG.debug("New BlockReaderLocal for file " + blkfile + " of size "
+ blkfile.length() + " startOffset " + startOffset + " length "
+ length + " short circuit checksum " + skipChecksumCheck);
}
if (!skipChecksumCheck) {
// get the metadata file
File metafile = new File(pathinfo.getMetaPath());
checksumIn = new FileInputStream(metafile);
// read and handle the common header here. For now just a version
BlockMetadataHeader header = BlockMetadataHeader
.readHeader(new DataInputStream(checksumIn));
short version = header.getVersion();
if (version != BlockMetadataHeader.VERSION) {
LOG.warn("Wrong version (" + version + ") for metadata file for "
+ blk + " ignoring ...");
}
DataChecksum checksum = header.getChecksum();
long firstChunkOffset = startOffset
- (startOffset % checksum.getBytesPerChecksum());
localBlockReader = new BlockReaderLocal(conf, file, blk, token,
startOffset, length, pathinfo, checksum, true, dataIn,
firstChunkOffset, checksumIn);
} else {
localBlockReader = new BlockReaderLocal(conf, file, blk, token,
startOffset, length, pathinfo, dataIn);
}
} catch (IOException e) {
// remove from cache
localDatanodeInfo.removeBlockLocalPathInfo(blk);
DFSClient.LOG.warn("BlockReaderLocal: Removing " + blk
+ " from cache because local file " + pathinfo.getBlockPath()
+ " could not be opened.");
throw e;
} finally {
if (localBlockReader == null) {
if (dataIn != null) {
dataIn.close();
}
if (checksumIn != null) {
checksumIn.close();
}
}
}
return localBlockReader;
}
private static synchronized LocalDatanodeInfo getLocalDatanodeInfo(int port) {
LocalDatanodeInfo ldInfo = localDatanodeInfoMap.get(port);
if (ldInfo == null) {
ldInfo = new LocalDatanodeInfo();
localDatanodeInfoMap.put(port, ldInfo);
}
return ldInfo;
}
private static BlockLocalPathInfo getBlockPathInfo(ExtendedBlock blk,
DatanodeInfo node, Configuration conf, int timeout,
Token<BlockTokenIdentifier> token) throws IOException {
LocalDatanodeInfo localDatanodeInfo = getLocalDatanodeInfo(node.ipcPort);
BlockLocalPathInfo pathinfo = null;
ClientDatanodeProtocol proxy = localDatanodeInfo.getDatanodeProxy(node,
conf, timeout);
try {
// make RPC to local datanode to find local pathnames of blocks
pathinfo = proxy.getBlockLocalPathInfo(blk, token);
if (pathinfo != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Cached location of block " + blk + " as " + pathinfo);
}
localDatanodeInfo.setBlockLocalPathInfo(blk, pathinfo);
}
} catch (IOException e) {
localDatanodeInfo.resetDatanodeProxy(); // Reset proxy on error
throw e;
}
return pathinfo;
}
private static boolean skipChecksumCheck(Configuration conf) {
return conf.getBoolean(
DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_SKIP_CHECKSUM_KEY,
DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_SKIP_CHECKSUM_DEFAULT);
}
private BlockReaderLocal(Configuration conf, String hdfsfile,
ExtendedBlock block, Token<BlockTokenIdentifier> token, long startOffset,
long length, BlockLocalPathInfo pathinfo, FileInputStream dataIn)
throws IOException {
this(conf, hdfsfile, block, token, startOffset, length, pathinfo,
DataChecksum.newDataChecksum(DataChecksum.Type.NULL, 4), false,
dataIn, startOffset, null);
}
private BlockReaderLocal(Configuration conf, String hdfsfile,
ExtendedBlock block, Token<BlockTokenIdentifier> token, long startOffset,
long length, BlockLocalPathInfo pathinfo, DataChecksum checksum,
boolean verifyChecksum, FileInputStream dataIn, long firstChunkOffset,
FileInputStream checksumIn) throws IOException {
this.filename = hdfsfile;
this.checksum = checksum;
this.verifyChecksum = verifyChecksum;
this.startOffset = Math.max(startOffset, 0);
bytesPerChecksum = this.checksum.getBytesPerChecksum();
checksumSize = this.checksum.getChecksumSize();
this.dataIn = dataIn;
this.checksumIn = checksumIn;
this.offsetFromChunkBoundary = (int) (startOffset-firstChunkOffset);
dataBuff = bufferPool.getBuffer(bytesPerChecksum*64);
checksumBuff = bufferPool.getBuffer(checksumSize*64);
//Initially the buffers have nothing to read.
dataBuff.flip();
checksumBuff.flip();
long toSkip = firstChunkOffset;
while (toSkip > 0) {
long skipped = dataIn.skip(toSkip);
if (skipped == 0) {
throw new IOException("Couldn't initialize input stream");
}
toSkip -= skipped;
}
if (checksumIn != null) {
long checkSumOffset = (firstChunkOffset / bytesPerChecksum)
* checksumSize;
while (checkSumOffset > 0) {
long skipped = checksumIn.skip(checkSumOffset);
if (skipped == 0) {
throw new IOException("Couldn't initialize checksum input stream");
}
checkSumOffset -= skipped;
}
}
}
private int readIntoBuffer(FileInputStream stream, ByteBuffer buf)
throws IOException {
int bytesRead = stream.getChannel().read(buf);
if (bytesRead < 0) {
//EOF
return bytesRead;
}
while (buf.remaining() > 0) {
int n = stream.getChannel().read(buf);
if (n < 0) {
//EOF
return bytesRead;
}
bytesRead += n;
}
return bytesRead;
}
@Override
public synchronized int read(byte[] buf, int off, int len) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.info("read off " + off + " len " + len);
}
if (!verifyChecksum) {
return dataIn.read(buf, off, len);
} else {
int dataRead = -1;
if (dataBuff.remaining() == 0) {
dataBuff.clear();
checksumBuff.clear();
dataRead = readIntoBuffer(dataIn, dataBuff);
readIntoBuffer(checksumIn, checksumBuff);
checksumBuff.flip();
dataBuff.flip();
checksum.verifyChunkedSums(dataBuff, checksumBuff, filename,
this.startOffset);
} else {
dataRead = dataBuff.remaining();
}
if (dataRead > 0) {
int nRead = Math.min(dataRead - offsetFromChunkBoundary, len);
if (offsetFromChunkBoundary > 0) {
dataBuff.position(offsetFromChunkBoundary);
// Its either end of file or dataRead is greater than the
// offsetFromChunkBoundary
offsetFromChunkBoundary = 0;
}
if (nRead > 0) {
dataBuff.get(buf, off, nRead);
return nRead;
} else {
return 0;
}
} else {
return -1;
}
}
}
@Override
public synchronized long skip(long n) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("skip " + n);
}
if (n <= 0) {
return 0;
}
if (!verifyChecksum) {
return dataIn.skip(n);
}
// caller made sure newPosition is not beyond EOF.
int remaining = dataBuff.remaining();
int position = dataBuff.position();
int newPosition = position + (int)n;
// if the new offset is already read into dataBuff, just reposition
if (n <= remaining) {
assert offsetFromChunkBoundary == 0;
dataBuff.position(newPosition);
return n;
}
// for small gap, read through to keep the data/checksum in sync
if (n - remaining <= bytesPerChecksum) {
dataBuff.position(position + remaining);
if (skipBuf == null) {
skipBuf = new byte[bytesPerChecksum];
}
int ret = read(skipBuf, 0, (int)(n - remaining));
return ret;
}
// optimize for big gap: discard the current buffer, skip to
// the beginning of the appropriate checksum chunk and then
// read to the middle of that chunk to be in sync with checksums.
this.offsetFromChunkBoundary = newPosition % bytesPerChecksum;
long toskip = n - remaining - this.offsetFromChunkBoundary;
dataBuff.clear();
checksumBuff.clear();
long dataSkipped = dataIn.skip(toskip);
if (dataSkipped != toskip) {
throw new IOException("skip error in data input stream");
}
long checkSumOffset = (dataSkipped / bytesPerChecksum) * checksumSize;
if (checkSumOffset > 0) {
long skipped = checksumIn.skip(checkSumOffset);
if (skipped != checkSumOffset) {
throw new IOException("skip error in checksum input stream");
}
}
// read into the middle of the chunk
if (skipBuf == null) {
skipBuf = new byte[bytesPerChecksum];
}
assert skipBuf.length == bytesPerChecksum;
assert this.offsetFromChunkBoundary < bytesPerChecksum;
int ret = read(skipBuf, 0, this.offsetFromChunkBoundary);
if (ret == -1) { // EOS
return toskip;
} else {
return (toskip + ret);
}
}
@Override
public synchronized void close() throws IOException {
dataIn.close();
if (checksumIn != null) {
checksumIn.close();
}
if (dataBuff != null) {
bufferPool.returnBuffer(dataBuff);
dataBuff = null;
}
if (checksumBuff != null) {
bufferPool.returnBuffer(checksumBuff);
checksumBuff = null;
}
startOffset = -1;
checksum = null;
}
@Override
public int readAll(byte[] buf, int offset, int len) throws IOException {
return BlockReaderUtil.readAll(this, buf, offset, len);
}
@Override
public void readFully(byte[] buf, int off, int len) throws IOException {
BlockReaderUtil.readFully(this, buf, off, len);
}
@Override
public Socket takeSocket() {
return null;
}
@Override
public boolean hasSentStatusCode() {
return false;
}
} | apache-2.0 |
DJBiokinetix/Hub-Plugin | Hub.java | 13980 | package me.DJBiokinetix;
import java.util.HashMap;
import java.util.Random;
import org.bukkit.ChatColor;
import org.bukkit.Color;
import org.bukkit.Effect;
import org.bukkit.FireworkEffect;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Firework;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerJoinEvent;
import org.bukkit.event.player.PlayerKickEvent;
import org.bukkit.event.player.PlayerQuitEvent;
import org.bukkit.inventory.meta.FireworkMeta;
import org.bukkit.plugin.java.JavaPlugin;
public class Hub extends JavaPlugin implements Listener{
final HashMap<Long, Long> Timer = new HashMap<Long, Long>();
public HashMap<String, Long> cooldowns = new HashMap<String, Long>();
public void onEnable(){
saveDefaultConfig();
getServer().getPluginManager().registerEvents(this, this);
}
@SuppressWarnings("deprecation")
public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args){
Player jugador = (Player)sender;
int cdt = Integer.parseInt(getConfig().getString("Time"));
String launch = getConfig().getString("Firework message");
String notallowed = getConfig().getString("No permission");
String wrong = getConfig().getString("Incorrect usage");
String wait = getConfig().getString("Wait message");
String leave = getConfig().getString("S Fake").replaceAll("&", "§").replaceAll("%user%", jugador.getPlayer().getName());
String entry = getConfig().getString("E Fake").replaceAll("&", "§").replaceAll("%user%", jugador.getPlayer().getName());
if(cmd.getName().equalsIgnoreCase("rconfig")){
if(jugador.getPlayer().hasPermission("rconfig.hub")){
sender.sendMessage(ChatColor.GREEN + "Configuration reloaded");
saveDefaultConfig();
reloadConfig();
}}
if(cmd.getName().equalsIgnoreCase("hub")){
jugador.sendMessage(ChatColor.DARK_GRAY + "======[" + ChatColor.GOLD + "Hub" + ChatColor.DARK_GRAY + "]======");
jugador.sendMessage(ChatColor.GRAY + "Plugin by: DJBiokinetix");
jugador.sendMessage(ChatColor.GRAY + "Version: 3.9");
if (args.length == 1) {
if (args[0].equalsIgnoreCase("help")){
jugador.sendMessage(ChatColor.GREEN + "Command: /fw - Launch fireworks!");
jugador.sendMessage(ChatColor.GREEN + "Command: /fake - Fake message 'leave'");
jugador.sendMessage(ChatColor.GREEN + "Command: /entry - Fake message 'join'");
jugador.sendMessage(ChatColor.GREEN + "Command: /rconfig - Reload the config files!");
jugador.sendMessage(ChatColor.GREEN + "Command: /music - menu of the disco mode...");
}
}
}
if(cmd.getName().equalsIgnoreCase("music")){
jugador.sendMessage(ChatColor.DARK_GRAY + "======[" + ChatColor.GOLD + "Music" + ChatColor.DARK_GRAY + "]======");
jugador.sendMessage(ChatColor.RED + "/13 - Play record 13");
jugador.sendMessage(ChatColor.RED + "/cat - Play record cat");
jugador.sendMessage(ChatColor.RED + "/blocks - Play record blocks");
jugador.sendMessage(ChatColor.RED + "/chirp - Play record chirp");
jugador.sendMessage(ChatColor.RED + "/far - Play record far");
jugador.sendMessage(ChatColor.RED + "/mall - Play record mall");
jugador.sendMessage(ChatColor.RED + "/mellohi - Play record mellohi");
jugador.sendMessage(ChatColor.RED + "/stal - Play record stal");
jugador.sendMessage(ChatColor.RED + "/strad - Play record strad");
jugador.sendMessage(ChatColor.RED + "/ward - Play record ward");
jugador.sendMessage(ChatColor.RED + "/11 - Play record 11");
jugador.sendMessage(ChatColor.RED + "/wait - Play record wait");
jugador.sendMessage(ChatColor.DARK_GRAY + "===============");
}
if(cmd.getName().equalsIgnoreCase("fake")){
if(jugador.getPlayer().hasPermission("fake.hub")){
jugador.getServer().broadcastMessage(leave);
}}
if(cmd.getName().equalsIgnoreCase("entry")){
if(jugador.getPlayer().hasPermission("fake.hub")){
jugador.getServer().broadcastMessage(entry);
}}
if(cmd.getName().equalsIgnoreCase("13")){
if(jugador.getPlayer().hasPermission("13.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 2256);
jugador.sendMessage(ChatColor.GREEN + "Now playing: Record - 13");
}}
if(cmd.getName().equalsIgnoreCase("Cat")){
if(jugador.getPlayer().hasPermission("cat.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 2257);
jugador.sendMessage(ChatColor.GREEN + "Now playing: Record - Cat");
}}
if(cmd.getName().equalsIgnoreCase("Blocks")){
if(jugador.getPlayer().hasPermission("blocks.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 2258);
jugador.sendMessage(ChatColor.GREEN + "Now playing: Record - Blocks");
}}
if(cmd.getName().equalsIgnoreCase("Chirp")){
if(jugador.getPlayer().hasPermission("chirp.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 2259);
jugador.sendMessage(ChatColor.GREEN + "Now playing: Record - Chirp");
}}
if(cmd.getName().equalsIgnoreCase("Far")){
if(jugador.getPlayer().hasPermission("far.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 2260);
jugador.sendMessage(ChatColor.GREEN + "Now playing: Record - Far");
}}
if(cmd.getName().equalsIgnoreCase("Mall")){
if(jugador.getPlayer().hasPermission("mall.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 2261);
jugador.sendMessage(ChatColor.GREEN + "Now playing: Record - Mall");
}}
if(cmd.getName().equalsIgnoreCase("Mellohi")){
if(jugador.getPlayer().hasPermission("mellohi.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 2262);
jugador.sendMessage(ChatColor.GREEN + "Now playing: Record - Mellohi");
}}
if(cmd.getName().equalsIgnoreCase("Stal")){
if(jugador.getPlayer().hasPermission("stal.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 2263);
jugador.sendMessage(ChatColor.GREEN + "Now playing: Record - Stal");
}}
if(cmd.getName().equalsIgnoreCase("Strad")){
if(jugador.getPlayer().hasPermission("strad.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 2264);
jugador.sendMessage(ChatColor.GREEN + "Now playing: Record - Strad");
}}
if(cmd.getName().equalsIgnoreCase("Ward")){
if(jugador.getPlayer().hasPermission("ward.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 2265);
jugador.sendMessage(ChatColor.GREEN + "Now playing: Record - Ward");
}}
if(cmd.getName().equalsIgnoreCase("11")){
if(jugador.getPlayer().hasPermission("11.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 2266);
jugador.sendMessage(ChatColor.GREEN + "Now playing: Record - 11");
}}
if(cmd.getName().equalsIgnoreCase("Wait")){
if(jugador.getPlayer().hasPermission("wait.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 2267);
jugador.sendMessage(ChatColor.GREEN + "Now playing: Record - Wait");
}}
if(cmd.getName().equalsIgnoreCase("stopmusic")){
if(jugador.getPlayer().hasPermission("music.hub")){
jugador.playEffect(jugador.getLocation(), Effect.RECORD_PLAY, 0);
jugador.sendMessage(ChatColor.RED + "The music has been stoped!");
}}
if ((cmd.getName().equalsIgnoreCase("fw")) || (cmd.getName().equalsIgnoreCase("firework"))){
if (args.length == 0){
if (jugador.hasPermission("firework.hub")){
if (jugador.hasPermission("firework.admin")){
shootFirework(jugador);
jugador.sendMessage(formatVariables(launch, jugador));
} else {
int cooldownTime = cdt;
if (this.cooldowns.containsKey(sender.getName())){
long secondsLeft = ((Long)this.cooldowns.get(sender.getName())).longValue() / 1000L + cooldownTime - System.currentTimeMillis() / 1000L;
if (secondsLeft > 0L){
sender.sendMessage(formatVariables(wait, jugador));
return true;
}
}
this.cooldowns.put(sender.getName(), Long.valueOf(System.currentTimeMillis()));
shootFirework(jugador);
jugador.sendMessage(formatVariables(launch, jugador));
}
} else {
jugador.sendMessage(formatVariables(notallowed, jugador));
}
} else if (args.length == 3){
if (jugador.hasPermission("firework.coord")){
double x = Integer.parseInt(args[0]);
double y = Integer.parseInt(args[1]);
double z = Integer.parseInt(args[2]);
shootFirework2(jugador, x, y, z);
jugador.sendMessage(formatVariables(launch, jugador));
}
}
else if (args.length == 1) {
if (args[0].equalsIgnoreCase("reload")){
if (jugador.hasPermission("firework.admin")){
reloadConfig();
jugador.sendMessage(ChatColor.GREEN + "Configuration reloaded");
}
} else {
jugador.sendMessage(formatVariables(wrong, jugador));
}
}
}
return false;
}
@EventHandler
public void Join(PlayerJoinEvent e){
if (e.getPlayer().hasPermission("join.hub")){
e.setJoinMessage(getConfig().getString("Join").replaceAll("&", "§").replaceAll("%user%", e.getPlayer().getName()));
shootFirework(e.getPlayer());
return;
}
e.setJoinMessage(null);
}
@EventHandler
public void Leave(PlayerQuitEvent e){
if (e.getPlayer().hasPermission("leave.hub")){
e.setQuitMessage(getConfig().getString("Leave").replaceAll("&", "§").replaceAll("%user%", e.getPlayer().getName()));
return;
}
e.setQuitMessage(null);
}
@EventHandler
public void Kick(PlayerKickEvent e){
e.setLeaveMessage(null);
}
public String formatVariables(String string, Player player){
int cdt = Integer.parseInt(getConfig().getString("Time"));
return ChatColor.translateAlternateColorCodes("&".charAt(0), string).replace("%time", String.valueOf(cdt));
}
private void shootFirework(Player player){
Firework firework = (Firework)player.getWorld().spawnEntity(player.getLocation(), EntityType.FIREWORK);
FireworkMeta fm = firework.getFireworkMeta();
Random r = new Random();
FireworkEffect.Type type = null;
int fType = r.nextInt(5) + 1;
switch (fType){
case 1:
default:
type = FireworkEffect.Type.BALL;
break;
case 2:
type = FireworkEffect.Type.BALL_LARGE;
break;
case 3:
type = FireworkEffect.Type.BURST;
break;
case 4:
type = FireworkEffect.Type.CREEPER;
break;
case 5:
type = FireworkEffect.Type.STAR;
}
int c1i = r.nextInt(16) + 1;
int c2i = r.nextInt(16) + 1;
Color c1 = getColor(c1i);
Color c2 = getColor(c2i);
FireworkEffect effect = FireworkEffect.builder().flicker(r.nextBoolean()).withColor(c1).withFade(c2).with(type).trail(r.nextBoolean()).build();
fm.addEffect(effect);
int power = r.nextInt(2) + 1;
fm.setPower(power);
firework.setFireworkMeta(fm);
}
private void shootFirework2(Player player, double x, double y, double z){
World world = player.getWorld();
double x1 = x;
double y1 = y;
double z1 = z;
Location loc = new Location(world, x1, y1, z1);
Firework firework = (Firework)player.getWorld().spawnEntity(loc, EntityType.FIREWORK);
FireworkMeta fm = firework.getFireworkMeta();
Random r = new Random();
FireworkEffect.Type type = null;
int fType = r.nextInt(5) + 1;
switch (fType){
case 1:
default:
type = FireworkEffect.Type.BALL;
break;
case 2:
type = FireworkEffect.Type.BALL_LARGE;
break;
case 3:
type = FireworkEffect.Type.BURST;
break;
case 4:
type = FireworkEffect.Type.CREEPER;
break;
case 5:
type = FireworkEffect.Type.STAR;
}
int c1i = r.nextInt(16) + 1;
int c2i = r.nextInt(16) + 1;
Color c1 = getColor(c1i);
Color c2 = getColor(c2i);
FireworkEffect effect = FireworkEffect.builder().flicker(r.nextBoolean()).withColor(c1).withFade(c2).with(type).trail(r.nextBoolean()).build();
fm.addEffect(effect);
int power = r.nextInt(2) + 1;
fm.setPower(power);
firework.setFireworkMeta(fm);
}
public Color getColor(int c){
switch (c){
case 1:
default:
return Color.AQUA;
case 2:
return Color.BLACK;
case 3:
return Color.BLUE;
case 4:
return Color.FUCHSIA;
case 5:
return Color.GRAY;
case 6:
return Color.GREEN;
case 7:
return Color.LIME;
case 8:
return Color.MAROON;
case 9:
return Color.NAVY;
case 10:
return Color.OLIVE;
case 11:
return Color.ORANGE;
case 12:
return Color.PURPLE;
case 13:
return Color.RED;
case 14:
return Color.SILVER;
case 15:
return Color.TEAL;
case 16:
return Color.WHITE;
}
}
}
| apache-2.0 |
ruslan-sverchkov/aho-corasick | core/src/main/java/org/rsverchk/ahocorasick/Node.java | 11975 | package org.rsverchk.ahocorasick;
import gnu.trove.list.TCharList;
import gnu.trove.list.array.TCharArrayList;
import gnu.trove.map.hash.TCharObjectHashMap;
import gnu.trove.procedure.TCharObjectProcedure;
import org.apache.commons.lang3.Validate;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.text.MessageFormat;
import java.util.concurrent.atomic.AtomicReference;
/**
* Trie node.
*
* @param <T> payload type
* @author Ruslan Sverchkov
*/
public class Node<T> {
private final Node<T> parent;
private final int level;
private TCharObjectHashMap<Node<T>> children;
private Node<T> suffix;
private T payload;
private Node<T> terminalSuffix;
/**
* Create a root node.
*
* @param <T> payload type
* @return a root node
*/
public static <T> Node<T> root() {
return new Node<>(null, 0);
}
/**
* Check if the node is root
*
* @return whether the node is root
*/
public boolean isRoot() {
return parent == null;
}
/**
* Check if the node is terminal
*
* @return whether the node is terminal
*/
public boolean isTerminal() {
return payload != null;
}
/**
* Get node level (length of the shortest path between the node and root).
*
* @return node level
*/
public int getLevel() {
return level;
}
/**
* Get suffix.
*
* @return suffix
* @throws IllegalArgumentException if suffix is null, it means that the method is called before the trie is
* initialized, which makes no sense
*/
@Nonnull
public Node<T> getSuffix() {
if (suffix == null) {
throw new IllegalStateException("suffix is not set, call setSuffix() first");
}
return suffix;
}
/**
* Get terminal suffix.
*
* @return terminal suffix, can be null
*/
@Nullable
public Node<T> getTerminalSuffix() {
return terminalSuffix;
}
/**
* Get payload.
*
* @return payload, can be null if the node is not terminal
*/
@Nullable
public T getPayload() {
return payload;
}
/**
* Set payload.
*
* @param payload payload to set
* @throws NullPointerException if payload is null
*/
public void setPayload(@Nonnull T payload) {
Validate.notNull(payload);
this.payload = payload;
}
/**
* Set suffix, terminal suffix and compact the node.
*/
public void init() {
Node<T> suffix = findSuffix();
setSuffix(suffix);
Node<T> terminalSuffix = findTerminalSuffix();
if (terminalSuffix != null) {
this.terminalSuffix = terminalSuffix;
}
if (children != null) {
children.compact();
}
}
/**
* Create a child for the node.
*
* @param key a character corresponding to the child
* @return the child
* @throws IllegalArgumentException if the node already has a child corresponding to the specified character
*/
public Node<T> createChild(char key) {
if (children != null) {
Validate.isTrue(!children.containsKey(key), MessageFormat.format("child [{0}] already exists",
key));
} else {
children = new TCharObjectHashMap<>(1);
}
Node<T> child = new Node<>(this, level + 1);
children.put(key, child);
return child;
}
/**
* Get a child corresponding to the specified character.
*
* @param key character
* @return a child corresponding to the specified character if exists, null otherwise
*/
@Nullable
public Node<T> getChild(char key) {
return children == null ? null : children.get(key);
}
/**
* Call the specified procedure for each child of current node.
*
* @param procedure procedure to call
* @throws NullPointerException if procedure is null
*/
public void forEachChild(@Nonnull TCharObjectProcedure<? super Node<T>> procedure) {
Validate.notNull(procedure);
if (children == null) {
return;
}
children.forEachEntry(procedure);
}
/**
* A position corresponding to the current node has been found in an input string, traverse the trie starting from
* the current node and call the specified handler for each terminal node that will be encountered.
*
* Implementation comment:
* recursive implementation is good looking but this one is stack overflow free
*
* @param index a position in an input string corresponding to the current node
* @param handler a match handler
* @return whether to continue matching or not
* @throws NullPointerException if handler is null
* @throws IllegalArgumentException if index + 1 is lesser than the node level (it means that the match has been
* found in a substring that is shorter than the match itself which obviously
* cannot happen without programming errors)
*/
public boolean handleMatch(int index, @Nonnull MatchHandler<T> handler) {
int endOfWordExclusive = index + 1;
Validate.isTrue(endOfWordExclusive >= level);
Validate.notNull(handler);
Node<T> current = this;
while (current != null) {
if (current.isTerminal()) {
if (!handler.handle(endOfWordExclusive - current.getLevel(), endOfWordExclusive,
current.getPayload())) {
return false;
}
}
current = current.getTerminalSuffix();
}
return true;
}
@Override
public String toString() {
TCharList list = new TCharArrayList(1);
Node<T> current = this;
while (!current.isRoot()) {
list.add(current.getKey());
current = current.getParent();
}
StringBuilder sb = new StringBuilder();
sb.append("Node {root}");
for (int i = 0; i < list.size(); i++) {
sb.append("\n");
int level = i + 1;
for (int k = 0; k < level; k++) {
sb.append(" ");
}
char c = list.get(i);
sb.append("Node {key: [")
.append(c)
.append("], level: [")
.append(level)
.append("]}");
}
return sb.toString();
}
/**
* Find suffix node for current node. Pay attention that by the time we look for suffix of the node on level
* N, all suffixes of all nodes on levels from 1 to N-1 must be set. This can be ensured by breadth-first traversal
* of the trie.
*
* @return suffix node for the specified node
* @throws NullPointerException if node is null
* @throws IllegalStateException if the method is called before all suffixes of all nodes on higher levels are set
*/
@Nonnull
protected Node<T> findSuffix() {
if (isRoot()) {
return this;
}
if (parent.isRoot()) {
return parent; // for direct descendants of the root it is the suffix
}
char key = getKey();
Node<T> parentSuffix = parent.getSuffix(); // get parent suffix
Node<T> suffixChild = parentSuffix.getChild(key); // get suffix child corresponding to the key
while (suffixChild == null) { // if no such child
if (parentSuffix.isRoot()) { // and we're in the root
return parentSuffix; // return root
}
parentSuffix = parentSuffix.getSuffix(); // if we're not in the root, get current node's suffix
suffixChild = parentSuffix.getChild(key); // get suffix child corresponding to the key
}
return suffixChild; // a suitable node is found, return it
}
/**
* Find the nearest terminal suffix for current node. Pay attention that by the time we look for a terminal
* suffix, all suffixes of all nodes on levels from 1 to N-1 must be set must be set. This can be ensured by
* breadth-first traversal of the trie.
*
* @return the nearest terminal suffix for the specified node, null if there are no candidates
* @throws IllegalStateException if the method is called before all suffixes of all nodes on higher levels are set
*/
@Nullable
protected Node<T> findTerminalSuffix() {
Node<T> currentSuffix = getSuffix();
while (true) {
if (currentSuffix.isRoot()) {
return null;
}
if (currentSuffix.isTerminal()) {
return currentSuffix;
}
Node<T> current = currentSuffix;
currentSuffix = current.getSuffix();
}
}
/**
* Get a key corresponding to current node. Useful for logging.
*
* @return a key corresponding to current node
* @throws IllegalStateException if current node is root, the operation makes no sense for root
*/
protected char getKey() {
if (isRoot()) {
throw new IllegalStateException("makes no sense for root");
}
return parent.getChildKey(this);
}
/**
* Find a character corresponding to the specified child. The character is useful for logging but it's not stored
* as a node field for memory efficiency's sake, so the only way to determine it is to use parent's children map.
*
* @param child the child to find a character for
* @return a character corresponding to the specified child
* @throws NullPointerException if child is null
* @throws IllegalArgumentException if the specified node is not a child of current node
*/
protected char getChildKey(@Nonnull Node<T> child) {
Validate.notNull(child);
AtomicReference<Character> ref = new AtomicReference<>();
forEachChild((char key, Node<T> node) -> {
if (node.equals(child)) {
ref.set(key);
return false;
}
return true;
});
if (ref.get() == null) {
throw new IllegalArgumentException("no such child");
}
return ref.get();
}
/*
Implementation comment:
The methods are useful for testing purposes. Even though the class is very simple, it's still a full-fledged state
machine and for white box testing of a state machine we need an ability to set its state.
*/
@Nullable
public Node<T> getParent() {
return parent;
}
@Nullable
protected TCharObjectHashMap<Node<T>> getChildren() {
return children;
}
public void setChildren(@Nullable TCharObjectHashMap<Node<T>> children) {
this.children = children;
}
protected void setSuffix(@Nullable Node<T> suffix) {
this.suffix = suffix;
}
protected void setTerminalSuffix(@Nullable Node<T> terminalSuffix) {
this.terminalSuffix = terminalSuffix;
}
/**
* Create an instance of Node. If parent is null and level is 0, the node is considered to be a root node.
* If parent is not null and level is > 0, the node is considered to be a simple node.
*
* @param parent node parent
* @param level node level
* @throws IllegalArgumentException if:
* * parent is null and level is not 0
* * parent is not null and level is not > 0
*/
protected Node(@Nullable Node<T> parent, int level) {
if (parent == null) {
Validate.isTrue(level == 0);
} else {
Validate.isTrue(level > 0);
}
this.parent = parent;
this.level = level;
}
} | apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-ram/src/main/java/com/amazonaws/services/ram/model/transform/AcceptResourceShareInvitationRequestProtocolMarshaller.java | 2846 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ram.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.ram.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.protocol.*;
import com.amazonaws.protocol.Protocol;
import com.amazonaws.annotation.SdkInternalApi;
/**
* AcceptResourceShareInvitationRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class AcceptResourceShareInvitationRequestProtocolMarshaller implements
Marshaller<Request<AcceptResourceShareInvitationRequest>, AcceptResourceShareInvitationRequest> {
private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.REST_JSON)
.requestUri("/acceptresourceshareinvitation").httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true)
.serviceName("AWSRAM").build();
private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory;
public AcceptResourceShareInvitationRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) {
this.protocolFactory = protocolFactory;
}
public Request<AcceptResourceShareInvitationRequest> marshall(AcceptResourceShareInvitationRequest acceptResourceShareInvitationRequest) {
if (acceptResourceShareInvitationRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
final ProtocolRequestMarshaller<AcceptResourceShareInvitationRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(
SDK_OPERATION_BINDING, acceptResourceShareInvitationRequest);
protocolMarshaller.startMarshalling();
AcceptResourceShareInvitationRequestMarshaller.getInstance().marshall(acceptResourceShareInvitationRequest, protocolMarshaller);
return protocolMarshaller.finishMarshalling();
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
zslinandljxin/ArticleReader | app/src/main/java/zsl/zhaoqing/com/utility/AsynImageLoader.java | 2883 | package zsl.zhaoqing.com.utility;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.os.Message;
import android.util.LruCache;
import java.io.InputStream;
import java.net.URL;
/**
* 图片下载工具类,用于下载及缓存图片
*/
public class AsynImageLoader {
//缓存图片的链接和下载的图片
private LruCache<String,Drawable> cacheDrawable;
public AsynImageLoader() {
long maxcache = Runtime.getRuntime().maxMemory() / 1024;
cacheDrawable = new LruCache<String,Drawable>((int) (maxcache / 8)){
@Override
protected int sizeOf(String key, Drawable value) {
return ((BitmapDrawable)value).getBitmap().getByteCount() / 1024;
}
};
}
public Drawable imageLoad(final String imageUrl , final FinishLoadingListener listener ){
//判断imageCoach对象中的key值(图片链接)所对应的value是否已经存有图片
Drawable drawable = getDrawableFromCache(imageUrl);
if (drawable != null){
return drawable;
}
//处理线程发送来的图片
final Handler handler = new Handler(){
@Override
public void handleMessage(Message msg) {
listener.loadImage(imageUrl, (Drawable) msg.obj);
}
};
//开启线程下载图片,图片下载后存储到imageCoach对象,并发送到处理器去处理图片
new Thread(new Runnable() {
@Override
public void run() {
Drawable drawable = loadImageFromUrl(imageUrl);
if (drawable == null){
return;
}
addDrawableToCache(imageUrl,drawable);
Message msg = handler.obtainMessage(0 , drawable);
handler.sendMessage(msg);
}
}).start();
return null;
}
//用于下载图片的方法
public static Drawable loadImageFromUrl(String imageUrl){
URL m;
InputStream in = null;
Drawable drawable = null;
try {
m = new URL(imageUrl);
in = (InputStream) m.getContent();
drawable= Drawable.createFromStream(in , "src");
} catch (Exception e) {
e.printStackTrace();
}
return drawable;
}
public void addDrawableToCache(String key, Drawable drawable){
if (getDrawableFromCache(key) == null){
cacheDrawable.put(key,drawable);
}
}
public Drawable getDrawableFromCache(String key){
return cacheDrawable.get(key);
}
//图片下载完毕后回调该接口的实例
public interface FinishLoadingListener{
public void loadImage(String imageUrl , Drawable drawable);
}
}
| apache-2.0 |
lucasponce/alerting-reactor | hawkular-alerts-netty/src/main/java/org/hawkular/alerts/netty/handlers/EventsHandler.java | 20030 | package org.hawkular.alerts.netty.handlers;
import static io.netty.handler.codec.http.HttpMethod.DELETE;
import static io.netty.handler.codec.http.HttpMethod.GET;
import static io.netty.handler.codec.http.HttpMethod.POST;
import static io.netty.handler.codec.http.HttpMethod.PUT;
import static io.netty.handler.codec.http.HttpResponseStatus.OK;
import static org.hawkular.alerts.api.json.JsonUtil.collectionFromJson;
import static org.hawkular.alerts.api.json.JsonUtil.fromJson;
import static org.hawkular.alerts.api.json.JsonUtil.toJson;
import static org.hawkular.alerts.netty.HandlersManager.TENANT_HEADER_NAME;
import static org.hawkular.alerts.netty.util.ResponseUtil.badRequest;
import static org.hawkular.alerts.netty.util.ResponseUtil.checkTags;
import static org.hawkular.alerts.netty.util.ResponseUtil.extractPaging;
import static org.hawkular.alerts.netty.util.ResponseUtil.handleExceptions;
import static org.hawkular.alerts.netty.util.ResponseUtil.isEmpty;
import static org.hawkular.alerts.netty.util.ResponseUtil.ok;
import static org.hawkular.alerts.netty.util.ResponseUtil.paginatedOk;
import static org.hawkular.alerts.netty.util.ResponseUtil.parseTagQuery;
import static org.hawkular.alerts.netty.util.ResponseUtil.parseTags;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.hawkular.alerts.api.model.event.Event;
import org.hawkular.alerts.api.model.paging.Page;
import org.hawkular.alerts.api.model.paging.Pager;
import org.hawkular.alerts.api.services.AlertsService;
import org.hawkular.alerts.api.services.EventsCriteria;
import org.hawkular.alerts.engine.StandaloneAlerts;
import org.hawkular.alerts.log.MsgLogger;
import org.hawkular.alerts.netty.RestEndpoint;
import org.hawkular.alerts.netty.RestHandler;
import org.hawkular.alerts.netty.handlers.EventsWatcher.EventsListener;
import org.hawkular.alerts.netty.util.ResponseUtil;
import org.hawkular.alerts.netty.util.ResponseUtil.BadRequestException;
import org.hawkular.alerts.netty.util.ResponseUtil.InternalServerException;
import org.jboss.logging.Logger;
import org.reactivestreams.Publisher;
import io.netty.handler.codec.http.HttpMethod;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
import reactor.ipc.netty.http.server.HttpServerRequest;
import reactor.ipc.netty.http.server.HttpServerResponse;
/**
* @author Jay Shaughnessy
* @author Lucas Ponce
*/
@RestEndpoint(path = "/events")
public class EventsHandler implements RestHandler {
private static final MsgLogger log = Logger.getMessageLogger(MsgLogger.class, EventsHandler.class.getName());
private static final String ROOT = "/";
private static final String DATA = "/data";
private static final String TAGS = "/tags";
private static final String WATCH = "/watch";
private static final String _DELETE = "/delete";
private static final String EVENT = "/event";
private static final String PARAM_START_TIME = "startTime";
private static final String PARAM_END_TIME = "endTime";
private static final String PARAM_EVENT_IDS = "eventIds";
private static final String PARAM_TRIGGER_IDS = "triggerIds";
private static final String PARAM_CATEGORIES = "categories";
private static final String PARAM_TAGS = "tags";
private static final String PARAM_TAG_QUERY = "tagQuery";
private static final String PARAM_THIN = "thin";
private static final String PARAM_WATCH_INTERVAL = "watchInterval";
private static final String PARAM_TAG_NAMES = "tagNames";
AlertsService alertsService;
public EventsHandler() {
alertsService = StandaloneAlerts.getAlertsService();
}
@Override
public Publisher<Void> process(HttpServerRequest req,
HttpServerResponse resp,
String tenantId,
String subpath,
Map<String, List<String>> params) {
HttpMethod method = req.method();
if (isEmpty(tenantId)) {
return badRequest(resp, TENANT_HEADER_NAME + " header is required");
}
// POST /
if (method == POST && subpath.equals(ROOT)) {
return createEvent(req, resp, tenantId);
}
// POST /data
if (method == POST && subpath.equals(DATA)) {
return sendEvents(req, resp, tenantId);
}
// PUT /tags
if (method == PUT && subpath.equals(TAGS)) {
return addTags(req, resp, tenantId, params);
}
// DELETE /tags
if (method == DELETE && subpath.equals(TAGS)) {
return removeTags(req, resp, tenantId, params);
}
// GET /
if (method == GET && subpath.equals(ROOT)) {
return findEvents(req, resp, tenantId, params, req.uri());
}
// GET /watch
if (method == GET && subpath.equals(WATCH)) {
return watchEvents(resp, tenantId, params);
}
// PUT /delete
if (method == PUT && subpath.equals(_DELETE)) {
return deleteEvents(req, resp, tenantId, null, params);
}
String[] tokens = subpath.substring(1).split(ROOT);
// DELETE /{eventId}
if (method == DELETE && tokens.length == 1) {
return deleteEvents(req, resp, tenantId, tokens[0], params);
}
// GET /event/{eventId}
if (method == GET && subpath.startsWith(EVENT) && tokens.length == 2) {
return getEvent(req, resp, tenantId, tokens[1], params);
}
return badRequest(resp, "Wrong path " + method + " " + subpath);
}
Publisher<Void> createEvent(HttpServerRequest req, HttpServerResponse resp, String tenantId) {
return req
.receive()
.aggregate()
.asString()
.publishOn(Schedulers.elastic())
.map(json -> {
Event parsed;
try {
parsed = fromJson(json, Event.class);
return parsed;
} catch (Exception e) {
log.errorf(e, "Error parsing Event json: %s. Reason: %s", json, e.toString());
throw new BadRequestException(e.toString());
}
})
.flatMap(event -> {
if (event == null) {
throw new BadRequestException("Event null.");
}
if (isEmpty(event.getId())) {
throw new BadRequestException("Event with id null.");
}
if (isEmpty(event.getCategory())) {
throw new BadRequestException("Event with category null.");
}
event.setTenantId(tenantId);
if (!checkTags(event)) {
throw new BadRequestException("Tags " + event.getTags() + " must be non empty.");
}
Event found;
try {
found = alertsService.getEvent(tenantId, event.getId(), true);
} catch (IllegalArgumentException e) {
throw new BadRequestException("Bad arguments: " + e.getMessage());
} catch (Exception e) {
throw new InternalServerException(e.toString());
}
if (found != null) {
throw new BadRequestException("Event with ID [" + event.getId() + "] exists.");
}
try {
alertsService.addEvents(Collections.singletonList(event));
return ok(resp, event);
} catch (IllegalArgumentException e) {
throw new BadRequestException("Bad arguments: " + e.getMessage());
} catch (Exception e) {
throw new InternalServerException(e.toString());
}
})
.onErrorResumeWith(e -> handleExceptions(resp, e));
}
Publisher<Void> sendEvents(HttpServerRequest req, HttpServerResponse resp, String tenantId) {
return req
.receive()
.aggregate()
.asString()
.publishOn(Schedulers.elastic())
.map(json -> {
Collection<Event> parsed;
try {
parsed = collectionFromJson(json, Event.class);
return parsed;
} catch (Exception e) {
log.errorf(e, "Error parsing Event json: %s. Reason: %s", json, e.toString());
throw new BadRequestException(e.toString());
}
})
.flatMap(events -> {
if (isEmpty(events)) {
throw new BadRequestException("Events is empty");
}
try {
events.stream().forEach(ev -> ev.setTenantId(tenantId));
alertsService.sendEvents(events);
log.debugf("Events: ", events);
return ok(resp);
} catch (IllegalArgumentException e) {
throw new BadRequestException("Bad arguments: " + e.getMessage());
} catch (Exception e) {
throw new InternalServerException(e.toString());
}
})
.onErrorResumeWith(e -> handleExceptions(resp, e));
}
Publisher<Void> addTags(HttpServerRequest req, HttpServerResponse resp, String tenantId, Map<String, List<String>> params) {
return req
.receive()
.publishOn(Schedulers.elastic())
.thenMany(Mono.fromSupplier(() -> {
String eventIds = null;
String tags = null;
if (params.get(PARAM_EVENT_IDS) != null) {
eventIds = params.get(PARAM_EVENT_IDS).get(0);
}
if (params.get(PARAM_TAGS) != null) {
tags = params.get(PARAM_TAGS).get(0);
}
if (isEmpty(eventIds) || isEmpty(tags)) {
throw new BadRequestException("EventIds and Tags required for adding tags");
}
try {
List<String> eventIdList = Arrays.asList(eventIds.split(","));
Map<String, String> tagsMap = parseTags(tags);
alertsService.addEventTags(tenantId, eventIdList, tagsMap);
log.debugf("Tagged eventIds:%s, %s", eventIdList, tagsMap);
return tagsMap;
} catch (IllegalArgumentException e) {
throw new BadRequestException("Bad arguments: " + e.getMessage());
} catch (Exception e) {
throw new InternalServerException(e.toString());
}
}))
.flatMap(tagsMap -> ok(resp))
.onErrorResumeWith(e -> handleExceptions(resp, e));
}
Publisher<Void> removeTags(HttpServerRequest req, HttpServerResponse resp, String tenantId, Map<String, List<String>> params) {
return req
.receive()
.publishOn(Schedulers.elastic())
.thenMany(Mono.fromSupplier(() -> {
String eventIds = null;
String tagNames = null;
if (params.get(PARAM_EVENT_IDS) != null) {
eventIds = params.get(PARAM_EVENT_IDS).get(0);
}
if (params.get(PARAM_TAG_NAMES) != null) {
tagNames = params.get(PARAM_TAG_NAMES).get(0);
}
if (isEmpty(eventIds) || isEmpty(tagNames)) {
throw new BadRequestException("EventIds and Tags required for removing tags");
}
try {
Collection<String> ids = Arrays.asList(eventIds.split(","));
Collection<String> tags = Arrays.asList(tagNames.split(","));
alertsService.removeEventTags(tenantId, ids, tags);
log.debugf("Untagged eventsIds:%s, %s", ids, tags);
return tags;
} catch (IllegalArgumentException e) {
throw new BadRequestException("Bad arguments: " + e.getMessage());
} catch (Exception e) {
throw new InternalServerException(e.toString());
}
}))
.flatMap(tags -> ok(resp))
.onErrorResumeWith(e -> handleExceptions(resp, e));
}
Publisher<Void> findEvents(HttpServerRequest req, HttpServerResponse resp, String tenantId, Map<String, List<String>> params, String uri) {
return req
.receive()
.publishOn(Schedulers.elastic())
.thenMany(Mono.fromSupplier(() -> {
try {
Pager pager = extractPaging(params);
EventsCriteria criteria = buildCriteria(params);
Page<Event> eventPage = alertsService.getEvents(tenantId, criteria, pager);
log.debugf("Events: %s", eventPage);
return eventPage;
} catch (IllegalArgumentException e) {
throw new BadRequestException("Bad arguments: " + e.getMessage());
} catch (Exception e) {
log.debug(e.getMessage(), e);
throw new InternalServerException(e.toString());
}
}))
.flatMap(eventPage -> {
if (isEmpty(eventPage)) {
return ok(resp, eventPage);
}
return paginatedOk(req, resp, eventPage, uri);
})
.onErrorResumeWith(e -> handleExceptions(resp, e));
}
Publisher<Void> watchEvents(HttpServerResponse resp, String tenantId, Map<String, List<String>> params) {
EventsCriteria criteria = buildCriteria(params);
Flux<String> watcherFlux = Flux.create(sink -> {
Long watchInterval = null;
if (params.get(PARAM_WATCH_INTERVAL) != null) {
watchInterval = Long.valueOf(params.get(PARAM_WATCH_INTERVAL).get(0));
}
EventsListener listener = event -> {
sink.next(toJson(event) + "\r\n");
};
String channelId = resp.context().channel().id().asShortText();
EventsWatcher watcher = new EventsWatcher(channelId, listener, Collections.singleton(tenantId), criteria, watchInterval);
sink.onCancel(() -> watcher.dispose());
watcher.start();
});
resp.status(OK);
// Watcher send events one by one, so flux is splited in windows of one element
return watcherFlux.window(1).concatMap(w -> resp.sendString(w));
}
Publisher<Void> deleteEvents(HttpServerRequest req, HttpServerResponse resp, String tenantId, String eventId, Map<String, List<String>> params) {
return req
.receive()
.publishOn(Schedulers.elastic())
.thenMany(Mono.fromSupplier(() -> {
int numDeleted = -1;
try {
EventsCriteria criteria = buildCriteria(params);
criteria.setEventId(eventId);
numDeleted = alertsService.deleteEvents(tenantId, criteria);
log.debugf("Events deleted: ", numDeleted);
} catch (IllegalArgumentException e) {
throw new BadRequestException("Bad arguments: " + e.getMessage());
} catch (Exception e) {
log.debug(e.getMessage(), e);
throw new InternalServerException(e.toString());
}
if (numDeleted <= 0 && eventId != null) {
throw new ResponseUtil.NotFoundException("Event " + eventId + " doesn't exist for delete");
}
Map<String, Integer> deleted = new HashMap<>();
deleted.put("deleted", new Integer(numDeleted));
return deleted;
}))
.flatMap(deleted -> ok(resp, deleted))
.onErrorResumeWith(e -> handleExceptions(resp, e));
}
Publisher<Void> getEvent(HttpServerRequest req, HttpServerResponse resp, String tenantId, String eventId, Map<String, List<String>> params) {
return req
.receive()
.publishOn(Schedulers.elastic())
.thenMany(Mono.fromSupplier(() -> {
boolean thin = false;
if (params.get(PARAM_THIN) != null) {
thin = Boolean.valueOf(params.get(PARAM_THIN).get(0));
}
Event found;
try {
found = alertsService.getEvent(tenantId, eventId, thin);
if (found != null) {
return found;
}
} catch (IllegalArgumentException e) {
throw new BadRequestException("Bad arguments: " + e.getMessage());
} catch (Exception e) {
log.debug(e.getMessage(), e);
throw new InternalServerException(e.toString());
}
throw new ResponseUtil.NotFoundException("eventId: " + eventId + " not found");
}))
.flatMap(found -> ok(resp, found))
.onErrorResumeWith(e -> handleExceptions(resp, e));
}
EventsCriteria buildCriteria(Map<String, List<String>> params) {
Long startTime = null;
Long endTime = null;
String eventIds = null;
String triggerIds = null;
String categories = null;
String tags = null;
String tagQuery = null;
boolean thin = false;
if (params.get(PARAM_START_TIME) != null) {
startTime = Long.valueOf(params.get(PARAM_START_TIME).get(0));
}
if (params.get(PARAM_END_TIME) != null) {
endTime = Long.valueOf(params.get(PARAM_END_TIME).get(0));
}
if (params.get(PARAM_EVENT_IDS) != null) {
eventIds = params.get(PARAM_EVENT_IDS).get(0);
}
if (params.get(PARAM_TRIGGER_IDS) != null) {
triggerIds = params.get(PARAM_TRIGGER_IDS).get(0);
}
if (params.get(PARAM_CATEGORIES) != null) {
categories = params.get(PARAM_CATEGORIES).get(0);
}
if (params.get(PARAM_TAGS) != null) {
tags = params.get(PARAM_TAGS).get(0);
}
if (params.get(PARAM_TAG_QUERY) != null) {
tagQuery = params.get(PARAM_TAG_QUERY).get(0);
}
String unifiedTagQuery;
if (!isEmpty(tags)) {
unifiedTagQuery = parseTagQuery(parseTags(tags));
} else {
unifiedTagQuery = tagQuery;
}
if (params.get(PARAM_THIN) != null) {
thin = Boolean.valueOf(params.get(PARAM_THIN).get(0));
}
return new EventsCriteria(startTime, endTime, eventIds, triggerIds, categories, unifiedTagQuery, thin);
}
}
| apache-2.0 |
enableiot/iotanalytics-gearpump-rule-engine | src/main/java/com/intel/ruleengine/gearpump/graph/ParallelismDefinition.java | 1398 | package com.intel.ruleengine.gearpump.graph;
/*
* Copyright (c) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
class ParallelismDefinition {
public int getSendAlertsProcessorsNumber() {
return 1;
}
public int getKafkaSourceProcessorsNumber() {
return 1;
}
public int getCheckObservationInRulesProcessorsNumber() {
return 1;
}
public int getDownloadRulesProcessorsNumber() {
return 1;
}
public int getPersistRulesProcessorsNumber() {
return 1;
}
public int getPersistComponentAlertsProccesorsNumber() {
return 1;
}
public int getCheckRulesProcessorsNumber() {
return 1;
}
public int getRulesForComponentProcessorsNumber() {
return 1;
}
public int getPersistObservationProcessorsNumber() {
return 1;
}
}
| apache-2.0 |
ashishbhandari/AndroidTabbedDialog | app/src/main/java/com/androidifygeeks/tabdialogs/MainActivity.java | 4540 | package com.androidifygeeks.tabdialogs;
import android.support.v4.app.Fragment;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.Toast;
import com.androidifygeeks.library.fragment.PageFragment;
import com.androidifygeeks.library.fragment.TabDialogFragment;
import com.androidifygeeks.library.iface.IFragmentListener;
import com.androidifygeeks.library.iface.ISimpleDialogCancelListener;
import com.androidifygeeks.library.iface.ISimpleDialogListener;
import java.util.HashSet;
import java.util.Set;
public class MainActivity extends AppCompatActivity implements ISimpleDialogListener, ISimpleDialogCancelListener, IFragmentListener {
private static final int REQUEST_TABBED_DIALOG = 42;
private static final String TAG = MainActivity.class.getSimpleName();
private final Set<Fragment> mMyScheduleFragments = new HashSet<>();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
findViewById(R.id.testbutton).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
TabDialogFragment.createBuilder(MainActivity.this, getSupportFragmentManager())
.setTitle("Title")
.setSubTitle("Subtitle")
.setTabButtonText(new CharSequence[]{"Tab1", "Tab2"})
.setPositiveButtonText("Ok")
.setNegativeButtonText("Cancel")
.setNeutralButtonText("Neutral")
.setRequestCode(REQUEST_TABBED_DIALOG)
.show();
}
});
}
@Override
public void onCancelled(int requestCode) {
switch (requestCode) {
case REQUEST_TABBED_DIALOG:
Toast.makeText(MainActivity.this, "Dialog cancelled", Toast.LENGTH_SHORT).show();
break;
default:
break;
}
}
@Override
public void onNegativeButtonClicked(int requestCode) {
if (requestCode == REQUEST_TABBED_DIALOG) {
Toast.makeText(MainActivity.this, "Negative button clicked", Toast.LENGTH_SHORT).show();
}
}
@Override
public void onNeutralButtonClicked(int requestCode) {
if (requestCode == REQUEST_TABBED_DIALOG) {
Toast.makeText(MainActivity.this, "Neutral button clicked", Toast.LENGTH_SHORT).show();
}
}
@Override
public void onPositiveButtonClicked(int requestCode) {
if (requestCode == REQUEST_TABBED_DIALOG) {
Toast.makeText(MainActivity.this, "Positive button clicked", Toast.LENGTH_SHORT).show();
}
}
@Override
public void onFragmentViewCreated(Fragment fragment) {
int selectedTabPosition = fragment.getArguments().getInt(PageFragment.ARG_DAY_INDEX, 0);
View rootContainer = fragment.getView().findViewById(R.id.root_container);
Log.i(TAG, "Position: " + selectedTabPosition);
switch (selectedTabPosition) {
case 0:
selectedTabPositionZeroCase(rootContainer);
break;
case 1:
selectedTabPositionOneCase(rootContainer);
break;
default:
break;
}
}
private void selectedTabPositionZeroCase(View rootContainer) {
// add view in container for first tab
View tabProductDetailLayout = getLayoutInflater().inflate(R.layout.tab_one_layout, (ViewGroup) rootContainer);
TextView textView = (TextView) tabProductDetailLayout.findViewById(R.id.text_view);
textView.setText("hello: tab1");
}
private void selectedTabPositionOneCase(View rootContainer) {
// add view in container for second tab
View tabProductDetailLayout2 = getLayoutInflater().inflate(R.layout.tab_one_layout, (ViewGroup) rootContainer);
TextView textView1 = (TextView) tabProductDetailLayout2.findViewById(R.id.text_view);
textView1.setText("hello: tab2");
}
@Override
public void onFragmentAttached(Fragment fragment) {
mMyScheduleFragments.add(fragment);
}
@Override
public void onFragmentDetached(Fragment fragment) {
mMyScheduleFragments.remove(fragment);
}
}
| apache-2.0 |
lantunes/fixd | src/main/java/org/bigtesting/fixd/session/RequestParamSessionHandler.java | 1073 | /*
* Copyright (C) 2015 BigTesting.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bigtesting.fixd.session;
import java.util.Set;
import org.bigtesting.fixd.request.HttpRequest;
/**
*
* @author Luis Antunes
*/
public class RequestParamSessionHandler implements SessionHandler {
public void onCreate(HttpRequest request) {
Set<String> params = request.getRequestParameterNames();
for (String param : params) {
request.getSession().set(param, request.getRequestParameter(param));
}
}
}
| apache-2.0 |
efsavage/ajah | ajah-user/src/main/java/com/ajah/user/email/data/EmailVerificationNotFoundException.java | 1796 | /*
* Copyright 2014 Eric F. Savage, code@efsavage.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ajah.user.email.data;
import com.ajah.user.email.EmailVerification;
import com.ajah.user.email.EmailVerificationId;
import lombok.Data;
import lombok.EqualsAndHashCode;
/**
* Thrown when an {@link EmailVerification} was expected to be found, but was
* not.
*
* @author <a href="http://efsavage.com">Eric F. Savage</a>, <a
* href="mailto:code@efsavage.com">code@efsavage.com</a>.
*/
@Data
@EqualsAndHashCode(callSuper = true)
public class EmailVerificationNotFoundException extends Exception {
private final EmailVerificationId id;
private final String code;
/**
* Thrown when an {@link EmailVerification} could not be found by it's
* internal ID.
*
* @param id
* The internal ID that was sought.
*/
public EmailVerificationNotFoundException(final EmailVerificationId id) {
super("ID: " + id);
this.id = id;
this.code = null;
}
/**
* Thrown when an {@link EmailVerification} could not be found by it's code.
*
* @param code
* The internal ID that was sought.
*/
public EmailVerificationNotFoundException(final String code) {
this.id = null;
this.code = code;
}
}
| apache-2.0 |
McLeodMoores/starling | projects/util/src/main/java/com/opengamma/util/result/Result.java | 15092 | /**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.util.result;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.helpers.MessageFormatter;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableSet;
import com.opengamma.util.ArgumentChecker;
/**
* The immutable result from a calculation containing an indication
* of whether a value has been calculated.
* Results can be generated using the factory methods on this class.
*
* @param <T> the type of the underlying result for a successful invocation
*/
public abstract class Result<T> {
/**
* Indicates if this result represents a successful call and has a result available.
*
* @return true if the result represents a success and a value is available
*/
public abstract boolean isSuccess();
/**
* Return the actual result value if calculated successfully.
* <p>
* If it has not been calculated then an IllegalStateException will be thrown.
* To avoid this, check the result status using {@link #isSuccess()} or
* {@link #getStatus()} first.
*
* @return the value if calculated successfully, not null
* @throws IllegalArgumentException if called when the result has not been successfully calculated
*/
public abstract T getValue();
/**
* Indicates the status of this result.
* <p>
* It is up to the client to decide if it is able to handle the status or
* decline to handle. In general it is easier to call {@link #isSuccess()}.
*
* @return the status of this function result
*/
public abstract ResultStatus getStatus();
/**
* Return the message associated with a failure event.
* <p>
* If the calculation was actually successful then an an IllegalStateException will be thrown.
* To avoid this, check the result status using {@link #isSuccess()}
* or {@link #getStatus()} first.
*
* @return the failure message if calculation was unsuccessful, not null
* @throws IllegalStateException if called on a success result
*/
public abstract String getFailureMessage();
/**
* Gets the collection of failure instances that are associated with this result.
* <p>
* If the calculation was actually successful then an an IllegalStateException will be thrown.
* To avoid this, check the result status using {@link #isSuccess()}
* or {@link #getStatus()} first.
*
* @return the failures associated with a failure result, empty if successful
*/
public abstract ImmutableSet<Failure> getFailures();
/**
* Applies a function to a result's value if the result is a success. If the result is a failure then a failure is returned without applying the function.
* Useful for applying logic to a successful result using Java 8 lambdas without having to check the status.
*
* <pre>
* result = ...
* return result.ifSuccess(value -> doSomething(value));
* </pre>
*
* Identical to {@link #flatMap}
*
* @param <U>
* the required type of the new result object
* @param function
* the function to transform the value with, not null
* @return the new result, not null
*/
public abstract <U> Result<U> ifSuccess(Function<T, Result<U>> function);
/**
* Applies a function to a result's value if the result is a success. If the result is a failure then a failure is returned without applying the function.
* Useful for applying logic to a successful result using Java 8 lambdas without having to check the status.
*
* <pre>
* result = ...
* return result.flatMap(value -> doSomething(value));
* </pre>
*
* Identical to {@link #ifSuccess}
*
* @param <U>
* the required type of the new result object
* @param function
* the function to transform the value with, not null
* @return the new result, not null
*/
public <U> Result<U> flatMap(final Function<T, Result<U>> function) {
return ifSuccess(function);
}
/**
* Combines this result's value with another result's value using a binary function if both are successes. If either result is a failure then a failure is
* returned without applying the function. Useful for applying logic to successful results using Java 8 lambdas without having to check the statuses.
*
* <pre>
* result1 = ...
* result2 = ...
* return result1.combineWith(result2, (value1, value2) -> doSomething(value1, value2);
* </pre>
*
* @param other
* another result
* @param function
* a function for combining values from two results
* @param <U>
* the type of the other result's value
* @param <V>
* the type of the value in the returned result
* @return a the result of combining the result values or a failure if either result is a failure
*/
public abstract <U, V> Result<V> combineWith(Result<U> other, Function2<T, U, Result<V>> function);
//-------------------------------------------------------------------------
/**
* Indicates if there is a result value available from this instance.
* <p>
* This generally means that any calculation has been successfully performed
* but for calculation that may return partial results e.g. market data
* requests this method will return true. To distinguish between these
* cases, check the result status using {@link #getStatus()}.
*
* @return true if a value is available
* @deprecated use {@link #isSuccess()}
*/
@Deprecated
public boolean isValueAvailable() {
return isSuccess();
}
/**
* Propagate a failure result, ensuring that its generic type signature
* matches the one required.
*
* @param <U> the required type of the new result object
* @return the new function result object, not null
* @throws IllegalStateException if invoked on a successful result
* @deprecated use {@link #failure(Result)}
*/
@Deprecated
public <U> Result<U> propagateFailure() {
return Result.failure(this);
}
/**
* Applies a function to a result's value if the result is a success.
* If the result is a failure then a failure is returned without applying the function.
*
* @param <U> the required type of the new result object
* @param function the mapper object to transform the value with, not null
* @return the new result, not null
* @deprecated use {@link #ifSuccess(Function)} or {@link #flatMap(Function)}
*/
@Deprecated
public <U> Result<U> map(final ResultMapper<T, U> function) {
return flatMap(new Function<T, Result<U>>() {
@Override
public Result<U> apply(final T input) {
return function.map(getValue());
}
});
}
//-------------------------------------------------------------------------
/**
* Creates a successful result wrapping a value.
*
* @param value the result value
* @param <U> the type of the value
* @return a successful result wrapping the value
*/
public static <U> Result<U> success(final U value) {
return new SuccessResult<>(value);
}
/**
* Creates a failed result.
* <p>
* Formatting of the error message uses placeholders as per SLF4J.
* Each {} in the message is replaced by the next message argument.
*
* @param status the result status
* @param message a message explaining the failure, uses the SLF4J message format for inserting {@code messageArgs}
* @param messageArgs the arguments for the message
* @param <U> the expected type of the result
* @return a failure result
*/
public static <U> Result<U> failure(final FailureStatus status, final String message, final Object... messageArgs) {
return FailureResult.of(new Failure(status, formatMessage(message, messageArgs)));
}
/**
* Creates a failed result caused by an exception.
* <p>
* Formatting of the error message uses placeholders as per SLF4J.
* Each {} in the message is replaced by the next message argument.
*
* @param exception the cause of the failure
* @param message a message explaining the failure, uses the SLF4J message format for inserting {@code messageArgs}
* @param messageArgs the arguments for the message
* @param <U> the expected type of the result
* @return a failure result
*/
public static <U> Result<U> failure(final Exception exception, final String message, final Object... messageArgs) {
return FailureResult.of(new Failure(exception, formatMessage(message, messageArgs)));
}
/**
* Creates a failed result caused by an exception.
*
* @param exception the cause of the failure
* @param <U> the expected type of the result
* @return a failure result
*/
public static <U> Result<U> failure(final Exception exception) {
return FailureResult.of(new Failure(exception));
}
/**
* Creates a failed result caused by an exception with a specified status.
*
* @param status the result status
* @param exception the cause of the failure
* @param <U> the expected type of the result
* @return a failure result
*/
public static <U> Result<U> failure(final FailureStatus status, final Exception exception) {
return FailureResult.of(new Failure(status, exception));
}
/**
* Creates a failed result caused by an exception with a specified status and message.
* <p>
* Formatting of the error message uses placeholders as per SLF4J.
* Each {} in the message is replaced by the next message argument.
*
* @param status the result status
* @param exception the cause of the failure
* @param message a message explaining the failure, uses the SLF4J message format for inserting {@code messageArgs}
* @param messageArgs the arguments for the message
* @param <U> the expected type of the result
* @return a failure result
*/
public static <U> Result<U> failure(final FailureStatus status, final Exception exception, final String message, final Object... messageArgs) {
return FailureResult.of(new Failure(status, formatMessage(message, messageArgs), exception));
}
/**
* Formats the message using SLF4J.
*
* @param message the message
* @param messageArgs the arguments for the message
* @return the formatted message
*/
private static String formatMessage(final String message, final Object[] messageArgs) {
return MessageFormatter.arrayFormat(message, messageArgs).getMessage();
}
/**
* Returns a failed result from another failed result.
* This method ensures the result type matches the expected type.
*
* @param result a failure result
* @param <U> the expected result type
* @return a failure result of the expected type
* @throws IllegalArgumentException if the result is a success
*/
@SuppressWarnings("unchecked")
public static <U> Result<U> failure(final Result<?> result) {
if (result.isSuccess()) {
throw new IllegalArgumentException("Result must be a failure");
}
return (Result<U>) result;
}
/**
* Creates a failed result cause by multiple failed results.
* The input results can be successes or failures, only the failures will be included in the created result.
* Intended to be used with {@link #anyFailures(Result[])}.
* <code>
* if (Result.anyFailures(result1, result2, result3) {
* return Result.failures(result1, result2, result3);
* }
* </code>
*
* @param result1 the first result
* @param result2 the second result
* @param results the rest of the results
* @param <U> the expected type of the result
* @return a failed result wrapping multiple other failed results
* @throws IllegalArgumentException if all of the results are successes
*/
public static <U> Result<U> failure(final Result<?> result1, final Result<?> result2, final Result<?>... results) {
ArgumentChecker.notNull(result1, "result1");
ArgumentChecker.notNull(result2, "result2");
final List<Failure> failures = new ArrayList<>();
if (!result1.isSuccess()) {
failures.addAll(result1.getFailures());
}
if (!result2.isSuccess()) {
failures.addAll(result2.getFailures());
}
for (final Result<?> result : results) {
if (!result.isSuccess()) {
failures.addAll(result.getFailures());
}
}
if (failures.isEmpty()) {
throw new IllegalArgumentException("All results were successes");
}
return FailureResult.of(failures);
}
/**
* Creates a failed result cause by multiple failed results.
* The input results can be successes or failures, only the failures will be included in the created result.
* Intended to be used with {@link #anyFailures(Iterable)}.
* <code>
* if (Result.anyFailures(results) {
* return Result.failure(results);
* }
* </code>
*
* @param results multiple results, of which at least one must be a failure, not empty
* @param <U> the expected type of the result
* @return a failed result wrapping multiple other failed results
* @throws IllegalArgumentException if results is empty or contains nothing but successes
*/
public static <U> Result<U> failure(final Iterable<Result<?>> results) {
ArgumentChecker.notEmpty(results, "results");
final List<Failure> failures = new ArrayList<>();
for (final Result<?> result : results) {
if (!result.isSuccess()) {
failures.addAll(result.getFailures());
}
}
if (failures.isEmpty()) {
throw new IllegalArgumentException("All results were successes");
}
return FailureResult.of(failures);
}
//-------------------------------------------------------------------------
/**
* Checks if all the results are successful.
*
* @param results the results to check
* @return true if all of the results are successes
*/
public static boolean allSuccessful(final Result<?>... results) {
for (final Result<?> result : results) {
if (!result.isSuccess()) {
return false;
}
}
return true;
}
/**
* Checks if all the results are successful.
*
* @param results the results to check
* @return true if all of the results are successes
*/
public static boolean allSuccessful(final Iterable<? extends Result<?>> results) {
for (final Result<?> result : results) {
if (!result.isSuccess()) {
return false;
}
}
return true;
}
/**
* Checks if any of the results are failures.
*
* @param results the results to check
* @return true if any of the results are failures
*/
public static boolean anyFailures(final Result<?>... results) {
return !allSuccessful(results);
}
/**
* Checks if any of the results are failures.
*
* @param results the results to check
* @return true if any of the results are failures
*/
public static boolean anyFailures(final Iterable<? extends Result<?>> results) {
return !allSuccessful(results);
}
}
| apache-2.0 |
culmat/gitblit | src/main/java/com/gitblit/utils/MessageProcessor.java | 5228 | /*
* Copyright 2013 gitblit.com.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gitblit.utils;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.lib.Repository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.gitblit.IStoredSettings;
import com.gitblit.Keys;
import com.gitblit.models.RepositoryModel;
import com.syntevo.bugtraq.BugtraqConfig;
import com.syntevo.bugtraq.BugtraqFormatter;
import com.syntevo.bugtraq.BugtraqFormatter.OutputHandler;
public class MessageProcessor {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final IStoredSettings settings;
public MessageProcessor(IStoredSettings settings) {
this.settings = settings;
}
/**
* Returns an html version of the commit message with any global or
* repository-specific regular expression substitution applied.
*
* This method uses the preferred renderer to transform the commit message.
*
* @param repository
* @param model
* @param text
* @return html version of the commit message
*/
public String processCommitMessage(Repository repository, RepositoryModel model, String text) {
switch (model.commitMessageRenderer) {
case MARKDOWN:
try {
String prepared = processCommitMessageRegex(repository, model.name, text);
return MarkdownUtils.transformMarkdown(prepared);
} catch (Exception e) {
logger.error("Failed to render commit message as markdown", e);
}
break;
default:
// noop
break;
}
return processPlainCommitMessage(repository, model.name, text);
}
/**
* Returns an html version of the commit message with any global or
* repository-specific regular expression substitution applied.
*
* This method assumes the commit message is plain text.
*
* @param repository
* @param repositoryName
* @param text
* @return html version of the commit message
*/
public String processPlainCommitMessage(Repository repository, String repositoryName, String text) {
String html = StringUtils.escapeForHtml(text, false);
html = processCommitMessageRegex(repository, repositoryName, html);
return StringUtils.breakLinesForHtml(html);
}
/**
* Apply globally or per-repository specified regex substitutions to the
* commit message.
*
* @param repository
* @param repositoryName
* @param text
* @return the processed commit message
*/
protected String processCommitMessageRegex(Repository repository, String repositoryName, String text) {
Map<String, String> map = new HashMap<String, String>();
// global regex keys
if (settings.getBoolean(Keys.regex.global, false)) {
for (String key : settings.getAllKeys(Keys.regex.global)) {
if (!key.equals(Keys.regex.global)) {
String subKey = key.substring(key.lastIndexOf('.') + 1);
map.put(subKey, settings.getString(key, ""));
}
}
}
// repository-specific regex keys
List<String> keys = settings.getAllKeys(Keys.regex._ROOT + "."
+ repositoryName.toLowerCase());
for (String key : keys) {
String subKey = key.substring(key.lastIndexOf('.') + 1);
map.put(subKey, settings.getString(key, ""));
}
for (Entry<String, String> entry : map.entrySet()) {
String definition = entry.getValue().trim();
String[] chunks = definition.split("!!!");
if (chunks.length == 2) {
text = text.replaceAll(chunks[0], chunks[1]);
} else {
logger.warn(entry.getKey()
+ " improperly formatted. Use !!! to separate match from replacement: "
+ definition);
}
}
try {
// parse bugtraq repo config
BugtraqConfig config = BugtraqConfig.read(repository);
if (config != null) {
BugtraqFormatter formatter = new BugtraqFormatter(config);
StringBuilder sb = new StringBuilder();
formatter.formatLogMessage(text, new BugtraqOutputHandler(sb));
text = sb.toString();
}
} catch (IOException e) {
logger.error(MessageFormat.format("Bugtraq config for {0} is invalid!", repositoryName), e);
} catch (ConfigInvalidException e) {
logger.error(MessageFormat.format("Bugtraq config for {0} is invalid!", repositoryName), e);
}
return text;
}
private class BugtraqOutputHandler implements OutputHandler {
final StringBuilder sb;
BugtraqOutputHandler(StringBuilder sb) {
this.sb = sb;
}
@Override
public void appendText(String text) {
sb.append(text);
}
@Override
public void appendLink(String name, String target) {
sb.append(MessageFormat.format("<a class=\"bugtraq\" href=\"{1}\">{0}</a>", name, target));
}
}
}
| apache-2.0 |
terma/gigaspace-routine | src/main/java/com/github/terma/gigaspaceroutine/sources/Source.java | 1345 | /*
Copyright 2015 Artem Stasiuk
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.github.terma.gigaspaceroutine.sources;
import com.gigaspaces.document.SpaceDocument;
import com.gigaspaces.query.IdsQuery;
import com.j_spaces.core.client.SQLQuery;
import org.openspaces.core.GigaSpace;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
abstract public class Source implements Serializable {
public static Source fromList(List list) {
return null;
}
public static Source fromQueryByIds(IdsQuery query) {
return new IdsSource(query);
}
public static Source fromQueryByIds(SQLQuery<SpaceDocument> query) {
return new QuerySource(query);
}
public abstract ArrayList<SpaceDocument> fetch(GigaSpace gigaSpace);
public abstract Source toIds(List<Serializable> uids);
}
| apache-2.0 |
alyphen/atherna | src/main/java/com/seventh_root/atherna/stat/AthernaStatManager.java | 2358 | package com.seventh_root.atherna.stat;
import com.seventh_root.atherna.Atherna;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import static java.util.logging.Level.SEVERE;
public class AthernaStatManager {
private Atherna plugin;
public AthernaStatManager(Atherna plugin) {
this.plugin = plugin;
}
public AthernaStat getById(int id) {
Connection connection = plugin.getDatabaseConnection();
if (connection != null) {
try {
PreparedStatement statement = connection.prepareStatement(
"SELECT id, name FROM atherna_stat WHERE id = ? LIMIT 1"
);
statement.setInt(1, id);
ResultSet resultSet = statement.executeQuery();
if (resultSet.next()) {
return new AthernaStat(resultSet.getInt("id"), resultSet.getString("name"));
}
} catch (SQLException exception) {
plugin.getLogger().log(SEVERE, "An SQL exception occurred while attempting to retrieve a stat", exception);
}
} else {
plugin.getLogger().log(SEVERE, "Database connection is not available. Cannot load stat.");
}
return null;
}
public List<AthernaStat> getStats() {
Connection connection = plugin.getDatabaseConnection();
if (connection != null) {
try {
PreparedStatement statement = connection.prepareStatement(
"SELECT id, name FROM atherna_stat"
);
ResultSet resultSet = statement.executeQuery();
List<AthernaStat> stats = new ArrayList<>();
while (resultSet.next()) {
stats.add(new AthernaStat(resultSet.getInt("id"), resultSet.getString("name")));
}
return stats;
} catch (SQLException exception) {
plugin.getLogger().log(SEVERE, "An SQL exception occurred while attempting to retrieve a stat", exception);
}
} else {
plugin.getLogger().log(SEVERE, "Database connection is not available. Cannot load stat.");
}
return null;
}
}
| apache-2.0 |
vam-google/google-cloud-java | google-api-grpc/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/AudioConfigProto.java | 5976 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/audio_config.proto
package com.google.cloud.dialogflow.v2beta1;
public final class AudioConfigProto {
private AudioConfigProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_v2beta1_VoiceSelectionParams_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_v2beta1_VoiceSelectionParams_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_v2beta1_SynthesizeSpeechConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_v2beta1_SynthesizeSpeechConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dialogflow_v2beta1_OutputAudioConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dialogflow_v2beta1_OutputAudioConfig_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n2google/cloud/dialogflow/v2beta1/audio_"
+ "config.proto\022\037google.cloud.dialogflow.v2"
+ "beta1\032\034google/api/annotations.proto\"k\n\024V"
+ "oiceSelectionParams\022\014\n\004name\030\001 \001(\t\022E\n\013ssm"
+ "l_gender\030\002 \001(\01620.google.cloud.dialogflow"
+ ".v2beta1.SsmlVoiceGender\"\270\001\n\026SynthesizeS"
+ "peechConfig\022\025\n\rspeaking_rate\030\001 \001(\001\022\r\n\005pi"
+ "tch\030\002 \001(\001\022\026\n\016volume_gain_db\030\003 \001(\001\022\032\n\022eff"
+ "ects_profile_id\030\005 \003(\t\022D\n\005voice\030\004 \001(\01325.g"
+ "oogle.cloud.dialogflow.v2beta1.VoiceSele"
+ "ctionParams\"\327\001\n\021OutputAudioConfig\022L\n\016aud"
+ "io_encoding\030\001 \001(\01624.google.cloud.dialogf"
+ "low.v2beta1.OutputAudioEncoding\022\031\n\021sampl"
+ "e_rate_hertz\030\002 \001(\005\022Y\n\030synthesize_speech_"
+ "config\030\003 \001(\01327.google.cloud.dialogflow.v"
+ "2beta1.SynthesizeSpeechConfig*\215\001\n\017SsmlVo"
+ "iceGender\022!\n\035SSML_VOICE_GENDER_UNSPECIFI"
+ "ED\020\000\022\032\n\026SSML_VOICE_GENDER_MALE\020\001\022\034\n\030SSML"
+ "_VOICE_GENDER_FEMALE\020\002\022\035\n\031SSML_VOICE_GEN"
+ "DER_NEUTRAL\020\003*\244\001\n\023OutputAudioEncoding\022%\n"
+ "!OUTPUT_AUDIO_ENCODING_UNSPECIFIED\020\000\022#\n\037"
+ "OUTPUT_AUDIO_ENCODING_LINEAR_16\020\001\022\035\n\031OUT"
+ "PUT_AUDIO_ENCODING_MP3\020\002\022\"\n\036OUTPUT_AUDIO"
+ "_ENCODING_OGG_OPUS\020\003B\256\001\n#com.google.clou"
+ "d.dialogflow.v2beta1B\020AudioConfigProtoP\001"
+ "ZIgoogle.golang.org/genproto/googleapis/"
+ "cloud/dialogflow/v2beta1;dialogflow\370\001\001\242\002"
+ "\002DF\252\002\037Google.Cloud.Dialogflow.V2beta1b\006p"
+ "roto3"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
},
assigner);
internal_static_google_cloud_dialogflow_v2beta1_VoiceSelectionParams_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_dialogflow_v2beta1_VoiceSelectionParams_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_v2beta1_VoiceSelectionParams_descriptor,
new java.lang.String[] {
"Name", "SsmlGender",
});
internal_static_google_cloud_dialogflow_v2beta1_SynthesizeSpeechConfig_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_dialogflow_v2beta1_SynthesizeSpeechConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_v2beta1_SynthesizeSpeechConfig_descriptor,
new java.lang.String[] {
"SpeakingRate", "Pitch", "VolumeGainDb", "EffectsProfileId", "Voice",
});
internal_static_google_cloud_dialogflow_v2beta1_OutputAudioConfig_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_dialogflow_v2beta1_OutputAudioConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dialogflow_v2beta1_OutputAudioConfig_descriptor,
new java.lang.String[] {
"AudioEncoding", "SampleRateHertz", "SynthesizeSpeechConfig",
});
com.google.api.AnnotationsProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
| apache-2.0 |
tomas-pluskal/jmzml | src/main/java/uk/ac/ebi/jmzml/xml/io/MzMLMarshaller.java | 6024 | /*
* Date: 22/7/2008
* Author: rcote
* File: uk.ac.ebi.jmzml.xml.io.MzMLMarshaller
*
* jmzml is Copyright 2008 The European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*
*/
package uk.ac.ebi.jmzml.xml.io;
import com.sun.xml.txw2.output.IndentingXMLStreamWriter;
import org.apache.log4j.Logger;
import uk.ac.ebi.jmzml.model.mzml.MzML;
import uk.ac.ebi.jmzml.model.mzml.MzMLObject;
import uk.ac.ebi.jmzml.model.mzml.utilities.ModelConstants;
import uk.ac.ebi.jmzml.xml.Constants;
import uk.ac.ebi.jmzml.xml.jaxb.marshaller.MarshallerFactory;
import uk.ac.ebi.jmzml.xml.util.EscapingXMLStreamWriter;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.StringWriter;
import java.io.Writer;
public class MzMLMarshaller {
private static final Logger logger = Logger.getLogger(MzMLMarshaller.class);
// jaxb.fragment - value must be a java.lang.Boolean
// This property determines whether or not document level events will be generated by the Marshaller. If the property is not specified, the default is false. This property has different implications depending on which marshal api you are using - when this property is set to true:
//
// * marshal(Object,ContentHandler) - the Marshaller won't invoke ContentHandler.startDocument() and ContentHandler.endDocument().
// * marshal(Object,Node) - the property has no effect on this API.
// * marshal(Object,OutputStream) - the Marshaller won't generate an xml declaration.
// * marshal(Object,Writer) - the Marshaller won't generate an xml declaration.
// * marshal(Object,Result) - depends on the kind of Result object, see semantics for Node, ContentHandler, and Stream APIs
// * marshal(Object,XMLEventWriter) - the Marshaller will not generate XMLStreamConstants.START_DOCUMENT and XMLStreamConstants.END_DOCUMENT events.
// * marshal(Object,XMLStreamWriter) - the Marshaller will not generate XMLStreamConstants.START_DOCUMENT and XMLStreamConstants.END_DOCUMENT events.
//
public <T extends MzMLObject> String marshall(T object) {
StringWriter sw = new StringWriter();
this.marshall(object, sw);
return sw.toString();
}
public <T extends MzMLObject> void marshall(T object, OutputStream os) {
this.marshall(object, new OutputStreamWriter(os));
}
public <T extends MzMLObject> void marshall(T object, Writer out) {
if (object == null) {
throw new IllegalArgumentException("Cannot marshall a NULL object");
}
try {
Marshaller marshaller = MarshallerFactory.getInstance().initializeMarshaller();
// Set JAXB_FRAGMENT_PROPERTY to true for all objects that do not have
// a @XmlRootElement annotation
// ToDo: add handling of indexedmzML (-> add flag to control treatment as fragment or not)
if (!(object instanceof MzML)) {
marshaller.setProperty(Constants.JAXB_FRAGMENT_PROPERTY, true);
if (logger.isDebugEnabled()) logger.debug("Object '" + object.getClass().getName() +
"' will be treated as root element.");
} else {
if (logger.isDebugEnabled()) logger.debug("Object '" + object.getClass().getName() +
"' will be treated as fragment.");
}
QName aQName = ModelConstants.getQNameForClass(object.getClass());
// before marshalling out, wrap in a Custom XMLStreamWriter
// to fix a JAXB bug: http://java.net/jira/browse/JAXB-614
// also wrapping in IndentingXMLStreamWriter to generate formatted XML
//XMLStreamWriter xmlStreamWriter = XMLOutputFactory.newInstance().createXMLStreamWriter(out);
System.setProperty("javax.xml.stream.XMLOutputFactory", "com.sun.xml.internal.stream.XMLOutputFactoryImpl");
// XMLOutputFactory factory = XMLOutputFactory.newFactory("com.sun.xml.internal.stream.XMLOutputFactoryImpl", null);
XMLOutputFactory factory = XMLOutputFactory.newFactory();
XMLStreamWriter xmlStreamWriter = factory.createXMLStreamWriter(out);
IndentingXMLStreamWriter writer = new IndentingXMLStreamWriter(new EscapingXMLStreamWriter(xmlStreamWriter));
marshaller.marshal( new JAXBElement(aQName, object.getClass(), object), writer );
} catch (JAXBException e) {
logger.error("MzMLMarshaller.marshall", e);
throw new IllegalStateException("Error while marshalling object:" + object.toString());
} catch (XMLStreamException e) {
logger.error("MzMLMarshaller.marshall", e);
throw new IllegalStateException("Error while marshalling object:" + object.toString());
}
}
// ToDo: default marshaller can only cope with mzML or sub-elements
// ToDo: ?? new marshal method to create indexedmzML (with parameter specifying the elements to index)
}
| apache-2.0 |
SalmanTKhan/MyAnimeViewer | recyclerview-fastscroll/src/main/java/com/simplecityapps/recyclerview_fastscroll/views/FastScroller.java | 14387 | /*
* Copyright (c) 2016 Tim Malseed
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.simplecityapps.recyclerview_fastscroll.views;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ObjectAnimator;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.support.annotation.ColorInt;
import android.support.annotation.IntDef;
import android.support.annotation.Keep;
import android.support.v4.view.animation.FastOutLinearInInterpolator;
import android.support.v4.view.animation.LinearOutSlowInInterpolator;
import android.support.v7.widget.RecyclerView;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.ViewConfiguration;
import com.simplecityapps.recyclerview_fastscroll.R;
import com.simplecityapps.recyclerview_fastscroll.interfaces.OnFastScrollStateChangeListener;
import com.simplecityapps.recyclerview_fastscroll.utils.Utils;
import java.lang.annotation.Retention;
import static java.lang.annotation.RetentionPolicy.SOURCE;
public class FastScroller {
private static final int DEFAULT_AUTO_HIDE_DELAY = 1500;
private FastScrollRecyclerView mRecyclerView;
private FastScrollPopup mPopup;
private int mThumbHeight;
private int mWidth;
private Paint mThumb;
private Paint mTrack;
private Rect mTmpRect = new Rect();
private Rect mInvalidateRect = new Rect();
private Rect mInvalidateTmpRect = new Rect();
// The inset is the buffer around which a point will still register as a click on the scrollbar
private int mTouchInset;
// This is the offset from the top of the scrollbar when the user first starts touching. To
// prevent jumping, this offset is applied as the user scrolls.
private int mTouchOffset;
public Point mThumbPosition = new Point(-1, -1);
public Point mOffset = new Point(0, 0);
private boolean mIsDragging;
private Animator mAutoHideAnimator;
boolean mAnimatingShow;
private int mAutoHideDelay = DEFAULT_AUTO_HIDE_DELAY;
private boolean mAutoHideEnabled = true;
private final Runnable mHideRunnable;
@Retention(SOURCE)
@IntDef({FastScrollerPopupPosition.ADJACENT, FastScrollerPopupPosition.CENTER})
public @interface FastScrollerPopupPosition {
int ADJACENT = 0;
int CENTER = 1;
}
public FastScroller(Context context, FastScrollRecyclerView recyclerView, AttributeSet attrs) {
Resources resources = context.getResources();
mRecyclerView = recyclerView;
mPopup = new FastScrollPopup(resources, recyclerView);
mThumbHeight = Utils.toPixels(resources, 48);
mWidth = Utils.toPixels(resources, 8);
mTouchInset = Utils.toPixels(resources, -24);
mThumb = new Paint(Paint.ANTI_ALIAS_FLAG);
mTrack = new Paint(Paint.ANTI_ALIAS_FLAG);
TypedArray typedArray = context.getTheme().obtainStyledAttributes(
attrs, R.styleable.FastScrollRecyclerView, 0, 0);
try {
mAutoHideEnabled = typedArray.getBoolean(R.styleable.FastScrollRecyclerView_fastScrollAutoHide, true);
mAutoHideDelay = typedArray.getInteger(R.styleable.FastScrollRecyclerView_fastScrollAutoHideDelay, DEFAULT_AUTO_HIDE_DELAY);
int trackColor = typedArray.getColor(R.styleable.FastScrollRecyclerView_fastScrollTrackColor, 0x1f000000);
int thumbColor = typedArray.getColor(R.styleable.FastScrollRecyclerView_fastScrollThumbColor, 0xff000000);
int popupBgColor = typedArray.getColor(R.styleable.FastScrollRecyclerView_fastScrollPopupBgColor, 0xff000000);
int popupTextColor = typedArray.getColor(R.styleable.FastScrollRecyclerView_fastScrollPopupTextColor, 0xffffffff);
int popupTextSize = typedArray.getDimensionPixelSize(R.styleable.FastScrollRecyclerView_fastScrollPopupTextSize, Utils.toScreenPixels(resources, 56));
int popupBackgroundSize = typedArray.getDimensionPixelSize(R.styleable.FastScrollRecyclerView_fastScrollPopupBackgroundSize, Utils.toPixels(resources, 88));
@FastScrollerPopupPosition int popupPosition = typedArray.getInteger(R.styleable.FastScrollRecyclerView_fastScrollPopupPosition, FastScrollerPopupPosition.ADJACENT);
mTrack.setColor(trackColor);
mThumb.setColor(thumbColor);
mPopup.setBgColor(popupBgColor);
mPopup.setTextColor(popupTextColor);
mPopup.setTextSize(popupTextSize);
mPopup.setBackgroundSize(popupBackgroundSize);
mPopup.setPopupPosition(popupPosition);
} finally {
typedArray.recycle();
}
mHideRunnable = new Runnable() {
@Override
public void run() {
if (!mIsDragging) {
if (mAutoHideAnimator != null) {
mAutoHideAnimator.cancel();
}
mAutoHideAnimator = ObjectAnimator.ofInt(FastScroller.this, "offsetX", (Utils.isRtl(mRecyclerView.getResources()) ? -1 : 1) * mWidth);
mAutoHideAnimator.setInterpolator(new FastOutLinearInInterpolator());
mAutoHideAnimator.setDuration(200);
mAutoHideAnimator.start();
}
}
};
mRecyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
super.onScrolled(recyclerView, dx, dy);
if (!mRecyclerView.isInEditMode()) {
show();
}
}
});
if (mAutoHideEnabled) {
postAutoHideDelayed();
}
}
public int getThumbHeight() {
return mThumbHeight;
}
public int getWidth() {
return mWidth;
}
public boolean isDragging() {
return mIsDragging;
}
/**
* Handles the touch event and determines whether to show the fast scroller (or updates it if
* it is already showing).
*/
public void handleTouchEvent(MotionEvent ev, int downX, int downY, int lastY,
OnFastScrollStateChangeListener stateChangeListener) {
ViewConfiguration config = ViewConfiguration.get(mRecyclerView.getContext());
int action = ev.getAction();
int y = (int) ev.getY();
switch (action) {
case MotionEvent.ACTION_DOWN:
if (isNearPoint(downX, downY)) {
mTouchOffset = downY - mThumbPosition.y;
}
break;
case MotionEvent.ACTION_MOVE:
// Check if we should start scrolling
if (!mIsDragging && isNearPoint(downX, downY) &&
Math.abs(y - downY) > config.getScaledTouchSlop()) {
mRecyclerView.getParent().requestDisallowInterceptTouchEvent(true);
mIsDragging = true;
mTouchOffset += (lastY - downY);
mPopup.animateVisibility(true);
if (stateChangeListener != null) {
stateChangeListener.onFastScrollStart();
}
}
if (mIsDragging) {
// Update the fastscroller section name at this touch position
int top = 0;
int bottom = mRecyclerView.getHeight() - mThumbHeight;
float boundedY = (float) Math.max(top, Math.min(bottom, y - mTouchOffset));
String sectionName = mRecyclerView.scrollToPositionAtProgress((boundedY - top) / (bottom - top));
mPopup.setSectionName(sectionName);
mPopup.animateVisibility(!sectionName.isEmpty());
mRecyclerView.invalidate(mPopup.updateFastScrollerBounds(mRecyclerView, mThumbPosition.y));
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_CANCEL:
mTouchOffset = 0;
if (mIsDragging) {
mIsDragging = false;
mPopup.animateVisibility(false);
if (stateChangeListener != null) {
stateChangeListener.onFastScrollStop();
}
}
break;
}
}
public void draw(Canvas canvas) {
if (mThumbPosition.x < 0 || mThumbPosition.y < 0) {
return;
}
//Background
canvas.drawRect(mThumbPosition.x + mOffset.x, mThumbHeight / 2 + mOffset.y, mThumbPosition.x + mOffset.x + mWidth, mRecyclerView.getHeight() + mOffset.y - mThumbHeight / 2, mTrack);
//Handle
canvas.drawRect(mThumbPosition.x + mOffset.x, mThumbPosition.y + mOffset.y, mThumbPosition.x + mOffset.x + mWidth, mThumbPosition.y + mOffset.y + mThumbHeight, mThumb);
//Popup
mPopup.draw(canvas);
}
/**
* Returns whether the specified points are near the scroll bar bounds.
*/
private boolean isNearPoint(int x, int y) {
mTmpRect.set(mThumbPosition.x, mThumbPosition.y, mThumbPosition.x + mWidth,
mThumbPosition.y + mThumbHeight);
mTmpRect.inset(mTouchInset, mTouchInset);
return mTmpRect.contains(x, y);
}
public void setThumbPosition(int x, int y) {
if (mThumbPosition.x == x && mThumbPosition.y == y) {
return;
}
// do not create new objects here, this is called quite often
mInvalidateRect.set(mThumbPosition.x + mOffset.x, mOffset.y, mThumbPosition.x + mOffset.x + mWidth, mRecyclerView.getHeight() + mOffset.y);
mThumbPosition.set(x, y);
mInvalidateTmpRect.set(mThumbPosition.x + mOffset.x, mOffset.y, mThumbPosition.x + mOffset.x + mWidth, mRecyclerView.getHeight() + mOffset.y);
mInvalidateRect.union(mInvalidateTmpRect);
mRecyclerView.invalidate(mInvalidateRect);
}
public void setOffset(int x, int y) {
if (mOffset.x == x && mOffset.y == y) {
return;
}
// do not create new objects here, this is called quite often
mInvalidateRect.set(mThumbPosition.x + mOffset.x, mOffset.y, mThumbPosition.x + mOffset.x + mWidth, mRecyclerView.getHeight() + mOffset.y);
mOffset.set(x, y);
mInvalidateTmpRect.set(mThumbPosition.x + mOffset.x, mOffset.y, mThumbPosition.x + mOffset.x + mWidth, mRecyclerView.getHeight() + mOffset.y);
mInvalidateRect.union(mInvalidateTmpRect);
mRecyclerView.invalidate(mInvalidateRect);
}
// Setter/getter for the popup alpha for animations
@Keep
public void setOffsetX(int x) {
setOffset(x, mOffset.y);
}
@Keep
public int getOffsetX() {
return mOffset.x;
}
public void show() {
if (!mAnimatingShow) {
if (mAutoHideAnimator != null) {
mAutoHideAnimator.cancel();
}
mAutoHideAnimator = ObjectAnimator.ofInt(this, "offsetX", 0);
mAutoHideAnimator.setInterpolator(new LinearOutSlowInInterpolator());
mAutoHideAnimator.setDuration(150);
mAutoHideAnimator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationCancel(Animator animation) {
super.onAnimationCancel(animation);
mAnimatingShow = false;
}
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
mAnimatingShow = false;
}
});
mAnimatingShow = true;
mAutoHideAnimator.start();
}
if (mAutoHideEnabled) {
postAutoHideDelayed();
} else {
cancelAutoHide();
}
}
protected void postAutoHideDelayed() {
if (mRecyclerView != null) {
cancelAutoHide();
mRecyclerView.postDelayed(mHideRunnable, mAutoHideDelay);
}
}
protected void cancelAutoHide() {
if (mRecyclerView != null) {
mRecyclerView.removeCallbacks(mHideRunnable);
}
}
public void setThumbColor(@ColorInt int color) {
mThumb.setColor(color);
mRecyclerView.invalidate(mInvalidateRect);
}
public void setTrackColor(@ColorInt int color) {
mTrack.setColor(color);
mRecyclerView.invalidate(mInvalidateRect);
}
public void setPopupBgColor(@ColorInt int color) {
mPopup.setBgColor(color);
}
public void setPopupTextColor(@ColorInt int color) {
mPopup.setTextColor(color);
}
public void setPopupTypeface(Typeface typeface) {
mPopup.setTypeface(typeface);
}
public void setPopupTextSize(int size) {
mPopup.setTextSize(size);
}
public int getPopupTextSize() {
return mPopup.getTextSize();
}
public void setAutoHideDelay(int hideDelay) {
mAutoHideDelay = hideDelay;
if (mAutoHideEnabled) {
postAutoHideDelayed();
}
}
public void setAutoHideEnabled(boolean autoHideEnabled) {
mAutoHideEnabled = autoHideEnabled;
if (autoHideEnabled) {
postAutoHideDelayed();
} else {
cancelAutoHide();
}
}
public void setPopupPosition(@FastScrollerPopupPosition int popupPosition) {
mPopup.setPopupPosition(popupPosition);
}
}
| apache-2.0 |
dremio/dremio-oss | common/src/test/java/com/dremio/io/file/TestMorePosixFilePermissions.java | 4442 | /*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.io.file;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import java.nio.file.attribute.PosixFilePermission;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import com.dremio.test.DremioTest;
/**
* Test class for {@code MorePosixFilePermissions}
*/
public class TestMorePosixFilePermissions extends DremioTest {
/**
* Test class for {@code MorePosixFilePermissions#fromOctalMode(int)}
*/
@RunWith(Parameterized.class)
public static class TestFromOctalMode extends DremioTest {
private final Set<PosixFilePermission> expected;
private final int mode;
@Parameterized.Parameters
public static Iterable<Object[]> getTestCases() {
// Brute forcing to list all cases
List<Set<PosixFilePermission>> combinations = new ArrayList<>();
combinations.add(EnumSet.noneOf(PosixFilePermission.class));
for (PosixFilePermission permission: PosixFilePermission.values()) {
// Duplicate the list
List<Set<PosixFilePermission>> newCombinations = new ArrayList<>(combinations);
for (Set<PosixFilePermission> newCombination: newCombinations) {
newCombination.add(permission);
}
combinations.addAll(newCombinations);
}
List<Object[]> testCases = new ArrayList<>(combinations.size());
for(Set<PosixFilePermission> combination: combinations) {
int mode = 0;
for (PosixFilePermission permission: combination) {
switch(permission) {
case OWNER_READ:
mode += 0400;
break;
case OWNER_WRITE:
mode += 0200;
break;
case OWNER_EXECUTE:
mode += 0100;
break;
case GROUP_READ:
mode += 0040;
break;
case GROUP_WRITE:
mode += 0020;
break;
case GROUP_EXECUTE:
mode += 0010;
break;
case OTHERS_READ:
mode += 0004;
break;
case OTHERS_WRITE:
mode += 0002;
break;
case OTHERS_EXECUTE:
mode += 0001;
break;
}
}
testCases.add(new Object[] {combination, mode});
}
return testCases;
}
public TestFromOctalMode(Set<PosixFilePermission> expected, int mode) {
this.expected = expected;
this.mode = mode;
}
@Test
public void checkFromOctalModeInt() {
assertThat(MorePosixFilePermissions.fromOctalMode(mode), is(equalTo(expected)));
}
@Test
public void checkFromOctalModeString() {
String s = Integer.toOctalString(mode);
assertThat(MorePosixFilePermissions.fromOctalMode(s), is(equalTo(expected)));
}
}
@Test
public void testFromOctalModeWithIllegalMode() {
assertFails(() -> MorePosixFilePermissions.fromOctalMode(-1));
assertFails(() -> MorePosixFilePermissions.fromOctalMode(512));
assertFails(() -> MorePosixFilePermissions.fromOctalMode(Integer.MIN_VALUE));
assertFails(() -> MorePosixFilePermissions.fromOctalMode(Integer.MAX_VALUE));
assertFails(() -> MorePosixFilePermissions.fromOctalMode("-1"));
assertFails(() -> MorePosixFilePermissions.fromOctalMode("8"));
assertFails(() -> MorePosixFilePermissions.fromOctalMode(""));
assertFails(() -> MorePosixFilePermissions.fromOctalMode("foo"));
}
public void assertFails(Runnable r) {
try {
r.run();
fail("Expected method to fail");
} catch (IllegalArgumentException e) {
// expected
}
}
}
| apache-2.0 |
kevinearls/camel | components/camel-leveldb/src/test/java/org/apache/camel/component/leveldb/LevelDBAggregateRecoverDeadLetterChannelFailedTest.java | 5025 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.leveldb;
import java.util.concurrent.TimeUnit;
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.AggregationStrategy;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Before;
import org.junit.Test;
public class LevelDBAggregateRecoverDeadLetterChannelFailedTest extends CamelTestSupport {
private LevelDBAggregationRepository repo;
@Override
@Before
public void setUp() throws Exception {
deleteDirectory("target/data");
repo = new LevelDBAggregationRepository("repo1", "target/data/leveldb.dat");
// enable recovery
repo.setUseRecovery(true);
// exhaust after at most 2 attempts
repo.setMaximumRedeliveries(2);
// and move to this dead letter channel
repo.setDeadLetterUri("direct:dead");
// check faster
repo.setRecoveryInterval(1000, TimeUnit.MILLISECONDS);
super.setUp();
}
@Test
public void testLevelDBAggregateRecoverDeadLetterChannelFailed() throws Exception {
// should fail all times
getMockEndpoint("mock:result").expectedMessageCount(0);
getMockEndpoint("mock:aggregated").expectedMessageCount(3);
// it should keep sending to DLC if it failed, so test for min 2 attempts
getMockEndpoint("mock:dead").expectedMinimumMessageCount(2);
// all the details should be the same about redelivered and redelivered 2 times
getMockEndpoint("mock:dead").message(0).header(Exchange.REDELIVERED).isEqualTo(Boolean.TRUE);
getMockEndpoint("mock:dead").message(0).header(Exchange.REDELIVERY_COUNTER).isEqualTo(2);
getMockEndpoint("mock:dead").message(1).header(Exchange.REDELIVERY_COUNTER).isEqualTo(2);
getMockEndpoint("mock:dead").message(1).header(Exchange.REDELIVERED).isEqualTo(Boolean.TRUE);
template.sendBodyAndHeader("direct:start", "A", "id", 123);
template.sendBodyAndHeader("direct:start", "B", "id", 123);
template.sendBodyAndHeader("direct:start", "C", "id", 123);
template.sendBodyAndHeader("direct:start", "D", "id", 123);
template.sendBodyAndHeader("direct:start", "E", "id", 123);
assertMockEndpointsSatisfied(30, TimeUnit.SECONDS);
// all the details should be the same about redelivered and redelivered 2 times
Exchange first = getMockEndpoint("mock:dead").getReceivedExchanges().get(0);
assertEquals(true, first.getIn().getHeader(Exchange.REDELIVERED));
assertEquals(2, first.getIn().getHeader(Exchange.REDELIVERY_COUNTER));
Exchange second = getMockEndpoint("mock:dead").getReceivedExchanges().get(1);
assertEquals(true, second.getIn().getHeader(Exchange.REDELIVERED));
assertEquals(2, first.getIn().getHeader(Exchange.REDELIVERY_COUNTER));
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start")
.aggregate(header("id"), new MyAggregationStrategy())
.completionSize(5).aggregationRepository(repo)
.log("aggregated exchange id ${exchangeId} with ${body}")
.to("mock:aggregated")
.throwException(new IllegalArgumentException("Damn"))
.to("mock:result")
.end();
from("direct:dead")
.to("mock:dead")
.throwException(new IllegalArgumentException("We are dead"));
}
};
}
public static class MyAggregationStrategy implements AggregationStrategy {
public Exchange aggregate(Exchange oldExchange, Exchange newExchange) {
if (oldExchange == null) {
return newExchange;
}
String body1 = oldExchange.getIn().getBody(String.class);
String body2 = newExchange.getIn().getBody(String.class);
oldExchange.getIn().setBody(body1 + body2);
return oldExchange;
}
}
} | apache-2.0 |
visallo/visallo | core/core/src/main/java/org/visallo/core/model/properties/types/IntegerVisalloExtendedData.java | 517 | package org.visallo.core.model.properties.types;
import org.vertexium.ExtendedDataRow;
public class IntegerVisalloExtendedData extends IdentityVisalloExtendedData<Integer> {
public IntegerVisalloExtendedData(String tableName, String propertyName) {
super(tableName, propertyName);
}
public int getValue(ExtendedDataRow row, int defaultValue) {
Integer nullable = getValue(row);
if (nullable == null) {
return defaultValue;
}
return nullable;
}
}
| apache-2.0 |
integrated/jakarta-slide-server | projector/src/java/org/apache/slide/projector/store/Cache.java | 761 | package org.apache.slide.projector.store;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
public class Cache extends AbstractStore {
private static Cache cache = new Cache();
private Map map;
public static Cache getInstance() {
return cache;
}
public Cache() {
map = new HashMap();
}
public void setMap(Map map) {
this.map = map;
}
public Map getMap() {
return map;
}
public void put(String key, Object value) throws IOException {
map.put(key, value);
}
public Object get(String key) throws IOException {
return map.get(key);
}
public void dispose(String key) throws IOException {
map.remove(key);
}
} | apache-2.0 |
ruttosan86/apache-kafka-tests | src/test/java/it/itsoftware/chartx/kafka/tests/data/old/SimulatedTickDataSourceTest.java | 1535 | /*
* Copyright 2016 Davide Soldi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.itsoftware.chartx.kafka.tests.data.old;
import static org.junit.Assert.*;
import java.util.logging.Logger;
import org.junit.Test;
import it.itsoftware.chartx.kafka.tests.data.Tick;
import it.itsoftware.chartx.kafka.tests.data.source.SimulatedTickSource;
import it.itsoftware.chartx.kafka.tests.data.source.TickSource;
public class SimulatedTickDataSourceTest {
final static Logger logger = Logger.getLogger("SimulatedTickDataSourceTest");
@Test
public void test() {
TickSource source = new SimulatedTickSource(10);
if(source.open()){
for(int i=0; i<100; i++) {
if(source.hasNext()) {
Tick next = source.next();
assertNotNull(next);
logger.info(next.toString());
}
}
if(source.close()) {
logger.info("Tick source closed.");
} else {
logger.severe("Failed to close source");
fail();
}
} else {
logger.severe("Failed to open source");
fail();
}
}
}
| apache-2.0 |
rkapsi/daap | src/main/java/org/ardverk/daap/chunks/ContentCodesResponseImpl.java | 9190 | /*
* Digital Audio Access Protocol (DAAP) Library
* Copyright (C) 2004-2010 Roger Kapsi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ardverk.daap.chunks;
import org.ardverk.daap.chunks.impl.ContentCodesResponse;
import org.ardverk.daap.chunks.impl.Status;
public final class ContentCodesResponseImpl extends ContentCodesResponse {
public ContentCodesResponseImpl() {
super();
add(new Status(200));
add(new ContentCode(0x6D736175, "dmap.authenticationmethod", 1)); // msau
add(new ContentCode(0x6D736173, "dmap.authenticationschemes", 1)); // msas
add(new ContentCode(0x6D62636C, "dmap.bag", 12)); // mbcl
add(new ContentCode(0x6162706C, "daap.baseplaylist", 1)); // abpl
add(new ContentCode(0x6162616C, "daap.browsealbumlisting", 12)); // abal
add(new ContentCode(0x61626172, "daap.browseartistlisting", 12)); // abar
add(new ContentCode(0x61626370, "daap.browsecomposerlisting", 12)); // abcp
add(new ContentCode(0x6162676E, "daap.browsegenrelisting", 12)); // abgn
add(new ContentCode(0x6D636F6E, "dmap.container", 12)); // mcon
add(new ContentCode(0x6D637463, "dmap.containercount", 5)); // mctc
add(new ContentCode(0x6D637469, "dmap.containeritemid", 5)); // mcti
add(new ContentCode(0x6D636E61, "dmap.contentcodesname", 9)); // mcna
add(new ContentCode(0x6D636E6D, "dmap.contentcodesnumber", 5)); // mcnm
add(new ContentCode(0x6D636372, "dmap.contentcodesresponse", 12)); // mccr
add(new ContentCode(0x6D637479, "dmap.contentcodestype", 3)); // mcty
add(new ContentCode(0x6170726F, "daap.protocolversion", 11)); // apro
add(new ContentCode(0x6162726F, "daap.databasebrowse", 12)); // abro
add(new ContentCode(0x6D736463, "dmap.databasescount", 5)); // msdc
add(new ContentCode(0x61706C79, "daap.databaseplaylists", 12)); // aply
add(new ContentCode(0x61646273, "daap.databasesongs", 12)); // adbs
add(new ContentCode(0x6D75646C, "dmap.deletedidlisting", 12)); // mudl
add(new ContentCode(0x6D64636C, "dmap.dictionary", 12)); // mdcl
add(new ContentCode(0x6D70726F, "dmap.protocolversion", 11)); // mpro
add(new ContentCode(0x668D6368, "dmap.haschildcontainers", 1)); // f?ch
add(new ContentCode(0x61654856, "com.apple.itunes.has-video", 1)); // aeHV
add(new ContentCode(0x6D696D63, "dmap.itemcount", 5)); // mimc
add(new ContentCode(0x6D696964, "dmap.itemid", 5)); // miid
add(new ContentCode(0x6D696B64, "dmap.itemkind", 1)); // mikd
add(new ContentCode(0x6D696E6D, "dmap.itemname", 9)); // minm
add(new ContentCode(0x61654149, "com.apple.itunes.itms-artistid", 5)); // aeAI
add(new ContentCode(0x61654349, "com.apple.itunes.itms-composerid", 5)); // aeCI
add(new ContentCode(0x61654749, "com.apple.itunes.itms-genreid", 5)); // aeGI
add(new ContentCode(0x61655049, "com.apple.itunes.itms-playlistid", 5)); // aePI
add(new ContentCode(0x61655349, "com.apple.itunes.itms-songid", 5)); // aeSI
add(new ContentCode(0x61655346, "com.apple.itunes.itms-storefrontid", 5)); // aeSF
add(new ContentCode(0x6D6C636C, "dmap.listing", 12)); // mlcl
add(new ContentCode(0x6D6C6974, "dmap.listingitem", 12)); // mlit
add(new ContentCode(0x6D736C72, "dmap.loginrequired", 1)); // mslr
add(new ContentCode(0x6D6C6F67, "dmap.loginresponse", 12)); // mlog
add(new ContentCode(0x61655356,
"com.apple.itunes.music-sharing-version", 5)); // aeSV
add(new ContentCode(0x61654E56, "com.apple.itunes.norm-volume", 5)); // aeNV
add(new ContentCode(0x6D70636F, "dmap.parentcontainerid", 5)); // mpco
add(new ContentCode(0x6D706572, "dmap.persistentid", 7)); // mper
add(new ContentCode(0x6170726D, "daap.playlistrepeatmode", 1)); // aprm
add(new ContentCode(0x6170736D, "daap.playlistshufflemode", 1)); // apsm
add(new ContentCode(0x6170736F, "daap.playlistsongs", 12)); // apso
add(new ContentCode(0x61655043, "com.apple.itunes.is-podcast", 1)); // aePC
add(new ContentCode(0x61655050, "com.apple.itunes.is-podcast-playlist",
1)); // aePP
add(new ContentCode(0x61727376, "daap.resolve", 12)); // arsv
add(new ContentCode(0x61726966, "daap.resolveinfo", 12)); // arif
add(new ContentCode(0x6D72636F, "dmap.returnedcount", 5)); // mrco
add(new ContentCode(0x61766462, "daap.serverdatabases", 12)); // avdb
add(new ContentCode(0x6D737276, "dmap.serverinforesponse", 12)); // msrv
add(new ContentCode(0x6D757372, "dmap.serverrevision", 5)); // musr
add(new ContentCode(0x6D6C6964, "dmap.sessionid", 5)); // mlid
add(new ContentCode(0x61655350, "com.apple.itunes.smart-playlist", 1)); // aeSP
add(new ContentCode(0x6173616C, "daap.songalbum", 9)); // asal
add(new ContentCode(0x61736172, "daap.songartist", 9)); // asar
add(new ContentCode(0x61736274, "daap.songbeatsperminute", 3)); // asbt
add(new ContentCode(0x61736272, "daap.songbitrate", 3)); // asbr
add(new ContentCode(0x61736374, "daap.songcategory", 9)); // asct
add(new ContentCode(0x61736373, "daap.songcodecsubtype", 5)); // ascs
add(new ContentCode(0x61736364, "daap.songcodectype", 5)); // ascd
add(new ContentCode(0x6173636D, "daap.songcomment", 9)); // ascm
add(new ContentCode(0x6173636F, "daap.songcompilation", 1)); // asco
add(new ContentCode(0x61736370, "daap.songcomposer", 9)); // ascp
add(new ContentCode(0x6173636E, "daap.songcontentdescription", 9)); // ascn
add(new ContentCode(0x61736372, "daap.songcontentrating", 1)); // ascr
add(new ContentCode(0x6173646B, "daap.songdatakind", 1)); // asdk
add(new ContentCode(0x6173756C, "daap.songdataurl", 9)); // asul
add(new ContentCode(0x61736461, "daap.songdateadded", 10)); // asda
add(new ContentCode(0x6173646D, "daap.songdatemodified", 10)); // asdm
add(new ContentCode(0x61736474, "daap.songdescription", 9)); // asdt
add(new ContentCode(0x61736462, "daap.songdisabled", 1)); // asdb
add(new ContentCode(0x61736463, "daap.songdisccount", 3)); // asdc
add(new ContentCode(0x6173646E, "daap.songdiscnumber", 3)); // asdn
add(new ContentCode(0x61736571, "daap.songeqpreset", 9)); // aseq
add(new ContentCode(0x6173666D, "daap.songformat", 9)); // asfm
add(new ContentCode(0x6173676E, "daap.songgenre", 9)); // asgn
add(new ContentCode(0x61677270, "daap.songgrouping", 9)); // agrp
add(new ContentCode(0x61736B79, "daap.songkeywords", 9)); // asky
add(new ContentCode(0x61736C63, "daap.songlongcontentdescription", 9)); // aslc
add(new ContentCode(0x61737276, "daap.songrelativevolume", 2)); // asrv
add(new ContentCode(0x61737372, "daap.songsamplerate", 5)); // assr
add(new ContentCode(0x6173737A, "daap.songsize", 5)); // assz
add(new ContentCode(0x61737374, "daap.songstarttime", 5)); // asst
add(new ContentCode(0x61737370, "daap.songstoptime", 5)); // assp
add(new ContentCode(0x6173746D, "daap.songtime", 5)); // astm
add(new ContentCode(0x61737463, "daap.songtrackcount", 3)); // astc
add(new ContentCode(0x6173746E, "daap.songtracknumber", 3)); // astn
add(new ContentCode(0x61737572, "daap.songuserrating", 1)); // asur
add(new ContentCode(0x61737972, "daap.songyear", 3)); // asyr
add(new ContentCode(0x6D74636F, "dmap.specifiedtotalcount", 5)); // mtco
add(new ContentCode(0x6D737474, "dmap.status", 5)); // mstt
add(new ContentCode(0x6D737473, "dmap.statusstring", 9)); // msts
add(new ContentCode(0x6D73616C, "dmap.supportsautologout", 1)); // msal
add(new ContentCode(0x6D736272, "dmap.supportsbrowse", 1)); // msbr
add(new ContentCode(0x6D736578, "dmap.supportsextensions", 1)); // msex
add(new ContentCode(0x6D736978, "dmap.supportsindex", 1)); // msix
add(new ContentCode(0x6D737069, "dmap.supportspersistentids", 1)); // mspi
add(new ContentCode(0x6D737179, "dmap.supportsquery", 1)); // msqy
add(new ContentCode(0x6D737273, "dmap.supportsresolve", 1)); // msrs
add(new ContentCode(0x6D737570, "dmap.supportsupdate", 1)); // msup
add(new ContentCode(0x6D73746D, "dmap.timeoutinterval", 5)); // mstm
add(new ContentCode(0x6D757064, "dmap.updateresponse", 12)); // mupd
add(new ContentCode(0x6D757479, "dmap.updatetype", 1)); // muty
}
}
| apache-2.0 |
kiritbasu/datacollector | jdbc-lib/src/main/java/com/streamsets/pipeline/lib/jdbc/MicrosoftJdbcRecordWriter.java | 7900 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.lib.jdbc;
import com.google.common.base.Joiner;
import com.streamsets.pipeline.api.Batch;
import com.streamsets.pipeline.api.Record;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.api.base.OnRecordErrorException;
import com.streamsets.pipeline.stage.destination.jdbc.Errors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* Record writer implementation that resolves a Microsoft change data capture log
* to the queries required to replicate the table to another destination.
*/
public class MicrosoftJdbcRecordWriter implements JdbcRecordWriter {
private static final Logger LOG = LoggerFactory.getLogger(MicrosoftJdbcRecordWriter.class);
private static final int DELETE = 1;
private static final int INSERT = 2;
private static final int BEFORE_UPDATE = 3;
private static final int AFTER_UPDATE = 4;
public static final ChangeLogFormat FORMAT = ChangeLogFormat.MSSQL;
public static final String OP_FIELD = "/__$operation";
private final String connectionString;
private final String tableName;
private final DataSource dataSource;
private List<String> primaryKeyColumns;
public MicrosoftJdbcRecordWriter(
String connectionString,
DataSource dataSource,
String tableName) throws StageException {
this.connectionString = connectionString;
this.dataSource = dataSource;
this.tableName = tableName;
lookupPrimaryKeys();
}
private void lookupPrimaryKeys() throws StageException {
Connection connection = null;
try {
connection = dataSource.getConnection();
primaryKeyColumns = JdbcUtil.getPrimaryKeys(connection, tableName);
} catch (SQLException e) {
String formattedError = JdbcUtil.formatSqlException(e);
LOG.error(formattedError);
LOG.debug(formattedError, e);
throw new StageException(Errors.JDBCDEST_17, tableName, formattedError);
} finally {
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
String formattedError = JdbcUtil.formatSqlException(e);
LOG.error(formattedError);
LOG.debug(formattedError, e);
}
}
}
if (primaryKeyColumns.isEmpty()) {
throw new StageException(Errors.JDBCDEST_17, tableName);
}
}
/** {@inheritDoc} */
@Override
public List<OnRecordErrorException> writeBatch(Batch batch) throws StageException {
List<OnRecordErrorException> errorRecords = new LinkedList<>();
Connection connection = null;
try {
connection = dataSource.getConnection();
Iterator<Record> recordIterator = batch.getRecords();
recordLoop:
while (recordIterator.hasNext()) {
Record record = recordIterator.next();
String query;
int i;
Map<String, Object> columnMappings = getColumnMappings(record);
if (record.has(OP_FIELD)) {
int operation = record.get(OP_FIELD).getValueAsInteger();
switch (operation) {
case INSERT:
query = String.format(
"INSERT INTO %s (%s) VALUES (%s)",
tableName,
Joiner.on(", ").join(columnMappings.keySet()),
Joiner.on(", ").join(Collections.nCopies(columnMappings.size(), "?"))
);
break;
case BEFORE_UPDATE:
continue;
case AFTER_UPDATE:
query = String.format(
"UPDATE %s SET %s = ? WHERE %s = ?",
tableName,
Joiner.on(" = ?, ").join(columnMappings.keySet()),
Joiner.on(" = ? AND ").join(primaryKeyColumns)
);
break;
case DELETE:
query = String.format(
"DELETE FROM %s WHERE %s = ?",
tableName,
Joiner.on(" = ? AND ").join(primaryKeyColumns)
);
break;
default:
errorRecords.add(new OnRecordErrorException(record, Errors.JDBCDEST_09, operation, FORMAT));
continue;
}
LOG.debug("Query: {}", query);
PreparedStatement statement = connection.prepareStatement(query);
i = 1;
if (operation != DELETE) {
for (Object value : columnMappings.values()) {
statement.setObject(i, value);
++i;
}
}
if (operation == AFTER_UPDATE || operation == DELETE) {
// Also bind the primary keys for the where clause
for (String key : primaryKeyColumns) {
if (!columnMappings.containsKey(key)) {
errorRecords.add(new OnRecordErrorException(record, Errors.JDBCDEST_19, key));
continue recordLoop;
}
statement.setObject(i, columnMappings.get(key));
}
}
// Since we must commit all the changes in the same transaction, if one fails,
// we should abort the entire transaction (batch).
statement.execute();
statement.close();
} else {
errorRecords.add(new OnRecordErrorException(record, Errors.JDBCDEST_08, OP_FIELD, FORMAT));
}
}
connection.commit();
} catch (SQLException e) {
String formattedError = JdbcUtil.formatSqlException(e);
LOG.error(formattedError);
LOG.debug(formattedError, e);
// Whole batch fails
errorRecords.clear();
Iterator<Record> records = batch.getRecords();
while (records.hasNext()) {
errorRecords.add(new OnRecordErrorException(records.next(), Errors.JDBCDEST_14, formattedError));
}
} finally {
if (connection != null) {
try {
connection.rollback();
} catch (SQLException e) {
String formattedError = JdbcUtil.formatSqlException(e);
LOG.error(formattedError);
LOG.debug(formattedError, e);
} finally {
try {
connection.close();
} catch (SQLException e) {
String formattedError = JdbcUtil.formatSqlException(e);
LOG.error(formattedError);
LOG.debug(formattedError, e);
}
}
}
}
return errorRecords;
}
private Map<String, Object> getColumnMappings(Record record) {
Map<String, Object> mappings = new HashMap<>(record.getFieldPaths().size());
for (String fieldPath : record.getFieldPaths()) {
if (fieldPath.isEmpty()) {
continue;
}
final String fieldName = fieldPath.substring(1);
if (!fieldName.startsWith("__")) {
mappings.put(fieldPath.substring(1), record.get(fieldPath).getValue());
}
}
return mappings;
}
}
| apache-2.0 |