repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
hortonworks/cloudbreak | orchestrator-salt/src/main/java/com/sequenceiq/cloudbreak/orchestrator/salt/SaltErrorResolver.java | 2954 | package com.sequenceiq.cloudbreak.orchestrator.salt;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.annotation.PostConstruct;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.yaml.snakeyaml.Yaml;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.sequenceiq.cloudbreak.util.FileReaderUtils;
@Component
public class SaltErrorResolver {
private static final Logger LOGGER = LoggerFactory.getLogger(SaltErrorResolver.class);
private Map<String, String> errorMessages;
private List<String> commandsWithStderrFailures;
private String resolveErrorMessage(Map<String, String> errors) {
String name = errors.get("Name");
Optional<String> found = errorMessages.keySet().stream().filter(name::contains).findFirst();
if (found.isPresent()) {
return errorMessages.get(found.get());
} else {
found = commandsWithStderrFailures.stream().filter(name::contains).findFirst();
if (found.isPresent() && errors.containsKey("Stderr")) {
return errors.get("Stderr");
}
}
return "Failed to execute: " + errors;
}
private String resolveMessageIfAvailable(Map<String, String> value) {
if (value.containsKey("Name")) {
return resolveErrorMessage(value);
}
if (value.size() == 1) {
return value.values().iterator().next();
}
return value.toString();
}
@PostConstruct
public void init() {
try {
String file = FileReaderUtils.readFileFromClasspath("salt/errormessages.yaml");
errorMessages = new Yaml().load(file);
LOGGER.info("Error messages for salt: {}", errorMessages);
file = FileReaderUtils.readFileFromClasspath("salt/stderrcommands.yaml");
commandsWithStderrFailures = new Yaml().load(file);
LOGGER.info("Salt commands that will pull the failure from stderr: {}", commandsWithStderrFailures);
} catch (IOException e) {
throw new RuntimeException("Can't load salt error messsages", e);
}
}
public Multimap<String, String> resolveErrorMessages(Multimap<String, Map<String, String>> missingNodesWithReason) {
LOGGER.info("Original missing nodes: {}", missingNodesWithReason);
Multimap<String, String> missingTargetsWithReplacedReasons = ArrayListMultimap.create();
missingNodesWithReason.entries().forEach(entry -> {
String value = resolveMessageIfAvailable(entry.getValue());
missingTargetsWithReplacedReasons.put(entry.getKey(), value);
});
LOGGER.info("Missing nodes after replace: {}", missingTargetsWithReplacedReasons);
return missingTargetsWithReplacedReasons;
}
}
| apache-2.0 |
arnaud-deprez/camel | components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/format/LocalDatePatternFormat.java | 2642 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.dataformat.bindy.format;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.Locale;
import org.apache.camel.dataformat.bindy.PatternFormat;
import org.apache.camel.util.ObjectHelper;
public class LocalDatePatternFormat implements PatternFormat<LocalDate> {
private String pattern;
private Locale locale;
public LocalDatePatternFormat() {
}
public LocalDatePatternFormat(String pattern, Locale locale) {
this.pattern = pattern;
this.locale = locale;
}
public String format(LocalDate object) throws Exception {
ObjectHelper.notNull(this.pattern, "pattern");
return this.getDateFormat().format(object);
}
public LocalDate parse(String string) throws Exception {
LocalDate date;
DateTimeFormatter df = this.getDateFormat();
ObjectHelper.notNull(this.pattern, "pattern");
if (doesStringFitLengthOfPattern(string)) {
date = LocalDate.parse(string, df);
return date;
} else {
throw new FormatException("Date provided does not fit the pattern defined");
}
}
private boolean doesStringFitLengthOfPattern(String string) {
return string.length() <= this.pattern.length();
}
protected DateTimeFormatter getDateFormat() {
DateTimeFormatter result;
if (locale != null) {
result = DateTimeFormatter.ofPattern(pattern, locale);
} else {
result = DateTimeFormatter.ofPattern(pattern);
}
return result;
}
public String getPattern() {
return pattern;
}
/**
* Sets the pattern
*
* @param pattern the pattern
*/
public void setPattern(String pattern) {
this.pattern = pattern;
}
}
| apache-2.0 |
crazysunj/Android-PickerView | android-pickerdialog/src/main/java/com/sunjian/android_pickview_lib/view/SmoothScrollTimerTask.java | 2075 | package com.sunjian.android_pickview_lib.view;
import java.util.TimerTask;
final class SmoothScrollTimerTask extends TimerTask {
int realTotalOffset;
int realOffset;
int offset;
final WheelView loopView;
SmoothScrollTimerTask(WheelView loopview, int offset) {
this.loopView = loopview;
this.offset = offset;
realTotalOffset = Integer.MAX_VALUE;
realOffset = 0;
}
@Override
public final void run() {
if (realTotalOffset == Integer.MAX_VALUE) {
realTotalOffset = offset;
}
//把要滚动的范围细分成十小份,按是小份单位来重绘
realOffset = (int) ((float) realTotalOffset * 0.1F);
if (realOffset == 0) {
if (realTotalOffset < 0) {
realOffset = -1;
} else {
realOffset = 1;
}
}
if (Math.abs(realTotalOffset) <= 1) {
loopView.cancelFuture();
loopView.handler.sendEmptyMessage(MessageHandler.WHAT_ITEM_SELECTED);
} else {
loopView.totalScrollY = loopView.totalScrollY + realOffset;
//这里如果不是循环模式,则点击空白位置需要回滚,不然就会出现选到-1 item的 情况
if (!loopView.isLoop) {
float itemHeight = loopView.itemHeight;
float top = (float) (-loopView.initPosition) * itemHeight;
float bottom = (float) (loopView.getItemsCount() - 1 - loopView.initPosition) * itemHeight;
if (loopView.totalScrollY <= top||loopView.totalScrollY >= bottom) {
loopView.totalScrollY = loopView.totalScrollY - realOffset;
loopView.cancelFuture();
loopView.handler.sendEmptyMessage(MessageHandler.WHAT_ITEM_SELECTED);
return;
}
}
loopView.handler.sendEmptyMessage(MessageHandler.WHAT_INVALIDATE_LOOP_VIEW);
realTotalOffset = realTotalOffset - realOffset;
}
}
}
| apache-2.0 |
strator-dev/greenpepper | greenpepper/greenpepper-client/src/main/java/com/greenpepper/server/rpc/runner/report/HtmlReport.java | 3703 |
/**
* Copyright (c) 2008 Pyxis Technologies inc.
*
* This is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA,
* or see the FSF site: http://www.fsf.org.
*
* @author oaouattara
* @version $Id: $Id
*/
package com.greenpepper.server.rpc.runner.report;
import java.io.IOException;
import java.io.InputStream;
import java.io.Writer;
import com.greenpepper.server.domain.Execution;
import com.greenpepper.server.rpc.runner.XmlRpcRemoteRunner;
import com.greenpepper.util.ExceptionImposter;
import com.greenpepper.util.ExceptionUtils;
public class HtmlReport
implements Report
{
private final String name;
private Execution execution;
private Throwable exception;
/**
* <p>newInstance.</p>
*
* @param name a {@link java.lang.String} object.
* @return a {@link com.greenpepper.server.rpc.runner.report.HtmlReport} object.
*/
public static HtmlReport newInstance(String name)
{
return new HtmlReport(name);
}
/**
* <p>Constructor for HtmlReport.</p>
*
* @param name a {@link java.lang.String} object.
*/
public HtmlReport(String name)
{
this.name = name;
}
/**
* <p>Getter for the field <code>name</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getName()
{
return name;
}
/**
* <p>getType.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getType()
{
return "html";
}
/** {@inheritDoc} */
public void printTo(Writer writer)
throws IOException
{
if (exception != null)
{
writer.write(ExceptionUtils.stackTrace(exception, "\n"));
writer.flush();
return;
}
if (execution != null)
{
writer.write(toHtml(execution, true));
writer.flush();
}
}
/** {@inheritDoc} */
public void renderException(Throwable t)
{
this.exception = t;
}
/** {@inheritDoc} */
public void generate(Execution execution)
{
this.execution = execution;
}
private String toHtml(Execution execution, boolean includeStyle)
throws IOException
{
StringBuilder html = new StringBuilder();
String results = execution.getResults();
if (includeStyle)
{
html.append("<html>\n")
.append(" <head>\n")
.append(" <title>").append(getName()).append("</title>\n")
.append("<style>\n")
.append(getStyleContent())
.append("\n</style>\n")
.append("</head>\n")
.append("<body>\n")
.append("<div id=\"Content\" style=\"text-align:left; padding: 5px;\">")
.append(results.replace("<html>", "").replace("</html>", ""))
.append("</div>\n")
.append("</body>\n")
.append("</html>");
}
else
{
html.append(results);
}
return html.toString();
}
private String getStyleContent()
{
try
{
InputStream is = XmlRpcRemoteRunner.class.getResource("style.css").openStream();
byte[] bytes = new byte[is.available()];
if (is.read(bytes) > 0)
{
return new String(bytes);
}
else
{
throw new Exception("Cannot read style.css resource from jar");
}
}
catch (Exception ex)
{
throw ExceptionImposter.imposterize(ex);
}
}
}
| apache-2.0 |
nmalesic/LocoMaps | src/com/locompas/edd/bl/model/bd/session/SessionConnection.java | 2053 | package com.locompas.edd.bl.model.bd.session;
import java.util.ArrayList;
import java.util.HashMap;
import javax.servlet.http.HttpSession;
import com.locomaps.edd.bl.model.User;
import com.locomaps.edd.bl.model.db.Persistance;
import com.locomaps.edd.bl.model.db.PersistanceManager;
import com.locomaps.edd.bl.model.db.PersistanceParameter;
public class SessionConnection { //implements Persistance{
// public SessionConnection() {
// super();
// }
//
// Boolean activeConnection;
// HttpSession sessionScope;
//
// public HttpSession getSessionScope() {
// return sessionScope;
// }
//
// private void setSessionScope(HttpSession sessionScope) {
// if (sessionScope == null){
// this.activeConnection = false;
// } else {
// this.activeConnection = true;
// }
//
// this.sessionScope = sessionScope;
// }
//
// @Override
// public HashMap<String, User> listAllUser() {
// HashMap<String,User> listeUser = (HashMap<String, User>) sessionScope.getAttribute("listeUser");
// if (listeUser == null) {
// listeUser = new HashMap<String,User>();
// }
// return listeUser;
// }
//
// @Override
// public User getUserByEMail(String email) {
// HashMap<String,User> listeUser = listAllUser();
// User userSession = listeUser.get(email);
//
// return userSession;
// }
//
// @Override
// public boolean change(User user) {
// HashMap<String,User> listeUser = listAllUser();
// listeUser.put(user.getEmail(),user);
//
// return true;
// }
//
// @Override
// public boolean addUser(User user) {
// HashMap<String,User> listeUser = listAllUser();
// // Ajout du nouvel utilisateur dans la session
// listeUser.put(user.getEmail(),user);
//
// return true;
// }
//
// @Override
// public boolean initDB(Object chaineDeConnexion) {
// boolean retour = false;
//
// if (chaineDeConnexion instanceof String) {
// retour = false;
// } else if (chaineDeConnexion instanceof HttpSession) {
// this.setSessionScope((HttpSession)chaineDeConnexion);
// retour = this.activeConnection;
// }
// return retour;
// }
//
}
| apache-2.0 |
suninformation/ymate-platform-v2 | ymate-platform-persistence-jdbc/src/main/java/net/ymate/platform/persistence/jdbc/base/impl/DefaultProcedureOperator.java | 8131 | /*
* Copyright 2007-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.ymate.platform.persistence.jdbc.base.impl;
import net.ymate.platform.commons.util.ExpressionUtils;
import net.ymate.platform.core.persistence.base.Type;
import net.ymate.platform.persistence.jdbc.IDatabaseConnectionHolder;
import net.ymate.platform.persistence.jdbc.IDatabaseDataSourceConfig;
import net.ymate.platform.persistence.jdbc.base.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.StopWatch;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.sql.CallableStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* 数据库存储过程操作器接口实现
*
* @param <T> 元素类型
* @author 刘镇 (suninformation@163.com) on 16/12/8 上午1:04
*/
public class DefaultProcedureOperator<T> extends AbstractOperator implements IProcedureOperator<T> {
private static final Log LOG = LogFactory.getLog(DefaultProcedureOperator.class);
/**
* 存储过程OUT参数类型集合
*/
private final List<Integer> outParams = new ArrayList<>();
private IOutResultProcessor resultProcessor;
private IResultSetHandler<T> resultSetHandler;
private final List<List<T>> resultSets = new ArrayList<>();
public DefaultProcedureOperator(String sql, IDatabaseConnectionHolder connectionHolder) {
super(sql, connectionHolder);
}
public DefaultProcedureOperator(String sql, IDatabaseConnectionHolder connectionHolder, IAccessorConfig accessorConfig) {
super(sql, connectionHolder, accessorConfig);
}
@Override
public void execute() throws Exception {
if (!this.executed) {
StopWatch time = new StopWatch();
time.start();
try {
doExecute();
// 执行过程未发生异常将标记已执行,避免重复执行
this.executed = true;
} finally {
time.stop();
this.expenseTime = time.getTime();
//
if (LOG.isInfoEnabled()) {
IDatabaseDataSourceConfig dataSourceConfig = this.getConnectionHolder().getDataSourceConfig();
if (dataSourceConfig.isShowSql()) {
String logStr = ExpressionUtils.bind("[${sql}]${param}[${count}][${time}]")
.set("sql", StringUtils.defaultIfBlank(this.sql, "@NULL"))
.set("param", serializeParameters())
.set("count", "N/A")
.set("time", this.expenseTime + "ms").getResult();
if (dataSourceConfig.isStackTraces()) {
StringBuilder stackBuilder = new StringBuilder(logStr);
doAppendStackTraces(dataSourceConfig, stackBuilder);
LOG.info(stackBuilder.toString());
} else {
LOG.info(logStr);
}
}
}
}
}
}
@Override
public IProcedureOperator<T> execute(IResultSetHandler<T> resultSetHandler) throws Exception {
this.resultSetHandler = resultSetHandler;
this.execute();
return this;
}
@Override
public IProcedureOperator<T> execute(IOutResultProcessor resultProcessor) throws Exception {
this.resultProcessor = resultProcessor;
this.execute();
return this;
}
@Override
protected int doExecute() throws Exception {
CallableStatement statement = null;
AccessorEventContext eventContext = null;
boolean hasEx = false;
try {
IAccessor accessor = new BaseAccessor(this.getAccessorConfig());
statement = accessor.getCallableStatement(this.getConnectionHolder().getConnection(), doBuildCallSql());
doSetParameters(statement);
doRegisterOutParams(statement);
if (this.getAccessorConfig() != null) {
eventContext = new AccessorEventContext(statement, Type.OPT.PROCEDURE);
this.getAccessorConfig().beforeStatementExecution(eventContext);
}
boolean flag = statement.execute();
if (flag) {
do {
ResultSet resultSet = statement.getResultSet();
if (resultSet != null) {
resultSets.add(resultSetHandler.handle(resultSet));
resultSet.close();
}
} while (statement.getMoreResults());
} else {
int idx = this.getParameters().size() + 1;
for (Integer paramType : outParams) {
resultProcessor.process(idx, paramType, statement.getObject((idx)));
idx++;
}
}
return -1;
} catch (Exception ex) {
hasEx = true;
throw ex;
} finally {
if (!hasEx && this.getAccessorConfig() != null && eventContext != null) {
this.getAccessorConfig().afterStatementExecution(eventContext);
}
if (statement != null) {
statement.close();
}
}
}
/**
* 构建存储过程CALL语句(根据不同的数据库, 可由子类重新实现)
*
* @return 返回CALL语句
*/
protected String doBuildCallSql() {
List<String> params = new ArrayList<>();
for (int i = 0; i < this.getParameters().size() + this.outParams.size(); i++) {
params.add("?");
}
this.sql = String.format("{CALL %s%s}", this.getSQL(), params.isEmpty() ? "()" : String.format("(%s)", StringUtils.join(params, ',')));
return this.sql;
}
/**
* 注册存储过程输出的参数(从最后一个输入参数后开始, 根据不同的数据库,可由子类重新实现)
*
* @param statement CallableStatement
* @throws SQLException 可能产生的任何异常
*/
protected void doRegisterOutParams(CallableStatement statement) throws SQLException {
int idx = this.getParameters().size() + 1;
for (Integer type : outParams) {
statement.registerOutParameter(idx++, type);
}
}
@Override
@SuppressWarnings("unchecked")
public IProcedureOperator<T> addParameter(SQLParameter parameter) {
return (IProcedureOperator<T>) super.addParameter(parameter);
}
@Override
@SuppressWarnings("unchecked")
public IProcedureOperator<T> addParameter(Object parameter) {
return (IProcedureOperator<T>) super.addParameter(parameter);
}
@Override
public IProcedureOperator<T> addOutParameter(Integer sqlParamType) {
this.outParams.add(sqlParamType);
return this;
}
@Override
public IProcedureOperator<T> setOutResultProcessor(IOutResultProcessor outResultProcessor) {
resultProcessor = outResultProcessor;
return this;
}
@Override
public IProcedureOperator<T> setResultSetHandler(IResultSetHandler<T> resultSetHandler) {
this.resultSetHandler = resultSetHandler;
return this;
}
@Override
public List<List<T>> getResultSets() {
return Collections.unmodifiableList(resultSets);
}
}
| apache-2.0 |
SES-fortiss/SmartGridCoSimulation | core/cim15/src/CIM15/IEC61968/AssetModels/ConductorUsageKind.java | 5925 | /**
*/
package CIM15.IEC61968.AssetModels;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.eclipse.emf.common.util.Enumerator;
/**
* <!-- begin-user-doc -->
* A representation of the literals of the enumeration '<em><b>Conductor Usage Kind</b></em>',
* and utility methods for working with them.
* <!-- end-user-doc -->
* @see CIM15.IEC61968.AssetModels.AssetModelsPackage#getConductorUsageKind()
* @generated
*/
public enum ConductorUsageKind implements Enumerator {
/**
* The '<em><b>Secondary</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #SECONDARY_VALUE
* @generated
* @ordered
*/
SECONDARY(0, "secondary", "secondary"),
/**
* The '<em><b>Other</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #OTHER_VALUE
* @generated
* @ordered
*/
OTHER(1, "other", "other"),
/**
* The '<em><b>Distribution</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #DISTRIBUTION_VALUE
* @generated
* @ordered
*/
DISTRIBUTION(2, "distribution", "distribution"),
/**
* The '<em><b>Transmission</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #TRANSMISSION_VALUE
* @generated
* @ordered
*/
TRANSMISSION(3, "transmission", "transmission");
/**
* The '<em><b>Secondary</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>Secondary</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #SECONDARY
* @generated
* @ordered
*/
public static final int SECONDARY_VALUE = 0;
/**
* The '<em><b>Other</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>Other</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #OTHER
* @generated
* @ordered
*/
public static final int OTHER_VALUE = 1;
/**
* The '<em><b>Distribution</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>Distribution</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #DISTRIBUTION
* @generated
* @ordered
*/
public static final int DISTRIBUTION_VALUE = 2;
/**
* The '<em><b>Transmission</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>Transmission</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #TRANSMISSION
* @generated
* @ordered
*/
public static final int TRANSMISSION_VALUE = 3;
/**
* An array of all the '<em><b>Conductor Usage Kind</b></em>' enumerators.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static final ConductorUsageKind[] VALUES_ARRAY =
new ConductorUsageKind[] {
SECONDARY,
OTHER,
DISTRIBUTION,
TRANSMISSION,
};
/**
* A public read-only list of all the '<em><b>Conductor Usage Kind</b></em>' enumerators.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static final List<ConductorUsageKind> VALUES = Collections.unmodifiableList(Arrays.asList(VALUES_ARRAY));
/**
* Returns the '<em><b>Conductor Usage Kind</b></em>' literal with the specified literal value.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static ConductorUsageKind get(String literal) {
for (int i = 0; i < VALUES_ARRAY.length; ++i) {
ConductorUsageKind result = VALUES_ARRAY[i];
if (result.toString().equals(literal)) {
return result;
}
}
return null;
}
/**
* Returns the '<em><b>Conductor Usage Kind</b></em>' literal with the specified name.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static ConductorUsageKind getByName(String name) {
for (int i = 0; i < VALUES_ARRAY.length; ++i) {
ConductorUsageKind result = VALUES_ARRAY[i];
if (result.getName().equals(name)) {
return result;
}
}
return null;
}
/**
* Returns the '<em><b>Conductor Usage Kind</b></em>' literal with the specified integer value.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static ConductorUsageKind get(int value) {
switch (value) {
case SECONDARY_VALUE: return SECONDARY;
case OTHER_VALUE: return OTHER;
case DISTRIBUTION_VALUE: return DISTRIBUTION;
case TRANSMISSION_VALUE: return TRANSMISSION;
}
return null;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private final int value;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private final String name;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private final String literal;
/**
* Only this class can construct instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private ConductorUsageKind(int value, String name, String literal) {
this.value = value;
this.name = name;
this.literal = literal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public int getValue() {
return value;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName() {
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getLiteral() {
return literal;
}
/**
* Returns the literal value of the enumerator, which is its string representation.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
return literal;
}
} //ConductorUsageKind
| apache-2.0 |
brant-hwang/armeria | src/main/java/com/linecorp/armeria/client/http/SimpleHttpRequest.java | 2958 | /*
* Copyright 2015 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.client.http;
import java.net.URI;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpMethod;
/**
* A container for information to send in an HTTP request. This is a simpler version of {@link FullHttpRequest}
* which only uses a byte array to avoid callers having to worry about memory management.
*/
public class SimpleHttpRequest {
private final URI uri;
private final HttpMethod method;
private final HttpHeaders headers;
private final byte[] content;
SimpleHttpRequest(URI uri, HttpMethod method, HttpHeaders headers,
byte[] content) {
this.uri = uri;
this.method = method;
this.headers = new ImmutableHttpHeaders(headers);
this.content = content;
}
/**
* Returns this request's URI.
*/
public URI uri() {
return uri;
}
/**
* Returns this request's HTTP method.
*/
public HttpMethod method() {
return method;
}
/**
* Returns this request's HTTP headers.
*/
public HttpHeaders headers() {
return headers;
}
/**
* Returns the length of this requests's content.
*/
public int contentLength() {
return content.length;
}
/**
* Reads this request's content into the destination buffer.
*/
public void readContent(byte[] dst, int offset, int length) {
System.arraycopy(content, 0, dst, offset, length);
}
byte[] content() {
return content;
}
@Override
public String toString() {
return toString(uri, method, headers, content);
}
static String toString(URI uri, HttpMethod method, HttpHeaders headers,
byte[] content) {
StringBuilder buf = new StringBuilder();
buf.append('(');
buf.append("uri: ").append(uri);
buf.append(", method: ").append(method);
buf.append(", headers: ").append(headers);
buf.append(", content: ");
if (content.length > 0) {
buf.append("<length: ").append(content.length).append('>');
} else {
buf.append("<none>");
}
buf.append(')');
return buf.toString();
}
}
| apache-2.0 |
dumitru-petrusca/gosu-lang | gosu-core/src/main/java/gw/internal/gosu/parser/JavaMethodInfo.java | 23088 | /*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.internal.gosu.parser;
import gw.internal.ext.org.objectweb.asm.Opcodes;
import gw.internal.gosu.parser.java.classinfo.JavaSourceDefaultValue;
import gw.lang.Deprecated;
import gw.lang.GosuShop;
import gw.lang.PublishedName;
import gw.lang.javadoc.IClassDocNode;
import gw.lang.javadoc.IDocRef;
import gw.lang.javadoc.IExceptionNode;
import gw.lang.javadoc.IMethodNode;
import gw.lang.javadoc.IParamNode;
import gw.lang.parser.GosuParserTypes;
import gw.lang.parser.TypeVarToTypeMap;
import gw.lang.reflect.IAnnotationInfo;
import gw.lang.reflect.IExceptionInfo;
import gw.lang.reflect.IFeatureInfo;
import gw.lang.reflect.IMethodCallHandler;
import gw.lang.reflect.IParameterInfo;
import gw.lang.reflect.IScriptabilityModifier;
import gw.lang.reflect.IType;
import gw.lang.reflect.SimpleParameterInfo;
import gw.lang.reflect.TypeInfoUtil;
import gw.lang.reflect.TypeSystem;
import gw.lang.reflect.gs.IGenericTypeVariable;
import gw.lang.reflect.gs.IGosuClass;
import gw.lang.reflect.java.ClassInfoUtil;
import gw.lang.reflect.java.IJavaAnnotatedElement;
import gw.lang.reflect.java.IJavaClassGenericArrayType;
import gw.lang.reflect.java.IJavaClassInfo;
import gw.lang.reflect.java.IJavaClassMethod;
import gw.lang.reflect.java.IJavaClassParameterizedType;
import gw.lang.reflect.java.IJavaClassType;
import gw.lang.reflect.java.IJavaClassTypeVariable;
import gw.lang.reflect.java.IJavaClassWildcardType;
import gw.lang.reflect.java.IJavaMethodDescriptor;
import gw.lang.reflect.java.IJavaMethodInfo;
import gw.lang.reflect.java.JavaExceptionInfo;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
/**
*/
public class JavaMethodInfo extends JavaBaseFeatureInfo implements IJavaMethodInfo
{
private static final int UNINITED = 0;
private static final int TRUE_ENC = 1;
private static final int FALSE_ENC = 2;
private IJavaMethodDescriptor _md;
private boolean _forceHidden;
private IParameterInfo[] _paramsWithoutTypeVars;
private IParameterInfo[] _paramsWithTypeVars;
private IType _retTypeWithTypeVars;
private IType _retTypeWithoutTypeVars;
private IMethodCallHandler _callHandler;
private IGenericTypeVariable[] _typeVars;
private int _staticCache = UNINITED;
private List<IExceptionInfo> _exceptions;
private IDocRef<IMethodNode> _methodDocs = new IDocRef<IMethodNode>() {
@Override
public IMethodNode get() {
if (getContainer() instanceof JavaTypeInfo) {
IClassDocNode classDocs = ((JavaTypeInfo) getContainer()).getDocNode().get();
return classDocs == null ? null : classDocs.getMethod(_md);
} else {
return null;
}
}
};
private String _name;
private String _signature;
/**
* @param container Typically this will be the containing ITypeInfo
* @param md The method descriptor (from BeanInfo)
*/
public JavaMethodInfo(IFeatureInfo container, IJavaMethodDescriptor md, boolean forceHidden) {
super(container);
_md = md;
_forceHidden = forceHidden;
_name = _md.getName();
if (_md.getMethod().isAnnotationPresent( PublishedName.class)) {
_name = (String) _md.getMethod().getAnnotation( PublishedName.class).getFieldValue("value");
}
_signature = makeSignature();
}
@Override
public IParameterInfo[] getGenericParameters()
{
return getParameters( true );
}
@Override
public IParameterInfo[] getParameters()
{
IType ownerType = getOwnersType();
return getParameters( !ownerType.isGenericType() || ownerType.isParameterizedType() );
}
private IParameterInfo[] getParameters( boolean bKeepTypeVars )
{
if( bKeepTypeVars )
{
if( _paramsWithTypeVars == null )
{
_paramsWithTypeVars = convertParameterDescriptors( bKeepTypeVars );
}
return _paramsWithTypeVars;
}
else
{
if( _paramsWithoutTypeVars == null )
{
_paramsWithoutTypeVars = convertParameterDescriptors( bKeepTypeVars );
}
return _paramsWithoutTypeVars;
}
}
@Override
public IType getGenericReturnType()
{
return getReturnType( true );
}
@Override
public IType getReturnType()
{
IType ownerType = getOwnersType();
return getReturnType( !ownerType.isGenericType() || ownerType.isParameterizedType() );
}
private IType getReturnType( boolean bKeepTypeVars )
{
return bKeepTypeVars ? getReturnTypeWithTypeVars() : getReturnTypeWithoutTypeVars();
}
private IType getReturnTypeWithoutTypeVars()
{
if( _retTypeWithoutTypeVars != null )
{
return _retTypeWithoutTypeVars;
}
IType declaringClass = _md.getMethod().getEnclosingClass().getJavaType();
TypeVarToTypeMap actualParamByVarName = TypeLord.mapTypeByVarName( getOwnersType(), declaringClass, false );
actualParamByVarName = addEnclosingTypeParams( declaringClass, actualParamByVarName );
for( IGenericTypeVariable tv : getTypeVariables() )
{
if( actualParamByVarName.isEmpty() )
{
actualParamByVarName = new TypeVarToTypeMap();
}
actualParamByVarName.put( tv.getTypeVariableDefinition().getType(), tv.getBoundingType() );
}
IType retType = ClassInfoUtil.getActualReturnType( _md.getMethod().getGenericReturnType(), actualParamByVarName, false );
if( TypeSystem.isDeleted( retType ) )
{
return null;
}
if( retType.isGenericType() && !retType.isParameterizedType() )
{
retType = TypeLord.getDefaultParameterizedType( retType );
}
retType = ClassInfoUtil.getPublishedType(retType, _md.getMethod().getEnclosingClass());
_retTypeWithoutTypeVars = retType;
return retType;
}
private IType getReturnTypeWithTypeVars()
{
if( _retTypeWithTypeVars != null )
{
return _retTypeWithTypeVars;
}
IType declaringClass = _md.getMethod().getEnclosingClass().getJavaType();
TypeVarToTypeMap actualParamByVarName = TypeLord.mapTypeByVarName( getOwnersType(), declaringClass, true );
actualParamByVarName = addEnclosingTypeParams( declaringClass, actualParamByVarName );
for( IGenericTypeVariable tv : getTypeVariables() )
{
if( actualParamByVarName.isEmpty() )
{
actualParamByVarName = new TypeVarToTypeMap();
}
actualParamByVarName.put( tv.getTypeVariableDefinition().getType(),
tv.getTypeVariableDefinition() != null
? tv.getTypeVariableDefinition().getType()
: new TypeVariableType( getOwnersType(), tv ) );
}
IType retType = ClassInfoUtil.getActualReturnType( _md.getMethod().getGenericReturnType(), actualParamByVarName, true );
if( TypeSystem.isDeleted( retType ) )
{
return null;
}
if( retType.isGenericType() && !retType.isParameterizedType() )
{
retType = TypeLord.getDefaultParameterizedType( retType );
}
retType = ClassInfoUtil.getPublishedType(retType, _md.getMethod().getEnclosingClass());
_retTypeWithTypeVars = retType;
return retType;
}
public static TypeVarToTypeMap addEnclosingTypeParams( IType declaringClass, TypeVarToTypeMap actualParamByVarName )
{
while( declaringClass.getEnclosingType() != null && !Modifier.isStatic( declaringClass.getModifiers() ) )
{
declaringClass = declaringClass.getEnclosingType();
IGenericTypeVariable[] typeVariables = declaringClass.getGenericTypeVariables();
if( typeVariables != null )
{
for( IGenericTypeVariable typeVariable : typeVariables )
{
if( actualParamByVarName.isEmpty() )
{
actualParamByVarName = new TypeVarToTypeMap();
}
actualParamByVarName.put( typeVariable.getTypeVariableDefinition().getType(),
typeVariable.getTypeVariableDefinition() != null
? typeVariable.getTypeVariableDefinition().getType()
: new TypeVariableType( declaringClass, typeVariable ) );
}
}
}
return actualParamByVarName;
}
@Override
public List<IAnnotationInfo> getDeclaredAnnotations() {
List<IAnnotationInfo> annotations = super.getDeclaredAnnotations();
if (getMethodDocs().get() != null && getMethodDocs().get().isDeprecated()) {
annotations.add(GosuShop.getAnnotationInfoFactory().createJavaAnnotation(makeDeprecated(getMethodDocs().get().getDeprecated()), this));
}
return annotations;
}
@Override
public IGenericTypeVariable[] getTypeVariables()
{
if( _typeVars == null )
{
_typeVars = _md.getMethod().getTypeVariables(this);
}
return _typeVars;
}
@Override
public IType getParameterizedReturnType( IType... typeParams )
{
TypeVarToTypeMap actualParamByVarName =
TypeLord.mapTypeByVarName( getOwnersType(), _md.getMethod().getEnclosingClass().getJavaType(), true );
int i = 0;
for( IGenericTypeVariable tv : getTypeVariables() )
{
if( actualParamByVarName.isEmpty() )
{
actualParamByVarName = new TypeVarToTypeMap();
}
actualParamByVarName.putByString( tv.getName(), typeParams[i++] );
}
return _md.getMethod().getGenericReturnType().getActualType(actualParamByVarName, true);
}
public IType[] getParameterizedParameterTypes( IType... typeParams )
{
return getParameterizedParameterTypes2( null, typeParams );
}
public IType[] getParameterizedParameterTypes2( IGosuClass ownersType, IType... typeParams )
{
IType ot = ownersType == null ? getOwnersType() : ownersType;
TypeVarToTypeMap actualParamByVarName =
TypeLord.mapTypeByVarName( ot, _md.getMethod().getEnclosingClass().getJavaType(), true );
int i = 0;
for( IGenericTypeVariable tv : getTypeVariables() )
{
if( actualParamByVarName.isEmpty() )
{
actualParamByVarName = new TypeVarToTypeMap();
}
actualParamByVarName.putByString( tv.getName(), typeParams[i++] );
}
return ClassInfoUtil.getActualTypes(_md.getMethod().getGenericParameterTypes(), actualParamByVarName, true);
}
// <T extends CharSequence> T[] foo( ArrayList<? extends T>[] s ) { return null; }
@Override
public TypeVarToTypeMap inferTypeParametersFromArgumentTypes( IType... argTypes )
{
return inferTypeParametersFromArgumentTypes2( null, argTypes );
}
@Override
public TypeVarToTypeMap inferTypeParametersFromArgumentTypes2( IGosuClass ownersType, IType... argTypes )
{
IJavaClassType[] genParamTypes = _md.getMethod().getGenericParameterTypes();
IType ot = ownersType == null ? getOwnersType() : ownersType;
TypeVarToTypeMap actualParamByVarName = TypeLord.mapTypeByVarName( ot, _md.getMethod().getEnclosingClass().getJavaType(), true );
IGenericTypeVariable[] typeVars = getTypeVariables();
for( IGenericTypeVariable tv : typeVars )
{
if( actualParamByVarName.isEmpty() )
{
actualParamByVarName = new TypeVarToTypeMap();
}
if( !TypeLord.isRecursiveType( tv.getTypeVariableDefinition().getType(), tv.getBoundingType() ) )
{
actualParamByVarName.put( tv.getTypeVariableDefinition().getType(), tv.getBoundingType() );
}
else
{
actualParamByVarName.put( tv.getTypeVariableDefinition().getType(), TypeLord.getPureGenericType( tv.getBoundingType() ) );
}
}
TypeVarToTypeMap map = new TypeVarToTypeMap();
for( int i = 0; i < argTypes.length; i++ )
{
if( genParamTypes.length > i )
{
IType argType = argTypes[i];
IJavaClassType genParamType = genParamTypes[i];
inferTypeVariableTypesFromGenParamTypeAndConcreteType( genParamType, argType, map );
ensureInferredTypeAssignableToBoundingType( actualParamByVarName, map );
}
}
return map;
}
private void ensureInferredTypeAssignableToBoundingType( TypeVarToTypeMap actualParamByVarName, TypeVarToTypeMap map )
{
for( Object s : map.keySet() )
{
IType inferredType = map.getRaw( s );
IType boundingType = actualParamByVarName.getRaw( s );
if( !boundingType.isAssignableFrom( inferredType ) )
{
map.putRaw( s, boundingType );
}
}
}
private void inferTypeVariableTypesFromGenParamTypeAndConcreteType( IJavaClassType genParamType, IType argType, TypeVarToTypeMap map )
{
if( argType == GosuParserTypes.NULL_TYPE() )
{
return;
}
if( genParamType instanceof IJavaClassGenericArrayType)
{
//## todo: DON'T allow a null component type here; we do it now as a hack that enables gosu arrays to be compatible with java arrays
//## todo: same as TypeLord.inferTypeVariableTypesFromGenParamTypeAndConcreteType()
if( argType.getComponentType() == null || !argType.getComponentType().isPrimitive() )
{
inferTypeVariableTypesFromGenParamTypeAndConcreteType(
((IJavaClassGenericArrayType)genParamType).getGenericComponentType(), argType.getComponentType(), map );
}
}
else if( genParamType instanceof IJavaClassParameterizedType)
{
IJavaClassParameterizedType parameterizedType = (IJavaClassParameterizedType)genParamType;
IType argTypeInTermsOfParamType = TypeLord.findParameterizedType( argType, genParamType.getActualType( TypeVarToTypeMap.EMPTY_MAP ) );
if( argTypeInTermsOfParamType == null )
{
return;
}
IType[] concreteTypeParams = argTypeInTermsOfParamType.getTypeParameters();
if( concreteTypeParams != null && concreteTypeParams.length > 0 )
{
int i = 0;
for( IJavaClassType typeArg : parameterizedType.getActualTypeArguments() )
{
inferTypeVariableTypesFromGenParamTypeAndConcreteType( typeArg, concreteTypeParams[i++], map );
}
}
}
else if( genParamType instanceof IJavaClassTypeVariable)
{
String strTypeVarName = genParamType.getName();
IType type = map.getByString( strTypeVarName );
if( type == null || type instanceof TypeVariableType )
{
// Infer the type
map.putByString( strTypeVarName, argType );
}
}
else if( genParamType instanceof IJavaClassWildcardType)
{
IJavaClassWildcardType wildcardType = (IJavaClassWildcardType)genParamType;
inferTypeVariableTypesFromGenParamTypeAndConcreteType(
wildcardType.getUpperBound(), argType, map );
}
}
@Override
public IMethodCallHandler getCallHandler()
{
if( _callHandler != null )
{
return _callHandler;
}
IJavaClassMethod method = this._md.getMethod();
if (!(method instanceof MethodJavaClassMethod)) {
return null;
}
return _callHandler = new MethodCallAdapter( ((MethodJavaClassMethod)method).getJavaMethod() );
}
@Override
public String getReturnDescription()
{
return getMethodDocs().get() == null ? "" : getMethodDocs().get().getReturnDescription();
}
@Override
public List<IExceptionInfo> getExceptions()
{
if( _exceptions == null )
{
IJavaClassMethod method = _md.getMethod();
IJavaClassInfo[] classes = method.getExceptionTypes();
_exceptions = new ArrayList<IExceptionInfo>();
for (int i = 0; i < classes.length; i++) {
final IJavaClassInfo exceptionClass = classes[i];
_exceptions.add(new JavaExceptionInfo(this, exceptionClass, new IDocRef<IExceptionNode>() {
@Override
public IExceptionNode get() {
return getMethodDocs().get() == null ? null : getMethodDocs().get().getException(exceptionClass);
}
}));
}
}
// merge in methods exceptions with the annotations
return _exceptions;
}
@Override
public String getName() {
return _signature;
}
private String makeSignature() {
String name = getDisplayName();
name += TypeInfoUtil.getTypeVarList( this, true );
name += "(";
IParameterInfo[] parameterInfos = getGenericParameters();
if (parameterInfos.length > 0) {
name += " ";
for (int i = 0; i < parameterInfos.length; i++) {
IParameterInfo iParameterInfo = parameterInfos[i];
if (i != 0) {
name += ", ";
}
name += iParameterInfo.getFeatureType().getName();
}
name += " ";
}
name += ")";
return name;
}
@Override
public String getDisplayName()
{
return _name;
}
@Override
public String getShortDescription()
{
return getMethodDocs().get() == null ? null : getMethodDocs().get().getDescription();
}
@Override
public String getDescription()
{
return getMethodDocs().get() == null ? null : getMethodDocs().get().getDescription();
}
@Override
public boolean isHidden() {
return _forceHidden || super.isHidden();
}
@Override
protected boolean isDefaultEnumFeature()
{
if( getOwnersType().isEnum() )
{
String name = getName();
return isStatic() && (name.equals( "values()" ) || name.equals( "valueOf( java.lang.String )" ));
}
else
{
return false;
}
}
@Override
public boolean isVisible(IScriptabilityModifier constraint) {
return !_forceHidden && super.isVisible(constraint);
}
@Override
public boolean isStatic()
{
if( _staticCache == UNINITED )
{
synchronized( this )
{
if( _staticCache == UNINITED )
{
if( Modifier.isStatic( _md.getMethod().getModifiers() ) )
{
_staticCache = TRUE_ENC;
}
else
{
_staticCache = FALSE_ENC;
}
}
}
}
return _staticCache == TRUE_ENC;
}
@Override
public boolean isPrivate()
{
return Modifier.isPrivate( _md.getMethod().getModifiers() );
}
@Override
public boolean isInternal()
{
return !isPrivate() && !isPublic() && !isProtected();
}
@Override
public boolean isProtected()
{
return Modifier.isProtected( _md.getMethod().getModifiers() );
}
@Override
public boolean isPublic()
{
return Modifier.isPublic( _md.getMethod().getModifiers() );
}
@Override
public boolean isAbstract()
{
return Modifier.isAbstract( _md.getMethod().getModifiers() );
}
@Override
public boolean isFinal()
{
return Modifier.isFinal( _md.getMethod().getModifiers() );
}
@Override
public boolean isDeprecated()
{
return isJavadocDeprecated() || super.isDeprecated() || getMethod().isAnnotationPresent( Deprecated.class ) || getMethod().isAnnotationPresent( java.lang.Deprecated.class );
}
private boolean isJavadocDeprecated()
{
return (getModifiers() & Opcodes.ACC_DEPRECATED) > 0;
}
@Override
public String getDeprecatedReason() {
String deprecated = super.getDeprecatedReason();
if (isDeprecated() && deprecated == null) {
IAnnotationInfo gwDeprecated = getMethod().getAnnotation( Deprecated.class );
return gwDeprecated == null ? null : (String) gwDeprecated.getFieldValue( "value" );
}
return deprecated;
}
@Override
public boolean hasAnnotationDefault() {
Object defaultValue = getMethod().getDefaultValue();
return defaultValue != null &&
defaultValue != JavaSourceDefaultValue.NULL;
}
@Override
public Object getAnnotationDefault() {
return getMethod().getDefaultValue();
}
private IParameterInfo[] convertParameterDescriptors( boolean bKeepTypeVars )
{
IType declaringClass = _md.getMethod().getEnclosingClass().getJavaType();
TypeVarToTypeMap actualParamByVarName =
TypeLord.mapTypeByVarName( getOwnersType(), declaringClass, bKeepTypeVars );
actualParamByVarName = addEnclosingTypeParams( declaringClass, actualParamByVarName );
for( IGenericTypeVariable tv : getTypeVariables() )
{
if( actualParamByVarName.isEmpty() )
{
actualParamByVarName = new TypeVarToTypeMap();
}
if( bKeepTypeVars )
{
actualParamByVarName.put( tv.getTypeVariableDefinition().getType(),
tv.getTypeVariableDefinition() != null
? tv.getTypeVariableDefinition().getType()
: new TypeVariableType( getOwnersType(), tv ) );
}
else
{
actualParamByVarName.put( tv.getTypeVariableDefinition().getType(), tv.getBoundingType() );
}
}
IJavaClassType[] paramTypes = _md.getMethod().getGenericParameterTypes();
return convertGenericParameterTypes( this, actualParamByVarName, paramTypes, bKeepTypeVars, _md.getMethod().getEnclosingClass());
}
static IParameterInfo[] convertGenericParameterTypes( IFeatureInfo container,
TypeVarToTypeMap actualParamByVarName,
IJavaClassType[] paramTypes,
boolean bKeepTypeVars,
IJavaClassInfo declaringClass )
{
if( paramTypes == null )
{
return null;
}
IParameterInfo[] pi = new IParameterInfo[paramTypes.length];
for( int i = 0; i < paramTypes.length; i++ )
{
IType parameterType = null;
if(paramTypes[i] != null) {
parameterType = paramTypes[i].getActualType( actualParamByVarName, bKeepTypeVars );
}
if (parameterType == null) {
parameterType = TypeSystem.getErrorType();
}
parameterType = ClassInfoUtil.getPublishedType(parameterType, declaringClass);
pi[i] = new SimpleParameterInfo( container, parameterType, i );
}
return pi;
}
@Override
public IJavaClassMethod getMethod()
{
return _md.getMethod();
}
@Override
public String toString()
{
return getName();
}
@Override
protected IJavaAnnotatedElement getAnnotatedElement()
{
return _md.getMethod();
}
@Override
protected boolean isVisibleViaFeatureDescriptor(IScriptabilityModifier constraint) {
return _md.isVisibleViaFeatureDescriptor(constraint);
}
@Override
protected boolean isHiddenViaFeatureDescriptor() {
return _md.isHiddenViaFeatureDescriptor();
}
@Override
public IDocRef<IParamNode> getDocsForParam(final int paramIndex) {
return new IDocRef<IParamNode>() {
@Override
public IParamNode get() {
if (getMethodDocs().get() != null) {
List<? extends IParamNode> list = getMethodDocs().get().getParams();
if (list.size() > paramIndex) {
return list.get(paramIndex);
}
}
return null;
}
};
}
@Override
public IDocRef<IMethodNode> getMethodDocs() {
return _methodDocs;
}
@Override
public Method getRawMethod() {
return ((MethodJavaClassMethod)_md.getMethod()).getJavaMethod();
}
@Override
public int getModifiers() {
return _md.getMethod().getModifiers();
}
}
| apache-2.0 |
AdaptiveMe/adaptive-arp-api-lib-java | src/main/java/me/adaptive/arp/api/ContactPersonalInfo.java | 4402 | /**
--| ADAPTIVE RUNTIME PLATFORM |----------------------------------------------------------------------------------------
(C) Copyright 2013-2015 Carlos Lozano Diez t/a Adaptive.me <http://adaptive.me>.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 . Unless required by appli-
-cable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
Original author:
* Carlos Lozano Diez
<http://github.com/carloslozano>
<http://twitter.com/adaptivecoder>
<mailto:carlos@adaptive.me>
Contributors:
* Ferran Vila Conesa
<http://github.com/fnva>
<http://twitter.com/ferran_vila>
<mailto:ferran.vila.conesa@gmail.com>
* See source code files for contributors.
Release:
* @version v2.2.15
-------------------------------------------| aut inveniam viam aut faciam |--------------------------------------------
*/
package me.adaptive.arp.api;
import java.io.Serializable;
/**
Structure representing the personal info data elements of a contact.
@author Francisco Javier Martin Bueno
@since v2.0
@version 1.0
*/
public class ContactPersonalInfo extends APIBean implements Serializable {
/**
Java serialization support.
@since 2.2.13
*/
private static final long serialVersionUID = 100391943L;
/**
The title of the Contact
*/
private ContactPersonalInfoTitle title;
/**
The last name of the Contact
*/
private String lastName;
/**
The middle name of the Contact if it proceeds
*/
private String middleName;
/**
The name of the Contact
*/
private String name;
/**
Default constructor
@since v2.0
*/
public ContactPersonalInfo() {
}
/**
The Constructor used by the implementation
@param name of the Contact
@param middleName of the Contact
@param lastName of the Contact
@param title of the Contact
@since v2.0
*/
public ContactPersonalInfo(String name, String middleName, String lastName, ContactPersonalInfoTitle title) {
super();
this.name = name;
this.middleName = middleName;
this.lastName = lastName;
this.title = title;
}
/**
Returns the title of the Contact
@return Title
@since v2.0
*/
public ContactPersonalInfoTitle getTitle() {
return this.title;
}
/**
Set the Title of the Contact
@param title of the Contact
@since v2.0
*/
public void setTitle(ContactPersonalInfoTitle title) {
this.title = title;
}
/**
Returns the last name of the Contact
@return lastName
@since v2.0
*/
public String getLastName() {
return this.lastName;
}
/**
Set the last name of the Contact
@param lastName of the Contact
@since v2.0
*/
public void setLastName(String lastName) {
this.lastName = lastName;
}
/**
Returns the middle name of the Contact
@return middelName
@since v2.0
*/
public String getMiddleName() {
return this.middleName;
}
/**
Set the middle name of the Contact
@param middleName of the Contact
@since v2.0
*/
public void setMiddleName(String middleName) {
this.middleName = middleName;
}
/**
Returns the name of the Contact
@return name
@since v2.0
*/
public String getName() {
return this.name;
}
/**
Set the name of the Contact
@param name of the Contact
@since v2.0
*/
public void setName(String name) {
this.name = name;
}
}
/**
------------------------------------| Engineered with ♥ in Barcelona, Catalonia |--------------------------------------
*/
| apache-2.0 |
YiNPNG/test | TextTwo/app/src/main/java/com/example/dell/texttwo/ui/FragmentBase.java | 1735 | package com.example.dell.texttwo.ui;
import com.example.dell.texttwo.R;
import android.content.BroadcastReceiver;
import android.content.IntentFilter;
import android.database.ContentObserver;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ListView;
public class FragmentBase extends Fragment {
protected ListView mListView;
protected View mLoadingLayout;
protected MainActivity mParent;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mParent = (MainActivity) getActivity();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.fragment_file, container, false);
mListView = (ListView) v.findViewById(android.R.id.list);
mLoadingLayout = v.findViewById(R.id.loading);
return v;
}
public boolean onBackPressed() {
return false;
}
public void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
if (mParent != null)
mParent.registerReceiver(receiver, filter);
}
public void unregisterReceiver(BroadcastReceiver receiver) {
if (mParent != null)
mParent.unregisterReceiver(receiver);
}
public void registerContentObserver(Uri uri, boolean notifyForDescendents, ContentObserver observer) {
if (mParent != null)
mParent.getContentResolver().registerContentObserver(uri, notifyForDescendents, observer);
}
public void unregisterContentObserver(ContentObserver observer) {
if (mParent != null)
mParent.getContentResolver().unregisterContentObserver(observer);
}
}
| apache-2.0 |
darciopacifico/omr | modules/JazzAV/core/src/main/java/br/com/dlp/jazzav/produto/MarcaVO.java | 644 | package br.com.dlp.jazzav.produto;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import br.com.dlp.jazzav.AbstractLogEntityVO;
/**
*
* @author darcio
*
*/
@Entity
public class MarcaVO extends AbstractLogEntityVO<Long> {
private static final long serialVersionUID = 6715916803586893459L;
private String nome;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Override
public Long getPK() {
return this.pk;
}
public String getNome() {
return nome;
}
public void setNome(String nome) {
this.nome = nome;
}
}
| apache-2.0 |
dreamseekerkun/EnjoyLife | app/src/main/java/com/dream/plmm/bean/HealthyInfoDetailEntity.java | 4203 | package com.dream.plmm.bean;
/**
* Created by likun on 16/8/19.
*/
public class HealthyInfoDetailEntity {
/**
* count : 1754
* description : 平时要注意开窗通风,要多吃“补氧”食物,例如葵花子油、杏仁、奇异果、红薯等,还要多进行一些慢跑等有氧运动,这些都助于身体吸纳氧气
* fcount : 0
* id : 10
* img : /lore/150731/af4811aaf581c7369c4c425d449ab94d.jpg
* keywords : 身体 缺氧 症状 心功能不全 内分泌系统
* loreclass : 12
* message : <p> </p>
<p> 现在,制氧机已经超越了医疗抢救机器这一概念,成为家庭中一件时髦的健康器材,尤其受到老人的青睐。那么,怎么知道自己身体是否缺氧,又如何为身体补氧呢? </p>
<p> 以下这些症状就预示我们身体可能出现了缺氧。神经系统:精神差、打哈欠、整天感觉疲倦、无力、记忆力变差、注意力不能集中、工作能力下降、失眠、痴呆。心血管系统:经常头晕、心慌、胸闷、憋气、血压不正常、面色灰暗、眼睑或肢体水肿。胃肠、内分泌系统:食欲变差、经常便秘、胃胀痛、烦躁、易感冒。肌肉骨骼系统:容易抽筋、腰腿酸痛或关节痛。皮肤黏膜:容易口腔溃烂、咽喉发炎、牙龈出血等。 </p>
<p> 如果身体出现一些缺氧的症状该怎么办呢?平时要注意开窗通风,要多吃“补氧”食物,例如葵花子油、杏仁、奇异果、红薯等,还要多进行一些慢跑等有氧运动,这些都助于身体吸纳氧气。 </p>
<p> 当身体有明显缺氧症,并经过开窗、食补、运动等方式仍不能缓解症状时,尤其是有明确的慢性缺氧性疾病如慢性阻塞性肺病、心功能不全等,采用家庭吸氧比较好。 </p>
<br>
* rcount : 0
* status : true
* time : 1438305244000
* title : 身体缺氧五大信号又该如何为身体补氧呢?
* url : http://www.tngou.net/lore/show/10
*/
private int count;
private String description;
private int fcount;
private int id;
private String img;
private String keywords;
private int loreclass;
private String message;
private int rcount;
private boolean status;
private long time;
private String title;
private String url;
public int getCount() {
return count;
}
public void setCount(int count) {
this.count = count;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public int getFcount() {
return fcount;
}
public void setFcount(int fcount) {
this.fcount = fcount;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getImg() {
return img;
}
public void setImg(String img) {
this.img = img;
}
public String getKeywords() {
return keywords;
}
public void setKeywords(String keywords) {
this.keywords = keywords;
}
public int getLoreclass() {
return loreclass;
}
public void setLoreclass(int loreclass) {
this.loreclass = loreclass;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public int getRcount() {
return rcount;
}
public void setRcount(int rcount) {
this.rcount = rcount;
}
public boolean isStatus() {
return status;
}
public void setStatus(boolean status) {
this.status = status;
}
public long getTime() {
return time;
}
public void setTime(long time) {
this.time = time;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
}
| apache-2.0 |
gxa/atlas | base/src/main/java/uk/ac/ebi/atlas/profiles/MinMaxProfileRanking.java | 1377 | package uk.ac.ebi.atlas.profiles;
import com.google.common.collect.MinMaxPriorityQueue;
import uk.ac.ebi.atlas.commons.streams.ObjectInputStream;
import uk.ac.ebi.atlas.model.GeneProfilesList;
import uk.ac.ebi.atlas.model.Profile;
import java.util.Comparator;
import java.util.function.Supplier;
public class MinMaxProfileRanking<T extends Profile, L extends GeneProfilesList<T>> implements SelectProfiles<T, L> {
private final Comparator<T> comparator;
private final Supplier<L> newList;
public MinMaxProfileRanking(Comparator<T> comparator, Supplier<L> newList) {
this.comparator = comparator;
this.newList = newList;
}
@Override
public L select(ObjectInputStream<T> profiles, int maxSize) {
MinMaxPriorityQueue<T> rankingQueue =
maxSize > 0 ?
MinMaxPriorityQueue.orderedBy(comparator).maximumSize(maxSize).create() :
MinMaxPriorityQueue.orderedBy(comparator).create();
int count = 0;
for (T profile : new IterableObjectInputStream<>(profiles)) {
rankingQueue.add(profile);
count++;
}
L list = newList.get();
T profile;
while ((profile = rankingQueue.poll()) != null) {
list.add(profile);
}
list.setTotalResultCount(count);
return list;
}
}
| apache-2.0 |
OmniKryptec/OmniKryptec-Engine | src/main/java/de/omnikryptec/old/util/OSUtil.java | 6117 | /*
* Copyright 2017 - 2019 Roman Borris (pcfreak9000), Paul Hagedorn (Panzer1119)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.omnikryptec.old.util;
import de.codemakers.io.file.AdvancedFile;
import de.omnikryptec.old.main.OmniKryptecEngine;
import de.omnikryptec.old.util.logger.LogLevel;
import de.omnikryptec.old.util.logger.Logger;
import java.io.File;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.jar.JarFile;
/**
* @author Panzer1119
*/
public class OSUtil {
private static final String OS_NAME = System.getProperty("os.name").toLowerCase();
private static final AdvancedFile USER_HOME = AdvancedFile.folderOfPath(System.getProperty("user.home"));
private static final String ENGINE_FOLDER_NAME = "." + OmniKryptecEngine.class.getSimpleName() + "_3-1-5";
private static final String PATHSEPARATOR = "/";
public static final OS OPERATING_SYSTEM = detectOS();
public static final AdvancedFile STANDARD_APPDATA_FOLDER = getStandardAppDataEngineFolder();
public static enum OS {
WINDOWS("windows"), MAC("macosx"), UNIX("linux"), SOLARIS("solaris"), ERROR(null);
private final String name;
OS(String name) {
this.name = name;
}
public String getName() {
return name;
}
public String toPathForResource(String nativesPath) {
return (nativesPath.startsWith(PATHSEPARATOR) ? "" : PATHSEPARATOR) + nativesPath
+ (nativesPath.endsWith(PATHSEPARATOR) ? "" : PATHSEPARATOR) + name;
}
@Override
public String toString() {
return name;
}
}
public static final OS getOS() {
return OPERATING_SYSTEM;
}
private static final OS detectOS() {
if (OS_NAME.contains("win")) {
return OS.WINDOWS;
} else if (OS_NAME.contains("mac")) {
return OS.MAC;
} else if (OS_NAME.contains("nix") || OS_NAME.contains("nux") || OS_NAME.contains("aix")) {
return OS.UNIX;
} else if (OS_NAME.contains("sunos")) {
return OS.SOLARIS;
} else {
return OS.ERROR;
}
}
public static final boolean createStandardFolders() {
try {
return (STANDARD_APPDATA_FOLDER.createAdvancedFile() && STANDARD_APPDATA_FOLDER.isDirectory());
} catch (Exception ex) {
Logger.logErr("Error while creating standard folders: " + ex, ex);
return false;
}
}
public static final AdvancedFile getStandardAppDataEngineFolder() {
return getAppDataFolder(ENGINE_FOLDER_NAME);
}
public static final AdvancedFile getAppDataFolder(String folderName) {
AdvancedFile file = null;
switch (OPERATING_SYSTEM) {
case WINDOWS:
file = new AdvancedFile(false, USER_HOME, "AppData", "Roaming", folderName);
break;
case MAC:
file = new AdvancedFile(false, USER_HOME, "Library", "Application Support", folderName); // TODO Needs
// confirmation!
break;
case UNIX:
file = new AdvancedFile(false, USER_HOME, folderName);
break;
case SOLARIS:
file = new AdvancedFile(false, USER_HOME, folderName);
break;
case ERROR:
break;
default:
break;
}
if (file != null) {
file.setShouldBeFile(false);
}
return file;
}
public static final boolean extractFolderFromJar(AdvancedFile folder, String path) {
try {
boolean allGood = true;
final AdvancedFile jarFile = getJarFile();
if (jarFile.isFile()) {
if (path.startsWith("/")) {
path = path.substring("/".length());
}
final String path_ = path;
final JarFile jar = new JarFile(jarFile.toFile());
allGood = jar.stream()
.filter((jarEntry) -> !jarEntry.isDirectory() && jarEntry.getName().startsWith(path_))
.allMatch((jarEntry) -> {
try {
final File file_ = getFileOfPath(folder, jarEntry.getName()).toFile().getAbsoluteFile();
if (!file_.exists()) {
final InputStream inputStream = jar.getInputStream(jarEntry);
Files.copy(inputStream, file_.toPath());
inputStream.close();
}
return true;
} catch (Exception ex) {
Logger.logErr("Error while extracting file from jar: " + ex, ex);
return false;
}
});
jar.close();
} else {
final URL url = OSUtil.class.getResource(path);
if (url != null) {
final AdvancedFile apps = AdvancedFile.folderOfPath(url.toURI().getPath());
for (AdvancedFile app : apps.listAdvancedFiles()) {
try {
Files.copy(app.toFile().toPath(),
new AdvancedFile(false, folder, app.getName()).toFile().toPath(),
StandardCopyOption.COPY_ATTRIBUTES);
} catch (java.nio.file.FileAlreadyExistsException faex) {
} catch (Exception ex) {
allGood = false;
Logger.log("Error while extracting file from folder from jar: " + ex, LogLevel.WARNING);
}
}
} else {
allGood = false;
}
}
return allGood;
} catch (Exception ex) {
Logger.logErr("Error while extracting folder from jar: " + ex, ex);
return false;
}
}
public static final AdvancedFile getFileOfPath(AdvancedFile folder, String path) {
String name = path;
if (path.contains(PATHSEPARATOR)) {
name = name.substring(name.lastIndexOf(PATHSEPARATOR) + PATHSEPARATOR.length());
}
return new AdvancedFile(false, folder, name);
}
public static final AdvancedFile getJarFile() {
return new AdvancedFile(false, OSUtil.class.getProtectionDomain().getCodeSource().getLocation().getPath());
}
public static final boolean isJarFile() {
return getJarFile().isFile();
}
public static final boolean isIDE() {
return !isJarFile();
}
}
| apache-2.0 |
freme-project/Broker | src/main/java/eu/freme/broker/eservices/Pipelines.java | 19529 | /**
* Copyright (C) 2015 Agro-Know, Deutsches Forschungszentrum für Künstliche Intelligenz, iMinds,
* Institut für Angewandte Informatik e. V. an der Universität Leipzig,
* Istituto Superiore Mario Boella, Tilde, Vistatec, WRIPL (http://freme-project.eu)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.freme.broker.eservices;
import com.google.gson.JsonSyntaxException;
import com.mashape.unirest.http.exceptions.UnirestException;
import eu.freme.broker.exception.*;
import eu.freme.common.conversion.rdf.RDFConstants;
import eu.freme.common.exception.OwnedResourceNotFoundException;
import eu.freme.common.persistence.dao.PipelineDAO;
import eu.freme.common.persistence.dao.UserDAO;
import eu.freme.common.persistence.model.OwnedResource;
import eu.freme.common.persistence.model.Pipeline;
import eu.freme.common.persistence.model.User;
import eu.freme.eservices.pipelines.core.PipelineResponse;
import eu.freme.eservices.pipelines.core.PipelineService;
import eu.freme.eservices.pipelines.core.ServiceException;
import eu.freme.eservices.pipelines.core.WrappedPipelineResponse;
import eu.freme.eservices.pipelines.requests.RequestBuilder;
import eu.freme.eservices.pipelines.requests.RequestFactory;
import eu.freme.eservices.pipelines.requests.SerializedRequest;
import eu.freme.eservices.pipelines.serialization.Serializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Profile;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.annotation.Secured;
import org.springframework.security.authentication.InsufficientAuthenticationException;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author Gerald Haesendonck
*/
@RestController
@SuppressWarnings("unused")
@Profile("broker")
public class Pipelines extends BaseRestController {
@Autowired
PipelineService pipelineAPI;
@Autowired
PipelineDAO pipelineDAO;
@Autowired
UserDAO userDAO;
/**
* <p>Calls the pipelining service.</p>
* <p>Some predefined Requests can be formed using the class {@link RequestFactory}. It also converts request objects
* from and to JSON.</p>
* <p><To create custom requests, use the {@link RequestBuilder}.</p>
* <p>Examples can be found in the unit tests in {@link eu/freme/broker/integration_tests/pipelines}.</p>
* @param requests The requests to send to the service.
* @param stats If "true": wrap the response of the last request and add timing statistics.
* @return The response of the last request.
* @throws BadRequestException The contents of the request is not valid.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(value = "pipelining/chain",
method = RequestMethod.POST,
consumes = "application/json",
produces = {"text/turtle", "application/json", "application/ld+json", "application/n-triples", "application/rdf+xml", "text/n3", "text/html"}
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> pipeline(@RequestBody String requests, @RequestParam (value = "stats", defaultValue = "false", required = false) String stats) {
try {
boolean wrapResult = Boolean.parseBoolean(stats);
List<SerializedRequest> serializedRequests = Serializer.fromJson(requests);
WrappedPipelineResponse pipelineResult = pipelineAPI.chain(serializedRequests);
MultiValueMap<String, String> headers = new HttpHeaders();
if (wrapResult) {
headers.add(HttpHeaders.CONTENT_TYPE, RDFConstants.RDFSerialization.JSON.contentType());
return new ResponseEntity<>(Serializer.toJson(pipelineResult), headers, HttpStatus.OK);
} else {
headers.add(HttpHeaders.CONTENT_TYPE, pipelineResult.getContent().getContentType());
PipelineResponse lastResponse = pipelineResult.getContent();
return new ResponseEntity<>(lastResponse.getBody(), headers, HttpStatus.OK);
}
} catch (ServiceException serviceError) {
// TODO: see if this can be replaced by excsption(s) defined in the broker.
logger.error(serviceError.getMessage(), serviceError);
MultiValueMap<String, String> headers = new HttpHeaders();
headers.add(HttpHeaders.CONTENT_TYPE, serviceError.getResponse().getContentType());
return new ResponseEntity<>(serviceError.getMessage(), headers, serviceError.getStatus());
} catch (JsonSyntaxException jsonException) {
logger.error(jsonException.getMessage(), jsonException);
String errormsg = jsonException.getCause() != null ? jsonException.getCause().getMessage() : jsonException.getMessage();
throw new BadRequestException("Error detected in the JSON body contents: " + errormsg);
} catch (UnirestException unirestException) {
logger.error(unirestException.getMessage(), unirestException);
throw new BadRequestException(unirestException.getMessage());
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Calls the pipelining service using an existing template.
* @param body The contents to send to the pipeline. This can be a NIF or plain text document.
* @param id The id of the pipeline template to use.
* @param stats If "true": wrap the response of the last request and add timing statistics.
* @return The response of the latest request defined in the template.
* @throws AccessDeniedException The pipeline template is not visible by the current user.
* @throws BadRequestException The contents of the request is not valid.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
* @throws TemplateNotFoundException The pipeline template does not exist.
*/
@RequestMapping(value = "pipelining/chain/{id}",
method = RequestMethod.POST,
consumes = {"text/turtle", "application/json", "application/ld+json", "application/n-triples", "application/rdf+xml", "text/n3", "text/plain"},
produces = {"text/turtle", "application/json", "application/ld+json", "application/n-triples", "application/rdf+xml", "text/n3"}
)
public ResponseEntity<String> pipeline(@RequestBody String body, @PathVariable long id, @RequestParam (value = "stats", defaultValue = "false", required = false) String stats) {
try {
Pipeline pipeline = pipelineDAO.findOneById(id);
List<SerializedRequest> serializedRequests = Serializer.fromJson(pipeline.getSerializedRequests());
serializedRequests.get(0).setBody(body);
return pipeline(Serializer.toJson(serializedRequests), stats);
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new AccessDeniedException(ex.getMessage());
} catch (JsonSyntaxException jsonException) {
logger.error(jsonException.getMessage(), jsonException);
String errormsg = jsonException.getCause() != null ? jsonException.getCause().getMessage() : jsonException.getMessage();
throw new BadRequestException("Error detected in the JSON body contents: " + errormsg);
} catch (OwnedResourceNotFoundException ex) {
logger.error(ex.getMessage(), ex);
throw new TemplateNotFoundException("Could not find the pipeline template with id " + id);
}
}
/**
* Creates and stores a pipeline template.
* @param pipelineInfo A JSON string containing the fields "label", "description", "serializedRequests", which
* define the pipeline template.
* @param visibility The visibility of the template. Can be {@literal PUBLIC} or {@literal PRIVATE}. PUBLIC means visible to anyone,
* PRIVATE means only visible to the currently authenticated user.
* @param persist {@literal true}: store the template until deleted by someone, {@literal false} to guarantee
* it to be stored for one week.
* @return A JSON string containing the full pipeline info, i.e. the fields "id", "label", "description",
* "persist", "visibility", "owner", "serializedRequests".
* @throws AccessDeniedException The pipeline template is not visible by the current user.
* @throws BadRequestException The contents of the request is not valid.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(value = "pipelining/templates",
method = RequestMethod.POST,
consumes = "application/json",
produces = "application/json"
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> create(
@RequestBody String pipelineInfo,
@RequestParam(value = "visibility", required = false) String visibility,
@RequestParam (value = "persist", defaultValue = "false", required = false) String persist
) {
try {
// just to perform a first validation of the pipeline...
eu.freme.eservices.pipelines.serialization.Pipeline pipelineInfoObj = Serializer.templateFromJson(pipelineInfo);
//List<SerializedRequest> serializedRequests = RequestFactory.fromJson(requests);
boolean toPersist = Boolean.parseBoolean(persist);
Pipeline pipeline = new Pipeline(
OwnedResource.Visibility.getByString(visibility),
pipelineInfoObj.getLabel(),
pipelineInfoObj.getDescription(),
Serializer.toJson(pipelineInfoObj.getSerializedRequests()),
toPersist);
pipeline = pipelineDAO.save(pipeline);
String response = Serializer.toJson(pipeline);
return createOKJSONResponse(response);
} catch (JsonSyntaxException jsonException) {
logger.error(jsonException.getMessage(), jsonException);
String errormsg = jsonException.getCause() != null ? jsonException.getCause().getMessage() : jsonException.getMessage();
throw new BadRequestException("Error detected in the JSON body contents: " + errormsg);
} catch (eu.freme.common.exception.BadRequestException e) {
logger.error(e.getMessage(), e);
throw new BadRequestException(e.getMessage());
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new AccessDeniedException(ex.getMessage());
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Updates an existing pipeline template.
* @param id The id of the pipeline template to update.
* @param ownerName The name of the new owner.
* @param visibility The visibility of the template. Can be {@literal PUBLIC} or {@literal PRIVATE}. PUBLIC means visible to anyone,
* PRIVATE means only visible to the currently authenticated user.
* @param persist {@literal true}: store the template until deleted by someone, {@literal false} to guarantee
* it to be stored for one week.
* @param pipelineInfo A JSON string containing updated pipeline template info. The fields "label", "description", "serializedRequests"
* define the pipeline template.
* @return A JSON string containing the updated full pipeline info, i.e. the fields "id", "label", "description",
* "persist", "visibility", "owner", "serializedRequests".
* @throws ForbiddenException The pipeline template is not visible by the current user.
* @throws BadRequestException The contents of the request is not valid.
* @throws TemplateNotFoundException The pipeline template does not exist.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(
value = "pipelining/templates/{id}",
method = RequestMethod.PUT,
consumes = "application/json",
produces = "application/json"
)
public ResponseEntity<String> update(
@PathVariable(value = "id") long id,
@RequestParam(value = "owner", required=false) String ownerName,
@RequestParam(value = "visibility", required = false) String visibility,
@RequestParam(value = "persist", required = false) String persist,
@RequestBody(required = false) String pipelineInfo
) {
try {
Pipeline pipeline = pipelineDAO.findOneById(id);
if (pipelineInfo != null && !pipelineInfo.isEmpty()) {
eu.freme.eservices.pipelines.serialization.Pipeline pipelineInfoObj = Serializer.templateFromJson(pipelineInfo);
String newLabel = pipelineInfoObj.getLabel();
if (newLabel != null && !newLabel.equals(pipeline.getLabel())) {
pipeline.setLabel(newLabel);
}
String newDescription = pipelineInfoObj.getDescription();
if (newDescription != null && !newDescription.equals(pipeline.getDescription())) {
pipeline.setDescription(newDescription);
}
List<SerializedRequest> oldRequests = Serializer.fromJson(pipeline.getSerializedRequests());
List<SerializedRequest> newRequests = pipelineInfoObj.getSerializedRequests();
if (newRequests != null && !newRequests.equals(oldRequests)) {
pipeline.setSerializedRequests(Serializer.toJson(newRequests));
}
}
if (visibility != null && !visibility.equals(pipeline.getVisibility().name())) {
pipeline.setVisibility(OwnedResource.Visibility.getByString(visibility));
}
if (persist != null) {
boolean toPersist = Boolean.parseBoolean(persist);
if (toPersist != pipeline.isPersistent()) {
pipeline.setPersist(toPersist);
}
}
if (ownerName != null && !ownerName.equals(pipeline.getOwner().getName())) {
User newOwner = userDAO.getRepository().findOneByName(ownerName);
if (newOwner == null) {
throw new BadRequestException("Can not change owner of the dataset. User \"" + ownerName + "\" does not exist.");
}
pipeline.setOwner(newOwner);
}
pipeline = pipelineDAO.save(pipeline);
String response = Serializer.toJson(pipeline);
return createOKJSONResponse(response);
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new ForbiddenException(ex.getMessage());
} catch (OwnedResourceNotFoundException ex) {
logger.error(ex.getMessage(), ex);
throw new TemplateNotFoundException("Could not find the pipeline template with id " + id);
} catch (JsonSyntaxException jsonException) {
logger.error(jsonException.getMessage(), jsonException);
String errormsg = jsonException.getCause() != null ? jsonException.getCause().getMessage() : jsonException.getMessage();
throw new BadRequestException("Error detected in the JSON body contents: " + errormsg);
} catch (eu.freme.common.exception.BadRequestException e) {
logger.error(e.getMessage(), e);
throw new BadRequestException(e.getMessage());
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Reads (gets) the pipeline template with the given id.
* @param id The id of the pipeline template to get.
* @return The pipeline templatewith the given id as a JSON string.
* @throws AccessDeniedException The pipeline template is not visible by the current user.
* @throws TemplateNotFoundException The pipeline template does not exist.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(
value = "pipelining/templates/{id}",
method = RequestMethod.GET,
produces = "application/json"
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> read(@PathVariable(value = "id") long id) {
try {
Pipeline pipeline = pipelineDAO.findOneById(id);
String serializedPipeline = Serializer.toJson(pipeline);
return createOKJSONResponse(serializedPipeline);
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new AccessDeniedException(ex.getMessage());
} catch (OwnedResourceNotFoundException ex) {
logger.error(ex.getMessage(), ex);
throw new TemplateNotFoundException("Could not find the pipeline template with id " + id);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Reads (gets) all visible pipelines.
* @return all visible pipelines as a JSON string.
*/
@RequestMapping(
value = "pipelining/templates",
method = RequestMethod.GET,
produces = "application/json"
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> read() {
try {
List<Pipeline> readablePipelines = pipelineDAO.findAllReadAccessible();
String serializedPipelines = Serializer.templatesToJson(readablePipelines);
return createOKJSONResponse(serializedPipelines);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Deletes the pipeline template with the given id.
* @param id The id of the template to delete.
* @return The message "The pipeline was sucessfully removed."
* @throws ForbiddenException The pipeline template cannot be deleted by the current user.
* @throws TemplateNotFoundException The pipeline template does not exist.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(
value = "pipelining/templates/{id}",
method = RequestMethod.DELETE
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> delete(@PathVariable("id") long id) {
try {
pipelineDAO.delete(pipelineDAO.findOneById(id));
return new ResponseEntity<>("The pipeline was sucessfully removed.", HttpStatus.OK);
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new ForbiddenException(ex.getMessage());
} catch (OwnedResourceNotFoundException ex) {
logger.error(ex.getMessage(), ex);
throw new TemplateNotFoundException("Could not find the pipeline template with id " + id);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
private ResponseEntity<String> createOKJSONResponse(final String contents) {
MultiValueMap<String, String> headers = new HttpHeaders();
headers.add(HttpHeaders.CONTENT_TYPE, RDFConstants.RDFSerialization.JSON.contentType());
return new ResponseEntity<>(contents, headers, HttpStatus.OK);
}
} | apache-2.0 |
maheshika/carbon-business-process | components/bpmn/org.wso2.carbon.bpmn/src/main/java/org/wso2/carbon/bpmn/core/internal/BPMNServiceComponent.java | 3411 | /**
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.bpmn.core.internal;
import org.activiti.engine.ProcessEngines;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.osgi.service.component.ComponentContext;
import org.wso2.carbon.bpmn.core.ActivitiEngineBuilder;
import org.wso2.carbon.bpmn.core.BPMNServerHolder;
import org.wso2.carbon.bpmn.core.db.DataSourceHandler;
import org.wso2.carbon.bpmn.core.deployment.TenantManager;
import org.wso2.carbon.bpmn.core.exception.BPMNMetaDataTableCreationException;
import org.wso2.carbon.bpmn.core.exception.DatabaseConfigurationException;
import org.wso2.carbon.registry.core.service.RegistryService;
/**
* @scr.component name="org.wso2.carbon.bpmn.core.internal.BPMNServiceComponent" immediate="true"
* @scr.reference name="registry.service" interface="org.wso2.carbon.registry.core.service.RegistryService"
* cardinality="1..1" policy="dynamic" bind="setRegistryService" unbind="unsetRegistryService"
*/
public class BPMNServiceComponent {
private static Log log = LogFactory.getLog(BPMNServiceComponent.class);
protected void activate(ComponentContext ctxt) {
log.info("Initializing the BPMN core component...");
try {
BPMNServerHolder holder = BPMNServerHolder.getInstance();
ActivitiEngineBuilder activitiEngineBuilder = new ActivitiEngineBuilder();
holder.setEngine(activitiEngineBuilder.buildEngine());
holder.setTenantManager(new TenantManager());
DataSourceHandler dataSourceHandler = new DataSourceHandler();
dataSourceHandler.initDataSource(activitiEngineBuilder.getDataSourceJndiName());
dataSourceHandler.closeDataSource();
} catch (BPMNMetaDataTableCreationException e) {
log.error("Could not create BPMN checksum table", e);
} catch (DatabaseConfigurationException e) {
log.error("Could not create BPMN checksum table", e);
}catch (Throwable e) {
log.error("Failed to initialize the BPMN core component.", e);
}
}
protected void deactivate(ComponentContext ctxt) {
log.info("Stopping the BPMN core component...");
ProcessEngines.destroy();
}
protected void setRegistryService(RegistryService registrySvc) {
if (log.isDebugEnabled()) {
log.debug("RegistryService bound to the BPMN component");
}
BPMNServerHolder.getInstance().setRegistryService(registrySvc);
}
public void unsetRegistryService(RegistryService registryService) {
if (log.isDebugEnabled()) {
log.debug("RegistryService unbound from the BPMN component");
}
BPMNServerHolder.getInstance().unsetRegistryService(registryService);
}
}
| apache-2.0 |
xsingHu/xs-android-architecture | study-view/xs-MPAndroidChartDemo/demo/src/main/java/com/xsing/demo/DemoBase.java | 1322 | package com.xsing.demo;
import android.graphics.Typeface;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.FragmentActivity;
/**
* Baseclass of all Activities of the Demo Application.
*
* @author Philipp Jahoda
*/
public abstract class DemoBase extends FragmentActivity {
protected String[] mMonths = new String[] {
"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Okt", "Nov", "Dec"
};
protected String[] mParties = new String[] {
"Party A", "Party B", "Party C", "Party D", "Party E", "Party F", "Party G", "Party H",
"Party I", "Party J", "Party K", "Party L", "Party M", "Party N", "Party O", "Party P",
"Party Q", "Party R", "Party S", "Party T", "Party U", "Party V", "Party W", "Party X",
"Party Y", "Party Z"
};
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
protected float getRandom(float range, float startsfrom) {
return (float) (Math.random() * range) + startsfrom;
}
@Override
public void onBackPressed() {
super.onBackPressed();
// overridePendingTransition(R.anim.move_left_in_activity, R.anim.move_right_out_activity);
}
} | apache-2.0 |
speedycontrol/googleapis | output/com/google/cloud/ml/api/v1beta1/ModelOrBuilder.java | 2745 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/ml/v1beta1/model_service.proto
package com.google.cloud.ml.api.v1beta1;
public interface ModelOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.cloud.ml.v1beta1.Model)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Required. The name specified for the model when it was created.
* The model name must be unique within the project it is created in.
* </pre>
*
* <code>optional string name = 1;</code>
*/
java.lang.String getName();
/**
* <pre>
* Required. The name specified for the model when it was created.
* The model name must be unique within the project it is created in.
* </pre>
*
* <code>optional string name = 1;</code>
*/
com.google.protobuf.ByteString
getNameBytes();
/**
* <pre>
* Optional. The description specified for the model when it was created.
* </pre>
*
* <code>optional string description = 2;</code>
*/
java.lang.String getDescription();
/**
* <pre>
* Optional. The description specified for the model when it was created.
* </pre>
*
* <code>optional string description = 2;</code>
*/
com.google.protobuf.ByteString
getDescriptionBytes();
/**
* <pre>
* Output only. The default version of the model. This version will be used to
* handle prediction requests that do not specify a version.
* You can change the default version by calling
* [projects.methods.versions.setDefault](/ml/reference/rest/v1beta1/projects.models.versions/setDefault).
* </pre>
*
* <code>optional .google.cloud.ml.v1beta1.Version default_version = 3;</code>
*/
boolean hasDefaultVersion();
/**
* <pre>
* Output only. The default version of the model. This version will be used to
* handle prediction requests that do not specify a version.
* You can change the default version by calling
* [projects.methods.versions.setDefault](/ml/reference/rest/v1beta1/projects.models.versions/setDefault).
* </pre>
*
* <code>optional .google.cloud.ml.v1beta1.Version default_version = 3;</code>
*/
com.google.cloud.ml.api.v1beta1.Version getDefaultVersion();
/**
* <pre>
* Output only. The default version of the model. This version will be used to
* handle prediction requests that do not specify a version.
* You can change the default version by calling
* [projects.methods.versions.setDefault](/ml/reference/rest/v1beta1/projects.models.versions/setDefault).
* </pre>
*
* <code>optional .google.cloud.ml.v1beta1.Version default_version = 3;</code>
*/
com.google.cloud.ml.api.v1beta1.VersionOrBuilder getDefaultVersionOrBuilder();
}
| apache-2.0 |
twilking/wildfly-swarm | integration-tests/src/test/java/org/wildfly/swarm/integration/staticcontent/war/StaticContentWarTest.java | 2518 | /**
* Copyright 2015 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.integration.staticcontent.war;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.jboss.arquillian.drone.api.annotation.Drone;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.openqa.selenium.WebDriver;
import org.wildfly.swarm.arquillian.adapter.ArtifactDependencies;
import org.wildfly.swarm.integration.base.TestConstants;
import org.wildfly.swarm.integration.staticcontent.StaticContentCommonTests;
import org.wildfly.swarm.undertow.WARArchive;
import java.util.Arrays;
import java.util.List;
import static org.fest.assertions.Assertions.assertThat;
/**
* @author Bob McWhirter
*/
@RunWith(Arquillian.class)
public class StaticContentWarTest implements StaticContentCommonTests {
@Drone
WebDriver browser;
@Deployment
public static Archive createDeployment() throws Exception {
WARArchive deployment = ShrinkWrap.create(WARArchive.class);
deployment.staticContent();
return deployment;
}
@ArtifactDependencies
public static List<String> appDependencies() {
return Arrays.asList(
"org.wildfly.swarm:wildfly-swarm-undertow"
);
}
@RunAsClient
@Test
public void testStaticContent() throws Exception {
assertBasicStaticContentWorks("");
}
@Override
public void assertContains(String path, String content) throws Exception {
browser.navigate().to(TestConstants.DEFAULT_URL + path);
assertThat(browser.getPageSource()).contains(content);
}
@Override
public void assertNotFound(String path) throws Exception {
assertThat(browser.getPageSource().contains("Not Found"));
}
}
| apache-2.0 |
angcyo/RLibrary | imagepicker/src/main/java/com/lzy/imagepicker/adapter/ImageViewHolder.java | 689 | package com.lzy.imagepicker.adapter;
import android.support.annotation.IdRes;
import android.support.v7.widget.RecyclerView;
import android.view.View;
/**
* Copyright (C) 2016,深圳市红鸟网络科技股份有限公司 All rights reserved.
* 项目名称:
* 类的描述:
* 创建人员:Robi
* 创建时间:2017/02/21 15:07
* 修改人员:Robi
* 修改时间:2017/02/21 15:07
* 修改备注:
* Version: 1.0.0
*/
public class ImageViewHolder extends RecyclerView.ViewHolder {
public ImageViewHolder(View itemView) {
super(itemView);
}
public <T extends View> T v(@IdRes int resId) {
return (T) itemView.findViewById(resId);
}
}
| apache-2.0 |
anton-johansson/ip-to-geolocation-service | src/main/java/com/antonjohansson/geolocation/framework/domain/SourceData.java | 859 | /**
* Copyright (c) Anton Johansson <antoon.johansson@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.antonjohansson.geolocation.framework.domain;
/**
* Defines source data.
*/
public interface SourceData
{
/**
* Gets the IP address of this data.
*/
String getAddress();
}
| apache-2.0 |
googleads/googleads-java-lib | modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202102/ProspectiveLineItem.java | 13510 | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* ProspectiveLineItem.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202102;
/**
* Represents a prospective line item to be forecasted.
*/
public class ProspectiveLineItem implements java.io.Serializable {
/* The target of the forecast. If {@link LineItem#id} is null
* or no line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a line item already exists with
* {@link LineItem#id}, the forecast is
* computed for the subject, predicting what would happen
* if the existing line item's settings
* were modified to match the subject. */
private com.google.api.ads.admanager.axis.v202102.LineItem lineItem;
/* The target of the forecast if this prospective line item is
* a proposal line item.
*
* <p>If {@link ProposalLineItem#id} is null or no proposal
* line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a proposal line item already exists
* with {@link ProposalLineItem#id},
* the forecast is computed for the subject, predicting
* what would happen if the existing proposal
* line item's settings were modified to match the subject.
*
* <p>A proposal line item can optionally correspond
* to an order {@link LineItem}, in which case,
* by forecasting a proposal line item, the corresponding
* line item is implicitly ignored in the
* forecasting.
*
* <p>Either {@link #lineItem} or {@link #proposalLineItem}
* should be specified but not both. */
private com.google.api.ads.admanager.axis.v202102.ProposalLineItem proposalLineItem;
/* When set, the line item is assumed to be from this advertiser,
* and unified blocking rules will
* apply accordingly. If absent, line items without an
* existing order won't be subject to unified
* blocking rules. */
private java.lang.Long advertiserId;
public ProspectiveLineItem() {
}
public ProspectiveLineItem(
com.google.api.ads.admanager.axis.v202102.LineItem lineItem,
com.google.api.ads.admanager.axis.v202102.ProposalLineItem proposalLineItem,
java.lang.Long advertiserId) {
this.lineItem = lineItem;
this.proposalLineItem = proposalLineItem;
this.advertiserId = advertiserId;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("advertiserId", getAdvertiserId())
.add("lineItem", getLineItem())
.add("proposalLineItem", getProposalLineItem())
.toString();
}
/**
* Gets the lineItem value for this ProspectiveLineItem.
*
* @return lineItem * The target of the forecast. If {@link LineItem#id} is null
* or no line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a line item already exists with
* {@link LineItem#id}, the forecast is
* computed for the subject, predicting what would happen
* if the existing line item's settings
* were modified to match the subject.
*/
public com.google.api.ads.admanager.axis.v202102.LineItem getLineItem() {
return lineItem;
}
/**
* Sets the lineItem value for this ProspectiveLineItem.
*
* @param lineItem * The target of the forecast. If {@link LineItem#id} is null
* or no line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a line item already exists with
* {@link LineItem#id}, the forecast is
* computed for the subject, predicting what would happen
* if the existing line item's settings
* were modified to match the subject.
*/
public void setLineItem(com.google.api.ads.admanager.axis.v202102.LineItem lineItem) {
this.lineItem = lineItem;
}
/**
* Gets the proposalLineItem value for this ProspectiveLineItem.
*
* @return proposalLineItem * The target of the forecast if this prospective line item is
* a proposal line item.
*
* <p>If {@link ProposalLineItem#id} is null or no proposal
* line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a proposal line item already exists
* with {@link ProposalLineItem#id},
* the forecast is computed for the subject, predicting
* what would happen if the existing proposal
* line item's settings were modified to match the subject.
*
* <p>A proposal line item can optionally correspond
* to an order {@link LineItem}, in which case,
* by forecasting a proposal line item, the corresponding
* line item is implicitly ignored in the
* forecasting.
*
* <p>Either {@link #lineItem} or {@link #proposalLineItem}
* should be specified but not both.
*/
public com.google.api.ads.admanager.axis.v202102.ProposalLineItem getProposalLineItem() {
return proposalLineItem;
}
/**
* Sets the proposalLineItem value for this ProspectiveLineItem.
*
* @param proposalLineItem * The target of the forecast if this prospective line item is
* a proposal line item.
*
* <p>If {@link ProposalLineItem#id} is null or no proposal
* line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a proposal line item already exists
* with {@link ProposalLineItem#id},
* the forecast is computed for the subject, predicting
* what would happen if the existing proposal
* line item's settings were modified to match the subject.
*
* <p>A proposal line item can optionally correspond
* to an order {@link LineItem}, in which case,
* by forecasting a proposal line item, the corresponding
* line item is implicitly ignored in the
* forecasting.
*
* <p>Either {@link #lineItem} or {@link #proposalLineItem}
* should be specified but not both.
*/
public void setProposalLineItem(com.google.api.ads.admanager.axis.v202102.ProposalLineItem proposalLineItem) {
this.proposalLineItem = proposalLineItem;
}
/**
* Gets the advertiserId value for this ProspectiveLineItem.
*
* @return advertiserId * When set, the line item is assumed to be from this advertiser,
* and unified blocking rules will
* apply accordingly. If absent, line items without an
* existing order won't be subject to unified
* blocking rules.
*/
public java.lang.Long getAdvertiserId() {
return advertiserId;
}
/**
* Sets the advertiserId value for this ProspectiveLineItem.
*
* @param advertiserId * When set, the line item is assumed to be from this advertiser,
* and unified blocking rules will
* apply accordingly. If absent, line items without an
* existing order won't be subject to unified
* blocking rules.
*/
public void setAdvertiserId(java.lang.Long advertiserId) {
this.advertiserId = advertiserId;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof ProspectiveLineItem)) return false;
ProspectiveLineItem other = (ProspectiveLineItem) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.lineItem==null && other.getLineItem()==null) ||
(this.lineItem!=null &&
this.lineItem.equals(other.getLineItem()))) &&
((this.proposalLineItem==null && other.getProposalLineItem()==null) ||
(this.proposalLineItem!=null &&
this.proposalLineItem.equals(other.getProposalLineItem()))) &&
((this.advertiserId==null && other.getAdvertiserId()==null) ||
(this.advertiserId!=null &&
this.advertiserId.equals(other.getAdvertiserId())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getLineItem() != null) {
_hashCode += getLineItem().hashCode();
}
if (getProposalLineItem() != null) {
_hashCode += getProposalLineItem().hashCode();
}
if (getAdvertiserId() != null) {
_hashCode += getAdvertiserId().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(ProspectiveLineItem.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "ProspectiveLineItem"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("lineItem");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "lineItem"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "LineItem"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("proposalLineItem");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "proposalLineItem"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "ProposalLineItem"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("advertiserId");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "advertiserId"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| apache-2.0 |
scify/JedAIToolkit | src/main/java/org/scify/jedai/textmodels/BagModel.java | 6720 | /*
* Copyright [2016-2020] [George Papadakis (gpapadis@yahoo.gr)]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scify.jedai.textmodels;
import org.scify.jedai.utilities.enumerations.RepresentationModel;
import org.scify.jedai.utilities.enumerations.SimilarityMetric;
import gnu.trove.iterator.TObjectIntIterator;
import gnu.trove.map.TObjectIntMap;
import gnu.trove.map.hash.TObjectIntHashMap;
import java.util.HashSet;
import java.util.Set;
/**
*
* @author G.A.P. II
*/
public abstract class BagModel extends AbstractModel {
protected float noOfTotalTerms;
protected final TObjectIntMap<String> itemsFrequency;
public BagModel(int dId, int n, RepresentationModel md, SimilarityMetric sMetric, String iName) {
super(dId, n, md, sMetric, iName);
itemsFrequency = new TObjectIntHashMap<>();
}
@Override
public void finalizeModel() {
}
protected float getEnhancedJaccardSimilarity(BagModel oModel) {
TObjectIntMap<String> itemVector1 = itemsFrequency;
TObjectIntMap<String> itemVector2 = oModel.getItemsFrequency();
if (itemVector2.size() < itemVector1.size()) {
itemVector1 = oModel.getItemsFrequency();
itemVector2 = itemsFrequency;
}
float numerator = 0.0f;
for (TObjectIntIterator<String> iterator = itemVector1.iterator(); iterator.hasNext();) {
iterator.advance();
numerator += Math.min(iterator.value(), itemVector2.get(iterator.key()));
}
float denominator = noOfTotalTerms + oModel.getNoOfTotalTerms() - numerator;
return numerator / denominator;
}
@Override
public float getEntropy(boolean normalized) {
float entropy = 0.0f;
for (TObjectIntIterator<String> iterator = itemsFrequency.iterator(); iterator.hasNext();) {
iterator.advance();
float p_i = (iterator.value() / noOfTotalTerms);
entropy -= (p_i * (Math.log10(p_i) / Math.log10(2.0d)));
}
if (normalized) {
float maxEntropy = (float) Math.log10(noOfTotalTerms) / (float) Math.log10(2.0f);
return entropy / maxEntropy;
}
return entropy;
}
public TObjectIntMap<String> getItemsFrequency() {
return itemsFrequency;
}
protected float getJaccardSimilarity(BagModel oModel) {
final Set<String> commonKeys = new HashSet<>(itemsFrequency.keySet());
commonKeys.retainAll(oModel.getItemsFrequency().keySet());
float numerator = commonKeys.size();
float denominator = itemsFrequency.size() + oModel.getItemsFrequency().size() - numerator;
return numerator / denominator;
}
protected float getNoOfTotalTerms() {
return noOfTotalTerms;
}
@Override
public Set<String> getSignatures() {
return itemsFrequency.keySet();
}
@Override
public float getSimilarity(ITextModel oModel) {
switch (simMetric) {
case COSINE_SIMILARITY:
return getTfCosineSimilarity((BagModel) oModel);
case ENHANCED_JACCARD_SIMILARITY:
return getEnhancedJaccardSimilarity((BagModel) oModel);
case GENERALIZED_JACCARD_SIMILARITY:
return getTfGeneralizedJaccardSimilarity((BagModel) oModel);
case JACCARD_SIMILARITY:
return getJaccardSimilarity((BagModel) oModel);
default:
throw new IllegalStateException(
"The given similarity metric is incompatible with the bag representation model.");
}
}
protected float getTfCosineSimilarity(BagModel oModel) {
float totalTerms2 = oModel.getNoOfTotalTerms();
TObjectIntMap<String> itemVector1 = itemsFrequency;
TObjectIntMap<String> itemVector2 = oModel.getItemsFrequency();
if (itemVector2.size() < itemVector1.size()) {
itemVector1 = oModel.getItemsFrequency();
itemVector2 = itemsFrequency;
}
float numerator = 0.0f;
for (TObjectIntIterator<String> iterator = itemVector1.iterator(); iterator.hasNext();) {
iterator.advance();
numerator += iterator.value() * itemVector2.get(iterator.key()) / noOfTotalTerms / totalTerms2;
}
float denominator = getVectorMagnitude() * oModel.getVectorMagnitude();
return numerator / denominator;
}
protected float getTfGeneralizedJaccardSimilarity(BagModel oModel) {
float totalTerms1 = noOfTotalTerms;
float totalTerms2 = oModel.getNoOfTotalTerms();
TObjectIntMap<String> itemVector1 = itemsFrequency;
TObjectIntMap<String> itemVector2 = oModel.getItemsFrequency();
if (itemVector2.size() < itemVector1.size()) {
itemVector1 = oModel.getItemsFrequency();
itemVector2 = itemsFrequency;
totalTerms1 = oModel.getNoOfTotalTerms();
totalTerms2 = noOfTotalTerms;
}
float numerator = 0.0f;
for (TObjectIntIterator<String> iterator = itemVector1.iterator(); iterator.hasNext(); ) {
iterator.advance();
numerator += Math.min(iterator.value() / totalTerms1, itemVector2.get(iterator.key()) / totalTerms2);
}
final Set<String> allKeys = new HashSet<>(itemVector1.keySet());
allKeys.addAll(itemVector2.keySet());
float denominator = 0.0f;
for (String key : allKeys) {
denominator += Math.max(itemVector1.get(key) / totalTerms1, itemVector2.get(key) / totalTerms2);
}
return numerator / denominator;
}
protected float getVectorMagnitude() {
float magnitude = 0.0f;
for (TObjectIntIterator<String> iterator = itemsFrequency.iterator(); iterator.hasNext();) {
iterator.advance();
magnitude += Math.pow(iterator.value() / noOfTotalTerms, 2.0);
}
return (float) Math.sqrt(magnitude);
}
}
| apache-2.0 |
zimmermatt/flink | flink-tests/src/test/java/org/apache/flink/test/manual/StreamingScalabilityAndLatency.java | 4805 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.manual;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.runtime.minicluster.LocalFlinkMiniCluster;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.source.ParallelSourceFunction;
import static org.junit.Assert.fail;
/**
* Manual test to evaluate impact of checkpointing on latency.
*/
public class StreamingScalabilityAndLatency {
public static void main(String[] args) throws Exception {
if ((Runtime.getRuntime().maxMemory() >>> 20) < 5000) {
throw new RuntimeException("This test program needs to run with at least 5GB of heap space.");
}
final int taskManagers = 1;
final int slotsPerTaskManager = 80;
final int parallelism = taskManagers * slotsPerTaskManager;
LocalFlinkMiniCluster cluster = null;
try {
Configuration config = new Configuration();
config.setInteger(ConfigConstants.LOCAL_NUMBER_TASK_MANAGER, taskManagers);
config.setLong(TaskManagerOptions.MANAGED_MEMORY_SIZE, 80L);
config.setInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, slotsPerTaskManager);
config.setInteger(TaskManagerOptions.NETWORK_NUM_BUFFERS, 20000);
config.setInteger("taskmanager.net.server.numThreads", 1);
config.setInteger("taskmanager.net.client.numThreads", 1);
cluster = new LocalFlinkMiniCluster(config, false);
cluster.start();
runPartitioningProgram(cluster.getLeaderRPCPort(), parallelism);
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
finally {
if (cluster != null) {
cluster.stop();
}
}
}
private static void runPartitioningProgram(int jobManagerPort, int parallelism) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.createRemoteEnvironment("localhost", jobManagerPort);
env.setParallelism(parallelism);
env.getConfig().enableObjectReuse();
env.setBufferTimeout(5L);
env.enableCheckpointing(1000, CheckpointingMode.AT_LEAST_ONCE);
env
.addSource(new TimeStampingSource())
.map(new IdMapper<Tuple2<Long, Long>>())
.keyBy(0)
.addSink(new TimestampingSink());
env.execute("Partitioning Program");
}
private static class TimeStampingSource implements ParallelSourceFunction<Tuple2<Long, Long>> {
private static final long serialVersionUID = -151782334777482511L;
private volatile boolean running = true;
@Override
public void run(SourceContext<Tuple2<Long, Long>> ctx) throws Exception {
long num = 100;
long counter = (long) (Math.random() * 4096);
while (running) {
if (num < 100) {
num++;
ctx.collect(new Tuple2<Long, Long>(counter++, 0L));
}
else {
num = 0;
ctx.collect(new Tuple2<Long, Long>(counter++, System.currentTimeMillis()));
}
Thread.sleep(1);
}
}
@Override
public void cancel() {
running = false;
}
}
private static class TimestampingSink implements SinkFunction<Tuple2<Long, Long>> {
private static final long serialVersionUID = 1876986644706201196L;
private long maxLatency;
private long count;
@Override
public void invoke(Tuple2<Long, Long> value) {
long ts = value.f1;
if (ts != 0L) {
long diff = System.currentTimeMillis() - ts;
maxLatency = Math.max(diff, maxLatency);
}
count++;
if (count == 5000) {
System.out.println("Max latency: " + maxLatency);
count = 0;
maxLatency = 0;
}
}
}
private static class IdMapper<T> implements MapFunction<T, T> {
private static final long serialVersionUID = -6543809409233225099L;
@Override
public T map(T value) {
return value;
}
}
}
| apache-2.0 |
mpi2/PhenotypeData | web/src/main/java/uk/ac/ebi/phenotype/chart/PercentileComputation.java | 2685 | /*******************************************************************************
* Copyright 2015 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*******************************************************************************/
package uk.ac.ebi.phenotype.chart;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
*
* @author tudose
*
* Implenetation of Method 2 from http://en.wikipedia.org/wiki/Quartile
* 1. Use the median to divide the ordered data set into two halves. If the median is a datum (as opposed to being the mean of the middle two data), include the median in both halves.
* 2. The lower quartile value is the median of the lower half of the data. The upper quartile value is the median of the upper half of the data.
*/
public class PercentileComputation{
private List<Float> upperValues;
private List<Float> lowerValues;
private List<Float> values;
public PercentileComputation(List<Float> val){
ArrayList <Float> sortedValues = (ArrayList<Float>)val;
Collections.sort(sortedValues);
upperValues = new ArrayList<>();
lowerValues = new ArrayList<>();
values = val;
// Use the median to divide the ordered data set into two halves.
// If the median is a datum (as opposed to being the mean of the middle two data), include the median in both halves.
int n = sortedValues.size();
if (n % 2 == 1){
lowerValues = sortedValues.subList(0, (n+1)/2);
upperValues = sortedValues.subList((n-1)/2, n);
}
else{
lowerValues = sortedValues.subList(0, n/2);
upperValues = sortedValues.subList(n/2, n);
}
}
//The lower quartile value is the median of the lower half of the data. The upper quartile value is the median of the upper half of the data.
public float getUpperQuartile() {
return getMedian(upperValues);
}
public float getLowerQuartile() {
return getMedian(lowerValues);
}
public float getMedian(){
return getMedian(values);
}
private Float getMedian(List<Float> list){
int n = list.size();
if (n % 2 == 1){
return list.get((n - 1)/2);
}
else{
return (list.get(n/2 - 1) + list.get(n/2)) / 2;
}
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-kinesisanalyticsv2/src/main/java/com/amazonaws/services/kinesisanalyticsv2/model/transform/CheckpointConfigurationUpdateMarshaller.java | 3340 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kinesisanalyticsv2.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.kinesisanalyticsv2.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* CheckpointConfigurationUpdateMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class CheckpointConfigurationUpdateMarshaller {
private static final MarshallingInfo<String> CONFIGURATIONTYPEUPDATE_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("ConfigurationTypeUpdate").build();
private static final MarshallingInfo<Boolean> CHECKPOINTINGENABLEDUPDATE_BINDING = MarshallingInfo.builder(MarshallingType.BOOLEAN)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("CheckpointingEnabledUpdate").build();
private static final MarshallingInfo<Long> CHECKPOINTINTERVALUPDATE_BINDING = MarshallingInfo.builder(MarshallingType.LONG)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("CheckpointIntervalUpdate").build();
private static final MarshallingInfo<Long> MINPAUSEBETWEENCHECKPOINTSUPDATE_BINDING = MarshallingInfo.builder(MarshallingType.LONG)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("MinPauseBetweenCheckpointsUpdate").build();
private static final CheckpointConfigurationUpdateMarshaller instance = new CheckpointConfigurationUpdateMarshaller();
public static CheckpointConfigurationUpdateMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(CheckpointConfigurationUpdate checkpointConfigurationUpdate, ProtocolMarshaller protocolMarshaller) {
if (checkpointConfigurationUpdate == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(checkpointConfigurationUpdate.getConfigurationTypeUpdate(), CONFIGURATIONTYPEUPDATE_BINDING);
protocolMarshaller.marshall(checkpointConfigurationUpdate.getCheckpointingEnabledUpdate(), CHECKPOINTINGENABLEDUPDATE_BINDING);
protocolMarshaller.marshall(checkpointConfigurationUpdate.getCheckpointIntervalUpdate(), CHECKPOINTINTERVALUPDATE_BINDING);
protocolMarshaller.marshall(checkpointConfigurationUpdate.getMinPauseBetweenCheckpointsUpdate(), MINPAUSEBETWEENCHECKPOINTSUPDATE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
songzhw/SixUiViews | SixUiViews/lib/src/main/java/ca/six/views/util/ColorsUtil.java | 473 | package ca.six.views.util;
import android.graphics.Color;
public class ColorsUtil {
public static boolean isLight(int color) {
return Math.sqrt(
Color.red(color) * Color.red(color) * 0.241 +
Color.green(color) * Color.green(color) * 0.691 +
Color.blue(color) * Color.blue(color) * 0.068) > 130;
}
public static int getBaseColor(int color) {
if (isLight(color)) {
return Color.BLACK;
}
return Color.WHITE;
}
} | apache-2.0 |
tamalsen/vibur-dbcp | src/main/java/org/vibur/dbcp/cache/ReturnVal.java | 1964 | /**
* Copyright 2013 Simeon Malchev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vibur.dbcp.cache;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A thin wrapper class which allows us to augment the returned {@code value} of a method invocation with some
* additional "state" information. The instances of this class are used as a cached {@code value} for method
* invocations in a {@link java.util.concurrent.ConcurrentMap} cache implementation, and their "state" is describing
* whether the object is currently available, in use or evicted.
*
* @see StatementInvocationCacheProvider
* @see ConnMethodDef
*
* @author Simeon Malchev
* @param <V> the type of the value object held in this ReturnVal
*/
public class ReturnVal<V> {
/**
* The 3 different states in which a {@code ReturnVal} object can be while being used as a cached value:
*/
public static final int AVAILABLE = 0;
public static final int IN_USE = 1;
public static final int EVICTED = 2;
private final V value;
private final AtomicInteger state;
public ReturnVal(V value, AtomicInteger state) {
if (value == null)
throw new NullPointerException();
this.value = value;
this.state = state;
}
public V value() {
return value;
}
public AtomicInteger state() {
return state;
}
}
| apache-2.0 |
ispras/NetBlox-plug-ins | graphMiners/GCE/src/minerGCE/GCEDescriptionParser.java | 2295 | package minerGCE;
import org.xml.sax.SAXException;
import ru.ispras.modis.NetBlox.parser.basicParsersAndUtils.Utils;
import ru.ispras.modis.NetBlox.parser.basicParsersAndUtils.XMLIntegerRangeStringProcessor;
import ru.ispras.modis.NetBlox.parser.xmlParser.CommonXMLParser;
import ru.ispras.modis.NetBlox.parser.xmlParser.XMLStringValueProcessor;
import ru.ispras.modis.NetBlox.scenario.RangeOfValues;
public class GCEDescriptionParser extends CommonXMLParser {
class SupplementaryAlgosIdsProcessor extends XMLStringValueProcessor {
@Override
public void closeElement() {
super.closeElement();
String stringOfIds = getText();
String[] ids = stringOfIds.split(Utils.DELIMITER);
for (String stringId : ids) {
Utils.checkWhetherIsWordInScenario(stringId, TAG_SUPPLEMENTARY_ALGOS_IDS, "algorithm");
minerDescription.addSupplementaryAlgorithmId(stringId);
}
}
}
class LaunchesProcessor extends XMLIntegerRangeStringProcessor {
@Override
public void closeElement() {
super.closeElement();
RangeOfValues<Integer> launchNumbers = getValues();
if (launchNumbers != null && !launchNumbers.isEmpty()) {
minerDescription.setLaunchNumbers(launchNumbers);
}
}
}
private static final String TAG_SUPPLEMENTARY_ALGOS_IDS = "supplementaryAlgosIds";
private static final String TAG_LAUNCH_NUMBERS = "launchNumbers";
private static final String TAG_MINIMAL_CLIQUE_SIZE = "minimalCliqueSize";
private final XMLStringValueProcessor minimalCliqueSizeParser;
private DescriptionGCD_GCE minerDescription;
public GCEDescriptionParser() {
super();
add(TAG_SUPPLEMENTARY_ALGOS_IDS, new SupplementaryAlgosIdsProcessor());
add(TAG_LAUNCH_NUMBERS, new LaunchesProcessor());
add(TAG_MINIMAL_CLIQUE_SIZE, minimalCliqueSizeParser = new XMLStringValueProcessor());
}
@Override
public void startDocument() throws SAXException {
super.startDocument();
minerDescription = new DescriptionGCD_GCE();
}
@Override
public void endDocument() throws SAXException {
super.endDocument();
String text = minimalCliqueSizeParser.getText();
if (text != null && !text.isEmpty()) {
minerDescription.setMinimalCliqueSize(Integer.parseInt(text));
}
}
public DescriptionGCD_GCE getParsedDescription() {
return minerDescription;
}
}
| apache-2.0 |
RobAustin/byte-buddy | byte-buddy-dep/src/test/java/net/bytebuddy/instrumentation/method/bytecode/bind/MethodDelegationBinderAmbiguityResolverNoOpTest.java | 764 | package net.bytebuddy.instrumentation.method.bytecode.bind;
import net.bytebuddy.instrumentation.method.MethodDescription;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Mockito.mock;
public class MethodDelegationBinderAmbiguityResolverNoOpTest {
@Test
public void testResolution() throws Exception {
assertThat(MethodDelegationBinder.AmbiguityResolver.NoOp.INSTANCE.resolve(mock(MethodDescription.class),
mock(MethodDelegationBinder.MethodBinding.class),
mock(MethodDelegationBinder.MethodBinding.class)),
is(MethodDelegationBinder.AmbiguityResolver.Resolution.UNKNOWN));
}
}
| apache-2.0 |
vkostyukov/la4j | src/main/java/org/la4j/decomposition/EigenDecompositor.java | 27846 | /*
* Copyright 2011-2013, by Vladimir Kostyukov and Contributors.
*
* This file is part of la4j project (http://la4j.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributor(s): Maxim Samoylov
*
*/
package org.la4j.decomposition;
import org.la4j.Matrices;
import org.la4j.Matrix;
import org.la4j.matrix.SparseMatrix;
import org.la4j.Vector;
import org.la4j.Vectors;
import org.la4j.vector.DenseVector;
import org.la4j.vector.functor.VectorAccumulator;
/**
* This class represents Eigen decomposition of matrices. More details
* <p>
* <a href="http://mathworld.wolfram.com/EigenDecomposition.html"> here.</a>
* </p>
*/
public class EigenDecompositor extends AbstractDecompositor implements MatrixDecompositor {
public EigenDecompositor(Matrix matrix) {
super(matrix);
}
/**
* Returns the result of Eigen (EVD) decomposition of given matrix
* <p>
* See <a href="http://mathworld.wolfram.com/EigenDecomposition.html">
* http://mathworld.wolfram.com/EigenDecomposition.html</a> for more
* details.
* </p>
*
* @return { V, D }
*/
@Override
public Matrix[] decompose() {
if (matrix.is(Matrices.SYMMETRIC_MATRIX)) {
return decomposeSymmetricMatrix(matrix);
} else if (matrix.rows() == matrix.columns()) {
return decomposeNonSymmetricMatrix(matrix);
} else {
throw new IllegalArgumentException("Can't decompose rectangle matrix");
}
}
@Override
public boolean applicableTo(Matrix matrix) {
return matrix.rows() == matrix.columns();
}
/**
* Returns the result of Eigen decomposition for <a
* href="http://mathworld.wolfram.com/SymmetricMatrix.html"> symmetric</a>
* matrix
* <p>
* See <a href="http://mathworld.wolfram.com/EigenDecomposition.html">
* http://mathworld.wolfram.com/EigenDecomposition.html</a> for more
* details.
* </p>
*
* @param matrix
* @return { V, D }
*/
private Matrix[] decomposeSymmetricMatrix(Matrix matrix) {
Matrix d = matrix.copy();
Matrix v = SparseMatrix.identity(matrix.rows());
Vector r = generateR(d);
Matrix u = SparseMatrix.identity(matrix.rows());
VectorAccumulator normAccumulator = Vectors.mkEuclideanNormAccumulator();
double n = Matrices.EPS;
double nn = r.fold(normAccumulator);
int kk = 0;
int ll = 0;
while (Math.abs(n - nn) > Matrices.EPS) {
int k = findMax(r);
int l = findMax(d, k);
regenerateU(u, d, k, l, kk, ll);
kk = k;
ll = l;
v = v.multiply(u);
d = u.transpose().multiply(d.multiply(u));
r.set(k, generateRi(d, k));
r.set(l, generateRi(d, l));
n = nn;
nn = r.fold(normAccumulator);
}
return new Matrix[] { v, d };
}
private int findMax(Vector vector) {
double value = vector.get(0);
int result = 0;
for (int i = 1; i < vector.length(); i++) {
double v = vector.get(i);
if (Math.abs(value) < Math.abs(v)) {
result = i;
value = v;
}
}
return result;
}
private int findMax(Matrix matrix, int i) {
double value = i > 0 ? matrix.get(i, 0) : matrix.get(i, 1);
int result = i > 0 ? 0 : 1;
for (int j = 0; j < matrix.columns(); j++) {
if (i != j) {
double v = matrix.get(i, j);
if (Math.abs(value) < Math.abs(v)) {
result = j;
value = v;
}
}
}
return result;
}
private Vector generateR(Matrix matrix) {
Vector result = DenseVector.zero(matrix.rows());
for (int i = 0; i < matrix.rows(); i++) {
result.set(i, generateRi(matrix, i));
}
return result;
}
private double generateRi(Matrix matrix, int i) {
double acc = 0;
for (int j = 0; j < matrix.columns(); j++) {
if (j != i) {
double value = matrix.get(i, j);
acc += value * value;
}
}
return acc;
}
private void regenerateU(Matrix u, Matrix matrix, int k, int l, int kk, int ll) {
u.set(kk, kk, 1.0);
u.set(ll, ll, 1.0);
u.set(kk, ll, 0.0);
u.set(ll, kk, 0.0);
double alpha = 0.0;
double beta = 0.0;
if (Math.abs(matrix.get(k, k) - matrix.get(l, l)) < Matrices.EPS) {
alpha = beta = Math.sqrt(0.5);
} else {
double mu = 2 * matrix.get(k, l) / (matrix.get(k, k) - matrix.get(l, l));
mu = 1.0 / Math.sqrt(1.0 + mu * mu);
alpha = Math.sqrt(0.5 * (1.0 + mu));
beta = Math.signum(mu) * Math.sqrt(0.5 * (1.0 - mu));
}
u.set(k, k, alpha);
u.set(l, l, alpha);
u.set(k, l, -beta);
u.set(l, k, beta);
}
/**
* Returns the result of Eigen decomposition for non-<a
* href="http://mathworld.wolfram.com/SymmetricMatrix.html">symmetric</a>
* matrix
* <p>
* See <a href="http://mathworld.wolfram.com/EigenDecomposition.html">
* http://mathworld.wolfram.com/EigenDecomposition.html</a> for more
* details.
* </p>
*
* @param matrix
* @return { P, D }
*/
private Matrix[] decomposeNonSymmetricMatrix(Matrix matrix) {
Matrix A = matrix.copy();
int n = matrix.columns();
Matrix v = SparseMatrix.identity(n);
Vector d = DenseVector.zero(n);
Vector e = DenseVector.zero(n);
Matrix h = A.copy();
Vector ort = DenseVector.zero(n);
// Reduce to Hessenberg form.
orthes(h, v, ort);
// Reduce Hessenberg to real Schur form.
hqr2(h, v, d, e);
Matrix dd = matrix.blankOfShape(n, n);
for (int i = 0; i < n; i++) {
dd.set(i, i, d.get(i));
if (e.get(i) > 0) {
dd.set(i, i + 1, e.get(i));
} else if (e.get(i) < 0) {
dd.set(i, i - 1, e.get(i));
}
}
return new Matrix[] { v, dd };
}
// Nonsymmetric reduction to Hessenberg form.
private void orthes(Matrix h, Matrix v, Vector ort) {
// This is derived from the Algol procedures orthes and ortran,
// by Martin and Wilkinson, Handbook for Auto. Comp.,
// Vol.ii-Linear Algebra, and the corresponding
// Fortran subroutines in EISPACK.
int n = ort.length();
int low = 0;
int high = n - 1;
for (int m = low + 1; m <= high - 1; m++) {
// Scale column.
double scale = 0.0;
for (int i = m; i <= high; i++) {
scale = scale + Math.abs(h.get(i, m - 1));
}
if (scale != 0.0) {
// Compute Householder transformation.
double hh = 0.0;
for (int i = high; i >= m; i--) {
ort.set(i, h.get(i, m - 1) / scale);
hh += ort.get(i) * ort.get(i);
}
double g = Math.sqrt(hh);
if (ort.get(m) > Matrices.EPS) {
g = -g;
}
hh = hh - ort.get(m) * g;
ort.updateAt(m, Vectors.asMinusFunction(g));
// Apply Householder similarity transformation
// H = (I-u*u'/h)*H*(I-u*u')/h)
for (int j = m; j < n; j++) {
double f = 0.0;
for (int i = high; i >= m; i--) {
f += ort.get(i) * h.get(i, j);
}
f = f / hh;
for (int i = m; i <= high; i++) {
h.updateAt(i, j, Matrices.asMinusFunction(f * ort.get(i)));
}
}
for (int i = 0; i <= high; i++) {
double f = 0.0;
for (int j = high; j >= m; j--) {
f += ort.get(j) * h.get(i, j);
}
f = f / hh;
for (int j = m; j <= high; j++) {
h.updateAt(i, j, Matrices.asMinusFunction(f * ort.get(j)));
}
}
ort.set(m, scale * ort.get(m));
h.set(m, m - 1, scale * g);
}
}
// Accumulate transformations (Algol's ortran).
for (int m = high - 1; m >= low + 1; m--) {
if (Math.abs(h.get(m, m - 1)) > Matrices.EPS) {
for (int i = m + 1; i <= high; i++) {
ort.set(i, h.get(i, m - 1));
}
for (int j = m; j <= high; j++) {
double g = 0.0;
for (int i = m; i <= high; i++) {
g += ort.get(i) * v.get(i, j);
}
// Double division avoids possible underflow
g = (g / ort.get(m)) / h.get(m, m - 1);
for (int i = m; i <= high; i++) {
v.updateAt(i, j, Matrices.asPlusFunction(g * ort.get(i)));
}
}
}
}
}
// Nonsymmetric reduction from Hessenberg to real Schur form.
private void hqr2(Matrix H, Matrix V, Vector d, Vector e) {
// This is derived from the Algol procedure hqr2,
// by Martin and Wilkinson, Handbook for Auto. Comp.,
// Vol.ii-Linear Algebra, and the corresponding
// Fortran subroutine in EISPACK.
// Initialize
int nn = e.length();
int n = nn - 1;
int low = 0;
int high = nn - 1;
double eps = Math.pow(2.0, -52.0);
double exshift = 0.0;
double p = 0;
double q = 0;
double r = 0;
double s = 0;
double z = 0;
double t;
double w;
double x;
double y;
// Store roots isolated by balanc and compute matrix norm
double norm = 0.0;
for (int i = 0; i < nn; i++) {
if (i < low | i > high) {
d.set(i, H.get(i, i));
e.set(i, 0.0);
}
for (int j = Math.max(i - 1, 0); j < nn; j++) {
norm = norm + Math.abs(H.get(i, j));
}
}
// Outer loop over eigenvalue index
int iter = 0;
while (n >= low) {
// Look for single small sub-diagonal element
int l = n;
while (l > low) {
s = Math.abs(H.get(l - 1, l - 1))
+ Math.abs(H.get(l, l));
if (s == 0.0) {
s = norm;
}
if (Math.abs(H.get(l, l - 1)) < eps * s) {
break;
}
l--;
}
// Check for convergence
// One root found
if (l == n) {
H.updateAt(n, n, Matrices.asPlusFunction(exshift));
d.set(n, H.get(n, n));
e.set(n, 0.0);
n--;
iter = 0;
// Two roots found
} else if (l == n - 1) {
w = H.get(n, n - 1) * H.get(n - 1, n);
p = (H.get(n - 1, n - 1) - H.get(n, n)) / 2.0;
q = p * p + w;
z = Math.sqrt(Math.abs(q));
H.updateAt(n, n, Matrices.asPlusFunction(exshift));
H.updateAt(n - 1, n - 1, Matrices.asPlusFunction(exshift));
x = H.get(n, n);
// Real pair
if (q >= 0) {
if (p >= 0) {
z = p + z;
} else {
z = p - z;
}
d.set(n - 1, x + z);
d.set(n, d.get(n - 1));
if (z != 0.0) {
d.set(n, x - w / z);
}
e.set(n - 1, 0.0);
e.set(n, 0.0);
x = H.get(n, n - 1);
s = Math.abs(x) + Math.abs(z);
p = x / s;
q = z / s;
r = Math.sqrt(p * p + q * q);
p = p / r;
q = q / r;
// Row modification
for (int j = n - 1; j < nn; j++) {
z = H.get(n - 1, j);
H.set(n - 1, j, q * z + p * H.get(n, j));
H.set(n, j, q * H.get(n, j) - p * z);
}
// Column modification
for (int i = 0; i <= n; i++) {
z = H.get(i, n - 1);
H.set(i, n - 1, q * z + p * H.get(i, n));
H.set(i, n, q * H.get(i, n) - p * z);
}
// Accumulate transformations
for (int i = low; i <= high; i++) {
z = V.get(i, n - 1);
V.set(i, n - 1, q * z + p * V.get(i, n));
V.set(i, n, q * V.get(i, n) - p * z);
}
// Complex pair
} else {
d.set(n - 1, x + p);
d.set(n, x + p);
e.set(n - 1, z);
e.set(n, -z);
}
n = n - 2;
iter = 0;
// No convergence yet
} else {
// Form shift
x = H.get(n, n);
y = 0.0;
w = 0.0;
if (l < n) {
y = H.get(n - 1, n - 1);
w = H.get(n, n - 1) * H.get(n - 1, n);
}
// Wilkinson's original ad hoc shift
if (iter == 10) {
exshift += x;
for (int i = low; i <= n; i++) {
H.updateAt(i, i, Matrices.asMinusFunction(x));
}
s = Math.abs(H.get(n, n - 1))
+ Math.abs(H.get(n - 1, n - 2));
x = y = 0.75 * s; // WTF ???
w = -0.4375 * s * s; // Are you kidding me???
}
// MATLAB's new ad hoc shift
if (iter == 30) {
s = (y - x) / 2.0;
s = s * s + w;
if (s > 0) {
s = Math.sqrt(s);
if (y < x) {
s = -s;
}
s = x - w / ((y - x) / 2.0 + s);
for (int i = low; i <= n; i++) {
H.updateAt(i, i, Matrices.asMinusFunction(s));
}
exshift += s;
x = y = w = 0.964;
}
}
iter = iter + 1; // (Could check iteration count here.)
// Look for two consecutive small sub-diagonal elements
int m = n - 2;
while (m >= l) {
z = H.get(m, m);
r = x - z;
s = y - z;
p = (r * s - w) / H.get(m + 1, m)
+ H.get(m, m + 1);
q = H.get(m + 1, m + 1) - z - r - s;
r = H.get(m + 2, m + 1);
s = Math.abs(p) + Math.abs(q) + Math.abs(r);
p = p / s;
q = q / s;
r = r / s;
if (m == l) {
break;
}
if (Math.abs(H.get(m, m - 1)) * (Math.abs(q) + Math.abs(r)) < eps
* (Math.abs(p) * (Math.abs(H.get(m - 1, m - 1))
+ Math.abs(z) + Math.abs(H.get(m + 1, m + 1))))) {
break;
}
m--;
}
for (int i = m + 2; i <= n; i++) {
H.set(i, i - 2, 0.0);
if (i > m + 2) {
H.set(i, i - 3, 0.0);
}
}
// Double QR step involving rows l:n and columns m:n
for (int k = m; k <= n - 1; k++) {
boolean notlast = (k != n - 1);
if (k != m) {
p = H.get(k, k - 1);
q = H.get(k + 1, k - 1);
r = (notlast ? H.get(k + 2, k - 1) : 0.0);
x = Math.abs(p) + Math.abs(q) + Math.abs(r);
if (x == 0.0) {
continue;
}
p = p / x;
q = q / x;
r = r / x;
}
s = Math.sqrt(p * p + q * q + r * r);
if (p < 0) {
s = -s;
}
if (s != 0) {
if (k != m) {
H.set(k, k - 1, -s * x);
} else if (l != m) {
H.updateAt(k, k - 1, Matrices.INV_FUNCTION);
}
p = p + s;
x = p / s;
y = q / s;
z = r / s;
q = q / p;
r = r / p;
// Row modification
for (int j = k; j < nn; j++) {
p = H.get(k, j) + q * H.get(k + 1, j);
if (notlast) {
p = p + r * H.get(k + 2, j);
H.updateAt(k + 2, j,
Matrices.asMinusFunction(p * z));
}
H.updateAt(k, j, Matrices.asMinusFunction(p * x));
H.updateAt(k + 1, j, Matrices.asMinusFunction(p * y));
}
// Column modification
for (int i = 0; i <= Math.min(n, k + 3); i++) {
p = x * H.get(i, k) + y
* H.get(i, k + 1);
if (notlast) {
p = p + z * H.get(i, k + 2);
H.updateAt(i, k + 2,
Matrices.asMinusFunction(p * r));
}
H.updateAt(i, k, Matrices.asMinusFunction(p));
H.updateAt(i, k + 1, Matrices.asMinusFunction(p * q));
}
// Accumulate transformations
for (int i = low; i <= high; i++) {
p = x * V.get(i, k) + y
* V.get(i, k + 1);
if (notlast) {
p = p + z * V.get(i, k + 2);
V.updateAt(i, k + 2,
Matrices.asMinusFunction(p * r));
}
V.updateAt(i, k, Matrices.asMinusFunction(p));
V.updateAt(i, k + 1, Matrices.asMinusFunction(p * q));
}
} // (s != 0)
} // k loop
} // check convergence
} // while (n >= low)
// Backsubstitute to find vectors of upper triangular form
if (norm == 0.0) {
return;
}
for (n = nn - 1; n >= 0; n--) {
p = d.get(n);
q = e.get(n);
// Real vector
if (q == 0) {
int l = n;
H.set(n, n, 1.0);
for (int i = n - 1; i >= 0; i--) {
w = H.get(i, i) - p;
r = 0.0;
for (int j = l; j <= n; j++) {
r = r + H.get(i, j) * H.get(j, n);
}
if (e.get(i) < 0.0) {
z = w;
s = r;
} else {
l = i;
if (e.get(i) == 0.0) {
if (w != 0.0) {
H.set(i, n, -r / w);
} else {
H.set(i, n, -r / (eps * norm));
}
// Solve real equations
} else {
x = H.get(i, i + 1);
y = H.get(i + 1, i);
q = (d.get(i) - p) * (d.get(i) - p)
+ e.get(i) * e.get(i);
t = (x * s - z * r) / q;
H.set(i, n, t);
if (Math.abs(x) > Math.abs(z)) {
H.set(i + 1, n, (-r - w * t) / x);
} else {
H.set(i + 1, n, (-s - y * t) / z);
}
}
// Overflow control
t = Math.abs(H.get(i, n));
if ((eps * t) * t > 1) {
for (int j = i; j <= n; j++) {
H.updateAt(j, n, Matrices.asDivFunction(t));
}
}
}
}
// Complex vector
} else if (q < 0) {
int l = n - 1;
// Last vector component imaginary so matrix is triangular
if (Math.abs(H.get(n, n - 1))
> Math.abs(H.get(n - 1, n))) {
H.set(n - 1, n - 1, q / H.get(n, n - 1));
H.set(n - 1, n, -(H.get(n, n) - p)
/ H.get(n, n - 1));
} else {
double[] cdiv = cdiv(0.0, -H.get(n - 1, n),
H.get(n - 1, n - 1) - p, q);
H.set(n - 1, n - 1, cdiv[0]);
H.set(n - 1, n, cdiv[1]);
}
H.set(n, n - 1, 0.0);
H.set(n, n, 1.0);
for (int i = n - 2; i >= 0; i--) {
double ra;
double sa;
double vr;
double vi;
ra = 0.0;
sa = 0.0;
for (int j = l; j <= n; j++) {
ra = ra + H.get(i, j) * H.get(j, n - 1);
sa = sa + H.get(i, j) * H.get(j, n);
}
w = H.get(i, i) - p;
if (e.get(i) < 0.0) {
z = w;
r = ra;
s = sa;
} else {
l = i;
if (e.get(i) == 0) {
double[] cdiv = cdiv(-ra, -sa, w, q);
H.set(i, n - 1, cdiv[0]);
H.set(i, n, cdiv[1]);
} else {
// Solve complex equations
x = H.get(i, i + 1);
y = H.get(i + 1, i);
vr = (d.get(i) - p) * (d.get(i) - p)
+ e.get(i) * e.get(i) - q * q;
vi = (d.get(i) - p) * 2.0 * q;
if (vr == 0.0 & vi == 0.0) {
vr = eps
* norm
* (Math.abs(w) + Math.abs(q)
+ Math.abs(x) + Math.abs(y) + Math
.abs(z));
}
double[] cdiv = cdiv(x * r - z * ra + q * sa,
x * s - z * sa - q * ra, vr, vi);
H.set(i, n - 1, cdiv[0]);
H.set(i, n, cdiv[1]);
if (Math.abs(x) > (Math.abs(z) + Math.abs(q))) {
H.set(i + 1, n - 1, (-ra - w
* H.get(i, n - 1) + q
* H.get(i, n)) / x);
H.set(i + 1, n, (-sa - w
* H.get(i, n) - q
* H.get(i, n - 1)) / x);
} else {
cdiv = cdiv(-r - y
* H.get(i, n - 1), -s - y
* H.get(i, n), z, q);
H.set(i + 1, n - 1, cdiv[0]);
H.set(i + 1, n, cdiv[1]);
}
}
// Overflow control
t = Math.max(Math.abs(H.get(i, n - 1)),
Math.abs(H.get(i, n)));
if ((eps * t) * t > 1) {
for (int j = i; j <= n; j++) {
H.updateAt(j, n - 1, Matrices.asDivFunction(t));
H.updateAt(j, n, Matrices.asDivFunction(t));
}
}
}
}
}
}
// Vectors of isolated roots
for (int i = 0; i < nn; i++) {
if (i < low | i > high) {
for (int j = i; j < nn; j++) {
V.set(i, j, H.get(i, j));
}
}
}
// Back transformation to get eigenvectors of original matrix
for (int j = nn - 1; j >= low; j--) {
for (int i = low; i <= high; i++) {
z = 0.0;
for (int k = low; k <= Math.min(j, high); k++) {
z = z + V.get(i, k) * H.get(k, j);
}
V.set(i, j, z);
}
}
}
private double[] cdiv(double xr, double xi, double yr, double yi) {
double cdivr;
double cdivi;
double r;
double d;
if (Math.abs(yr) > Math.abs(yi)) {
r = yi / yr;
d = yr + r * yi;
cdivr = (xr + r * xi) / d;
cdivi = (xi - r * xr) / d;
} else {
r = yr / yi;
d = yi + r * yr;
cdivr = (r * xr + xi) / d;
cdivi = (r * xi - xr) / d;
}
return new double[] { cdivr, cdivi };
}
}
| apache-2.0 |
jdgwartney/vsphere-ws | java/JAXWS/samples/com/vmware/vim25/VirtualSCSIController.java | 3215 |
package com.vmware.vim25;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for VirtualSCSIController complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="VirtualSCSIController">
* <complexContent>
* <extension base="{urn:vim25}VirtualController">
* <sequence>
* <element name="hotAddRemove" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
* <element name="sharedBus" type="{urn:vim25}VirtualSCSISharing"/>
* <element name="scsiCtlrUnitNumber" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "VirtualSCSIController", propOrder = {
"hotAddRemove",
"sharedBus",
"scsiCtlrUnitNumber"
})
@XmlSeeAlso({
VirtualLsiLogicController.class,
ParaVirtualSCSIController.class,
VirtualBusLogicController.class,
VirtualLsiLogicSASController.class
})
public class VirtualSCSIController
extends VirtualController
{
protected Boolean hotAddRemove;
@XmlElement(required = true)
protected VirtualSCSISharing sharedBus;
protected Integer scsiCtlrUnitNumber;
/**
* Gets the value of the hotAddRemove property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isHotAddRemove() {
return hotAddRemove;
}
/**
* Sets the value of the hotAddRemove property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setHotAddRemove(Boolean value) {
this.hotAddRemove = value;
}
/**
* Gets the value of the sharedBus property.
*
* @return
* possible object is
* {@link VirtualSCSISharing }
*
*/
public VirtualSCSISharing getSharedBus() {
return sharedBus;
}
/**
* Sets the value of the sharedBus property.
*
* @param value
* allowed object is
* {@link VirtualSCSISharing }
*
*/
public void setSharedBus(VirtualSCSISharing value) {
this.sharedBus = value;
}
/**
* Gets the value of the scsiCtlrUnitNumber property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getScsiCtlrUnitNumber() {
return scsiCtlrUnitNumber;
}
/**
* Sets the value of the scsiCtlrUnitNumber property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setScsiCtlrUnitNumber(Integer value) {
this.scsiCtlrUnitNumber = value;
}
}
| apache-2.0 |
Malanius/Terasology | engine/src/main/java/org/terasology/utilities/procedural/SimplexNoise.java | 19875 | /*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.utilities.procedural;
import org.terasology.math.TeraMath;
import org.terasology.utilities.random.FastRandom;
/**
* A speed-improved simplex noise algorithm for Simplex noise in 2D, 3D and 4D.
* <br><br>
* Based on example code by Stefan Gustavson (stegu@itn.liu.se).
* Optimisations by Peter Eastman (peastman@drizzle.stanford.edu).
* Better rank ordering method by Stefan Gustavson in 2012.
* <br><br>
* This could be speeded up even further, but it's useful as it is.
* <br><br>
* Version 2012-03-09
* <br><br>
* This code was placed in the public domain by its original author,
* Stefan Gustavson. You may use it as you see fit, but
* attribution is appreciated.
* <br><br>
* See http://staffwww.itn.liu.se/~stegu/
* <br><br>
* msteiger: Introduced seed value
*/
public class SimplexNoise extends AbstractNoise implements Noise2D, Noise3D {
/**
* Multiply this with the gridDim provided and noise(x,x) will give tileable 1D noise which will tile
* when x crosses a multiple of (this * gridDim)
*/
public static final float TILEABLE1DMAGICNUMBER = 0.5773502691896258f;
private static Grad[] grad3 = {
new Grad(1, 1, 0), new Grad(-1, 1, 0), new Grad(1, -1, 0), new Grad(-1, -1, 0),
new Grad(1, 0, 1), new Grad(-1, 0, 1), new Grad(1, 0, -1), new Grad(-1, 0, -1),
new Grad(0, 1, 1), new Grad(0, -1, 1), new Grad(0, 1, -1), new Grad(0, -1, -1)};
private static Grad[] grad4 = {
new Grad(0, 1, 1, 1), new Grad(0, 1, 1, -1), new Grad(0, 1, -1, 1), new Grad(0, 1, -1, -1),
new Grad(0, -1, 1, 1), new Grad(0, -1, 1, -1), new Grad(0, -1, -1, 1), new Grad(0, -1, -1, -1),
new Grad(1, 0, 1, 1), new Grad(1, 0, 1, -1), new Grad(1, 0, -1, 1), new Grad(1, 0, -1, -1),
new Grad(-1, 0, 1, 1), new Grad(-1, 0, 1, -1), new Grad(-1, 0, -1, 1), new Grad(-1, 0, -1, -1),
new Grad(1, 1, 0, 1), new Grad(1, 1, 0, -1), new Grad(1, -1, 0, 1), new Grad(1, -1, 0, -1),
new Grad(-1, 1, 0, 1), new Grad(-1, 1, 0, -1), new Grad(-1, -1, 0, 1), new Grad(-1, -1, 0, -1),
new Grad(1, 1, 1, 0), new Grad(1, 1, -1, 0), new Grad(1, -1, 1, 0), new Grad(1, -1, -1, 0),
new Grad(-1, 1, 1, 0), new Grad(-1, 1, -1, 0), new Grad(-1, -1, 1, 0), new Grad(-1, -1, -1, 0)};
// Skewing and unskewing factors for 2, 3, and 4 dimensions
private static final float F2 = 0.5f * (float) (Math.sqrt(3.0f) - 1.0f);
private static final float G2 = (3.0f - (float) Math.sqrt(3.0f)) / 6.0f;
private static final float F3 = 1.0f / 3.0f;
private static final float G3 = 1.0f / 6.0f;
private static final float F4 = ((float) Math.sqrt(5.0f) - 1.0f) / 4.0f;
private static final float G4 = (5.0f - (float) Math.sqrt(5.0f)) / 20.0f;
private final short[] perm;
private final short[] permMod12;
private final int permCount;
/**
* Initialize permutations with a given seed and grid dimension.
*
* @param seed a seed value used for permutation shuffling
*/
public SimplexNoise(long seed) {
this(seed, 256);
}
/**
* Initialize permutations with a given seed and grid dimension.
* Supports 1D tileable noise
* @see SimplexNoise#tileable1DMagicNumber
*
* @param seed a seed value used for permutation shuffling
* @param gridDim gridDim x gridDim will be the number of squares in the square grid formed after skewing the simplices belonging to once "tile"
*/
public SimplexNoise(long seed, int gridDim) {
FastRandom rand = new FastRandom(seed);
permCount = gridDim;
perm = new short[permCount * 2];
permMod12 = new short[permCount * 2];
short[] p = new short[permCount];
// Initialize with all values [0..(permCount-1)]
for (short i = 0; i < permCount; i++) {
p[i] = i;
}
// Shuffle the array
for (int i = 0; i < permCount; i++) {
int j = rand.nextInt(permCount);
short swap = p[i];
p[i] = p[j];
p[j] = swap;
}
for (int i = 0; i < permCount * 2; i++) {
perm[i] = p[i % permCount];
permMod12[i] = (short) (perm[i] % 12);
}
}
private static float dot(Grad g, float x, float y) {
return g.x * x + g.y * y;
}
private static float dot(Grad g, float x, float y, float z) {
return g.x * x + g.y * y + g.z * z;
}
private static float dot(Grad g, float x, float y, float z, float w) {
return g.x * x + g.y * y + g.z * z + g.w * w;
}
/**
* 2D simplex noise
*
* @param xin the x input coordinate
* @param yin the y input coordinate
* @return a noise value in the interval [-1,1]
*/
@Override
public float noise(float xin, float yin) {
float n0;
float n1;
float n2; // Noise contributions from the three corners
// Skew the input space to determine which simplex cell we're in
float s = (xin + yin) * F2; // Hairy factor for 2D
int i = TeraMath.floorToInt(xin + s);
int j = TeraMath.floorToInt(yin + s);
float t = (i + j) * G2;
float xo0 = i - t; // Unskew the cell origin back to (x,y) space
float yo0 = j - t;
float x0 = xin - xo0; // The x,y distances from the cell origin
float y0 = yin - yo0;
// For the 2D case, the simplex shape is an equilateral triangle.
// Determine which simplex we are in.
int i1; // Offsets for second (middle) corner of simplex in (i,j) coords
int j1;
if (x0 > y0) { // lower triangle, XY order: (0,0)->(1,0)->(1,1)
i1 = 1;
j1 = 0;
} else { // upper triangle, YX order: (0,0)->(0,1)->(1,1)
i1 = 0;
j1 = 1;
}
// A step of (1,0) in (i,j) means a step of (1-c,-c) in (x,y), and
// a step of (0,1) in (i,j) means a step of (-c,1-c) in (x,y), where
// c = (3-sqrt(3))/6
float x1 = x0 - i1 + G2; // Offsets for middle corner in (x,y) unskewed coords
float y1 = y0 - j1 + G2;
float x2 = x0 - 1.0f + 2.0f * G2; // Offsets for last corner in (x,y) unskewed coords
float y2 = y0 - 1.0f + 2.0f * G2;
// Work out the hashed gradient indices of the three simplex corners
int ii = Math.floorMod(i, permCount);
int jj = Math.floorMod(j, permCount);
int gi0 = permMod12[ii + perm[jj]];
int gi1 = permMod12[ii + i1 + perm[jj + j1]];
int gi2 = permMod12[ii + 1 + perm[jj + 1]];
// Calculate the contribution from the three corners
float t0 = 0.5f - x0 * x0 - y0 * y0;
if (t0 < 0) {
n0 = 0.0f;
} else {
t0 *= t0;
n0 = t0 * t0 * dot(grad3[gi0], x0, y0); // (x,y) of grad3 used for 2D gradient
}
float t1 = 0.5f - x1 * x1 - y1 * y1;
if (t1 < 0) {
n1 = 0.0f;
} else {
t1 *= t1;
n1 = t1 * t1 * dot(grad3[gi1], x1, y1);
}
float t2 = 0.5f - x2 * x2 - y2 * y2;
if (t2 < 0) {
n2 = 0.0f;
} else {
t2 *= t2;
n2 = t2 * t2 * dot(grad3[gi2], x2, y2);
}
// Add contributions from each corner to get the final noise value.
// The result is scaled to return values in the interval [-1,1].
return 70.0f * (n0 + n1 + n2);
}
/**
* 3D simplex noise
*
* @param xin the x input coordinate
* @param yin the y input coordinate
* @param zin the z input coordinate
* @return a noise value in the interval [-1,1]
*/
@Override
public float noise(float xin, float yin, float zin) {
float n0;
float n1;
float n2;
float n3; // Noise contributions from the four corners
// Skew the input space to determine which simplex cell we're in
float s = (xin + yin + zin) * F3; // Very nice and simple skew factor for 3D
int i = TeraMath.floorToInt(xin + s);
int j = TeraMath.floorToInt(yin + s);
int k = TeraMath.floorToInt(zin + s);
float t = (i + j + k) * G3;
float xo0 = i - t; // Unskew the cell origin back to (x,y,z) space
float yo0 = j - t;
float zo0 = k - t;
float x0 = xin - xo0; // The x,y,z distances from the cell origin
float y0 = yin - yo0;
float z0 = zin - zo0;
// For the 3D case, the simplex shape is a slightly irregular tetrahedron.
// Determine which simplex we are in.
int i1;
int j1;
int k1; // Offsets for second corner of simplex in (i,j,k) coords
int i2;
int j2;
int k2; // Offsets for third corner of simplex in (i,j,k) coords
if (x0 >= y0) {
if (y0 >= z0) { // X Y Z order
i1 = 1;
j1 = 0;
k1 = 0;
i2 = 1;
j2 = 1;
k2 = 0;
} else if (x0 >= z0) { // X Z Y order
i1 = 1;
j1 = 0;
k1 = 0;
i2 = 1;
j2 = 0;
k2 = 1;
} else { // Z X Y order
i1 = 0;
j1 = 0;
k1 = 1;
i2 = 1;
j2 = 0;
k2 = 1;
}
} else { // x0<y0
if (y0 < z0) { // Z Y X order
i1 = 0;
j1 = 0;
k1 = 1;
i2 = 0;
j2 = 1;
k2 = 1;
} else if (x0 < z0) { // Y Z X order
i1 = 0;
j1 = 1;
k1 = 0;
i2 = 0;
j2 = 1;
k2 = 1;
} else { // Y X Z order
i1 = 0;
j1 = 1;
k1 = 0;
i2 = 1;
j2 = 1;
k2 = 0;
}
}
// A step of (1,0,0) in (i,j,k) means a step of (1-c,-c,-c) in (x,y,z),
// a step of (0,1,0) in (i,j,k) means a step of (-c,1-c,-c) in (x,y,z), and
// a step of (0,0,1) in (i,j,k) means a step of (-c,-c,1-c) in (x,y,z), where
// c = 1/6.
float x1 = x0 - i1 + G3; // Offsets for second corner in (x,y,z) coords
float y1 = y0 - j1 + G3;
float z1 = z0 - k1 + G3;
float x2 = x0 - i2 + 2.0f * G3; // Offsets for third corner in (x,y,z) coords
float y2 = y0 - j2 + 2.0f * G3;
float z2 = z0 - k2 + 2.0f * G3;
float x3 = x0 - 1.0f + 3.0f * G3; // Offsets for last corner in (x,y,z) coords
float y3 = y0 - 1.0f + 3.0f * G3;
float z3 = z0 - 1.0f + 3.0f * G3;
// Work out the hashed gradient indices of the four simplex corners
int ii = Math.floorMod(i, permCount);
int jj = Math.floorMod(j, permCount);
int kk = Math.floorMod(k, permCount);
int gi0 = permMod12[ii + perm[jj + perm[kk]]];
int gi1 = permMod12[ii + i1 + perm[jj + j1 + perm[kk + k1]]];
int gi2 = permMod12[ii + i2 + perm[jj + j2 + perm[kk + k2]]];
int gi3 = permMod12[ii + 1 + perm[jj + 1 + perm[kk + 1]]];
// Calculate the contribution from the four corners
float t0 = 0.6f - x0 * x0 - y0 * y0 - z0 * z0;
if (t0 < 0) {
n0 = 0.0f;
} else {
t0 *= t0;
n0 = t0 * t0 * dot(grad3[gi0], x0, y0, z0);
}
float t1 = 0.6f - x1 * x1 - y1 * y1 - z1 * z1;
if (t1 < 0) {
n1 = 0.0f;
} else {
t1 *= t1;
n1 = t1 * t1 * dot(grad3[gi1], x1, y1, z1);
}
float t2 = 0.6f - x2 * x2 - y2 * y2 - z2 * z2;
if (t2 < 0) {
n2 = 0.0f;
} else {
t2 *= t2;
n2 = t2 * t2 * dot(grad3[gi2], x2, y2, z2);
}
float t3 = 0.6f - x3 * x3 - y3 * y3 - z3 * z3;
if (t3 < 0) {
n3 = 0.0f;
} else {
t3 *= t3;
n3 = t3 * t3 * dot(grad3[gi3], x3, y3, z3);
}
// Add contributions from each corner to get the final noise value.
// The result is scaled to stay just inside [-1,1]
return 32.0f * (n0 + n1 + n2 + n3);
}
/**
* 4D simplex noise, better simplex rank ordering method 2012-03-09
*
* @param xin the x input coordinate
* @param yin the y input coordinate
* @param zin the z input coordinate
* @return a noise value in the interval [-1,1]
*/
public float noise(float xin, float yin, float zin, float win) {
float n0;
float n1;
float n2;
float n3;
float n4; // Noise contributions from the five corners
// Skew the (x,y,z,w) space to determine which cell of 24 simplices we're in
float s = (xin + yin + zin + win) * F4; // Factor for 4D skewing
int i = TeraMath.floorToInt(xin + s);
int j = TeraMath.floorToInt(yin + s);
int k = TeraMath.floorToInt(zin + s);
int l = TeraMath.floorToInt(win + s);
float t = (i + j + k + l) * G4; // Factor for 4D unskewing
float xo0 = i - t; // Unskew the cell origin back to (x,y,z,w) space
float yo0 = j - t;
float zo0 = k - t;
float wo0 = l - t;
float x0 = xin - xo0; // The x,y,z,w distances from the cell origin
float y0 = yin - yo0;
float z0 = zin - zo0;
float w0 = win - wo0;
// For the 4D case, the simplex is a 4D shape I won't even try to describe.
// To find out which of the 24 possible simplices we're in, we need to
// determine the magnitude ordering of x0, y0, z0 and w0.
// Six pair-wise comparisons are performed between each possible pair
// of the four coordinates, and the results are used to rank the numbers.
int rankx = 0;
int ranky = 0;
int rankz = 0;
int rankw = 0;
if (x0 > y0) {
rankx++;
} else {
ranky++;
}
if (x0 > z0) {
rankx++;
} else {
rankz++;
}
if (x0 > w0) {
rankx++;
} else {
rankw++;
}
if (y0 > z0) {
ranky++;
} else {
rankz++;
}
if (y0 > w0) {
ranky++;
} else {
rankw++;
}
if (z0 > w0) {
rankz++;
} else {
rankw++;
}
int i1;
int j1;
int k1;
int l1; // The integer offsets for the second simplex corner
int i2;
int j2;
int k2;
int l2; // The integer offsets for the third simplex corner
int i3;
int j3;
int k3;
int l3; // The integer offsets for the fourth simplex corner
// simplex[c] is a 4-vector with the numbers 0, 1, 2 and 3 in some order.
// Many values of c will never occur, since e.g. x>y>z>w makes x<z, y<w and x<w
// impossible. Only the 24 indices which have non-zero entries make any sense.
// We use a thresholding to set the coordinates in turn from the largest magnitude.
// Rank 3 denotes the largest coordinate.
i1 = rankx >= 3 ? 1 : 0;
j1 = ranky >= 3 ? 1 : 0;
k1 = rankz >= 3 ? 1 : 0;
l1 = rankw >= 3 ? 1 : 0;
// Rank 2 denotes the second largest coordinate.
i2 = rankx >= 2 ? 1 : 0;
j2 = ranky >= 2 ? 1 : 0;
k2 = rankz >= 2 ? 1 : 0;
l2 = rankw >= 2 ? 1 : 0;
// Rank 1 denotes the second smallest coordinate.
i3 = rankx >= 1 ? 1 : 0;
j3 = ranky >= 1 ? 1 : 0;
k3 = rankz >= 1 ? 1 : 0;
l3 = rankw >= 1 ? 1 : 0;
// The fifth corner has all coordinate offsets = 1, so no need to compute that.
float x1 = x0 - i1 + G4; // Offsets for second corner in (x,y,z,w) coords
float y1 = y0 - j1 + G4;
float z1 = z0 - k1 + G4;
float w1 = w0 - l1 + G4;
float x2 = x0 - i2 + 2.0f * G4; // Offsets for third corner in (x,y,z,w) coords
float y2 = y0 - j2 + 2.0f * G4;
float z2 = z0 - k2 + 2.0f * G4;
float w2 = w0 - l2 + 2.0f * G4;
float x3 = x0 - i3 + 3.0f * G4; // Offsets for fourth corner in (x,y,z,w) coords
float y3 = y0 - j3 + 3.0f * G4;
float z3 = z0 - k3 + 3.0f * G4;
float w3 = w0 - l3 + 3.0f * G4;
float x4 = x0 - 1.0f + 4.0f * G4; // Offsets for last corner in (x,y,z,w) coords
float y4 = y0 - 1.0f + 4.0f * G4;
float z4 = z0 - 1.0f + 4.0f * G4;
float w4 = w0 - 1.0f + 4.0f * G4;
// Work out the hashed gradient indices of the five simplex corners
int ii = Math.floorMod(i, permCount);
int jj = Math.floorMod(j, permCount);
int kk = Math.floorMod(k, permCount);
int ll = Math.floorMod(l, permCount);
int gi0 = perm[ii + perm[jj + perm[kk + perm[ll]]]] % 32;
int gi1 = perm[ii + i1 + perm[jj + j1 + perm[kk + k1 + perm[ll + l1]]]] % 32;
int gi2 = perm[ii + i2 + perm[jj + j2 + perm[kk + k2 + perm[ll + l2]]]] % 32;
int gi3 = perm[ii + i3 + perm[jj + j3 + perm[kk + k3 + perm[ll + l3]]]] % 32;
int gi4 = perm[ii + 1 + perm[jj + 1 + perm[kk + 1 + perm[ll + 1]]]] % 32;
// Calculate the contribution from the five corners
float t0 = 0.6f - x0 * x0 - y0 * y0 - z0 * z0 - w0 * w0;
if (t0 < 0) {
n0 = 0.0f;
} else {
t0 *= t0;
n0 = t0 * t0 * dot(grad4[gi0], x0, y0, z0, w0);
}
float t1 = 0.6f - x1 * x1 - y1 * y1 - z1 * z1 - w1 * w1;
if (t1 < 0) {
n1 = 0.0f;
} else {
t1 *= t1;
n1 = t1 * t1 * dot(grad4[gi1], x1, y1, z1, w1);
}
float t2 = 0.6f - x2 * x2 - y2 * y2 - z2 * z2 - w2 * w2;
if (t2 < 0) {
n2 = 0.f;
} else {
t2 *= t2;
n2 = t2 * t2 * dot(grad4[gi2], x2, y2, z2, w2);
}
float t3 = 0.6f - x3 * x3 - y3 * y3 - z3 * z3 - w3 * w3;
if (t3 < 0) {
n3 = 0.0f;
} else {
t3 *= t3;
n3 = t3 * t3 * dot(grad4[gi3], x3, y3, z3, w3);
}
float t4 = 0.6f - x4 * x4 - y4 * y4 - z4 * z4 - w4 * w4;
if (t4 < 0) {
n4 = 0.0f;
} else {
t4 *= t4;
n4 = t4 * t4 * dot(grad4[gi4], x4, y4, z4, w4);
}
// Sum up and scale the result to cover the range [-1,1]
return 27.0f * (n0 + n1 + n2 + n3 + n4);
}
// Inner class to speed up gradient computations
// (array access is a lot slower than member access)
private static class Grad {
float x;
float y;
float z;
float w;
Grad(float x, float y, float z) {
this.x = x;
this.y = y;
this.z = z;
}
Grad(float x, float y, float z, float w) {
this.x = x;
this.y = y;
this.z = z;
this.w = w;
}
}
}
| apache-2.0 |
qiuyesuifeng/mybatis-3 | src/test/java/org/apache/ibatis/submitted/cglib_lazy_error/CglibNPETest.java | 6189 | /**
* Copyright 2009-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.submitted.cglib_lazy_error;
import java.io.Reader;
import java.sql.Connection;
import java.sql.DriverManager;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.jdbc.ScriptRunner;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.junit.Assert;
import org.testng.annotations.Test;
import org.testng.annotations.BeforeClass;
public class CglibNPETest {
private static SqlSessionFactory sqlSessionFactory;
@BeforeClass
public static void initDatabase() throws Exception {
Connection conn = null;
try {
Class.forName("org.hsqldb.jdbcDriver");
conn = DriverManager.getConnection("jdbc:hsqldb:mem:cglib_lazy_eager_no_error", "sa",
"");
Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/cglib_lazy_error/CreateDB.sql");
ScriptRunner runner = new ScriptRunner(conn);
runner.setLogWriter(null);
runner.setErrorLogWriter(null);
runner.runScript(reader);
conn.commit();
reader.close();
reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/cglib_lazy_error/ibatisConfig.xml");
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
reader.close();
} finally {
if (conn != null) {
conn.close();
}
}
}
@Test(groups={"tidb"})
public void testNoParent() {
SqlSession sqlSession = sqlSessionFactory.openSession();
PersonMapper personMapper = sqlSession.getMapper(PersonMapper.class);
Person person = personMapper.selectById(1);
Assert.assertNotNull("Persons must not be null", person);
Person parent = person.getParent();
Assert.assertNull("Parent must be null", parent);
sqlSession.close();
}
@Test(groups={"tidb"})
public void testAncestorSelf() {
SqlSession sqlSession = sqlSessionFactory.openSession();
PersonMapper personMapper = sqlSession.getMapper(PersonMapper.class);
Person person = personMapper.selectById(1);
Assert.assertNotNull("Persons must not be null", person);
Person ancestor = person.getAncestor();
Assert.assertEquals("Ancestor must be John Smith sr.", person, ancestor);
sqlSession.close();
}
@Test(groups={"tidb"})
public void testGrandParent() {
SqlSession sqlSession = sqlSessionFactory.openSession();
PersonMapper personMapper = sqlSession.getMapper(PersonMapper.class);
Person expectedParent = personMapper.selectById(2);
Person expectedGrandParent = personMapper.selectById(1);
Person person = personMapper.selectById(3);
Assert.assertNotNull("Persons must not be null", person);
Assert.assertEquals("Parent must be John Smith", expectedParent, person.getParent());
Assert.assertEquals("Parent must be John Smith sr.", expectedGrandParent, person.getParent().getParent());
sqlSession.close();
}
@Test(groups={"tidb"})
public void testAncestor() {
SqlSession sqlSession = sqlSessionFactory.openSession();
PersonMapper personMapper = sqlSession.getMapper(PersonMapper.class);
Person expectedAncestor = personMapper.selectById(1);
Person person = personMapper.selectById(3);
Assert.assertNotNull("Persons must not be null", person);
Assert.assertEquals("Ancestor must be John Smith sr.", expectedAncestor, person.getAncestor());
sqlSession.close();
}
@Test(groups={"tidb"})
public void testAncestorAfterQueryingParents() {
SqlSession sqlSession = sqlSessionFactory.openSession();
PersonMapper personMapper = sqlSession.getMapper(PersonMapper.class);
Person expectedAncestor = personMapper.selectById(1);
Person person = personMapper.selectById(3);
// Load ancestor indirectly.
Assert.assertNotNull("Persons must not be null", person);
Assert.assertNotNull("Parent must not be null", person.getParent());
Assert.assertNotNull("Grandparent must not be null", person.getParent().getParent());
Assert.assertEquals("Ancestor must be John Smith sr.", expectedAncestor, person.getAncestor());
sqlSession.close();
}
@Test(groups={"tidb"})
public void testInsertBetweenTwoSelects() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
PersonMapper personMapper = sqlSession.getMapper(PersonMapper.class);
Person selected1 = personMapper.selectById(1);
Person selected2 = personMapper.selectById(2);
Person selected3 = personMapper.selectById(3);
selected1.setId(4L);
int rows = personMapper.insertPerson(selected1);
Assert.assertEquals(1, rows);
selected1 = personMapper.selectById(1);
selected2 = personMapper.selectById(2);
selected3 = personMapper.selectById(3);
Person selected4 = personMapper.selectById(4);
Assert.assertEquals(1, selected1.getId().longValue());
Assert.assertEquals(2, selected2.getId().longValue());
Assert.assertEquals(3, selected3.getId().longValue());
Assert.assertEquals(4, selected4.getId().longValue());
} finally {
sqlSession.close();
}
}
@Test(groups={"tidb"})
public void testSelectWithStringSQLInjection() {
SqlSession sqlSession = sqlSessionFactory.openSession();
try {
PersonMapper personMapper = sqlSession.getMapper(PersonMapper.class);
Person selected1 = personMapper.selectByStringId("1");
Assert.assertEquals(1, selected1.getId().longValue());
} finally {
sqlSession.close();
}
}
}
| apache-2.0 |
hazendaz/assertj-core | src/test/java/org/assertj/core/error/ShouldBeBeforeYear_create_Test.java | 2438 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2021 the original author or authors.
*/
package org.assertj.core.error;
import org.assertj.core.description.TextDescription;
import org.assertj.core.internal.ComparatorBasedComparisonStrategy;
import org.assertj.core.presentation.StandardRepresentation;
import org.junit.jupiter.api.Test;
import static java.lang.String.format;
import static org.assertj.core.api.BDDAssertions.then;
import static org.assertj.core.error.ShouldBeBeforeYear.shouldBeBeforeYear;
import static org.assertj.core.presentation.StandardRepresentation.STANDARD_REPRESENTATION;
import static org.assertj.core.test.NeverEqualComparator.NEVER_EQUALS;
import static org.assertj.core.util.DateUtil.parse;
class ShouldBeBeforeYear_create_Test {
@Test
void should_create_error_message() {
// GIVEN
ErrorMessageFactory factory = shouldBeBeforeYear(parse("2011-01-01"), 2011);
// WHEN
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting year of:%n" +
" 2011-01-01T00:00:00.000 (java.util.Date)%n" +
"to be strictly before year:%n" +
" 2011%n"));
}
@Test
void should_create_error_message_with_comparison_strategy() {
// GIVEN
ComparatorBasedComparisonStrategy comparisonStrategy = new ComparatorBasedComparisonStrategy(NEVER_EQUALS);
ErrorMessageFactory factory = shouldBeBeforeYear(parse("2011-01-01"), 2012, comparisonStrategy);
// WHEN
String message = factory.create(new TextDescription("Test"), STANDARD_REPRESENTATION);
// THEN
then(message).isEqualTo("[Test] %n" +
"Expecting year of:%n" +
" 2011-01-01T00:00:00.000 (java.util.Date)%n" +
"to be strictly before year:%n" +
" 2012%n" +
"when comparing values using '%s'",
NEVER_EQUALS.description());
}
}
| apache-2.0 |
HuangLS/neo4j | community/kernel/src/test/java/org/neo4j/kernel/configuration/TestConfig.java | 6899 | /*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.kernel.configuration;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.neo4j.graphdb.config.InvalidSettingException;
import org.neo4j.graphdb.config.Setting;
import static java.util.Arrays.asList;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.neo4j.kernel.configuration.Settings.BOOLEAN;
import static org.neo4j.kernel.configuration.Settings.STRING;
import static org.neo4j.kernel.configuration.Settings.setting;
import static org.neo4j.helpers.collection.MapUtil.stringMap;
public class TestConfig
{
public static class MyMigratingSettings
{
@Migrator
public static ConfigurationMigrator migrator = new BaseConfigurationMigrator()
{
{
add( new SpecificPropertyMigration( "old", "Old has been replaced by newer!" )
{
@Override
public void setValueWithOldSetting( String value, Map<String, String> rawConfiguration )
{
rawConfiguration.put( newer.name(), value );
}
} );
}
};
public static Setting<String> newer = setting( "newer", STRING, "" );
}
public static class MySettingsWithDefaults
{
public static Setting<String> hello = setting( "hello", STRING, "Hello, World!" );
public static Setting<Boolean> boolSetting = setting( "bool_setting", BOOLEAN, Settings.TRUE );
}
private class ChangeCaptureListener implements ConfigurationChangeListener
{
private Set<ConfigurationChange> lastChangeSet;
@Override
public void notifyConfigurationChanges( Iterable<ConfigurationChange> change )
{
lastChangeSet = new HashSet<>();
for ( ConfigurationChange ch : change )
{
lastChangeSet.add( ch );
}
}
}
private <T> Set<T> setOf( T... objs )
{
Set<T> set = new HashSet<>();
Collections.addAll( set, objs );
return set;
}
@Test
public void shouldApplyDefaults()
{
Config config = new Config( new HashMap<String, String>(), MySettingsWithDefaults.class );
assertThat( config.get( MySettingsWithDefaults.hello ), is( "Hello, World!" ) );
}
@Test
public void shouldApplyMigrations()
{
// When
Config config = new Config( stringMap("old", "hello!"), MyMigratingSettings.class );
// Then
assertThat( config.get( MyMigratingSettings.newer ), is( "hello!" ) );
}
@Test( expected = InvalidSettingException.class )
public void shouldNotAllowSettingInvalidValues()
{
Config config = new Config( new HashMap<String, String>(), MySettingsWithDefaults.class );
Map<String, String> params = config.getParams();
params.put( MySettingsWithDefaults.boolSetting.name(), "asd" );
config.applyChanges( params );
fail( "Expected validation to fail." );
}
@Test( expected = InvalidSettingException.class )
public void shouldNotAllowInvalidValuesInConstructor()
{
new Config( stringMap( MySettingsWithDefaults.boolSetting.name(), "asd" ), MySettingsWithDefaults.class );
fail( "Expected validation to fail." );
}
@Test
public void shouldNotifyChangeListenersWhenNewSettingsAreApplied()
{
// Given
Config config = new Config( stringMap("setting", "old"), MyMigratingSettings.class );
ChangeCaptureListener listener = new ChangeCaptureListener();
config.addConfigurationChangeListener( listener );
// When
config.applyChanges( stringMap( "setting", "new" ) );
// Then
assertThat( listener.lastChangeSet,
is( setOf( new ConfigurationChange( "setting", "old", "new" ) ) ) );
}
@Test
public void shouldNotNotifyChangeListenerWhenNothingChanged()
{
// Given
Config config = new Config( stringMap("setting", "old"), MyMigratingSettings.class );
ChangeCaptureListener listener = new ChangeCaptureListener();
config.addConfigurationChangeListener( listener );
// When
config.applyChanges( stringMap( "setting", "old" ) ); // nothing really changed here
// Then
assertThat( listener.lastChangeSet, nullValue() );
}
@Test
public void settingNewPropertyMustNotAlterExistingSettings()
{
// Given
Config config = new Config( stringMap( "a", "1" ) );
// When
config.setProperty( "b", "2" );
// Then
assertThat( config.getParams(), is( stringMap( "a", "1", "b", "2" ) ) );
}
@Test
public void shouldBeAbleToRegisterSettingsClassesAfterInstantiation() throws Exception
{
// Given
Config config = new Config( stringMap( "old", "hello!" ) );
// When
config.registerSettingsClasses( asList( MySettingsWithDefaults.class, MyMigratingSettings.class ) );
// Then
assertThat( config.get( MyMigratingSettings.newer ), equalTo( "hello!" ) );
assertThat( config.get( MySettingsWithDefaults.hello ), equalTo( "Hello, World!" ) );
}
@Test
public void shouldBeAbleToAgumentConfig() throws Exception
{
// Given
Config config = new Config( stringMap( "newer", "old", "non-overlapping", "huzzah" ) );
// When
config.augment( stringMap( "newer", "new", "unrelated", "hello" ) );
// Then
assertThat( config.get( setting("newer", STRING, "") ), equalTo( "new" ) );
assertThat( config.get( setting("non-overlapping", STRING, "") ), equalTo( "huzzah" ) );
assertThat( config.get( setting("unrelated", STRING, "") ), equalTo( "hello" ) );
}
}
| apache-2.0 |
glameyzhou/training | distribute/src/main/java/org/glamey/training/designmodel/responsibility_chain/Applicant.java | 315 | package org.glamey.training.designmodel.responsibility_chain;
import lombok.Builder;
import lombok.Getter;
import java.math.BigDecimal;
/**
* @author zhouyang.zhou. 2017.08.14.16.
*/
@Getter
@Builder
public class Applicant {
private String name;
private String subject;
private BigDecimal money;
}
| apache-2.0 |
icemagno/mapview | src/main/java/br/com/cmabreu/action/GetUnitDetailAction.java | 1116 | package br.com.cmabreu.action;
import java.io.IOException;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts2.StrutsStatics;
import org.apache.struts2.convention.annotation.Action;
import org.apache.struts2.convention.annotation.InterceptorRef;
import org.apache.struts2.convention.annotation.ParentPackage;
import org.apache.struts2.convention.annotation.Result;
import com.opensymphony.xwork2.ActionContext;
@Action(value="getUnitDetail", results= {
@Result(name="ok", type="httpheader", params={"status", "200"}) },
interceptorRefs= { @InterceptorRef("seguranca") }
)
@ParentPackage("default")
public class GetUnitDetailAction {
public String execute(){
String resposta = "";
try {
HttpServletResponse response = (HttpServletResponse)ActionContext.getContext().get(StrutsStatics.HTTP_RESPONSE);
response.setCharacterEncoding("UTF-8");
response.getWriter().write(resposta);
} catch (IOException ex) {
System.out.println("[GetUnitDetailAction] Erro respondendo AJAX.");
}
return "ok";
}
}
| apache-2.0 |
SingingTree/hapi-fhir | hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IAnyResource.java | 1831 | package org.hl7.fhir.instance.model.api;
import ca.uhn.fhir.model.api.annotation.SearchParamDefinition;
import ca.uhn.fhir.rest.gclient.TokenClientParam;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/**
* An IBaseResource that has a FHIR version of DSTU3 or higher
*/
public interface IAnyResource extends IBaseResource {
/**
* Search parameter constant for <b>_language</b>
*/
@SearchParamDefinition(name="_language", path="", description="The language of the resource", type="string" )
String SP_RES_LANGUAGE = "_language";
/**
* Search parameter constant for <b>_id</b>
*/
@SearchParamDefinition(name="_id", path="", description="The ID of the resource", type="token" )
String SP_RES_ID = "_id";
/**
* <b>Fluent Client</b> search parameter constant for <b>_id</b>
* <p>
* Description: <b>the _id of a resource</b><br>
* Type: <b>string</b><br>
* Path: <b>Resource._id</b><br>
* </p>
*/
TokenClientParam RES_ID = new TokenClientParam(IAnyResource.SP_RES_ID);
String getId();
IIdType getIdElement();
IPrimitiveType<String> getLanguageElement();
Object getUserData(String name);
IAnyResource setId(String theId);
void setUserData(String name, Object value);
}
| apache-2.0 |
NatashaPanchina/npanchina | chess/src/main/java/ru/job4j/chess/firuges/black/KnightBlack.java | 1234 | package ru.job4j.chess.firuges.black;
import ru.job4j.chess.ImpossibleMoveException;
import ru.job4j.chess.firuges.Figure;
import ru.job4j.chess.firuges.Cell;
/**
* Black Knight.
* @author Natasha Panchina (panchinanata25@gmail.com)
* @version 1
* @since 18.08.2018
*/
public class KnightBlack extends Figure {
public KnightBlack(final Cell position) {
super(position);
}
public Cell position() {
return this.position;
}
@Override
public Cell[] way(Cell source, Cell dest) throws ImpossibleMoveException {
if (!this.isMove(source, dest)) {
throw new ImpossibleMoveException();
}
Cell[] steps = new Cell[]{dest};
return steps;
}
@Override
public Figure copy(Cell dest) {
return new KnightBlack(dest);
}
@Override
public boolean isMove(Cell source, Cell dest) {
boolean result = false;
int deltax = Math.abs(source.getX() - dest.getX());
int deltay = Math.abs(source.getY() - dest.getY());
if (Math.abs(deltax - deltay) == 1 && deltax == 2
|| Math.abs(deltax - deltay) == 1 && deltay == 2) {
result = true;
}
return result;
}
}
| apache-2.0 |
lvweiwolf/poi-3.16 | src/scratchpad/src/org/apache/poi/hemf/hemfplus/record/HemfPlusRecordType.java | 5033 | /* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hemf.hemfplus.record;
import org.apache.poi.util.Internal;
@Internal
public enum HemfPlusRecordType {
header(0x4001, HemfPlusHeader.class),
endOfFile(0x4002, UnimplementedHemfPlusRecord.class),
comment(0x4003, UnimplementedHemfPlusRecord.class),
getDC(0x4004, UnimplementedHemfPlusRecord.class),
multiFormatStart(0x4005, UnimplementedHemfPlusRecord.class),
multiFormatSection(0x4006, UnimplementedHemfPlusRecord.class),
multiFormatEnd(0x4007, UnimplementedHemfPlusRecord.class),
object(0x4008, UnimplementedHemfPlusRecord.class),
clear(0x4009, UnimplementedHemfPlusRecord.class),
fillRects(0x400A, UnimplementedHemfPlusRecord.class),
drawRects(0x400B, UnimplementedHemfPlusRecord.class),
fillPolygon(0x400C, UnimplementedHemfPlusRecord.class),
drawLines(0x400D, UnimplementedHemfPlusRecord.class),
fillEllipse(0x400E, UnimplementedHemfPlusRecord.class),
drawEllipse(0x400F, UnimplementedHemfPlusRecord.class),
fillPie(0x4010, UnimplementedHemfPlusRecord.class),
drawPie(0x4011, UnimplementedHemfPlusRecord.class),
drawArc(0x4012, UnimplementedHemfPlusRecord.class),
fillRegion(0x4013, UnimplementedHemfPlusRecord.class),
fillPath(0x4014, UnimplementedHemfPlusRecord.class),
drawPath(0x4015, UnimplementedHemfPlusRecord.class),
fillClosedCurve(0x4016, UnimplementedHemfPlusRecord.class),
drawClosedCurve(0x4017, UnimplementedHemfPlusRecord.class),
drawCurve(0x4018, UnimplementedHemfPlusRecord.class),
drawBeziers(0x4019, UnimplementedHemfPlusRecord.class),
drawImage(0x401A, UnimplementedHemfPlusRecord.class),
drawImagePoints(0x401B, UnimplementedHemfPlusRecord.class),
drawString(0x401C, UnimplementedHemfPlusRecord.class),
setRenderingOrigin(0x401D, UnimplementedHemfPlusRecord.class),
setAntiAliasMode(0x401E, UnimplementedHemfPlusRecord.class),
setTextRenderingHint(0x401F, UnimplementedHemfPlusRecord.class),
setTextContrast(0x4020, UnimplementedHemfPlusRecord.class),
setInterpolationMode(0x4021, UnimplementedHemfPlusRecord.class),
setPixelOffsetMode(0x4022, UnimplementedHemfPlusRecord.class),
setComositingMode(0x4023, UnimplementedHemfPlusRecord.class),
setCompositingQuality(0x4024, UnimplementedHemfPlusRecord.class),
save(0x4025, UnimplementedHemfPlusRecord.class),
restore(0x4026, UnimplementedHemfPlusRecord.class),
beginContainer(0x4027, UnimplementedHemfPlusRecord.class),
beginContainerNoParams(0x428, UnimplementedHemfPlusRecord.class),
endContainer(0x4029, UnimplementedHemfPlusRecord.class),
setWorldTransform(0x402A, UnimplementedHemfPlusRecord.class),
resetWorldTransform(0x402B, UnimplementedHemfPlusRecord.class),
multiplyWorldTransform(0x402C, UnimplementedHemfPlusRecord.class),
translateWorldTransform(0x402D, UnimplementedHemfPlusRecord.class),
scaleWorldTransform(0x402E, UnimplementedHemfPlusRecord.class),
rotateWorldTransform(0x402F, UnimplementedHemfPlusRecord.class),
setPageTransform(0x4030, UnimplementedHemfPlusRecord.class),
resetClip(0x4031, UnimplementedHemfPlusRecord.class),
setClipRect(0x4032, UnimplementedHemfPlusRecord.class),
setClipRegion(0x4033, UnimplementedHemfPlusRecord.class),
setClipPath(0x4034, UnimplementedHemfPlusRecord.class),
offsetClip(0x4035, UnimplementedHemfPlusRecord.class),
drawDriverstring(0x4036, UnimplementedHemfPlusRecord.class),
strokeFillPath(0x4037, UnimplementedHemfPlusRecord.class),
serializableObject(0x4038, UnimplementedHemfPlusRecord.class),
setTSGraphics(0x4039, UnimplementedHemfPlusRecord.class),
setTSClip(0x403A, UnimplementedHemfPlusRecord.class);
public final long id;
public final Class<? extends HemfPlusRecord> clazz;
HemfPlusRecordType(long id, Class<? extends HemfPlusRecord> clazz) {
this.id = id;
this.clazz = clazz;
}
public static HemfPlusRecordType getById(long id) {
for (HemfPlusRecordType wrt : values()) {
if (wrt.id == id) return wrt;
}
return null;
}
}
| apache-2.0 |
hanFengSan/light | app/src/main/java/com/yakami/light/view/fragment/AboutFragment.java | 958 | package com.yakami.light.view.fragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.yakami.light.BuildConfig;
import com.yakami.light.R;
import com.yakami.light.view.fragment.base.BaseFragment;
import butterknife.Bind;
import butterknife.ButterKnife;
/**
* Created by Yakami on 2016/8/5, enjoying it!
*/
public class AboutFragment extends BaseFragment {
@Bind(R.id.tv_about) TextView mAbout;
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_about, container, false);
ButterKnife.bind(this,view);
mAbout.setText("版本: " + BuildConfig.VERSION_NAME + "\n" + mRes.getString(R.string.author));
mAbout.append("\ngithub地址:https://github.com/hanFengSan/light");
return view;
}
}
| apache-2.0 |
kalyanreddyemani/opencga | opencga-catalog/src/test/java/org/opencb/opencga/catalog/CatalogManagerTest.java | 89707 | /*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.catalog;
import com.mongodb.BasicDBObject;
import org.junit.*;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import static org.opencb.opencga.catalog.db.api.CatalogSampleDBAdaptor.SampleFilterOption.*;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.opencb.commons.test.GenericTest;
import org.opencb.datastore.core.ObjectMap;
import org.opencb.datastore.core.QueryOptions;
import org.opencb.datastore.core.QueryResult;
import org.opencb.datastore.core.config.DataStoreServerAddress;
import org.opencb.datastore.mongodb.MongoDataStore;
import org.opencb.datastore.mongodb.MongoDataStoreManager;
import org.opencb.opencga.catalog.authentication.CatalogAuthenticationManager;
import org.opencb.opencga.catalog.db.api.CatalogFileDBAdaptor;
import org.opencb.opencga.catalog.db.api.CatalogSampleDBAdaptor;
import org.opencb.opencga.catalog.exceptions.CatalogException;
import org.opencb.opencga.catalog.io.CatalogIOManager;
import org.opencb.opencga.catalog.utils.CatalogAnnotationsValidatorTest;
import org.opencb.opencga.catalog.utils.CatalogFileUtils;
import org.opencb.opencga.catalog.models.*;
import org.opencb.opencga.catalog.models.File;
import org.opencb.opencga.catalog.exceptions.CatalogDBException;
import org.opencb.opencga.core.common.StringUtils;
import org.opencb.opencga.core.common.TimeUtils;
import java.io.*;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.stream.Collectors;
public class CatalogManagerTest extends GenericTest {
public final static String PASSWORD = "asdf";
protected CatalogManager catalogManager;
protected String sessionIdUser;
protected String sessionIdUser2;
protected String sessionIdUser3;
@Rule
public ExpectedException thrown = ExpectedException.none();
private File testFolder;
@Before
public void setUp() throws IOException, CatalogException {
InputStream is = CatalogManagerTest.class.getClassLoader().getResourceAsStream("catalog.properties");
Properties properties = new Properties();
properties.load(is);
clearCatalog(properties);
catalogManager = new CatalogManager(properties);
catalogManager.createUser("user", "User Name", "mail@ebi.ac.uk", PASSWORD, "", null);
catalogManager.createUser("user2", "User2 Name", "mail2@ebi.ac.uk", PASSWORD, "", null);
catalogManager.createUser("user3", "User3 Name", "user.2@e.mail", PASSWORD, "ACME", null);
sessionIdUser = catalogManager.login("user", PASSWORD, "127.0.0.1").first().getString("sessionId");
sessionIdUser2 = catalogManager.login("user2", PASSWORD, "127.0.0.1").first().getString("sessionId");
sessionIdUser3 = catalogManager.login("user3", PASSWORD, "127.0.0.1").first().getString("sessionId");
Project project1 = catalogManager.createProject("user", "Project about some genomes", "1000G", "", "ACME", null, sessionIdUser).first();
Project project2 = catalogManager.createProject("user2", "Project Management Project", "pmp", "life art intelligent system", "myorg", null, sessionIdUser2).first();
Project project3 = catalogManager.createProject("user3", "project 1", "p1", "", "", null, sessionIdUser3).first();
int studyId = catalogManager.createStudy(project1.getId(), "Phase 1", "phase1", Study.Type.TRIO, "Done", sessionIdUser).first().getId();
int studyId2 = catalogManager.createStudy(project1.getId(), "Phase 3", "phase3", Study.Type.CASE_CONTROL, "d", sessionIdUser).first().getId();
int studyId3 = catalogManager.createStudy(project2.getId(), "Study 1", "s1", Study.Type.CONTROL_SET, "", sessionIdUser2).first().getId();
catalogManager.createFolder(studyId2, Paths.get("data/test/folder/"), true, null, sessionIdUser);
testFolder = catalogManager.createFolder(studyId, Paths.get("data/test/folder/"), true, null, sessionIdUser).first();
ObjectMap attributes = new ObjectMap();
attributes.put("field", "value");
attributes.put("numValue", 5);
catalogManager.modifyFile(testFolder.getId(), new ObjectMap("attributes", attributes), sessionIdUser);
File fileTest1k = catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.NONE,
testFolder.getPath() + "test_1K.txt.gz",
StringUtils.randomString(1000).getBytes(), "", false, sessionIdUser).first();
attributes = new ObjectMap();
attributes.put("field", "value");
attributes.put("name", "fileTest1k");
attributes.put("numValue", "10");
attributes.put("boolean", false);
catalogManager.modifyFile(fileTest1k.getId(), new ObjectMap("attributes", attributes), sessionIdUser);
File fileTest05k = catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.NONE,
testFolder.getPath() + "test_0.5K.txt",
StringUtils.randomString(500).getBytes(), "", false, sessionIdUser).first();
attributes = new ObjectMap();
attributes.put("field", "valuable");
attributes.put("name", "fileTest05k");
attributes.put("numValue", 5);
attributes.put("boolean", true);
catalogManager.modifyFile(fileTest05k.getId(), new ObjectMap("attributes", attributes), sessionIdUser);
File test01k = catalogManager.createFile(studyId, File.Format.IMAGE, File.Bioformat.NONE,
testFolder.getPath() + "test_0.1K.png",
StringUtils.randomString(100).getBytes(), "", false, sessionIdUser).first();
attributes = new ObjectMap();
attributes.put("field", "other");
attributes.put("name", "test01k");
attributes.put("numValue", 50);
attributes.put("nested", new ObjectMap("num1", 45).append("num2", 33).append("text", "HelloWorld"));
catalogManager.modifyFile(test01k.getId(), new ObjectMap("attributes", attributes), sessionIdUser);
Set<Variable> variables = new HashSet<>();
variables.addAll(Arrays.asList(
new Variable("NAME", "", Variable.VariableType.TEXT, "", true, false, Collections.<String>emptyList(), 0, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("AGE", "", Variable.VariableType.NUMERIC, null, true, false, Collections.singletonList("0:130"), 1, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("HEIGHT", "", Variable.VariableType.NUMERIC, "1.5", false, false, Collections.singletonList("0:"), 2, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("ALIVE", "", Variable.VariableType.BOOLEAN, "", true, false, Collections.<String>emptyList(), 3, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("PHEN", "", Variable.VariableType.CATEGORICAL, "", true, false, Arrays.asList("CASE", "CONTROL"), 4, "", "", null, Collections.<String, Object>emptyMap())
));
VariableSet vs = catalogManager.createVariableSet(studyId, "vs", true, "", null, variables, sessionIdUser).first();
int s_1 = catalogManager.createSample(studyId, "s_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int s_2 = catalogManager.createSample(studyId, "s_2", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int s_3 = catalogManager.createSample(studyId, "s_3", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int s_4 = catalogManager.createSample(studyId, "s_4", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int s_5 = catalogManager.createSample(studyId, "s_5", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int s_6 = catalogManager.createSample(studyId, "s_6", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int s_7 = catalogManager.createSample(studyId, "s_7", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int s_8 = catalogManager.createSample(studyId, "s_8", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int s_9 = catalogManager.createSample(studyId, "s_9", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
catalogManager.annotateSample(s_1, "annot1", vs.getId(), new ObjectMap("NAME", "s_1").append("AGE", 6).append("ALIVE", true).append("PHEN", "CONTROL"), null, true, sessionIdUser);
catalogManager.annotateSample(s_2, "annot1", vs.getId(), new ObjectMap("NAME", "s_2").append("AGE", 10).append("ALIVE", false).append("PHEN", "CASE"), null, true, sessionIdUser);
catalogManager.annotateSample(s_3, "annot1", vs.getId(), new ObjectMap("NAME", "s_3").append("AGE", 15).append("ALIVE", true).append("PHEN", "CONTROL"), null, true, sessionIdUser);
catalogManager.annotateSample(s_4, "annot1", vs.getId(), new ObjectMap("NAME", "s_4").append("AGE", 22).append("ALIVE", false).append("PHEN", "CONTROL"), null, true, sessionIdUser);
catalogManager.annotateSample(s_5, "annot1", vs.getId(), new ObjectMap("NAME", "s_5").append("AGE", 29).append("ALIVE", true).append("PHEN", "CASE"), null, true, sessionIdUser);
catalogManager.annotateSample(s_6, "annot2", vs.getId(), new ObjectMap("NAME", "s_6").append("AGE", 38).append("ALIVE", true).append("PHEN", "CONTROL"), null, true, sessionIdUser);
catalogManager.annotateSample(s_7, "annot2", vs.getId(), new ObjectMap("NAME", "s_7").append("AGE", 46).append("ALIVE", false).append("PHEN", "CASE"), null, true, sessionIdUser);
catalogManager.annotateSample(s_8, "annot2", vs.getId(), new ObjectMap("NAME", "s_8").append("AGE", 72).append("ALIVE", true).append("PHEN", "CONTROL"), null, true, sessionIdUser);
catalogManager.modifyFile(test01k.getId(), new ObjectMap("sampleIds", Arrays.asList(s_1, s_2, s_3, s_4, s_5)), sessionIdUser);
}
@After
public void tearDown() throws Exception {
if(sessionIdUser != null) {
catalogManager.logout("user", sessionIdUser);
}
if(sessionIdUser2 != null) {
catalogManager.logout("user2", sessionIdUser2);
}
if(sessionIdUser3 != null) {
catalogManager.logout("user3", sessionIdUser3);
}
// catalogManager.close();
}
public CatalogManager getTestCatalogManager() {
return catalogManager;
}
@Test
public void testAdminUserExists() throws Exception {
thrown.expect(CatalogException.class);
catalogManager.login("admin", "admin", null);
}
@Test
public void testAdminUserExists2() throws Exception {
thrown.expect(CatalogException.class);
QueryResult<ObjectMap> login = catalogManager.login("admin", CatalogAuthenticationManager.cipherPassword("admin"), null);
User admin = catalogManager.getUser("admin", null, login.first().getString("sessionId")).first();
assertEquals(User.Role.ADMIN, admin.getRole());
}
@Test
public void testCreateExistingUser() throws Exception {
thrown.expect(CatalogException.class);
catalogManager.createUser("user", "User Name", "mail@ebi.ac.uk", PASSWORD, "", null);
}
@Test
public void testLoginAsAnonymous() throws Exception {
System.out.println(catalogManager.loginAsAnonymous("127.0.0.1"));
}
@Test
public void testLogin() throws Exception {
QueryResult<ObjectMap> queryResult = catalogManager.login("user", PASSWORD, "127.0.0.1");
System.out.println(queryResult.first().toJson());
thrown.expect(CatalogException.class);
catalogManager.login("user", "fakePassword", "127.0.0.1");
// fail("Expected 'wrong password' exception");
}
@Test
public void testLogoutAnonymous() throws Exception {
QueryResult<ObjectMap> queryResult = catalogManager.loginAsAnonymous("127.0.0.1");
catalogManager.logoutAnonymous(queryResult.first().getString("sessionId"));
}
@Test
public void testGetUserInfo() throws CatalogException {
QueryResult<User> user = catalogManager.getUser("user", null, sessionIdUser);
System.out.println("user = " + user);
QueryResult<User> userVoid = catalogManager.getUser("user", user.first().getLastActivity(), sessionIdUser);
System.out.println("userVoid = " + userVoid);
assertTrue(userVoid.getResult().isEmpty());
try {
catalogManager.getUser("user", null, sessionIdUser2);
fail();
} catch (CatalogException e) {
System.out.println(e);
}
}
@Test
public void testModifyUser() throws CatalogException, InterruptedException {
ObjectMap params = new ObjectMap();
String newName = "Changed Name " + StringUtils.randomString(10);
String newPassword = StringUtils.randomString(10);
String newEmail = "new@email.ac.uk";
params.put("name", newName);
ObjectMap attributes = new ObjectMap("myBoolean", true);
attributes.put("value", 6);
attributes.put("object", new BasicDBObject("id", 1234));
params.put("attributes", attributes);
User userPre = catalogManager.getUser("user", null, sessionIdUser).first();
System.out.println("userPre = " + userPre);
Thread.sleep(10);
catalogManager.modifyUser("user", params, sessionIdUser);
catalogManager.changeEmail("user", newEmail, sessionIdUser);
catalogManager.changePassword("user", PASSWORD, newPassword, sessionIdUser);
List<User> userList = catalogManager.getUser("user", userPre.getLastActivity(), new QueryOptions("exclude", Arrays.asList("sessions")), sessionIdUser).getResult();
if(userList.isEmpty()){
fail("Error. LastActivity should have changed");
}
User userPost = userList.get(0);
System.out.println("userPost = " + userPost);
assertTrue(!userPre.getLastActivity().equals(userPost.getLastActivity()));
assertEquals(userPost.getName(), newName);
assertEquals(userPost.getEmail(), newEmail);
assertEquals(userPost.getPassword(), CatalogAuthenticationManager.cipherPassword(newPassword));
for (Map.Entry<String, Object> entry : attributes.entrySet()) {
assertEquals(userPost.getAttributes().get(entry.getKey()), entry.getValue());
}
catalogManager.changePassword("user", newPassword, PASSWORD, sessionIdUser);
try {
params = new ObjectMap();
params.put("password", "1234321");
catalogManager.modifyUser("user", params, sessionIdUser);
fail("Expected exception");
} catch (CatalogDBException e){
System.out.println(e);
}
try {
catalogManager.modifyUser("user", params, sessionIdUser2);
fail("Expected exception");
} catch (CatalogException e){
System.out.println(e);
}
}
/**
* Project methods
* ***************************
*/
@Test
public void testCreateAnonymousProject() throws IOException, CatalogException {
String sessionId = catalogManager.loginAsAnonymous("127.0.0.1").first().getString("sessionId");
String userId = catalogManager.getUserIdBySessionId(sessionId);
catalogManager.createProject(userId, "Project", "project", "", "", null, sessionId);
catalogManager.logoutAnonymous(sessionId);
}
@Test
public void testGetAllProjects() throws Exception {
QueryResult<Project> projects = catalogManager.getAllProjects("user", null, sessionIdUser);
assertEquals(1, projects.getNumResults());
projects = catalogManager.getAllProjects("user", null, sessionIdUser2);
assertEquals(0, projects.getNumResults());
}
@Test
public void testCreateProject() throws Exception {
String projectAlias = "projectAlias_ASDFASDF";
catalogManager.createProject("user", "Project", projectAlias, "", "", null, sessionIdUser);
thrown.expect(CatalogDBException.class);
catalogManager.createProject("user", "Project", projectAlias, "", "", null, sessionIdUser);
}
@Test
public void testModifyProject() throws CatalogException {
String newProjectName = "ProjectName " + StringUtils.randomString(10);
int projectId = catalogManager.getUser("user", null, sessionIdUser).first().getProjects().get(0).getId();
ObjectMap options = new ObjectMap();
options.put("name", newProjectName);
ObjectMap attributes = new ObjectMap("myBoolean", true);
attributes.put("value", 6);
attributes.put("object", new BasicDBObject("id", 1234));
options.put("attributes", attributes);
catalogManager.modifyProject(projectId, options, sessionIdUser);
QueryResult<Project> result = catalogManager.getProject(projectId, null, sessionIdUser);
Project project = result.first();
System.out.println(result);
assertEquals(newProjectName, project.getName());
for (Map.Entry<String, Object> entry : attributes.entrySet()) {
assertEquals(project.getAttributes().get(entry.getKey()), entry.getValue());
}
options = new ObjectMap();
options.put("alias", "newProjectAlias");
catalogManager.modifyProject(projectId, options, sessionIdUser);
try {
catalogManager.modifyProject(projectId, options, sessionIdUser2);
fail("Expected 'Permission denied' exception");
} catch (CatalogDBException e){
System.out.println(e);
}
}
/**
* Study methods
* ***************************
*/
@Test
public void testModifyStudy() throws Exception {
int projectId = catalogManager.getAllProjects("user", null, sessionIdUser).first().getId();
int studyId = catalogManager.getAllStudies(projectId, null, sessionIdUser).first().getId();
String newName = "Phase 1 "+ StringUtils.randomString(20);
String newDescription = StringUtils.randomString(500);
ObjectMap parameters = new ObjectMap();
parameters.put("name", newName);
parameters.put("description", newDescription);
BasicDBObject attributes = new BasicDBObject("key", "value");
parameters.put("attributes", attributes);
catalogManager.modifyStudy(studyId, parameters, sessionIdUser);
QueryResult<Study> result = catalogManager.getStudy(studyId, sessionIdUser);
System.out.println(result);
Study study = result.first();
assertEquals(study.getName(), newName);
assertEquals(study.getDescription(), newDescription);
for (Map.Entry<String, Object> entry : attributes.entrySet()) {
assertEquals(study.getAttributes().get(entry.getKey()), entry.getValue());
}
}
/**
* File methods
* ***************************
*/
@Test
public void testDeleteDataFromStudy() throws Exception {
}
@Test
public void testCreateFolder() throws Exception {
int projectId = catalogManager.getAllProjects("user2", null, sessionIdUser2).first().getId();
int studyId = catalogManager.getAllStudies(projectId, null, sessionIdUser2).first().getId();
Set<String> paths = catalogManager.getAllFiles(studyId, new QueryOptions("type", File.Type.FOLDER),
sessionIdUser2).getResult().stream().map(File::getPath).collect(Collectors.toSet());
assertEquals(3, paths.size());
assertTrue(paths.contains("")); //root
assertTrue(paths.contains("data/")); //data
assertTrue(paths.contains("analysis/")); //analysis
Path folderPath = Paths.get("data", "new", "folder");
File folder = catalogManager.createFolder(studyId, folderPath, true, null, sessionIdUser2).first();
paths = catalogManager.getAllFiles(studyId, new QueryOptions("type", File.Type.FOLDER),
sessionIdUser2).getResult().stream().map(File::getPath).collect(Collectors.toSet());
assertEquals(5, paths.size());
assertTrue(paths.contains("data/new/"));
assertTrue(paths.contains("data/new/folder/"));
}
@Test
public void testCreateAndUpload() throws Exception {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
int studyId2 = catalogManager.getStudyId("user@1000G:phase3");
CatalogFileUtils catalogFileUtils = new CatalogFileUtils(catalogManager);
java.io.File fileTest;
String fileName = "item." + TimeUtils.getTimeMillis() + ".vcf";
QueryResult<File> fileResult = catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.VARIANT, "data/" + fileName, "description", true, -1, sessionIdUser);
fileTest = createDebugFile();
catalogFileUtils.upload(fileTest.toURI(), fileResult.first(), null, sessionIdUser, false, false, true, true);
assertTrue("File deleted", !fileTest.exists());
fileName = "item." + TimeUtils.getTimeMillis() + ".vcf";
fileResult = catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.VARIANT, "data/" + fileName, "description", true, -1, sessionIdUser);
fileTest = createDebugFile();
catalogFileUtils.upload(fileTest.toURI(), fileResult.first(), null, sessionIdUser, false, false, false, true);
assertTrue("File don't deleted", fileTest.exists());
assertTrue(fileTest.delete());
fileName = "item." + TimeUtils.getTimeMillis() + ".txt";
fileResult = catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.NONE, "data/" + fileName,
StringUtils.randomString(200).getBytes(), "description", true, sessionIdUser);
assertTrue("", fileResult.first().getStatus() == File.Status.READY);
assertTrue("", fileResult.first().getDiskUsage() == 200);
fileName = "item." + TimeUtils.getTimeMillis() + ".vcf";
fileTest = createDebugFile();
QueryResult<File> fileQueryResult = catalogManager.createFile(
studyId2, File.Format.PLAIN, File.Bioformat.VARIANT, "data/deletable/folder/" + fileName, "description", true, -1, sessionIdUser);
catalogFileUtils.upload(fileTest.toURI(), fileQueryResult.first(), null, sessionIdUser, false, false, true, true);
assertFalse("File deleted by the upload", fileTest.delete());
fileName = "item." + TimeUtils.getTimeMillis() + ".vcf";
fileTest = createDebugFile();
fileQueryResult = catalogManager.createFile(
studyId2, File.Format.PLAIN, File.Bioformat.VARIANT, "data/deletable/" + fileName, "description", true, -1, sessionIdUser);
catalogFileUtils.upload(fileTest.toURI(), fileQueryResult.first(), null, sessionIdUser, false, false, false, true);
assertTrue(fileTest.delete());
fileName = "item." + TimeUtils.getTimeMillis() + ".vcf";
fileTest = createDebugFile();
fileQueryResult = catalogManager.createFile(
studyId2, File.Format.PLAIN, File.Bioformat.VARIANT, "" + fileName, "file at root", true, -1, sessionIdUser);
catalogFileUtils.upload(fileTest.toURI(), fileQueryResult.first(), null, sessionIdUser, false, false, false, true);
assertTrue(fileTest.delete());
fileName = "item." + TimeUtils.getTimeMillis() + ".vcf";
fileTest = createDebugFile();
long size = Files.size(fileTest.toPath());
fileQueryResult = catalogManager.createFile(studyId2, File.Format.PLAIN, File.Bioformat.VARIANT, "" + fileName,
fileTest.toURI(), "file at root", true, sessionIdUser);
assertTrue("File should be moved", !fileTest.exists());
assertTrue(fileQueryResult.first().getDiskUsage() == size);
}
@Test
public void testDownloadAndHeadFile() throws CatalogException, IOException, InterruptedException {
int projectId = catalogManager.getAllProjects("user", null, sessionIdUser).first().getId();
int studyId = catalogManager.getAllStudies(projectId, null, sessionIdUser).first().getId();
CatalogFileUtils catalogFileUtils = new CatalogFileUtils(catalogManager);
String fileName = "item." + TimeUtils.getTimeMillis() + ".vcf";
java.io.File fileTest;
InputStream is = new FileInputStream(fileTest = createDebugFile());
File file = catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.VARIANT, "data/" + fileName, "description", true, -1, sessionIdUser).first();
catalogFileUtils.upload(is, file, sessionIdUser, false, false, true);
is.close();
byte[] bytes = new byte[100];
byte[] bytesOrig = new byte[100];
DataInputStream fis = new DataInputStream(new FileInputStream(fileTest));
DataInputStream dis = catalogManager.downloadFile(file.getId(), sessionIdUser);
fis.read(bytesOrig, 0, 100);
dis.read(bytes, 0, 100);
fis.close();
dis.close();
assertArrayEquals(bytesOrig, bytes);
int offset = 5;
int limit = 30;
dis = catalogManager.downloadFile(file.getId(), offset, limit, sessionIdUser);
fis = new DataInputStream(new FileInputStream(fileTest));
for (int i = 0; i < offset; i++) {
fis.readLine();
}
String line;
int lines = 0;
while ((line = dis.readLine()) != null) {
lines++;
System.out.println(line);
assertEquals(fis.readLine(), line);
}
assertEquals(limit-offset, lines);
fis.close();
dis.close();
fileTest.delete();
}
@Test
public void testDownloadFile() throws CatalogException, IOException, InterruptedException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
String fileName = "item." + TimeUtils.getTimeMillis() + ".vcf";
int fileSize = 200;
byte[] bytesOrig = StringUtils.randomString(fileSize).getBytes();
File file = catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.NONE, "data/" + fileName,
bytesOrig, "description", true, sessionIdUser).first();
DataInputStream dis = catalogManager.downloadFile(file.getId(), sessionIdUser);
byte[] bytes = new byte[fileSize];
dis.read(bytes, 0, fileSize);
assertTrue(Arrays.equals(bytesOrig, bytes));
}
@Test
public void renameFileTest() throws CatalogException, IOException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.NONE, "data/file.txt",
StringUtils.randomString(200).getBytes(), "description", true, sessionIdUser);
catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.NONE, "data/nested/folder/file2.txt",
StringUtils.randomString(200).getBytes(), "description", true, sessionIdUser);
catalogManager.renameFile(catalogManager.getFileId("user@1000G:phase1:data/nested/"), "nested2", sessionIdUser);
Set<String> paths = catalogManager.getAllFiles(studyId, null, sessionIdUser).getResult()
.stream().map(File::getPath).collect(Collectors.toSet());
assertTrue(paths.contains("data/nested2/"));
assertFalse(paths.contains("data/nested/"));
assertTrue(paths.contains("data/nested2/folder/"));
assertTrue(paths.contains("data/nested2/folder/file2.txt"));
assertTrue(paths.contains("data/file.txt"));
catalogManager.renameFile(catalogManager.getFileId("user@1000G:phase1:data/"), "Data", sessionIdUser);
paths = catalogManager.getAllFiles(studyId, null, sessionIdUser).getResult()
.stream().map(File::getPath).collect(Collectors.toSet());
assertTrue(paths.contains("Data/"));
assertTrue(paths.contains("Data/file.txt"));
assertTrue(paths.contains("Data/nested2/"));
assertTrue(paths.contains("Data/nested2/folder/"));
assertTrue(paths.contains("Data/nested2/folder/file2.txt"));
}
@Test
public void searchFileTest() throws CatalogException, IOException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
QueryOptions options;
QueryResult<File> result;
options = new QueryOptions(CatalogFileDBAdaptor.FileFilterOption.name.toString(), "~^data");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(1, result.getNumResults());
//Folder "jobs" does not exist
options = new QueryOptions(CatalogFileDBAdaptor.FileFilterOption.directory.toString(), "jobs");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(0, result.getNumResults());
//Get all files in data
options = new QueryOptions(CatalogFileDBAdaptor.FileFilterOption.directory.toString(), "data/");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(1, result.getNumResults());
//Get all files in data recursively
options = new QueryOptions(CatalogFileDBAdaptor.FileFilterOption.directory.toString(), "data/.*");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(5, result.getNumResults());
options = new QueryOptions(CatalogFileDBAdaptor.FileFilterOption.type.toString(), "FILE");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
result.getResult().forEach(f -> assertEquals(File.Type.FILE, f.getType()));
int numFiles = result.getNumResults();
assertEquals(3, numFiles);
options = new QueryOptions(CatalogFileDBAdaptor.FileFilterOption.type.toString(), "FOLDER");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
result.getResult().forEach(f -> assertEquals(File.Type.FOLDER, f.getType()));
int numFolders = result.getNumResults();
assertEquals(5, numFolders);
options = new QueryOptions(CatalogFileDBAdaptor.FileFilterOption.path.toString(), "");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(1, result.getNumResults());
assertEquals(".", result.first().getName());
options = new QueryOptions(CatalogFileDBAdaptor.FileFilterOption.type.toString(), "FILE,FOLDER");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(8, result.getNumResults());
assertEquals(numFiles + numFolders, result.getNumResults());
options = new QueryOptions("type", "FILE");
options.put("diskUsage", ">400");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(2, result.getNumResults());
options = new QueryOptions("type", "FILE");
options.put("diskUsage", "<400");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(1, result.getNumResults());
List<Integer> sampleIds = catalogManager.getAllSamples(studyId, new QueryOptions("name", "s_1,s_3,s_4"), sessionIdUser).getResult().stream().map(Sample::getId).collect(Collectors.toList());
result = catalogManager.searchFile(studyId, new QueryOptions("sampleIds", sampleIds), sessionIdUser);
assertEquals(1, result.getNumResults());
options = new QueryOptions("type", "FILE");
options.put("format", "PLAIN");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(2, result.getNumResults());
CatalogFileDBAdaptor.FileFilterOption attributes = CatalogFileDBAdaptor.FileFilterOption.attributes;
CatalogFileDBAdaptor.FileFilterOption nattributes = CatalogFileDBAdaptor.FileFilterOption.nattributes;
result = catalogManager.searchFile(studyId, new QueryOptions(attributes + ".nested.text" , "~H"), sessionIdUser);
assertEquals(1, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(nattributes + ".nested.num1" , ">0"), sessionIdUser);
assertEquals(1, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(attributes + ".nested.num1" , ">0"), sessionIdUser);
assertEquals(0, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(attributes + ".field" , "~val"), sessionIdUser);
assertEquals(3, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions("attributes.field" , "~val"), sessionIdUser);
assertEquals(3, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(attributes + ".field", "=~val"), sessionIdUser);
assertEquals(3, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(attributes + ".field", "~val"), sessionIdUser);
assertEquals(3, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(attributes + ".field", "value"), sessionIdUser);
assertEquals(2, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(attributes + ".field", "other"), sessionIdUser);
assertEquals(1, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions("nattributes.numValue", ">=5"), sessionIdUser);
assertEquals(3, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions("nattributes.numValue", ">4,<6"), sessionIdUser);
assertEquals(3, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(nattributes + ".numValue", "==5"), sessionIdUser);
assertEquals(2, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(nattributes + ".numValue", "==5.0"), sessionIdUser);
assertEquals(2, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(nattributes + ".numValue", "5.0"), sessionIdUser);
assertEquals(2, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(nattributes + ".numValue", ">5"), sessionIdUser);
assertEquals(1, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(nattributes + ".numValue", ">4"), sessionIdUser);
assertEquals(3, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(nattributes + ".numValue", "<6"), sessionIdUser);
assertEquals(2, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(nattributes + ".numValue", "<=5"), sessionIdUser);
assertEquals(2, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(nattributes + ".numValue" , "<5"), sessionIdUser);
assertEquals(0, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(nattributes + ".numValue" , "<2"), sessionIdUser);
assertEquals(0, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(nattributes + ".numValue" , "==23"), sessionIdUser);
assertEquals(0, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(attributes + ".numValue" , "=~10"), sessionIdUser);
assertEquals(1, result.getNumResults());
result = catalogManager.searchFile(studyId, new QueryOptions(nattributes + ".numValue" , "=10"), sessionIdUser);
assertEquals(0, result.getNumResults());
QueryOptions query = new QueryOptions();
query.add(attributes + ".name", "fileTest1k");
query.add(attributes + ".field", "value");
result = catalogManager.searchFile(studyId, query, sessionIdUser);
assertEquals(1, result.getNumResults());
query = new QueryOptions();
query.add(attributes + ".name", "fileTest1k");
query.add(attributes + ".field", "value");
query.add(attributes + ".numValue", Arrays.asList(8, 9, 10)); //Searching as String. numValue = "10"
result = catalogManager.searchFile(studyId, query, sessionIdUser);
assertEquals(1, result.getNumResults());
}
@Test
public void testSearchFileBoolean() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
QueryOptions options;
QueryResult<File> result;
CatalogFileDBAdaptor.FileFilterOption battributes = CatalogFileDBAdaptor.FileFilterOption.battributes;
options = new QueryOptions(battributes + ".boolean", "true"); //boolean in [true]
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(1, result.getNumResults());
options = new QueryOptions(battributes + ".boolean", "false"); //boolean in [false]
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(1, result.getNumResults());
options = new QueryOptions(battributes + ".boolean", "!=false"); //boolean in [null, true]
options.put("type", "FILE");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(2, result.getNumResults());
options = new QueryOptions(battributes + ".boolean", "!=true"); //boolean in [null, false]
options.put("type", "FILE");
result = catalogManager.searchFile(studyId, options, sessionIdUser);
assertEquals(2, result.getNumResults());
}
@Test
public void testSearchFileFail1() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
thrown.expect(CatalogDBException.class);
catalogManager.searchFile(studyId, new QueryOptions(CatalogFileDBAdaptor.FileFilterOption.nattributes.toString() + ".numValue", "==NotANumber"), sessionIdUser);
}
@Test
public void testSearchFileFail2() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
thrown.expect(CatalogDBException.class);
catalogManager.searchFile(studyId, new QueryOptions("badFilter", "badFilter"), sessionIdUser);
}
@Test
public void testSearchFileFail3() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
thrown.expect(CatalogDBException.class);
catalogManager.searchFile(studyId, new QueryOptions("id", "~5"), sessionIdUser); //Bad operator
}
@Test
public void testGetFileParent() throws CatalogException, IOException {
int fileId;
fileId = catalogManager.getFileId("user@1000G:phase1:data/test/folder/");
System.out.println(catalogManager.getFile(fileId, null, sessionIdUser));
QueryResult<File> fileParent = catalogManager.getFileParent(fileId, null, sessionIdUser);
System.out.println(fileParent);
fileId = catalogManager.getFileId("user@1000G:phase1:data/");
System.out.println(catalogManager.getFile(fileId, null, sessionIdUser));
fileParent = catalogManager.getFileParent(fileId, null, sessionIdUser);
System.out.println(fileParent);
fileId = catalogManager.getFileId("user@1000G:phase1:");
System.out.println(catalogManager.getFile(fileId, null, sessionIdUser));
fileParent = catalogManager.getFileParent(fileId, null, sessionIdUser);
System.out.println(fileParent);
}
@Test
public void testGetFileParents1() throws CatalogException {
int fileId;
QueryResult<File> fileParents;
fileId = catalogManager.getFileId("user@1000G:phase1:data/test/folder/");
fileParents = catalogManager.getFileParents(fileId, null, sessionIdUser);
assertEquals(4, fileParents.getNumResults());
assertEquals("", fileParents.getResult().get(0).getPath());
assertEquals("data/", fileParents.getResult().get(1).getPath());
assertEquals("data/test/", fileParents.getResult().get(2).getPath());
assertEquals("data/test/folder/", fileParents.getResult().get(3).getPath());
}
@Test
public void testGetFileParents2() throws CatalogException {
int fileId;
QueryResult<File> fileParents;
fileId = catalogManager.getFileId("user@1000G:phase1:data/test/folder/test_1K.txt.gz");
fileParents = catalogManager.getFileParents(fileId, null, sessionIdUser);
assertEquals(4, fileParents.getNumResults());
assertEquals("", fileParents.getResult().get(0).getPath());
assertEquals("data/", fileParents.getResult().get(1).getPath());
assertEquals("data/test/", fileParents.getResult().get(2).getPath());
assertEquals("data/test/folder/", fileParents.getResult().get(3).getPath());
}
@Test
public void testGetFileParents3() throws CatalogException {
int fileId;
QueryResult<File> fileParents;
fileId = catalogManager.getFileId("user@1000G:phase1:data/test/");
fileParents = catalogManager.getFileParents(fileId,
new QueryOptions("include", "projects.studies.files.path,projects.studies.files.id"),
sessionIdUser);
assertEquals(3, fileParents.getNumResults());
assertEquals("", fileParents.getResult().get(0).getPath());
assertEquals("data/", fileParents.getResult().get(1).getPath());
assertEquals("data/test/", fileParents.getResult().get(2).getPath());
fileParents.getResult().forEach(f -> {
assertNull(f.getName());
assertNotNull(f.getPath());
assertTrue(f.getId() != 0);
});
}
@Test
public void testDeleteFile () throws CatalogException, IOException {
int projectId = catalogManager.getAllProjects("user", null, sessionIdUser).first().getId();
int studyId = catalogManager.getAllStudies(projectId, null, sessionIdUser).first().getId();
List<File> result = catalogManager.getAllFiles(studyId, new QueryOptions("type", "FILE"), sessionIdUser).getResult();
for (File file : result) {
catalogManager.deleteFile(file.getId(), sessionIdUser);
}
CatalogFileUtils catalogFileUtils = new CatalogFileUtils(catalogManager);
catalogManager.getAllFiles(studyId, new QueryOptions("type", "FILE"), sessionIdUser).getResult().forEach(f -> {
assertEquals(f.getStatus(), File.Status.TRASHED);
assertTrue(f.getName().startsWith(".deleted"));
});
int studyId2 = catalogManager.getAllStudies(projectId, null, sessionIdUser).getResult().get(1).getId();
result = catalogManager.getAllFiles(studyId2, new QueryOptions("type", "FILE"), sessionIdUser).getResult();
for (File file : result) {
catalogManager.deleteFile(file.getId(), sessionIdUser);
}
catalogManager.getAllFiles(studyId, new QueryOptions("type", "FILE"), sessionIdUser).getResult().forEach(f -> {
assertEquals(f.getStatus(), File.Status.TRASHED);
assertTrue(f.getName().startsWith(".deleted"));
});
}
@Test
public void testDeleteLeafFolder () throws CatalogException, IOException {
int deletable = catalogManager.getFileId("user@1000G/phase3/data/test/folder/");
deleteFolderAndCheck(deletable);
}
@Test
public void testDeleteMiddleFolder () throws CatalogException, IOException {
int deletable = catalogManager.getFileId("user@1000G/phase3/data/");
deleteFolderAndCheck(deletable);
}
@Test
public void testDeleteRootFolder () throws CatalogException, IOException {
int deletable = catalogManager.getFileId("user@1000G/phase3/");
thrown.expect(CatalogException.class);
deleteFolderAndCheck(deletable);
}
@Test
public void deleteFolderTest() throws CatalogException, IOException {
List<File> folderFiles = new LinkedList<>();
int studyId = catalogManager.getStudyId("user@1000G/phase3");
File folder = catalogManager.createFolder(studyId, Paths.get("folder"), false, null, sessionIdUser).first();
folderFiles.add(catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.NONE, "folder/my.txt", StringUtils.randomString(200).getBytes(), "", true, sessionIdUser).first());
folderFiles.add(catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.NONE, "folder/my2.txt", StringUtils.randomString(200).getBytes(), "", true, sessionIdUser).first());
folderFiles.add(catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.NONE, "folder/my3.txt", StringUtils.randomString(200).getBytes(), "", true, sessionIdUser).first());
folderFiles.add(catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.NONE, "folder/subfolder/my4.txt", StringUtils.randomString(200).getBytes(), "", true, sessionIdUser).first());
folderFiles.add(catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.NONE, "folder/subfolder/my5.txt", StringUtils.randomString(200).getBytes(), "", true, sessionIdUser).first());
folderFiles.add(catalogManager.createFile(studyId, File.Format.PLAIN, File.Bioformat.NONE, "folder/subfolder/subsubfolder/my6.txt", StringUtils.randomString(200).getBytes(), "", true, sessionIdUser).first());
CatalogIOManager ioManager = catalogManager.getCatalogIOManagerFactory().get(catalogManager.getFileUri(folder));
for (File file : folderFiles) {
assertTrue(ioManager.exists(catalogManager.getFileUri(file)));
}
File stagedFile = catalogManager.createFile(studyId, File.Type.FILE, File.Format.PLAIN, File.Bioformat.NONE, "folder/subfolder/subsubfolder/my_staged.txt",
null, null, null, File.Status.STAGE, 0, -1, null, -1, null, null, true, null, sessionIdUser).first();
thrown.expect(CatalogException.class);
try {
catalogManager.deleteFolder(folder.getId(), sessionIdUser);
} finally {
assertEquals("Folder name should not be modified", folder.getPath(), catalogManager.getFile(folder.getId(), sessionIdUser).first().getPath());
assertTrue(ioManager.exists(catalogManager.getFileUri(catalogManager.getFile(folder.getId(), sessionIdUser).first())));
for (File file : folderFiles) {
assertEquals("File name should not be modified", file.getPath(), catalogManager.getFile(file.getId(), sessionIdUser).first().getPath());
URI fileUri = catalogManager.getFileUri(catalogManager.getFile(file.getId(), sessionIdUser).first());
assertTrue("File uri: " + fileUri + " should exist", ioManager.exists(fileUri));
}
}
}
private void deleteFolderAndCheck(int deletable) throws CatalogException, IOException {
List<File> allFilesInFolder;
catalogManager.deleteFolder(deletable, sessionIdUser);
File file = catalogManager.getFile(deletable, sessionIdUser).first();
allFilesInFolder = catalogManager.getAllFilesInFolder(deletable, null, sessionIdUser).getResult();
allFilesInFolder = catalogManager.searchFile(
catalogManager.getStudyIdByFileId(deletable),
new QueryOptions("directory", catalogManager.getFile(deletable, sessionIdUser).first().getPath() + ".*"),
null, sessionIdUser).getResult();
assertTrue(file.getStatus() == File.Status.TRASHED);
for (File subFile : allFilesInFolder) {
assertTrue(subFile.getStatus() == File.Status.TRASHED);
}
}
/* TYPE_FILE UTILS */
public static java.io.File createDebugFile() throws IOException {
String fileTestName = "/tmp/fileTest " + StringUtils.randomString(5);
return createDebugFile(fileTestName);
}
public static java.io.File createDebugFile(String fileTestName) throws IOException {
return createDebugFile(fileTestName, 200);
}
public static java.io.File createDebugFile(String fileTestName, int lines) throws IOException {
DataOutputStream os = new DataOutputStream(new FileOutputStream(fileTestName));
os.writeBytes("Debug file name: " + fileTestName + "\n");
for (int i = 0; i < 100; i++) {
os.writeBytes(i + ", ");
}
for (int i = 0; i < lines; i++) {
os.writeBytes(StringUtils.randomString(500));
os.write('\n');
}
os.close();
return Paths.get(fileTestName).toFile();
}
/**
* Job methods
* ***************************
*/
@Test
public void testCreateJob() throws CatalogException, IOException {
int projectId = catalogManager.getAllProjects("user", null, sessionIdUser).first().getId();
int studyId = catalogManager.getAllStudies(projectId, null, sessionIdUser).first().getId();
File outDir = catalogManager.createFolder(studyId, Paths.get("jobs", "myJob"), true, null, sessionIdUser).first();
URI tmpJobOutDir = catalogManager.createJobOutDir(studyId, StringUtils.randomString(5), sessionIdUser);
catalogManager.createJob(
studyId, "myJob", "samtool", "description", "echo \"Hello World!\"", tmpJobOutDir, outDir.getId(),
Collections.emptyList(), null, new HashMap<>(), null, Job.Status.PREPARED, 0, 0, null, sessionIdUser);
catalogManager.createJob(
studyId, "myReadyJob", "samtool", "description", "echo \"Hello World!\"", tmpJobOutDir, outDir.getId(),
Collections.emptyList(), null, new HashMap<>(), null, Job.Status.READY, 0, 0, null, sessionIdUser);
catalogManager.createJob(
studyId, "myQueuedJob", "samtool", "description", "echo \"Hello World!\"", tmpJobOutDir, outDir.getId(),
Collections.emptyList(), null, new HashMap<>(), null, Job.Status.QUEUED, 0, 0, null, sessionIdUser);
catalogManager.createJob(
studyId, "myErrorJob", "samtool", "description", "echo \"Hello World!\"", tmpJobOutDir, outDir.getId(),
Collections.emptyList(), null, new HashMap<>(), null, Job.Status.ERROR, 0, 0, null, sessionIdUser);
String sessionId = catalogManager.login("admin", "admin", "localhost").first().get("sessionId").toString();
QueryResult<Job> unfinishedJobs = catalogManager.getUnfinishedJobs(sessionId);
assertEquals(2, unfinishedJobs.getNumResults());
QueryResult<Job> allJobs = catalogManager.getAllJobs(studyId, sessionId);
assertEquals(4, allJobs.getNumResults());
}
@Test
public void testCreateFailJob() throws CatalogException {
int projectId = catalogManager.getAllProjects("user", null, sessionIdUser).first().getId();
int studyId = catalogManager.getAllStudies(projectId, null, sessionIdUser).first().getId();
URI tmpJobOutDir = catalogManager.createJobOutDir(studyId, StringUtils.randomString(5), sessionIdUser);
thrown.expect(CatalogException.class);
catalogManager.createJob(
studyId, "myErrorJob", "samtool", "description", "echo \"Hello World!\"", tmpJobOutDir, projectId, //Bad outputId
Collections.emptyList(), null, new HashMap<>(), null, Job.Status.ERROR, 0, 0, null, sessionIdUser);
}
@Test
public void testGetAllJobs() throws CatalogException {
int projectId = catalogManager.getAllProjects("user", null, sessionIdUser).first().getId();
int studyId = catalogManager.getAllStudies(projectId, null, sessionIdUser).first().getId();
File outDir = catalogManager.createFolder(studyId, Paths.get("jobs", "myJob"), true, null, sessionIdUser).first();
URI tmpJobOutDir = catalogManager.createJobOutDir(studyId, StringUtils.randomString(5), sessionIdUser);
catalogManager.createJob(
studyId, "myErrorJob", "samtool", "description", "echo \"Hello World!\"", tmpJobOutDir, outDir.getId(),
Collections.emptyList(), null, new HashMap<>(), null, Job.Status.ERROR, 0, 0, null, sessionIdUser);
QueryResult<Job> allJobs = catalogManager.getAllJobs(studyId, sessionIdUser);
assertEquals(1, allJobs.getNumTotalResults());
assertEquals(1, allJobs.getNumResults());
}
/**
* VariableSet methods
* ***************************
*/
@Test
public void testCreateVariableSet () throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
Study study = catalogManager.getStudy(studyId, sessionIdUser).first();
int variableSetNum = study.getVariableSets().size();
Set<Variable> variables = new HashSet<>();
variables.addAll(Arrays.asList(
new Variable("NAME", "", Variable.VariableType.TEXT, "", true, false, Collections.<String>emptyList(), 0, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("AGE", "", Variable.VariableType.NUMERIC, null, true, false, Collections.singletonList("0:99"), 1, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("HEIGHT", "", Variable.VariableType.NUMERIC, "1.5", false, false, Collections.singletonList("0:"), 2, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("ALIVE", "", Variable.VariableType.BOOLEAN, "", true, false, Collections.<String>emptyList(), 3, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("PHEN", "", Variable.VariableType.CATEGORICAL, "", true, false, Arrays.asList("CASE", "CONTROL"), 4, "", "", null, Collections.<String, Object>emptyMap())
));
QueryResult<VariableSet> queryResult = catalogManager.createVariableSet(study.getId(), "vs1", true, "", null, variables, sessionIdUser);
assertEquals(1, queryResult.getResult().size());
study = catalogManager.getStudy(study.getId(), sessionIdUser).first();
assertEquals(variableSetNum + 1, study.getVariableSets().size());
}
@Test
public void testCreateRepeatedVariableSet () throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
Study study = catalogManager.getStudy(studyId, sessionIdUser).first();
List<Variable> variables = Arrays.asList(
new Variable("NAME", "", Variable.VariableType.TEXT, "", true, false, Collections.<String>emptyList(), 0, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("NAME", "", Variable.VariableType.BOOLEAN, "", true, false, Collections.<String>emptyList(), 3, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("AGE", "", Variable.VariableType.NUMERIC, null, true, false, Collections.singletonList("0:99"), 1, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("HEIGHT", "", Variable.VariableType.NUMERIC, "1.5", false, false, Collections.singletonList("0:"), 2, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("PHEN", "", Variable.VariableType.CATEGORICAL, "", true, false, Arrays.asList("CASE", "CONTROL"), 4, "", "", null, Collections.<String, Object>emptyMap())
);
thrown.expect(CatalogException.class);
catalogManager.createVariableSet(study.getId(), "vs1", true, "", null, variables, sessionIdUser);
}
@Test
public void testDeleteVariableSet() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
List<Variable> variables = Arrays.asList(
new Variable("NAME", "", Variable.VariableType.TEXT, "", true, false, Collections.<String>emptyList(), 0, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("AGE", "", Variable.VariableType.NUMERIC, null, true, false, Collections.singletonList("0:99"), 1, "", "", null, Collections.<String, Object>emptyMap())
);
VariableSet vs1 = catalogManager.createVariableSet(studyId, "vs1", true, "", null, variables, sessionIdUser).first();
VariableSet vs1_deleted = catalogManager.deleteVariableSet(vs1.getId(), null, sessionIdUser).first();
assertEquals(vs1.getId(), vs1_deleted.getId());
thrown.expect(CatalogDBException.class); //VariableSet does not exist
catalogManager.getVariableSet(vs1.getId(), null, sessionIdUser);
}
@Test
public void testGetAllVariableSet() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
List<Variable> variables = Arrays.asList(
new Variable("NAME", "", Variable.VariableType.TEXT, "", true, false, Collections.<String>emptyList(), 0, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("AGE", "", Variable.VariableType.NUMERIC, null, true, false, Collections.singletonList("0:99"), 1, "", "", null, Collections.<String, Object>emptyMap())
);
VariableSet vs1 = catalogManager.createVariableSet(studyId, "vs1", true, "Cancer", null, variables, sessionIdUser).first();
VariableSet vs2 = catalogManager.createVariableSet(studyId, "vs2", true, "Virgo", null, variables, sessionIdUser).first();
VariableSet vs3 = catalogManager.createVariableSet(studyId, "vs3", true, "Piscis", null, variables, sessionIdUser).first();
VariableSet vs4 = catalogManager.createVariableSet(studyId, "vs4", true, "Aries", null, variables, sessionIdUser).first();
int numResults;
numResults = catalogManager.getAllVariableSet(studyId, new QueryOptions(CatalogSampleDBAdaptor.VariableSetFilterOption.name.toString(), "vs1"), sessionIdUser).getNumResults();
assertEquals(1, numResults);
numResults = catalogManager.getAllVariableSet(studyId, new QueryOptions(CatalogSampleDBAdaptor.VariableSetFilterOption.name.toString(), "vs1,vs2"), sessionIdUser).getNumResults();
assertEquals(2, numResults);
numResults = catalogManager.getAllVariableSet(studyId, new QueryOptions(CatalogSampleDBAdaptor.VariableSetFilterOption.name.toString(), "VS1"), sessionIdUser).getNumResults();
assertEquals(0, numResults);
numResults = catalogManager.getAllVariableSet(studyId, new QueryOptions(CatalogSampleDBAdaptor.VariableSetFilterOption.id.toString(), vs1.getId() + "," + vs3.getId()), sessionIdUser).getNumResults();
assertEquals(2, numResults);
}
@Test
public void testDeleteVariableSetInUse() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
int sampleId1 = catalogManager.createSample(studyId, "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
List<Variable> variables = Arrays.asList(
new Variable("NAME", "", Variable.VariableType.TEXT, "", true, false, Collections.<String>emptyList(), 0, "", "", null, Collections.<String, Object>emptyMap()),
new Variable("AGE", "", Variable.VariableType.NUMERIC, null, false, false, Collections.singletonList("0:99"), 1, "", "", null, Collections.<String, Object>emptyMap())
);
VariableSet vs1 = catalogManager.createVariableSet(studyId, "vs1", true, "", null, variables, sessionIdUser).first();
catalogManager.annotateSample(sampleId1, "annotationId", vs1.getId(), Collections.singletonMap("NAME", "LINUS"), null, sessionIdUser);
try {
catalogManager.deleteVariableSet(vs1.getId(), null, sessionIdUser).first();
} finally {
VariableSet variableSet = catalogManager.getVariableSet(vs1.getId(), null, sessionIdUser).first();
assertEquals(vs1.getId(), variableSet.getId());
thrown.expect(CatalogDBException.class); //Expect the exception from the try
}
}
/**
* Sample methods
* ***************************
*/
@Test
public void testCreateSample () throws CatalogException {
int projectId = catalogManager.getAllProjects("user", null, sessionIdUser).first().getId();
int studyId = catalogManager.getAllStudies(projectId, null, sessionIdUser).first().getId();
QueryResult<Sample> sampleQueryResult = catalogManager.createSample(studyId, "HG007", "IMDb", "", null, null, sessionIdUser);
System.out.println("sampleQueryResult = " + sampleQueryResult);
}
@Test
public void testAnnotateMulti () throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
Study study = catalogManager.getStudy(studyId, sessionIdUser).first();
int sampleId = catalogManager.createSample(study.getId(), "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
Set<Variable> variables = new HashSet<>();
variables.add(new Variable("NAME", "", Variable.VariableType.TEXT, "", true, false, Collections.<String>emptyList(), 0, "", "", null, Collections.<String, Object>emptyMap()));
VariableSet vs1 = catalogManager.createVariableSet(study.getId(), "vs1", false, "", null, variables, sessionIdUser).first();
HashMap<String, Object> annotations = new HashMap<>();
annotations.put("NAME", "Luke");
QueryResult<AnnotationSet> annotationSetQueryResult = catalogManager.annotateSample(sampleId, "annotation1", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations = new HashMap<>();
annotations.put("NAME", "Lucas");
catalogManager.annotateSample(sampleId, "annotation2", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
assertEquals(2, catalogManager.getSample(sampleId, null, sessionIdUser).first().getAnnotationSets().size());
}
@Test
public void testAnnotateUnique () throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
Study study = catalogManager.getStudy(studyId, sessionIdUser).first();
int sampleId = catalogManager.createSample(study.getId(), "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
Set<Variable> variables = new HashSet<>();
variables.add(new Variable("NAME", "", Variable.VariableType.TEXT, "", true, false, Collections.<String>emptyList(), 0, "", "", null, Collections.<String, Object>emptyMap()));
VariableSet vs1 = catalogManager.createVariableSet(study.getId(), "vs1", true, "", null, variables, sessionIdUser).first();
HashMap<String, Object> annotations = new HashMap<>();
annotations.put("NAME", "Luke");
QueryResult<AnnotationSet> annotationSetQueryResult = catalogManager.annotateSample(sampleId, "annotation1", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations.put("NAME", "Lucas");
thrown.expect(CatalogException.class);
catalogManager.annotateSample(sampleId, "annotation2", vs1.getId(), annotations, null, sessionIdUser);
}
@Test
public void testAnnotateIncorrectType () throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
Study study = catalogManager.getStudy(studyId, sessionIdUser).first();
int sampleId = catalogManager.createSample(study.getId(), "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
Set<Variable> variables = new HashSet<>();
variables.add(new Variable("NUM", "", Variable.VariableType.NUMERIC, "", true, false, null, 0, "", "", null, Collections.<String, Object>emptyMap()));
VariableSet vs1 = catalogManager.createVariableSet(study.getId(), "vs1", false, "", null, variables, sessionIdUser).first();
HashMap<String, Object> annotations = new HashMap<>();
annotations.put("NUM", "5");
QueryResult<AnnotationSet> annotationSetQueryResult = catalogManager.annotateSample(sampleId, "annotation1", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations.put("NUM", "6.8");
annotationSetQueryResult = catalogManager.annotateSample(sampleId, "annotation2", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations.put("NUM", "five point five");
thrown.expect(CatalogException.class);
catalogManager.annotateSample(sampleId, "annotation3", vs1.getId(), annotations, null, sessionIdUser);
}
@Test
public void testAnnotateRange () throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
Study study = catalogManager.getStudy(studyId, sessionIdUser).first();
int sampleId = catalogManager.createSample(study.getId(), "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
Set<Variable> variables = new HashSet<>();
variables.add(new Variable("RANGE_NUM", "", Variable.VariableType.NUMERIC, "", true, false, Arrays.asList("1:14", "16:22", "50:"), 0, "", "", null, Collections.<String, Object>emptyMap()));
VariableSet vs1 = catalogManager.createVariableSet(study.getId(), "vs1", false, "", null, variables, sessionIdUser).first();
HashMap<String, Object> annotations = new HashMap<>();
annotations.put("RANGE_NUM", "1"); // 1:14
QueryResult<AnnotationSet> annotationSetQueryResult = catalogManager.annotateSample(sampleId, "annotation1", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations.put("RANGE_NUM", "14"); // 1:14
annotationSetQueryResult = catalogManager.annotateSample(sampleId, "annotation2", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations.put("RANGE_NUM", "20"); // 16:20
annotationSetQueryResult = catalogManager.annotateSample(sampleId, "annotation3", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations.put("RANGE_NUM", "100000"); // 50:
annotationSetQueryResult = catalogManager.annotateSample(sampleId, "annotation4", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations.put("RANGE_NUM", "14.1");
thrown.expect(CatalogException.class);
catalogManager.annotateSample(sampleId, "annotation5", vs1.getId(), annotations, null, sessionIdUser);
}
@Test
public void testAnnotateCategorical () throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
Study study = catalogManager.getStudy(studyId, sessionIdUser).first();
int sampleId = catalogManager.createSample(study.getId(), "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
Set<Variable> variables = new HashSet<>();
variables.add(new Variable("COOL_NAME", "", Variable.VariableType.CATEGORICAL, "", true, false, Arrays.asList("LUKE", "LEIA", "VADER", "YODA"), 0, "", "", null, Collections.<String, Object>emptyMap()));
VariableSet vs1 = catalogManager.createVariableSet(study.getId(), "vs1", false, "", null, variables, sessionIdUser).first();
HashMap<String, Object> annotations = new HashMap<>();
annotations.put("COOL_NAME", "LUKE");
QueryResult<AnnotationSet> annotationSetQueryResult = catalogManager.annotateSample(sampleId, "annotation1", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations.put("COOL_NAME", "LEIA");
annotationSetQueryResult = catalogManager.annotateSample(sampleId, "annotation2", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations.put("COOL_NAME", "VADER");
annotationSetQueryResult = catalogManager.annotateSample(sampleId, "annotation3", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations.put("COOL_NAME", "YODA");
annotationSetQueryResult = catalogManager.annotateSample(sampleId, "annotation4", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations.put("COOL_NAME", "SPOCK");
thrown.expect(CatalogException.class);
catalogManager.annotateSample(sampleId, "annotation5", vs1.getId(), annotations, null, sessionIdUser);
}
@Test
public void testAnnotateNested() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
Study study = catalogManager.getStudy(studyId, sessionIdUser).first();
int sampleId1 = catalogManager.createSample(study.getId(), "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId2 = catalogManager.createSample(study.getId(), "SAMPLE_2", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId3 = catalogManager.createSample(study.getId(), "SAMPLE_3", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId4 = catalogManager.createSample(study.getId(), "SAMPLE_4", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId5 = catalogManager.createSample(study.getId(), "SAMPLE_5", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
VariableSet vs1 = catalogManager.createVariableSet(study.getId(), "vs1", false, "", null, Collections.singleton(CatalogAnnotationsValidatorTest.nestedObject), sessionIdUser).first();
QueryResult<AnnotationSet> annotationSetQueryResult;
HashMap<String, Object> annotations = new HashMap<>();
annotations.put("nestedObject", new QueryOptions("stringList", Arrays.asList("li", "lu")).append("object", new ObjectMap("string", "my value").append("numberList", Arrays.asList(2, 3, 4))));
annotationSetQueryResult = catalogManager.annotateSample(sampleId1, "annotation1", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
annotations.put("nestedObject", new QueryOptions("stringList", Arrays.asList("lo", "lu")).append("object", new ObjectMap("string", "stringValue").append("numberList", Arrays.asList(3,4,5))));
annotationSetQueryResult = catalogManager.annotateSample(sampleId2, "annotation1", vs1.getId(), annotations, null, sessionIdUser);
assertEquals(1, annotationSetQueryResult.getNumResults());
// annotations.put("nestedObject", new QueryOptions("stringList", Arrays.asList("li", "lo", "lu")).append("object", new ObjectMap("string", "my value").append("numberList", Arrays.asList(2, 3, 4))));
// annotationSetQueryResult = catalogManager.annotateSample(sampleId3, "annotation1", vs1.getId(), annotations, null, sessionIdUser);
// assertEquals(1, annotationSetQueryResult.getNumResults());
//
// annotations.put("nestedObject", new QueryOptions("stringList", Arrays.asList("li", "lo", "lu")).append("object", new ObjectMap("string", "my value").append("numberList", Arrays.asList(2, 3, 4))));
// annotationSetQueryResult = catalogManager.annotateSample(sampleId4, "annotation1", vs1.getId(), annotations, null, sessionIdUser);
// assertEquals(1, annotationSetQueryResult.getNumResults());
List<Sample> samples;
QueryOptions queryOptions = new QueryOptions(variableSetId.toString(), vs1.getId());
queryOptions.put(annotation.toString(), "nestedObject.stringList:li");
samples = catalogManager.getAllSamples(studyId, queryOptions, sessionIdUser).getResult();
assertEquals(1, samples.size());
queryOptions.put(annotation.toString(), "nestedObject.stringList:lo");
samples = catalogManager.getAllSamples(studyId, queryOptions, sessionIdUser).getResult();
assertEquals(1, samples.size());
queryOptions.put(annotation.toString(), "nestedObject.stringList:LL");
samples = catalogManager.getAllSamples(studyId, queryOptions, sessionIdUser).getResult();
assertEquals(0, samples.size());
queryOptions.put(annotation.toString(), "nestedObject.stringList:lo,li,LL");
samples = catalogManager.getAllSamples(studyId, queryOptions, sessionIdUser).getResult();
assertEquals(2, samples.size());
queryOptions.put(annotation.toString(), "nestedObject.object.string:my value");
samples = catalogManager.getAllSamples(studyId, queryOptions, sessionIdUser).getResult();
assertEquals(1, samples.size());
queryOptions.put(annotation.toString(), "nestedObject.stringList:lo,lu,LL;nestedObject.object.string:my value");
samples = catalogManager.getAllSamples(studyId, queryOptions, sessionIdUser).getResult();
assertEquals(1, samples.size());
queryOptions.put(annotation.toString(), "nestedObject.stringList:lo,lu,LL;nestedObject.object.numberList:7");
samples = catalogManager.getAllSamples(studyId, queryOptions, sessionIdUser).getResult();
assertEquals(0, samples.size());
queryOptions.put(annotation.toString(), "nestedObject.stringList:lo,lu,LL;nestedObject.object.numberList:3");
samples = catalogManager.getAllSamples(studyId, queryOptions, sessionIdUser).getResult();
assertEquals(2, samples.size());
queryOptions.put(annotation.toString(), "nestedObject.stringList:lo,lu,LL;nestedObject.object.numberList:5");
samples = catalogManager.getAllSamples(studyId, queryOptions, sessionIdUser).getResult();
assertEquals(1, samples.size());
queryOptions.put(annotation.toString(), "nestedObject.stringList:lo,lu,LL;nestedObject.object.numberList:2,5");
samples = catalogManager.getAllSamples(studyId, queryOptions, sessionIdUser).getResult();
assertEquals(2, samples.size());
queryOptions.put(annotation.toString(), "nestedObject.stringList:lo,lu,LL;nestedObject.object.numberList:0");
samples = catalogManager.getAllSamples(studyId, queryOptions, sessionIdUser).getResult();
assertEquals(0, samples.size());
}
@Test
public void testQuerySamples() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
Study study = catalogManager.getStudy(studyId, sessionIdUser).first();
VariableSet variableSet = study.getVariableSets().get(0);
List<Sample> samples;
QueryOptions annotation = new QueryOptions();
samples = catalogManager.getAllSamples(studyId, annotation, sessionIdUser).getResult();
assertEquals(9, samples.size());
annotation = new QueryOptions(variableSetId.toString(), variableSet.getId());
samples = catalogManager.getAllSamples(studyId, annotation, sessionIdUser).getResult();
assertEquals(8, samples.size());
annotation = new QueryOptions(annotationSetId.toString(), "annot2");
samples = catalogManager.getAllSamples(studyId, annotation, sessionIdUser).getResult();
assertEquals(3, samples.size());
annotation = new QueryOptions(annotationSetId.toString(), "noExist");
samples = catalogManager.getAllSamples(studyId, annotation, sessionIdUser).getResult();
assertEquals(0, samples.size());
annotation = new QueryOptions("annotation", "NAME:s_1,s_2,s_3");
samples = catalogManager.getAllSamples(studyId, annotation, sessionIdUser).getResult();
assertEquals(3, samples.size());
annotation = new QueryOptions("annotation", "AGE:>30");
annotation.add(variableSetId.toString(), variableSet.getId());
samples = catalogManager.getAllSamples(studyId, annotation, sessionIdUser).getResult();
assertEquals(3, samples.size());
annotation = new QueryOptions("annotation", "AGE:>30");
annotation.add(variableSetId.toString(), variableSet.getId());
samples = catalogManager.getAllSamples(studyId, annotation, sessionIdUser).getResult();
assertEquals(3, samples.size());
annotation = new QueryOptions("annotation", "AGE:>30");
annotation.add(variableSetId.toString(), variableSet.getId());
annotation.addToListOption("annotation", "ALIVE:true");
samples = catalogManager.getAllSamples(studyId, annotation, sessionIdUser).getResult();
assertEquals(2, samples.size());
}
@Test
public void testModifySample() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
int sampleId1 = catalogManager.createSample(studyId, "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int individualId = catalogManager.createIndividual(studyId, "Individual1", "", 0, 0, Individual.Gender.MALE, new QueryOptions(), sessionIdUser).first().getId();
Sample sample = catalogManager.modifySample(sampleId1, new QueryOptions("individualId", individualId), sessionIdUser).first();
assertEquals(individualId, sample.getIndividualId());
}
@Test
public void testModifySampleBadIndividual() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
int sampleId1 = catalogManager.createSample(studyId, "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
thrown.expect(CatalogDBException.class);
catalogManager.modifySample(sampleId1, new QueryOptions("individualId", 4), sessionIdUser);
}
@Test
public void testDeleteSample() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
int sampleId = catalogManager.createSample(studyId, "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
QueryResult<Sample> queryResult = catalogManager.deleteSample(sampleId, new QueryOptions(), sessionIdUser);
assertEquals(sampleId, queryResult.first().getId());
thrown.expect(CatalogDBException.class);
catalogManager.getSample(sampleId, new QueryOptions(), sessionIdUser);
}
/**
* Cohort methods
* ***************************
*/
@Test
public void testCreateCohort() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
int sampleId1 = catalogManager.createSample(studyId, "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId2 = catalogManager.createSample(studyId, "SAMPLE_2", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId3 = catalogManager.createSample(studyId, "SAMPLE_3", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
Cohort myCohort = catalogManager.createCohort(studyId, "MyCohort", Cohort.Type.FAMILY, "", Arrays.asList(sampleId1, sampleId2, sampleId3), null, sessionIdUser).first();
assertEquals("MyCohort", myCohort.getName());
assertEquals(3, myCohort.getSamples().size());
assertTrue(myCohort.getSamples().contains(sampleId1));
assertTrue(myCohort.getSamples().contains(sampleId2));
assertTrue(myCohort.getSamples().contains(sampleId3));
}
@Test
public void testGetAllCohorts() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
int sampleId1 = catalogManager.createSample(studyId, "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId2 = catalogManager.createSample(studyId, "SAMPLE_2", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId3 = catalogManager.createSample(studyId, "SAMPLE_3", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId4 = catalogManager.createSample(studyId, "SAMPLE_4", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId5 = catalogManager.createSample(studyId, "SAMPLE_5", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
Cohort myCohort1 = catalogManager.createCohort(studyId, "MyCohort1", Cohort.Type.FAMILY, "", Arrays.asList(sampleId1, sampleId2, sampleId3), null, sessionIdUser).first();
Cohort myCohort2 = catalogManager.createCohort(studyId, "MyCohort2", Cohort.Type.FAMILY, "", Arrays.asList(sampleId1, sampleId2, sampleId3, sampleId4), null, sessionIdUser).first();
Cohort myCohort3 = catalogManager.createCohort(studyId, "MyCohort3", Cohort.Type.CASE_CONTROL, "", Arrays.asList(sampleId3, sampleId4), null, sessionIdUser).first();
Cohort myCohort4 = catalogManager.createCohort(studyId, "MyCohort4", Cohort.Type.TRIO, "", Arrays.asList(sampleId5, sampleId3), null, sessionIdUser).first();
int numResults;
numResults = catalogManager.getAllCohorts(studyId, new QueryOptions(CatalogSampleDBAdaptor.CohortFilterOption.samples.toString(), sampleId1), sessionIdUser).getNumResults();
assertEquals(2, numResults);
numResults = catalogManager.getAllCohorts(studyId, new QueryOptions(CatalogSampleDBAdaptor.CohortFilterOption.samples.toString(), sampleId1 + "," + sampleId5), sessionIdUser).getNumResults();
assertEquals(3, numResults);
// numResults = catalogManager.getAllCohorts(studyId, new QueryOptions(CatalogSampleDBAdaptor.CohortFilterOption.samples.toString(), sampleId3 + "," + sampleId4), sessionIdUser).getNumResults();
// assertEquals(2, numResults);
numResults = catalogManager.getAllCohorts(studyId, new QueryOptions(CatalogSampleDBAdaptor.CohortFilterOption.name.toString(), "MyCohort2"), sessionIdUser).getNumResults();
assertEquals(1, numResults);
numResults = catalogManager.getAllCohorts(studyId, new QueryOptions(CatalogSampleDBAdaptor.CohortFilterOption.name.toString(), "~MyCohort."), sessionIdUser).getNumResults();
assertEquals(4, numResults);
numResults = catalogManager.getAllCohorts(studyId, new QueryOptions(CatalogSampleDBAdaptor.CohortFilterOption.type.toString(), Cohort.Type.FAMILY), sessionIdUser).getNumResults();
assertEquals(2, numResults);
numResults = catalogManager.getAllCohorts(studyId, new QueryOptions(CatalogSampleDBAdaptor.CohortFilterOption.type.toString(), "CASE_CONTROL"), sessionIdUser).getNumResults();
assertEquals(1, numResults);
numResults = catalogManager.getAllCohorts(studyId, new QueryOptions(CatalogSampleDBAdaptor.CohortFilterOption.id.toString(), myCohort1.getId() + "," + myCohort2.getId() + "," + myCohort3.getId()), sessionIdUser).getNumResults();
assertEquals(3, numResults);
}
@Test
public void testCreateCohortFail() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
thrown.expect(CatalogException.class);
catalogManager.createCohort(studyId, "MyCohort", Cohort.Type.FAMILY, "", Arrays.asList(23, 4, 5), null, sessionIdUser);
}
@Test
public void testUpdateCohort() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
int sampleId1 = catalogManager.createSample(studyId, "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId2 = catalogManager.createSample(studyId, "SAMPLE_2", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId3 = catalogManager.createSample(studyId, "SAMPLE_3", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId4 = catalogManager.createSample(studyId, "SAMPLE_4", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId5 = catalogManager.createSample(studyId, "SAMPLE_5", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
Cohort myCohort = catalogManager.createCohort(studyId, "MyCohort", Cohort.Type.FAMILY, "", Arrays.asList(sampleId1, sampleId2, sampleId3), null, sessionIdUser).first();
assertEquals("MyCohort", myCohort.getName());
assertEquals(3, myCohort.getSamples().size());
assertTrue(myCohort.getSamples().contains(sampleId1));
assertTrue(myCohort.getSamples().contains(sampleId2));
assertTrue(myCohort.getSamples().contains(sampleId3));
Cohort myModifiedCohort = catalogManager.modifyCohort(myCohort.getId(), new ObjectMap("samples", Arrays.asList(sampleId1, sampleId3, sampleId4, sampleId5)).append("name", "myModifiedCohort"), sessionIdUser).first();
assertEquals("myModifiedCohort", myModifiedCohort.getName());
assertEquals(4, myModifiedCohort.getSamples().size());
assertTrue(myModifiedCohort.getSamples().contains(sampleId1));
assertTrue(myModifiedCohort.getSamples().contains(sampleId3));
assertTrue(myModifiedCohort.getSamples().contains(sampleId4));
assertTrue(myModifiedCohort.getSamples().contains(sampleId5));
}
@Test
public void testDeleteCohort() throws CatalogException {
int studyId = catalogManager.getStudyId("user@1000G:phase1");
int sampleId1 = catalogManager.createSample(studyId, "SAMPLE_1", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId2 = catalogManager.createSample(studyId, "SAMPLE_2", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
int sampleId3 = catalogManager.createSample(studyId, "SAMPLE_3", "", "", null, new QueryOptions(), sessionIdUser).first().getId();
Cohort myCohort = catalogManager.createCohort(studyId, "MyCohort", Cohort.Type.FAMILY, "", Arrays.asList(sampleId1, sampleId2, sampleId3), null, sessionIdUser).first();
assertEquals("MyCohort", myCohort.getName());
assertEquals(3, myCohort.getSamples().size());
assertTrue(myCohort.getSamples().contains(sampleId1));
assertTrue(myCohort.getSamples().contains(sampleId2));
assertTrue(myCohort.getSamples().contains(sampleId3));
Cohort myDeletedCohort = catalogManager.deleteCohort(myCohort.getId(), null, sessionIdUser).first();
assertEquals(myCohort.getId(), myDeletedCohort.getId());
thrown.expect(CatalogException.class);
catalogManager.getCohort(myCohort.getId(), null, sessionIdUser);
}
/* */
/* Test util methods */
/* */
public static void clearCatalog(Properties properties) throws IOException {
List<DataStoreServerAddress> dataStoreServerAddresses = new LinkedList<>();
for (String hostPort : properties.getProperty(CatalogManager.CATALOG_DB_HOSTS, "localhost").split(",")) {
if (hostPort.contains(":")) {
String[] split = hostPort.split(":");
Integer port = Integer.valueOf(split[1]);
dataStoreServerAddresses.add(new DataStoreServerAddress(split[0], port));
} else {
dataStoreServerAddresses.add(new DataStoreServerAddress(hostPort, 27017));
}
}
MongoDataStoreManager mongoManager = new MongoDataStoreManager(dataStoreServerAddresses);
MongoDataStore db = mongoManager.get(properties.getProperty(CatalogManager.CATALOG_DB_DATABASE));
db.getDb().dropDatabase();
mongoManager.close(properties.getProperty(CatalogManager.CATALOG_DB_DATABASE));
Path rootdir = Paths.get(URI.create(properties.getProperty(CatalogManager.CATALOG_MAIN_ROOTDIR)));
deleteFolderTree(rootdir.toFile());
if (properties.containsKey(CatalogManager.CATALOG_JOBS_ROOTDIR)) {
Path jobsDir = Paths.get(URI.create(properties.getProperty(CatalogManager.CATALOG_JOBS_ROOTDIR)));
if (jobsDir.toFile().exists()) {
deleteFolderTree(jobsDir.toFile());
}
}
}
public static void deleteFolderTree(java.io.File folder) {
java.io.File[] files = folder.listFiles();
if(files!=null) {
for(java.io.File f: files) {
if(f.isDirectory()) {
deleteFolderTree(f);
} else {
f.delete();
}
}
}
folder.delete();
}
} | apache-2.0 |
adaptris/interlok | interlok-core/src/main/java/com/adaptris/util/text/mime/MimeUtils.java | 855 | package com.adaptris.util.text.mime;
import static com.adaptris.util.text.mime.MimeConstants.HEADER_CONTENT_ENCODING;
import static org.apache.commons.lang3.StringUtils.isBlank;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import javax.mail.MessagingException;
import javax.mail.internet.InternetHeaders;
import javax.mail.internet.MimeUtility;
public class MimeUtils {
public static byte[] encodeData(byte[] data, String encoding, InternetHeaders hdrs)
throws MessagingException, IOException {
if (!isBlank(encoding)) {
hdrs.setHeader(HEADER_CONTENT_ENCODING, encoding);
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (OutputStream encodedOut = MimeUtility.encode(out, encoding)) {
encodedOut.write(data);
}
return out.toByteArray();
}
}
| apache-2.0 |
cnagel/wcm-io-handler | media/src/test/java/io/wcm/handler/mediasource/dam/impl/DamAutoCroppingTest.java | 2026 | /*
* #%L
* wcm.io
* %%
* Copyright (C) 2019 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.handler.mediasource.dam.impl;
import static com.day.cq.dam.api.DamConstants.PREFIX_ASSET_WEB;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import com.day.cq.dam.api.Asset;
import com.day.cq.dam.api.Rendition;
import io.wcm.handler.media.testcontext.AppAemContext;
import io.wcm.testing.mock.aem.junit5.AemContext;
import io.wcm.testing.mock.aem.junit5.AemContextExtension;
@ExtendWith(AemContextExtension.class)
class DamAutoCroppingTest {
private final AemContext context = AppAemContext.newAemContext();
@Test
@SuppressWarnings("null")
void testGetWebRenditionForCropping() {
Asset asset = context.create().asset("/content/dam/asset1.jpg", 160, 90, "image/jpeg");
Rendition webRendition = context.create().assetRendition(asset, PREFIX_ASSET_WEB + ".80.45.jpg", 80, 45, "image/jpeg");
RenditionMetadata result = DamAutoCropping.getWebRenditionForCropping(asset);
assertEquals(webRendition.getPath(), result.getRendition().getPath());
}
@Test
void testGetWebRenditionNotExisting() {
Asset assetWithoutRenditions = context.create().asset("/content/dam/asset2.jpg", 160, 90, "image/jpeg");
assertNull(DamAutoCropping.getWebRenditionForCropping(assetWithoutRenditions));
}
}
| apache-2.0 |
reinhard/daisy-integration | daisy-cocoon/src/main/java/com/indoqa/daisy/cocoon/controller/DocumentPartByPathController.java | 557 | package com.indoqa.daisy.cocoon.controller;
import org.apache.cocoon.rest.controller.annotation.SitemapParameter;
import org.apache.cocoon.rest.controller.response.RestResponse;
import org.apache.cocoon.rest.controller.response.URLResponse;
public class DocumentPartByPathController extends AbstractDocumentController {
@SitemapParameter
private String part;
@Override
public RestResponse sendSuccessResponse(String id) throws Exception {
return new URLResponse("/default/doc/id/" + id + "/part/" + this.part + ".html");
}
}
| apache-2.0 |
libris/librisxl | gui-whelktool/src/main/java/whelk/gui/SelectScriptPanel.java | 2204 | package whelk.gui;
import whelk.PortableScript;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
public class SelectScriptPanel extends WizardCard implements ActionListener
{
Wizard wizard;
JLabel description = new JLabel();
public SelectScriptPanel(Wizard wizard)
{
super(wizard);
this.wizard = wizard;
Box vBox = Box.createVerticalBox();
JButton loadButton = new JButton("Öppna script-fil");
loadButton.addActionListener(this);
loadButton.setActionCommand("load");
vBox.add(loadButton);
vBox.add(Box.createVerticalStrut(10));
vBox.add(new JLabel("Valt script:"));
vBox.add(description);
this.add(vBox);
}
@Override
void onShow(Object parameterFromPreviousCard)
{
setNextCard(Wizard.RUN);
disableNext();
}
@Override
public void actionPerformed(ActionEvent actionEvent)
{
if (actionEvent.getActionCommand().equals("load"))
{
JFileChooser chooser = new JFileChooser();
chooser.setPreferredSize(new Dimension(1024, 768));
int returnVal = chooser.showOpenDialog(wizard);
if(returnVal == JFileChooser.APPROVE_OPTION)
{
File chosenFile = chooser.getSelectedFile();
try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(chosenFile)))
{
Object loaded = ois.readObject();
if (loaded instanceof PortableScript)
{
PortableScript loadedScript = (PortableScript) loaded;
description.setText(loadedScript.comment);
setParameterForNextCard(loaded);
enableNext();
}
} catch (IOException | ClassNotFoundException ioe)
{
Wizard.exitFatal(ioe.getMessage());
}
}
}
}
}
| apache-2.0 |
researchgate/metrics-statsd | metrics3-statsd/src/main/java/com/readytalk/metrics/StatsDReporter.java | 11572 | /**
* Copyright (C) 2013 metrics-statsd contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.readytalk.metrics;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Metered;
import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.ScheduledReporter;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import javax.annotation.concurrent.NotThreadSafe;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Locale;
import java.util.Map;
import java.util.SortedMap;
import java.util.concurrent.TimeUnit;
/**
* A reporter which publishes metric values to a StatsD server.
*
* @see <a href="https://github.com/etsy/statsd">StatsD</a>
*/
@NotThreadSafe
public class StatsDReporter extends ScheduledReporter {
private static final Logger LOG = LoggerFactory.getLogger(StatsDReporter.class);
private final StatsD statsD;
private final String prefix;
private final String suffix;
private StatsDReporter(final MetricRegistry registry,
final StatsD statsD,
final String prefix,
final String suffix,
final TimeUnit rateUnit,
final TimeUnit durationUnit,
final MetricFilter filter) {
super(registry, "statsd-reporter", filter, rateUnit, durationUnit);
this.statsD = statsD;
this.prefix = prefix;
this.suffix = suffix;
}
/**
* Returns a new {@link Builder} for {@link StatsDReporter}.
*
* @param registry the registry to report
* @return a {@link Builder} instance for a {@link StatsDReporter}
*/
public static Builder forRegistry(final MetricRegistry registry) {
return new Builder(registry);
}
/**
* A builder for {@link StatsDReporter} instances. Defaults to not using a prefix,
* converting rates to events/second, converting durations to milliseconds, and not
* filtering metrics.
*/
@NotThreadSafe
public static final class Builder {
private final MetricRegistry registry;
private String prefix;
private String suffix;
private TimeUnit rateUnit;
private TimeUnit durationUnit;
private MetricFilter filter;
private Builder(final MetricRegistry registry) {
this.registry = registry;
this.prefix = null;
this.rateUnit = TimeUnit.SECONDS;
this.durationUnit = TimeUnit.MILLISECONDS;
this.filter = MetricFilter.ALL;
}
/**
* Prefix all metric names with the given string.
*
* @param _prefix the prefix for all metric names
* @return {@code this}
*/
public Builder prefixedWith(@Nullable final String _prefix) {
this.prefix = _prefix;
return this;
}
/**
* Prefix all metric names with the given string.
*
* @param _suffix the prefix for all metric names
* @return {@code this}
*/
public Builder suffixedWith(@Nullable final String _suffix) {
this.suffix = _suffix;
return this;
}
/**
* Convert rates to the given time unit.
*
* @param _rateUnit a unit of time
* @return {@code this}
*/
public Builder convertRatesTo(final TimeUnit _rateUnit) {
this.rateUnit = _rateUnit;
return this;
}
/**
* Convert durations to the given time unit.
*
* @param _durationUnit a unit of time
* @return {@code this}
*/
public Builder convertDurationsTo(final TimeUnit _durationUnit) {
this.durationUnit = _durationUnit;
return this;
}
/**
* Only report metrics which match the given filter.
*
* @param _filter a {@link MetricFilter}
* @return {@code this}
*/
public Builder filter(final MetricFilter _filter) {
this.filter = _filter;
return this;
}
/**
* Builds a {@link StatsDReporter} with the given properties, sending metrics to StatsD at the given host and port.
*
* @param host the hostname of the StatsD server.
* @param port the port of the StatsD server. This is typically 8125.
* @return a {@link StatsDReporter}
*/
public StatsDReporter build(final String host, final int port) {
return build(new StatsD(host, port));
}
/**
* Builds a {@link StatsDReporter} with the given properties, sending metrics using the
* given {@link StatsD} client.
*
* @param statsD a {@link StatsD} client
* @return a {@link StatsDReporter}
*/
public StatsDReporter build(final StatsD statsD) {
return new StatsDReporter(registry, statsD, prefix, suffix, rateUnit, durationUnit, filter);
}
}
@Override
@SuppressWarnings("rawtypes") //Metrics 3.0 interface specifies the raw Gauge type
public void report(final SortedMap<String, Gauge> gauges,
final SortedMap<String, Counter> counters,
final SortedMap<String, Histogram> histograms,
final SortedMap<String, Meter> meters,
final SortedMap<String, Timer> timers) {
try {
statsD.connect();
for (Map.Entry<String, Gauge> entry : gauges.entrySet()) {
reportGauge(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, Counter> entry : counters.entrySet()) {
reportCounter(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, Histogram> entry : histograms.entrySet()) {
reportHistogram(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, Meter> entry : meters.entrySet()) {
reportMetered(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, Timer> entry : timers.entrySet()) {
reportTimer(entry.getKey(), entry.getValue());
}
} catch (IOException e) {
LOG.warn("Unable to report to StatsD", statsD, e);
} finally {
try {
statsD.close();
} catch (IOException e) {
LOG.debug("Error disconnecting from StatsD", statsD, e);
}
}
}
private void reportTimer(final String name, final Timer timer) {
final Snapshot snapshot = timer.getSnapshot();
String suffixedName = suffix(name);
statsD.send(prefix(suffixedName, "max"), formatNumber(convertDuration(snapshot.getMax())));
statsD.send(prefix(suffixedName, "mean"), formatNumber(convertDuration(snapshot.getMean())));
statsD.send(prefix(suffixedName, "min"), formatNumber(convertDuration(snapshot.getMin())));
statsD.send(prefix(suffixedName, "stddev"), formatNumber(convertDuration(snapshot.getStdDev())));
statsD.send(prefix(suffixedName, "p50"), formatNumber(convertDuration(snapshot.getMedian())));
statsD.send(prefix(suffixedName, "p75"), formatNumber(convertDuration(snapshot.get75thPercentile())));
statsD.send(prefix(suffixedName, "p95"), formatNumber(convertDuration(snapshot.get95thPercentile())));
statsD.send(prefix(suffixedName, "p98"), formatNumber(convertDuration(snapshot.get98thPercentile())));
statsD.send(prefix(suffixedName, "p99"), formatNumber(convertDuration(snapshot.get99thPercentile())));
statsD.send(prefix(suffixedName, "p999"), formatNumber(convertDuration(snapshot.get999thPercentile())));
reportMetered(name, timer);
}
private void reportMetered(final String name, final Metered meter) {
String suffixedName = suffix(name);
statsD.send(prefix(suffixedName, "samples"), formatNumber(meter.getCount()));
statsD.send(prefix(suffixedName, "m1_rate"), formatNumber(convertRate(meter.getOneMinuteRate())));
statsD.send(prefix(suffixedName, "m5_rate"), formatNumber(convertRate(meter.getFiveMinuteRate())));
statsD.send(prefix(suffixedName, "m15_rate"), formatNumber(convertRate(meter.getFifteenMinuteRate())));
statsD.send(prefix(suffixedName, "mean_rate"), formatNumber(convertRate(meter.getMeanRate())));
}
private void reportHistogram(final String name, final Histogram histogram) {
final Snapshot snapshot = histogram.getSnapshot();
String suffixedName = suffix(name);
statsD.send(prefix(suffixedName, "samples"), formatNumber(histogram.getCount()));
statsD.send(prefix(suffixedName, "max"), formatNumber(snapshot.getMax()));
statsD.send(prefix(suffixedName, "mean"), formatNumber(snapshot.getMean()));
statsD.send(prefix(suffixedName, "min"), formatNumber(snapshot.getMin()));
statsD.send(prefix(suffixedName, "stddev"), formatNumber(snapshot.getStdDev()));
statsD.send(prefix(suffixedName, "p50"), formatNumber(snapshot.getMedian()));
statsD.send(prefix(suffixedName, "p75"), formatNumber(snapshot.get75thPercentile()));
statsD.send(prefix(suffixedName, "p95"), formatNumber(snapshot.get95thPercentile()));
statsD.send(prefix(suffixedName, "p98"), formatNumber(snapshot.get98thPercentile()));
statsD.send(prefix(suffixedName, "p99"), formatNumber(snapshot.get99thPercentile()));
statsD.send(prefix(suffixedName, "p999"), formatNumber(snapshot.get999thPercentile()));
}
private void reportCounter(final String name, final Counter counter) {
statsD.send(prefix(suffix(name)), formatNumber(counter.getCount()));
}
@SuppressWarnings("rawtypes") //Metrics 3.0 passes us the raw Gauge type
private void reportGauge(final String name, final Gauge gauge) {
final String value = format(gauge.getValue());
if (value != null) {
statsD.send(prefix(suffix(name)), value);
}
}
@Nullable
private String format(final Object o) {
if (o instanceof Float) {
return formatNumber(((Float) o).doubleValue());
} else if (o instanceof Double) {
return formatNumber((Double) o);
} else if (o instanceof Byte) {
return formatNumber(((Byte) o).longValue());
} else if (o instanceof Short) {
return formatNumber(((Short) o).longValue());
} else if (o instanceof Integer) {
return formatNumber(((Integer) o).longValue());
} else if (o instanceof Long) {
return formatNumber((Long) o);
} else if (o instanceof BigInteger) {
return formatNumber((BigInteger) o);
} else if (o instanceof BigDecimal) {
return formatNumber(((BigDecimal) o).doubleValue());
}
return null;
}
private String prefix(final String... components) {
return MetricRegistry.name(prefix, components);
}
private String suffix(String name) {
if (suffix == null || suffix.isEmpty() || !name.contains("%s")) {
return name;
}
return String.format(name, suffix);
}
private String formatNumber(final BigInteger n) {
return String.valueOf(n);
}
private String formatNumber(final long n) {
return Long.toString(n);
}
private String formatNumber(final double v) {
return String.format(Locale.US, "%2.2f", v);
}
}
| apache-2.0 |
abhishekshivanna/samza | samza-core/src/main/java/org/apache/samza/coordinator/metadatastore/CoordinatorStreamStore.java | 11941 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.coordinator.metadatastore;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.Sets;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.samza.Partition;
import org.apache.samza.SamzaException;
import org.apache.samza.config.Config;
import org.apache.samza.coordinator.stream.CoordinatorStreamKeySerde;
import org.apache.samza.coordinator.stream.messages.CoordinatorStreamMessage;
import org.apache.samza.metadatastore.MetadataStore;
import org.apache.samza.metrics.MetricsRegistry;
import org.apache.samza.serializers.JsonSerde;
import org.apache.samza.serializers.Serde;
import org.apache.samza.system.IncomingMessageEnvelope;
import org.apache.samza.system.OutgoingMessageEnvelope;
import org.apache.samza.system.SystemAdmin;
import org.apache.samza.system.SystemConsumer;
import org.apache.samza.system.SystemFactory;
import org.apache.samza.system.SystemProducer;
import org.apache.samza.system.SystemStream;
import org.apache.samza.system.SystemStreamMetadata;
import org.apache.samza.system.SystemStreamMetadata.SystemStreamPartitionMetadata;
import org.apache.samza.system.SystemStreamPartition;
import org.apache.samza.system.SystemStreamPartitionIterator;
import org.apache.samza.util.CoordinatorStreamUtil;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An implementation of the {@link MetadataStore} interface where the metadata of the samza job is stored in coordinator stream.
*
* This class is thread safe.
*
* It is recommended to use {@link NamespaceAwareCoordinatorStreamStore}. This will enable the single CoordinatorStreamStore connection
* to be shared by the multiple {@link NamespaceAwareCoordinatorStreamStore} instances.
*/
public class CoordinatorStreamStore implements MetadataStore {
private static final Logger LOG = LoggerFactory.getLogger(CoordinatorStreamStore.class);
private static final String SOURCE = "SamzaContainer";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private final Config config;
private final SystemStream coordinatorSystemStream;
private final SystemStreamPartition coordinatorSystemStreamPartition;
private final SystemProducer systemProducer;
private final SystemConsumer systemConsumer;
private final SystemAdmin systemAdmin;
// Namespaced key to the message byte array.
private final Map<String, byte[]> messagesReadFromCoordinatorStream = new ConcurrentHashMap<>();
private final Object bootstrapLock = new Object();
private final AtomicBoolean isInitialized = new AtomicBoolean(false);
private SystemStreamPartitionIterator iterator;
public CoordinatorStreamStore(Config config, MetricsRegistry metricsRegistry) {
this.config = config;
this.coordinatorSystemStream = CoordinatorStreamUtil.getCoordinatorSystemStream(config);
this.coordinatorSystemStreamPartition = new SystemStreamPartition(coordinatorSystemStream, new Partition(0));
SystemFactory systemFactory = CoordinatorStreamUtil.getCoordinatorSystemFactory(config);
this.systemProducer = systemFactory.getProducer(this.coordinatorSystemStream.getSystem(), config, metricsRegistry);
this.systemConsumer = systemFactory.getConsumer(this.coordinatorSystemStream.getSystem(), config, metricsRegistry);
this.systemAdmin = systemFactory.getAdmin(this.coordinatorSystemStream.getSystem(), config);
}
@VisibleForTesting
protected CoordinatorStreamStore(Config config, SystemProducer systemProducer, SystemConsumer systemConsumer, SystemAdmin systemAdmin) {
this.config = config;
this.systemConsumer = systemConsumer;
this.systemProducer = systemProducer;
this.systemAdmin = systemAdmin;
this.coordinatorSystemStream = CoordinatorStreamUtil.getCoordinatorSystemStream(config);
this.coordinatorSystemStreamPartition = new SystemStreamPartition(coordinatorSystemStream, new Partition(0));
}
@Override
public void init() {
if (isInitialized.compareAndSet(false, true)) {
LOG.info("Starting the coordinator stream system consumer with config: {}.", config);
registerConsumer();
systemConsumer.start();
systemProducer.register(SOURCE);
systemProducer.start();
iterator = new SystemStreamPartitionIterator(systemConsumer, coordinatorSystemStreamPartition);
readMessagesFromCoordinatorStream();
} else {
LOG.info("Store had already been initialized. Skipping.", coordinatorSystemStreamPartition);
}
}
@Override
public byte[] get(String namespacedKey) {
readMessagesFromCoordinatorStream();
return messagesReadFromCoordinatorStream.get(namespacedKey);
}
@Override
public void put(String namespacedKey, byte[] value) {
// 1. Store the namespace and key into correct fields of the CoordinatorStreamKey and convert the key to bytes.
CoordinatorMessageKey coordinatorMessageKey = deserializeCoordinatorMessageKeyFromJson(namespacedKey);
CoordinatorStreamKeySerde keySerde = new CoordinatorStreamKeySerde(coordinatorMessageKey.getNamespace());
byte[] keyBytes = keySerde.toBytes(coordinatorMessageKey.getKey());
// 2. Set the key, message in correct fields of {@link OutgoingMessageEnvelope} and publish it to the coordinator stream.
OutgoingMessageEnvelope envelope = new OutgoingMessageEnvelope(coordinatorSystemStream, 0, keyBytes, value);
systemProducer.send(SOURCE, envelope);
}
@Override
public void delete(String namespacedKey) {
// Since kafka doesn't support individual message deletion, store value as null for a namespacedKey to delete.
put(namespacedKey, null);
}
@Override
public Map<String, byte[]> all() {
readMessagesFromCoordinatorStream();
return Collections.unmodifiableMap(messagesReadFromCoordinatorStream);
}
private void readMessagesFromCoordinatorStream() {
synchronized (bootstrapLock) {
while (iterator.hasNext()) {
IncomingMessageEnvelope envelope = iterator.next();
byte[] keyAsBytes = (byte[]) envelope.getKey();
Serde<List<?>> serde = new JsonSerde<>();
Object[] keyArray = serde.fromBytes(keyAsBytes).toArray();
CoordinatorStreamMessage coordinatorStreamMessage = new CoordinatorStreamMessage(keyArray, new HashMap<>());
String namespacedKey = serializeCoordinatorMessageKeyToJson(coordinatorStreamMessage.getType(), coordinatorStreamMessage.getKey());
if (envelope.getMessage() != null) {
messagesReadFromCoordinatorStream.put(namespacedKey, (byte[]) envelope.getMessage());
} else {
messagesReadFromCoordinatorStream.remove(namespacedKey);
}
}
}
}
@Override
public void close() {
try {
LOG.info("Stopping the coordinator stream system consumer.", config);
systemAdmin.stop();
systemProducer.stop();
systemConsumer.stop();
} catch (Exception e) {
LOG.error("Exception occurred when closing the metadata store:", e);
}
}
@Override
public void flush() {
try {
systemProducer.flush(SOURCE);
} catch (Exception e) {
LOG.error("Exception occurred when flushing the metadata store:", e);
throw new SamzaException("Exception occurred when flushing the metadata store:", e);
}
}
/**
* <p>
* Fetches the metadata of the topic partition of coordinator stream. Registers the oldest offset
* for the topic partition of coordinator stream with the coordinator system consumer.
* </p>
*/
private void registerConsumer() {
LOG.debug("Attempting to register system stream partition: {}", coordinatorSystemStreamPartition);
String streamName = coordinatorSystemStreamPartition.getStream();
Map<String, SystemStreamMetadata> systemStreamMetadataMap = systemAdmin.getSystemStreamMetadata(Sets.newHashSet(streamName));
SystemStreamMetadata systemStreamMetadata = systemStreamMetadataMap.get(streamName);
Preconditions.checkNotNull(systemStreamMetadata, String.format("System stream metadata does not exist for stream: %s.", streamName));
SystemStreamPartitionMetadata systemStreamPartitionMetadata = systemStreamMetadata.getSystemStreamPartitionMetadata().get(coordinatorSystemStreamPartition.getPartition());
Preconditions.checkNotNull(systemStreamPartitionMetadata, String.format("System stream partition metadata does not exist for: %s.", coordinatorSystemStreamPartition));
String startingOffset = systemStreamPartitionMetadata.getOldestOffset();
LOG.info("Registering system stream partition: {} with offset: {}.", coordinatorSystemStreamPartition, startingOffset);
systemConsumer.register(coordinatorSystemStreamPartition, startingOffset);
}
/**
*
* Serializes the {@link CoordinatorMessageKey} into a json string.
*
* @param type the type of the coordinator message.
* @param key the key associated with the type
* @return the CoordinatorMessageKey serialized to a json string.
*/
public static String serializeCoordinatorMessageKeyToJson(String type, String key) {
try {
CoordinatorMessageKey coordinatorMessageKey = new CoordinatorMessageKey(key, type);
return OBJECT_MAPPER.writeValueAsString(coordinatorMessageKey);
} catch (IOException e) {
throw new SamzaException(String.format("Exception occurred when serializing metadata for type: %s, key: %s", type, key), e);
}
}
/**
* Deserializes the @param coordinatorMsgKeyAsString in json format to {@link CoordinatorMessageKey}.
* @param coordinatorMsgKeyAsJson the serialized CoordinatorMessageKey in json format.
* @return the deserialized CoordinatorMessageKey.
*/
public static CoordinatorMessageKey deserializeCoordinatorMessageKeyFromJson(String coordinatorMsgKeyAsJson) {
try {
return OBJECT_MAPPER.readValue(coordinatorMsgKeyAsJson, CoordinatorMessageKey.class);
} catch (IOException e) {
throw new SamzaException(String.format("Exception occurred when deserializing the coordinatorMsgKey: %s", coordinatorMsgKeyAsJson), e);
}
}
/**
* <p>
* Represents the key of a message in the coordinator stream.
*
* Coordinator message key is composite. It has both the type of the message
* and the key associated with the type in it.
* </p>
*/
public static class CoordinatorMessageKey {
// Represents the key associated with the type
private final String key;
// Represents the type of the message.
private final String namespace;
CoordinatorMessageKey(@JsonProperty("key") String key,
@JsonProperty("namespace") String namespace) {
this.key = key;
this.namespace = namespace;
}
public String getKey() {
return this.key;
}
public String getNamespace() {
return this.namespace;
}
}
}
| apache-2.0 |
lirui-apache/hive | standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TruncateTableRequest.java | 33559 | /**
* Autogenerated by Thrift Compiler (0.14.1)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.hadoop.hive.metastore.api;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.14.1)")
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class TruncateTableRequest implements org.apache.thrift.TBase<TruncateTableRequest, TruncateTableRequest._Fields>, java.io.Serializable, Cloneable, Comparable<TruncateTableRequest> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TruncateTableRequest");
private static final org.apache.thrift.protocol.TField DB_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("dbName", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField TABLE_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("tableName", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final org.apache.thrift.protocol.TField PART_NAMES_FIELD_DESC = new org.apache.thrift.protocol.TField("partNames", org.apache.thrift.protocol.TType.LIST, (short)3);
private static final org.apache.thrift.protocol.TField WRITE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("writeId", org.apache.thrift.protocol.TType.I64, (short)4);
private static final org.apache.thrift.protocol.TField VALID_WRITE_ID_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("validWriteIdList", org.apache.thrift.protocol.TType.STRING, (short)5);
private static final org.apache.thrift.protocol.TField ENVIRONMENT_CONTEXT_FIELD_DESC = new org.apache.thrift.protocol.TField("environmentContext", org.apache.thrift.protocol.TType.STRUCT, (short)6);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new TruncateTableRequestStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new TruncateTableRequestTupleSchemeFactory();
private @org.apache.thrift.annotation.Nullable java.lang.String dbName; // required
private @org.apache.thrift.annotation.Nullable java.lang.String tableName; // required
private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> partNames; // optional
private long writeId; // optional
private @org.apache.thrift.annotation.Nullable java.lang.String validWriteIdList; // optional
private @org.apache.thrift.annotation.Nullable EnvironmentContext environmentContext; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
DB_NAME((short)1, "dbName"),
TABLE_NAME((short)2, "tableName"),
PART_NAMES((short)3, "partNames"),
WRITE_ID((short)4, "writeId"),
VALID_WRITE_ID_LIST((short)5, "validWriteIdList"),
ENVIRONMENT_CONTEXT((short)6, "environmentContext");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // DB_NAME
return DB_NAME;
case 2: // TABLE_NAME
return TABLE_NAME;
case 3: // PART_NAMES
return PART_NAMES;
case 4: // WRITE_ID
return WRITE_ID;
case 5: // VALID_WRITE_ID_LIST
return VALID_WRITE_ID_LIST;
case 6: // ENVIRONMENT_CONTEXT
return ENVIRONMENT_CONTEXT;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __WRITEID_ISSET_ID = 0;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.PART_NAMES,_Fields.WRITE_ID,_Fields.VALID_WRITE_ID_LIST,_Fields.ENVIRONMENT_CONTEXT};
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.DB_NAME, new org.apache.thrift.meta_data.FieldMetaData("dbName", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.TABLE_NAME, new org.apache.thrift.meta_data.FieldMetaData("tableName", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.PART_NAMES, new org.apache.thrift.meta_data.FieldMetaData("partNames", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
tmpMap.put(_Fields.WRITE_ID, new org.apache.thrift.meta_data.FieldMetaData("writeId", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.VALID_WRITE_ID_LIST, new org.apache.thrift.meta_data.FieldMetaData("validWriteIdList", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.ENVIRONMENT_CONTEXT, new org.apache.thrift.meta_data.FieldMetaData("environmentContext", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT , "EnvironmentContext")));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TruncateTableRequest.class, metaDataMap);
}
public TruncateTableRequest() {
this.writeId = -1L;
}
public TruncateTableRequest(
java.lang.String dbName,
java.lang.String tableName)
{
this();
this.dbName = dbName;
this.tableName = tableName;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public TruncateTableRequest(TruncateTableRequest other) {
__isset_bitfield = other.__isset_bitfield;
if (other.isSetDbName()) {
this.dbName = other.dbName;
}
if (other.isSetTableName()) {
this.tableName = other.tableName;
}
if (other.isSetPartNames()) {
java.util.List<java.lang.String> __this__partNames = new java.util.ArrayList<java.lang.String>(other.partNames);
this.partNames = __this__partNames;
}
this.writeId = other.writeId;
if (other.isSetValidWriteIdList()) {
this.validWriteIdList = other.validWriteIdList;
}
if (other.isSetEnvironmentContext()) {
this.environmentContext = new EnvironmentContext(other.environmentContext);
}
}
public TruncateTableRequest deepCopy() {
return new TruncateTableRequest(this);
}
@Override
public void clear() {
this.dbName = null;
this.tableName = null;
this.partNames = null;
this.writeId = -1L;
this.validWriteIdList = null;
this.environmentContext = null;
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getDbName() {
return this.dbName;
}
public void setDbName(@org.apache.thrift.annotation.Nullable java.lang.String dbName) {
this.dbName = dbName;
}
public void unsetDbName() {
this.dbName = null;
}
/** Returns true if field dbName is set (has been assigned a value) and false otherwise */
public boolean isSetDbName() {
return this.dbName != null;
}
public void setDbNameIsSet(boolean value) {
if (!value) {
this.dbName = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getTableName() {
return this.tableName;
}
public void setTableName(@org.apache.thrift.annotation.Nullable java.lang.String tableName) {
this.tableName = tableName;
}
public void unsetTableName() {
this.tableName = null;
}
/** Returns true if field tableName is set (has been assigned a value) and false otherwise */
public boolean isSetTableName() {
return this.tableName != null;
}
public void setTableNameIsSet(boolean value) {
if (!value) {
this.tableName = null;
}
}
public int getPartNamesSize() {
return (this.partNames == null) ? 0 : this.partNames.size();
}
@org.apache.thrift.annotation.Nullable
public java.util.Iterator<java.lang.String> getPartNamesIterator() {
return (this.partNames == null) ? null : this.partNames.iterator();
}
public void addToPartNames(java.lang.String elem) {
if (this.partNames == null) {
this.partNames = new java.util.ArrayList<java.lang.String>();
}
this.partNames.add(elem);
}
@org.apache.thrift.annotation.Nullable
public java.util.List<java.lang.String> getPartNames() {
return this.partNames;
}
public void setPartNames(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> partNames) {
this.partNames = partNames;
}
public void unsetPartNames() {
this.partNames = null;
}
/** Returns true if field partNames is set (has been assigned a value) and false otherwise */
public boolean isSetPartNames() {
return this.partNames != null;
}
public void setPartNamesIsSet(boolean value) {
if (!value) {
this.partNames = null;
}
}
public long getWriteId() {
return this.writeId;
}
public void setWriteId(long writeId) {
this.writeId = writeId;
setWriteIdIsSet(true);
}
public void unsetWriteId() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __WRITEID_ISSET_ID);
}
/** Returns true if field writeId is set (has been assigned a value) and false otherwise */
public boolean isSetWriteId() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __WRITEID_ISSET_ID);
}
public void setWriteIdIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __WRITEID_ISSET_ID, value);
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getValidWriteIdList() {
return this.validWriteIdList;
}
public void setValidWriteIdList(@org.apache.thrift.annotation.Nullable java.lang.String validWriteIdList) {
this.validWriteIdList = validWriteIdList;
}
public void unsetValidWriteIdList() {
this.validWriteIdList = null;
}
/** Returns true if field validWriteIdList is set (has been assigned a value) and false otherwise */
public boolean isSetValidWriteIdList() {
return this.validWriteIdList != null;
}
public void setValidWriteIdListIsSet(boolean value) {
if (!value) {
this.validWriteIdList = null;
}
}
@org.apache.thrift.annotation.Nullable
public EnvironmentContext getEnvironmentContext() {
return this.environmentContext;
}
public void setEnvironmentContext(@org.apache.thrift.annotation.Nullable EnvironmentContext environmentContext) {
this.environmentContext = environmentContext;
}
public void unsetEnvironmentContext() {
this.environmentContext = null;
}
/** Returns true if field environmentContext is set (has been assigned a value) and false otherwise */
public boolean isSetEnvironmentContext() {
return this.environmentContext != null;
}
public void setEnvironmentContextIsSet(boolean value) {
if (!value) {
this.environmentContext = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case DB_NAME:
if (value == null) {
unsetDbName();
} else {
setDbName((java.lang.String)value);
}
break;
case TABLE_NAME:
if (value == null) {
unsetTableName();
} else {
setTableName((java.lang.String)value);
}
break;
case PART_NAMES:
if (value == null) {
unsetPartNames();
} else {
setPartNames((java.util.List<java.lang.String>)value);
}
break;
case WRITE_ID:
if (value == null) {
unsetWriteId();
} else {
setWriteId((java.lang.Long)value);
}
break;
case VALID_WRITE_ID_LIST:
if (value == null) {
unsetValidWriteIdList();
} else {
setValidWriteIdList((java.lang.String)value);
}
break;
case ENVIRONMENT_CONTEXT:
if (value == null) {
unsetEnvironmentContext();
} else {
setEnvironmentContext((EnvironmentContext)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case DB_NAME:
return getDbName();
case TABLE_NAME:
return getTableName();
case PART_NAMES:
return getPartNames();
case WRITE_ID:
return getWriteId();
case VALID_WRITE_ID_LIST:
return getValidWriteIdList();
case ENVIRONMENT_CONTEXT:
return getEnvironmentContext();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case DB_NAME:
return isSetDbName();
case TABLE_NAME:
return isSetTableName();
case PART_NAMES:
return isSetPartNames();
case WRITE_ID:
return isSetWriteId();
case VALID_WRITE_ID_LIST:
return isSetValidWriteIdList();
case ENVIRONMENT_CONTEXT:
return isSetEnvironmentContext();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof TruncateTableRequest)
return this.equals((TruncateTableRequest)that);
return false;
}
public boolean equals(TruncateTableRequest that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_dbName = true && this.isSetDbName();
boolean that_present_dbName = true && that.isSetDbName();
if (this_present_dbName || that_present_dbName) {
if (!(this_present_dbName && that_present_dbName))
return false;
if (!this.dbName.equals(that.dbName))
return false;
}
boolean this_present_tableName = true && this.isSetTableName();
boolean that_present_tableName = true && that.isSetTableName();
if (this_present_tableName || that_present_tableName) {
if (!(this_present_tableName && that_present_tableName))
return false;
if (!this.tableName.equals(that.tableName))
return false;
}
boolean this_present_partNames = true && this.isSetPartNames();
boolean that_present_partNames = true && that.isSetPartNames();
if (this_present_partNames || that_present_partNames) {
if (!(this_present_partNames && that_present_partNames))
return false;
if (!this.partNames.equals(that.partNames))
return false;
}
boolean this_present_writeId = true && this.isSetWriteId();
boolean that_present_writeId = true && that.isSetWriteId();
if (this_present_writeId || that_present_writeId) {
if (!(this_present_writeId && that_present_writeId))
return false;
if (this.writeId != that.writeId)
return false;
}
boolean this_present_validWriteIdList = true && this.isSetValidWriteIdList();
boolean that_present_validWriteIdList = true && that.isSetValidWriteIdList();
if (this_present_validWriteIdList || that_present_validWriteIdList) {
if (!(this_present_validWriteIdList && that_present_validWriteIdList))
return false;
if (!this.validWriteIdList.equals(that.validWriteIdList))
return false;
}
boolean this_present_environmentContext = true && this.isSetEnvironmentContext();
boolean that_present_environmentContext = true && that.isSetEnvironmentContext();
if (this_present_environmentContext || that_present_environmentContext) {
if (!(this_present_environmentContext && that_present_environmentContext))
return false;
if (!this.environmentContext.equals(that.environmentContext))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetDbName()) ? 131071 : 524287);
if (isSetDbName())
hashCode = hashCode * 8191 + dbName.hashCode();
hashCode = hashCode * 8191 + ((isSetTableName()) ? 131071 : 524287);
if (isSetTableName())
hashCode = hashCode * 8191 + tableName.hashCode();
hashCode = hashCode * 8191 + ((isSetPartNames()) ? 131071 : 524287);
if (isSetPartNames())
hashCode = hashCode * 8191 + partNames.hashCode();
hashCode = hashCode * 8191 + ((isSetWriteId()) ? 131071 : 524287);
if (isSetWriteId())
hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(writeId);
hashCode = hashCode * 8191 + ((isSetValidWriteIdList()) ? 131071 : 524287);
if (isSetValidWriteIdList())
hashCode = hashCode * 8191 + validWriteIdList.hashCode();
hashCode = hashCode * 8191 + ((isSetEnvironmentContext()) ? 131071 : 524287);
if (isSetEnvironmentContext())
hashCode = hashCode * 8191 + environmentContext.hashCode();
return hashCode;
}
@Override
public int compareTo(TruncateTableRequest other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetDbName(), other.isSetDbName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetDbName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dbName, other.dbName);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetTableName(), other.isSetTableName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetTableName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tableName, other.tableName);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetPartNames(), other.isSetPartNames());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetPartNames()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.partNames, other.partNames);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetWriteId(), other.isSetWriteId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetWriteId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.writeId, other.writeId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetValidWriteIdList(), other.isSetValidWriteIdList());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetValidWriteIdList()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.validWriteIdList, other.validWriteIdList);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetEnvironmentContext(), other.isSetEnvironmentContext());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEnvironmentContext()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.environmentContext, other.environmentContext);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("TruncateTableRequest(");
boolean first = true;
sb.append("dbName:");
if (this.dbName == null) {
sb.append("null");
} else {
sb.append(this.dbName);
}
first = false;
if (!first) sb.append(", ");
sb.append("tableName:");
if (this.tableName == null) {
sb.append("null");
} else {
sb.append(this.tableName);
}
first = false;
if (isSetPartNames()) {
if (!first) sb.append(", ");
sb.append("partNames:");
if (this.partNames == null) {
sb.append("null");
} else {
sb.append(this.partNames);
}
first = false;
}
if (isSetWriteId()) {
if (!first) sb.append(", ");
sb.append("writeId:");
sb.append(this.writeId);
first = false;
}
if (isSetValidWriteIdList()) {
if (!first) sb.append(", ");
sb.append("validWriteIdList:");
if (this.validWriteIdList == null) {
sb.append("null");
} else {
sb.append(this.validWriteIdList);
}
first = false;
}
if (isSetEnvironmentContext()) {
if (!first) sb.append(", ");
sb.append("environmentContext:");
if (this.environmentContext == null) {
sb.append("null");
} else {
sb.append(this.environmentContext);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetDbName()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'dbName' is unset! Struct:" + toString());
}
if (!isSetTableName()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'tableName' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class TruncateTableRequestStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public TruncateTableRequestStandardScheme getScheme() {
return new TruncateTableRequestStandardScheme();
}
}
private static class TruncateTableRequestStandardScheme extends org.apache.thrift.scheme.StandardScheme<TruncateTableRequest> {
public void read(org.apache.thrift.protocol.TProtocol iprot, TruncateTableRequest struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // DB_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.dbName = iprot.readString();
struct.setDbNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // TABLE_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.tableName = iprot.readString();
struct.setTableNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // PART_NAMES
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list126 = iprot.readListBegin();
struct.partNames = new java.util.ArrayList<java.lang.String>(_list126.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem127;
for (int _i128 = 0; _i128 < _list126.size; ++_i128)
{
_elem127 = iprot.readString();
struct.partNames.add(_elem127);
}
iprot.readListEnd();
}
struct.setPartNamesIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // WRITE_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.writeId = iprot.readI64();
struct.setWriteIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // VALID_WRITE_ID_LIST
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.validWriteIdList = iprot.readString();
struct.setValidWriteIdListIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // ENVIRONMENT_CONTEXT
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.environmentContext = new EnvironmentContext();
struct.environmentContext.read(iprot);
struct.setEnvironmentContextIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, TruncateTableRequest struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.dbName != null) {
oprot.writeFieldBegin(DB_NAME_FIELD_DESC);
oprot.writeString(struct.dbName);
oprot.writeFieldEnd();
}
if (struct.tableName != null) {
oprot.writeFieldBegin(TABLE_NAME_FIELD_DESC);
oprot.writeString(struct.tableName);
oprot.writeFieldEnd();
}
if (struct.partNames != null) {
if (struct.isSetPartNames()) {
oprot.writeFieldBegin(PART_NAMES_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.partNames.size()));
for (java.lang.String _iter129 : struct.partNames)
{
oprot.writeString(_iter129);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
}
if (struct.isSetWriteId()) {
oprot.writeFieldBegin(WRITE_ID_FIELD_DESC);
oprot.writeI64(struct.writeId);
oprot.writeFieldEnd();
}
if (struct.validWriteIdList != null) {
if (struct.isSetValidWriteIdList()) {
oprot.writeFieldBegin(VALID_WRITE_ID_LIST_FIELD_DESC);
oprot.writeString(struct.validWriteIdList);
oprot.writeFieldEnd();
}
}
if (struct.environmentContext != null) {
if (struct.isSetEnvironmentContext()) {
oprot.writeFieldBegin(ENVIRONMENT_CONTEXT_FIELD_DESC);
struct.environmentContext.write(oprot);
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class TruncateTableRequestTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public TruncateTableRequestTupleScheme getScheme() {
return new TruncateTableRequestTupleScheme();
}
}
private static class TruncateTableRequestTupleScheme extends org.apache.thrift.scheme.TupleScheme<TruncateTableRequest> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, TruncateTableRequest struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
oprot.writeString(struct.dbName);
oprot.writeString(struct.tableName);
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetPartNames()) {
optionals.set(0);
}
if (struct.isSetWriteId()) {
optionals.set(1);
}
if (struct.isSetValidWriteIdList()) {
optionals.set(2);
}
if (struct.isSetEnvironmentContext()) {
optionals.set(3);
}
oprot.writeBitSet(optionals, 4);
if (struct.isSetPartNames()) {
{
oprot.writeI32(struct.partNames.size());
for (java.lang.String _iter130 : struct.partNames)
{
oprot.writeString(_iter130);
}
}
}
if (struct.isSetWriteId()) {
oprot.writeI64(struct.writeId);
}
if (struct.isSetValidWriteIdList()) {
oprot.writeString(struct.validWriteIdList);
}
if (struct.isSetEnvironmentContext()) {
struct.environmentContext.write(oprot);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, TruncateTableRequest struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
struct.dbName = iprot.readString();
struct.setDbNameIsSet(true);
struct.tableName = iprot.readString();
struct.setTableNameIsSet(true);
java.util.BitSet incoming = iprot.readBitSet(4);
if (incoming.get(0)) {
{
org.apache.thrift.protocol.TList _list131 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRING);
struct.partNames = new java.util.ArrayList<java.lang.String>(_list131.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem132;
for (int _i133 = 0; _i133 < _list131.size; ++_i133)
{
_elem132 = iprot.readString();
struct.partNames.add(_elem132);
}
}
struct.setPartNamesIsSet(true);
}
if (incoming.get(1)) {
struct.writeId = iprot.readI64();
struct.setWriteIdIsSet(true);
}
if (incoming.get(2)) {
struct.validWriteIdList = iprot.readString();
struct.setValidWriteIdListIsSet(true);
}
if (incoming.get(3)) {
struct.environmentContext = new EnvironmentContext();
struct.environmentContext.read(iprot);
struct.setEnvironmentContextIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-alexaforbusiness/src/main/java/com/amazonaws/services/alexaforbusiness/model/transform/GetNetworkProfileRequestMarshaller.java | 2087 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.alexaforbusiness.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.alexaforbusiness.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* GetNetworkProfileRequestMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class GetNetworkProfileRequestMarshaller {
private static final MarshallingInfo<String> NETWORKPROFILEARN_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("NetworkProfileArn").build();
private static final GetNetworkProfileRequestMarshaller instance = new GetNetworkProfileRequestMarshaller();
public static GetNetworkProfileRequestMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(GetNetworkProfileRequest getNetworkProfileRequest, ProtocolMarshaller protocolMarshaller) {
if (getNetworkProfileRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getNetworkProfileRequest.getNetworkProfileArn(), NETWORKPROFILEARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
oehme/analysing-gradle-performance | my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p282/Production5650.java | 1891 | package org.gradle.test.performance.mediummonolithicjavaproject.p282;
public class Production5650 {
private String property0;
public String getProperty0() {
return property0;
}
public void setProperty0(String value) {
property0 = value;
}
private String property1;
public String getProperty1() {
return property1;
}
public void setProperty1(String value) {
property1 = value;
}
private String property2;
public String getProperty2() {
return property2;
}
public void setProperty2(String value) {
property2 = value;
}
private String property3;
public String getProperty3() {
return property3;
}
public void setProperty3(String value) {
property3 = value;
}
private String property4;
public String getProperty4() {
return property4;
}
public void setProperty4(String value) {
property4 = value;
}
private String property5;
public String getProperty5() {
return property5;
}
public void setProperty5(String value) {
property5 = value;
}
private String property6;
public String getProperty6() {
return property6;
}
public void setProperty6(String value) {
property6 = value;
}
private String property7;
public String getProperty7() {
return property7;
}
public void setProperty7(String value) {
property7 = value;
}
private String property8;
public String getProperty8() {
return property8;
}
public void setProperty8(String value) {
property8 = value;
}
private String property9;
public String getProperty9() {
return property9;
}
public void setProperty9(String value) {
property9 = value;
}
} | apache-2.0 |
gaojice/diskviewer | src/main/java/com/gaojice/diskviewer/processor/FileProcessor.java | 1628 | package com.gaojice.diskviewer.processor;
import java.io.File;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import com.gaojice.diskviewer.dao.DiskFileDao;
import com.gaojice.diskviewer.entity.DiskFile;
public class FileProcessor implements Runnable {
private DiskFileDao diskFileDao;
private File root;
private DiskFile p;
private org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor taskExecutor;
public FileProcessor(DiskFileDao diskFileDao, File root, DiskFile p, ThreadPoolTaskExecutor taskExecutor) {
super();
this.diskFileDao = diskFileDao;
this.root = root;
this.p = p;
this.taskExecutor = taskExecutor;
}
public void setTaskExecutor(ThreadPoolTaskExecutor taskExecutor) {
this.taskExecutor = taskExecutor;
}
public void setDiskFileDao(DiskFileDao diskFileDao) {
this.diskFileDao = diskFileDao;
}
public void setRoot(File root) {
this.root = root;
}
public void setP(DiskFile p) {
this.p = p;
}
public void run() {
DiskFile diskFile = new DiskFile();
diskFile.setParent(p);
diskFile.setName(root.getName());
if (root.isDirectory()) {
diskFile.setName(root.getAbsolutePath());
diskFile.setType("D");
diskFile.setSize(0L);
diskFileDao.insert(diskFile);
File[] children = root.listFiles();
if (children != null) {
for (File child : children) {
FileProcessor fileProcessor = new FileProcessor(diskFileDao, child, diskFile, taskExecutor);
taskExecutor.execute(fileProcessor);
}
}
} else {
diskFile.setType("F");
diskFile.setSize(root.length());
diskFileDao.insert(diskFile);
}
}
}
| apache-2.0 |
tkao1000/pinot | pinot-common/src/main/java/com/linkedin/pinot/common/config/TableCustomConfig.java | 2332 | /**
* Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.common.config;
import java.lang.reflect.Field;
import java.util.Map;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@JsonIgnoreProperties(ignoreUnknown = true)
public class TableCustomConfig {
private static final Logger LOGGER = LoggerFactory.getLogger(SegmentsValidationAndRetentionConfig.class);
public static final String MESSAGE_BASED_REFRESH_KEY = "messageBasedRefresh";
private Map<String, String> customConfigs;
public Map<String, String> getCustomConfigs() {
return customConfigs;
}
public void setCustomConfigs(Map<String, String> customConfigs) {
this.customConfigs = customConfigs;
}
@Override
public String toString() {
final StringBuilder result = new StringBuilder();
final String newLine = System.getProperty("line.separator");
result.append(this.getClass().getName());
result.append(" Object {");
result.append(newLine);
//determine fields declared in this class only (no fields of superclass)
final Field[] fields = this.getClass().getDeclaredFields();
//print field names paired with their values
for (final Field field : fields) {
result.append(" ");
try {
result.append(field.getName());
result.append(": ");
//requires access to private field:
result.append(field.get(this));
} catch (final IllegalAccessException ex) {
if (LOGGER.isWarnEnabled()) {
LOGGER.warn("Caught exception while processing field " + field, ex);
}
}
result.append(newLine);
}
result.append("}");
return result.toString();
}
}
| apache-2.0 |
motorina0/flowable-engine | modules/flowable-ui-idm/flowable-ui-idm-conf/src/main/java/org/flowable/app/servlet/ApiDispatcherServletConfiguration.java | 2846 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.app.servlet;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
import org.springframework.web.servlet.i18n.SessionLocaleResolver;
import org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerMapping;
import java.util.List;
@Configuration
@ComponentScan(value = {"org.flowable.app.rest.api", "org.flowable.app.rest.exception"})
@EnableAsync
public class ApiDispatcherServletConfiguration extends WebMvcConfigurationSupport {
@Autowired
protected ObjectMapper objectMapper;
@Autowired
protected Environment environment;
@Bean
public SessionLocaleResolver localeResolver() {
return new SessionLocaleResolver();
}
@Bean
public RequestMappingHandlerMapping requestMappingHandlerMapping() {
RequestMappingHandlerMapping requestMappingHandlerMapping = new RequestMappingHandlerMapping();
requestMappingHandlerMapping.setUseSuffixPatternMatch(false);
requestMappingHandlerMapping.setRemoveSemicolonContent(false);
requestMappingHandlerMapping.setInterceptors(getInterceptors());
return requestMappingHandlerMapping;
}
@Override
public void configureMessageConverters(List<HttpMessageConverter<?>> converters) {
addDefaultHttpMessageConverters(converters);
for (HttpMessageConverter<?> converter: converters) {
if (converter instanceof MappingJackson2HttpMessageConverter) {
MappingJackson2HttpMessageConverter jackson2HttpMessageConverter = (MappingJackson2HttpMessageConverter) converter;
jackson2HttpMessageConverter.setObjectMapper(objectMapper);
break;
}
}
}
}
| apache-2.0 |
haku/Onosendai | src/main/java/com/vaguehope/onosendai/payload/AddCommentPayload.java | 1811 | package com.vaguehope.onosendai.payload;
import android.content.Context;
import android.content.Intent;
import com.vaguehope.onosendai.config.Account;
import com.vaguehope.onosendai.model.Meta;
import com.vaguehope.onosendai.model.MetaType;
import com.vaguehope.onosendai.model.Tweet;
import com.vaguehope.onosendai.ui.PostActivity;
import com.vaguehope.onosendai.util.EqualHelper;
public class AddCommentPayload extends Payload {
private final Account account;
public AddCommentPayload (final Account account, final Tweet ownerTweet) {
super(ownerTweet, null, PayloadType.COMMENT);
this.account = account;
}
@Override
public String getTitle () {
return "Add Comment"; //ES
}
@Override
public boolean intentable () {
return true;
}
@Override
public Intent toIntent (final Context context) {
final Intent intent = new Intent(context, PostActivity.class);
intent.putExtra(PostActivity.ARG_ACCOUNT_ID, this.account.getId());
intent.putExtra(PostActivity.ARG_IN_REPLY_TO_UID, getOwnerTweet().getUid());
intent.putExtra(PostActivity.ARG_IN_REPLY_TO_SID, getOwnerTweet().getSid());
final Meta replyToId = getOwnerTweet().getFirstMetaOfType(MetaType.REPLYTO);
if (replyToId != null) intent.putExtra(PostActivity.ARG_ALT_REPLY_TO_SID, replyToId.getData());
return intent;
}
@Override
public int hashCode () {
return this.account != null ? this.account.getId() != null ? this.account.getId().hashCode() : 0 : 0;
}
@Override
public boolean equals (final Object o) {
if (o == null) return false;
if (o == this) return true;
if (!(o instanceof AddCommentPayload)) return false;
final AddCommentPayload that = (AddCommentPayload) o;
return EqualHelper.equal(this.getOwnerTweet(), that.getOwnerTweet()) &&
EqualHelper.equal(this.account, that.account);
}
}
| apache-2.0 |
PathVisio/pathvisio | modules/org.pathvisio.core/src/org/pathvisio/core/view/DefaultLinkAnchorDelegate.java | 2844 | /*******************************************************************************
* PathVisio, a tool for data visualization and analysis using biological pathways
* Copyright 2006-2019 BiGCaT Bioinformatics
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package org.pathvisio.core.view;
/**
* Utility class for creating and destroying LinkAnchors around a rectangular element.
*/
public class DefaultLinkAnchorDelegate extends AbstractLinkAnchorDelegate
{
private final Graphics parent;
private final VPathway canvas;
DefaultLinkAnchorDelegate(Graphics parent)
{
this.parent = parent;
this.canvas = parent.getDrawing();
}
private int numLinkanchorsH = -1;
private int numLinkanchorsV = -1;
private static final int MIN_SIZE_LA = 25;
public void showLinkAnchors()
{
if (parent instanceof Group &&
parent.gdata.getGroupStyle().isDisallowLinks())
{
return;
}
//Number of link anchors depends on the size of the object
//If the width/height is large enough, there will be three link anchors per side,
//Otherwise there will be only one link anchor per side
String anchorsCnt = parent.gdata.getDynamicProperty("NumberOfAnchors");
int numAnchors = 3;
if (anchorsCnt != null) {
numAnchors = Integer.parseInt(anchorsCnt);
}
int numH = parent.gdata.getMWidth() < MIN_SIZE_LA ? 1 : numAnchors;
int numV = parent.gdata.getMHeight() < MIN_SIZE_LA ? 1 : numAnchors;
if(numH != numLinkanchorsH || numV != numLinkanchorsV)
{
linkAnchors.clear();
double deltaH = 2.0/(numH + 1);
for(int i = 1; i <= numH; i++) {
linkAnchors.add(new LinkAnchor(canvas, parent, parent.gdata, -1 + i * deltaH, -1));
linkAnchors.add(new LinkAnchor(canvas, parent, parent.gdata, -1 + i * deltaH, 1));
}
double deltaV = 2.0/(numV + 1);
for(int i = 1; i <= numV; i++) {
linkAnchors.add(new LinkAnchor(canvas, parent, parent.gdata, -1, -1 + i * deltaV));
linkAnchors.add(new LinkAnchor(canvas, parent, parent.gdata, 1, -1 + i * deltaV));
}
numLinkanchorsH = numH;
numLinkanchorsV = numV;
}
}
public void hideLinkAnchors()
{
super.hideLinkAnchors();
numLinkanchorsV = -1;
numLinkanchorsH = -1;
}
}
| apache-2.0 |
blackcathacker/kc.preclean | coeus-it/src/test/java/org/kuali/kra/questionnaire/question/QuestionAuthorizationServiceTest.java | 2180 | /*
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.questionnaire.question;
import org.junit.Test;
import org.kuali.coeus.common.questionnaire.impl.question.QuestionAuthorizationService;
import org.kuali.coeus.sys.framework.service.KcServiceLocator;
import org.kuali.kra.infrastructure.PermissionConstants;
import org.kuali.kra.test.infrastructure.KcIntegrationTestBase;
import org.kuali.rice.krad.UserSession;
import org.kuali.rice.krad.util.GlobalVariables;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class QuestionAuthorizationServiceTest extends KcIntegrationTestBase {
@Test
public void permissionModifyQuestionTest() {
GlobalVariables.setUserSession(new UserSession("quickstart"));
assertTrue(KcServiceLocator.getService(QuestionAuthorizationService.class).hasPermission(PermissionConstants.MODIFY_QUESTION));
GlobalVariables.setUserSession(new UserSession("jtester"));
assertFalse(KcServiceLocator.getService(QuestionAuthorizationService.class).hasPermission(PermissionConstants.MODIFY_QUESTION));
}
@Test
public void permissionViewQuestionTest() {
GlobalVariables.setUserSession(new UserSession("jtester"));
assertTrue(KcServiceLocator.getService(QuestionAuthorizationService.class).hasPermission(PermissionConstants.VIEW_QUESTION));
GlobalVariables.setUserSession(new UserSession("rrabbit"));
assertFalse(KcServiceLocator.getService(QuestionAuthorizationService.class).hasPermission(PermissionConstants.VIEW_QUESTION));
}
}
| apache-2.0 |
cocainism/Sham | FinalWeb/src/main/java/finalWeb/controller/MainController.java | 140 | package finalWeb.controller;
import org.springframework.stereotype.Controller;
@Controller
public class MainController {
}
| apache-2.0 |
obidea/semantika | src/main/java/com/obidea/semantika/database/datatype/SqlTypeToXmlType.java | 2978 | /*
* Copyright (c) 2013-2015 Josef Hardi <josef.hardi@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.obidea.semantika.database.datatype;
import java.sql.Types;
import java.util.HashMap;
import com.obidea.semantika.datatype.DataType;
/**
* A mapping specification from SQL datatypes to XML Schema datatypes.
* Source: {@link http://www.w3.org/2001/sw/rdb2rdf/wiki/Mapping_SQL_datatypes_to_XML_Schema_datatypes}
*/
public final class SqlTypeToXmlType
{
private static HashMap<Integer, String> mTypeMapping;
static {
mTypeMapping = new HashMap<Integer, String>();
// mCoreMapping.put(Types.BINARY, DataType.HEX_BINARY);
// mCoreMapping.put(Types.JAVA_OBJECT, DataType.HEX_BINARY);
mTypeMapping.put(Types.NUMERIC, DataType.DECIMAL);
mTypeMapping.put(Types.DECIMAL, DataType.DECIMAL);
mTypeMapping.put(Types.BIGINT, DataType.LONG);
mTypeMapping.put(Types.INTEGER, DataType.INTEGER);
mTypeMapping.put(Types.SMALLINT, DataType.SHORT);
mTypeMapping.put(Types.TINYINT, DataType.BYTE);
mTypeMapping.put(Types.REAL, DataType.FLOAT);
mTypeMapping.put(Types.FLOAT, DataType.FLOAT);
mTypeMapping.put(Types.DOUBLE, DataType.DOUBLE);
mTypeMapping.put(Types.CHAR, DataType.STRING);
mTypeMapping.put(Types.VARCHAR, DataType.STRING);
mTypeMapping.put(Types.NCHAR, DataType.STRING);
mTypeMapping.put(Types.NVARCHAR, DataType.STRING);
mTypeMapping.put(Types.LONGVARCHAR, DataType.STRING);
mTypeMapping.put(Types.LONGNVARCHAR, DataType.STRING);
mTypeMapping.put(Types.DATE, DataType.DATE);
mTypeMapping.put(Types.TIME, DataType.TIME);
mTypeMapping.put(Types.TIMESTAMP, DataType.DATE_TIME);
mTypeMapping.put(Types.BOOLEAN, DataType.BOOLEAN);
mTypeMapping.put(Types.BIT, DataType.BOOLEAN);
mTypeMapping.put(Types.OTHER, DataType.STRING);
}
/**
* Return the corresponding XML type given the SQL type.
*
* @param sqlType
* The JDBC SQL type (see {@link java.sql.Types}).
* @return a URI string representing the XML type.
* @throws UnsupportedSqlDataTypeException
* if the data type has no corresponding XML type.
*/
public static String get(int sqlType)
{
String toReturn = mTypeMapping.get(sqlType);
if (toReturn == null) {
throw new UnsupportedSqlDataTypeException(sqlType);
}
return toReturn;
}
}
| apache-2.0 |
jbb-project/jbb | domain-services/jbb-system/src/main/java/org/jbb/system/impl/database/provider/H2InMemoryManager.java | 1603 | /*
* Copyright (C) 2017 the original author or authors.
*
* This file is part of jBB Application Project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*/
package org.jbb.system.impl.database.provider;
import org.jbb.lib.db.DbProperties;
import org.jbb.lib.db.provider.H2InMemoryProvider;
import org.jbb.system.api.database.DatabaseProvider;
import org.jbb.system.api.database.DatabaseSettings;
import org.jbb.system.api.database.h2.H2InMemorySettings;
import org.springframework.stereotype.Component;
import lombok.RequiredArgsConstructor;
@Component
@RequiredArgsConstructor
public class H2InMemoryManager implements DatabaseProviderManager<H2InMemorySettings> {
public static final String PROVIDER_PROPERTY_VALUE = H2InMemoryProvider.PROVIDER_VALUE;
private final DbProperties dbProperties;
@Override
public DatabaseProvider getProviderName() {
return DatabaseProvider.H2_IN_MEMORY;
}
@Override
public H2InMemorySettings getCurrentProviderSettings() {
return H2InMemorySettings.builder()
.databaseName(dbProperties.h2InMemoryDbName())
.build();
}
@Override
public void setProviderSettings(DatabaseSettings newDatabaseSettings) {
H2InMemorySettings newProviderSettings = newDatabaseSettings
.getH2InMemorySettings();
dbProperties.setProperty(DbProperties.H2_IN_MEMORY_DB_NAME_KEY,
newProviderSettings.getDatabaseName());
}
}
| apache-2.0 |
ZeitnotSWE/sHike | Codice/Web/src/main/java/com/wearit/shike/web/model/dao/weather/WeatherDao.java | 1519 | package com.wearit.shike.web.model.dao.weather;
import java.util.List;
import com.wearit.shike.web.model.weather.TrackWeather;
import com.wearit.shike.web.model.weather.Weather;
public interface WeatherDao {
/**
* Metodo che viene usato per elencare tutti i record appartenenti alla tabella
* Weather.
*
* @return
*/
public List<Weather> getAllWeather();
/**
* Metodo che viene usato per aggiungere informazioni meteo alla tabella Weather.
*
* @param w
* meteo da aggiungere al database
*/
public void addTrackWeather(TrackWeather tw);
/**
* Metodo che viene usato per estrarre le informazioni meteo di un track.
*
* @param idt
* id del virtual track per ottenere il meteo
* @return il trackweather associato al tracciato richiesto
*/
public List<Weather> getTrackWeather(int idt);
/**
* Metodo che viene usato per estrarre una singola informazione meteo di un track
*
* @param _idt
* id del virtual track per ottenere il meteo
* @param date
* data della previsione meteo
* @return il singolo trackweather associato al tracciato richiesto o null se lista
* trackweather vuota
*/
public Weather getSingleTrackWeather(int _idt, long date);
/**
* Metodo che viene usato per togliere un record dalla tabella Weather.
*
* @param id
* del meteo da cancellare
* @param order
* forecastOrder del meteo da cancellare
*/
public void deleteWeather(int id, long order);
} | apache-2.0 |
Chasego/kafka | streams/src/test/java/org/apache/kafka/streams/integration/StoreQueryIntegrationTest.java | 23416 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.integration;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StoreQueryParameters;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.InvalidStateStoreException;
import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster;
import org.apache.kafka.streams.integration.utils.IntegrationTestUtils;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.QueryableStoreType;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
import org.apache.kafka.test.IntegrationTest;
import org.apache.kafka.test.TestCondition;
import org.apache.kafka.test.TestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.getStore;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.startApplicationAndWaitUntilRunning;
import static org.apache.kafka.streams.state.QueryableStoreTypes.keyValueStore;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
@Category({IntegrationTest.class})
public class StoreQueryIntegrationTest {
private static final Logger LOG = LoggerFactory.getLogger(StoreQueryIntegrationTest.class);
private static final int NUM_BROKERS = 1;
private static int port = 0;
private static final String INPUT_TOPIC_NAME = "input-topic";
private static final String TABLE_NAME = "source-table";
public final EmbeddedKafkaCluster cluster = new EmbeddedKafkaCluster(NUM_BROKERS);
@Rule
public TestName testName = new TestName();
private final List<KafkaStreams> streamsToCleanup = new ArrayList<>();
private final MockTime mockTime = cluster.time;
@Before
public void before() throws InterruptedException, IOException {
cluster.start();
cluster.createTopic(INPUT_TOPIC_NAME, 2, 1);
}
@After
public void after() {
for (final KafkaStreams kafkaStreams : streamsToCleanup) {
kafkaStreams.close();
}
cluster.stop();
}
@Test
public void shouldQueryOnlyActivePartitionStoresByDefault() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
until(() -> {
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, (topic, somekey, value, numPartitions) -> 0);
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(TABLE_NAME, kafkaStreams1, queryableStoreType);
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(TABLE_NAME, kafkaStreams2, queryableStoreType);
final boolean kafkaStreams1IsActive = (keyQueryMetadata.activeHost().port() % 2) == 1;
try {
if (kafkaStreams1IsActive) {
assertThat(store1.get(key), is(notNullValue()));
assertThat(store2.get(key), is(nullValue()));
} else {
assertThat(store1.get(key), is(nullValue()));
assertThat(store2.get(key), is(notNullValue()));
}
return true;
} catch (final InvalidStateStoreException exception) {
assertThat(
exception.getMessage(),
containsString("Cannot get state store source-table because the stream thread is PARTITIONS_ASSIGNED, not RUNNING")
);
LOG.info("Streams wasn't running. Will try again.");
return false;
}
});
}
@Test
public void shouldQuerySpecificActivePartitionStores() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
until(() -> {
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, (topic, somekey, value, numPartitions) -> 0);
//key belongs to this partition
final int keyPartition = keyQueryMetadata.partition();
//key doesn't belongs to this partition
final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
final boolean kafkaStreams1IsActive = (keyQueryMetadata.activeHost().port() % 2) == 1;
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> storeQueryParam =
StoreQueryParameters.<ReadOnlyKeyValueStore<Integer, Integer>>fromNameAndType(TABLE_NAME, keyValueStore())
.withPartition(keyPartition);
ReadOnlyKeyValueStore<Integer, Integer> store1 = null;
ReadOnlyKeyValueStore<Integer, Integer> store2 = null;
if (kafkaStreams1IsActive) {
store1 = getStore(kafkaStreams1, storeQueryParam);
} else {
store2 = getStore(kafkaStreams2, storeQueryParam);
}
if (kafkaStreams1IsActive) {
assertThat(store1, is(notNullValue()));
assertThat(store2, is(nullValue()));
} else {
assertThat(store2, is(notNullValue()));
assertThat(store1, is(nullValue()));
}
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> storeQueryParam2 =
StoreQueryParameters.<ReadOnlyKeyValueStore<Integer, Integer>>fromNameAndType(TABLE_NAME, keyValueStore())
.withPartition(keyDontBelongPartition);
try {
// Assert that key is not served when wrong specific partition is requested
// If kafkaStreams1 is active for keyPartition, kafkaStreams2 would be active for keyDontBelongPartition
// So, in that case, store3 would be null and the store4 would not return the value for key as wrong partition was requested
if (kafkaStreams1IsActive) {
assertThat(store1.get(key), is(notNullValue()));
assertThat(getStore(kafkaStreams2, storeQueryParam2).get(key), is(nullValue()));
final InvalidStateStoreException exception =
assertThrows(InvalidStateStoreException.class, () -> getStore(kafkaStreams1, storeQueryParam2).get(key));
assertThat(
exception.getMessage(),
containsString("The specified partition 1 for store source-table does not exist.")
);
} else {
assertThat(store2.get(key), is(notNullValue()));
assertThat(getStore(kafkaStreams1, storeQueryParam2).get(key), is(nullValue()));
final InvalidStateStoreException exception =
assertThrows(InvalidStateStoreException.class, () -> getStore(kafkaStreams2, storeQueryParam2).get(key));
assertThat(
exception.getMessage(),
containsString("The specified partition 1 for store source-table does not exist.")
);
}
return true;
} catch (final InvalidStateStoreException exception) {
assertThat(
exception.getMessage(),
containsString("Cannot get state store source-table because the stream thread is PARTITIONS_ASSIGNED, not RUNNING")
);
LOG.info("Streams wasn't running. Will try again.");
return false;
}
});
}
@Test
public void shouldQueryAllStalePartitionStores() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
// Assert that both active and standby are able to query for a key
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(TABLE_NAME, kafkaStreams1, true, queryableStoreType);
return store1.get(key) != null;
}, "store1 cannot find results for key");
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(TABLE_NAME, kafkaStreams2, true, queryableStoreType);
return store2.get(key) != null;
}, "store2 cannot find results for key");
}
@Test
public void shouldQuerySpecificStalePartitionStores() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, (topic, somekey, value, numPartitions) -> 0);
//key belongs to this partition
final int keyPartition = keyQueryMetadata.partition();
//key doesn't belongs to this partition
final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
// Assert that both active and standby are able to query for a key
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> param = StoreQueryParameters
.fromNameAndType(TABLE_NAME, queryableStoreType)
.enableStaleStores()
.withPartition(keyPartition);
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(kafkaStreams1, param);
return store1.get(key) != null;
}, "store1 cannot find results for key");
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(kafkaStreams2, param);
return store2.get(key) != null;
}, "store2 cannot find results for key");
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> otherParam = StoreQueryParameters
.fromNameAndType(TABLE_NAME, queryableStoreType)
.enableStaleStores()
.withPartition(keyDontBelongPartition);
final ReadOnlyKeyValueStore<Integer, Integer> store3 = getStore(kafkaStreams1, otherParam);
final ReadOnlyKeyValueStore<Integer, Integer> store4 = getStore(kafkaStreams2, otherParam);
// Assert that
assertThat(store3.get(key), is(nullValue()));
assertThat(store4.get(key), is(nullValue()));
}
@Test
public void shouldQuerySpecificStalePartitionStoresMultiStreamThreads() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final int numStreamThreads = 2;
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final Properties streamsConfiguration1 = streamsConfiguration();
streamsConfiguration1.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
final Properties streamsConfiguration2 = streamsConfiguration();
streamsConfiguration2.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration1);
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration2);
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
assertTrue(kafkaStreams1.localThreadsMetadata().size() > 1);
assertTrue(kafkaStreams2.localThreadsMetadata().size() > 1);
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, new IntegerSerializer());
//key belongs to this partition
final int keyPartition = keyQueryMetadata.partition();
//key doesn't belongs to this partition
final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
// Assert that both active and standby are able to query for a key
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> param = StoreQueryParameters
.fromNameAndType(TABLE_NAME, queryableStoreType)
.enableStaleStores()
.withPartition(keyPartition);
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(kafkaStreams1, param);
return store1.get(key) != null;
}, "store1 cannot find results for key");
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(kafkaStreams2, param);
return store2.get(key) != null;
}, "store2 cannot find results for key");
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> otherParam = StoreQueryParameters
.fromNameAndType(TABLE_NAME, queryableStoreType)
.enableStaleStores()
.withPartition(keyDontBelongPartition);
final ReadOnlyKeyValueStore<Integer, Integer> store3 = getStore(kafkaStreams1, otherParam);
final ReadOnlyKeyValueStore<Integer, Integer> store4 = getStore(kafkaStreams2, otherParam);
// Assert that
assertThat(store3.get(key), is(nullValue()));
assertThat(store4.get(key), is(nullValue()));
}
private static void until(final TestCondition condition) {
boolean success = false;
final long deadline = System.currentTimeMillis() + IntegrationTestUtils.DEFAULT_TIMEOUT;
while (!success && System.currentTimeMillis() < deadline) {
try {
success = condition.conditionMet();
Thread.sleep(500L);
} catch (final RuntimeException e) {
throw e;
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
}
private KafkaStreams createKafkaStreams(final StreamsBuilder builder, final Properties config) {
final KafkaStreams streams = new KafkaStreams(builder.build(config), config);
streamsToCleanup.add(streams);
return streams;
}
private void produceValueRange(final int key, final int start, final int endExclusive) {
final Properties producerProps = new Properties();
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers());
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
IntegrationTestUtils.produceKeyValuesSynchronously(
INPUT_TOPIC_NAME,
IntStream.range(start, endExclusive)
.mapToObj(i -> KeyValue.pair(key, i))
.collect(Collectors.toList()),
producerProps,
mockTime);
}
private Properties streamsConfiguration() {
final String safeTestName = safeUniqueTestName(getClass(), testName);
final Properties config = new Properties();
config.put(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, StreamsConfig.OPTIMIZE);
config.put(StreamsConfig.APPLICATION_ID_CONFIG, "app-" + safeTestName);
config.put(StreamsConfig.APPLICATION_SERVER_CONFIG, "localhost:" + (++port));
config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers());
config.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
config.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1);
config.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 100);
config.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, 200);
config.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 1000);
config.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100);
return config;
}
}
| apache-2.0 |
multi-os-engine/moe-core | moe.apple/moe.platform.ios/src/main/java/apple/safariservices/SFContentBlockerManager.java | 6153 | /*
Copyright 2014-2016 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apple.safariservices;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSError;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCBlock;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
@Generated
@Library("SafariServices")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class SFContentBlockerManager extends NSObject {
static {
NatJ.register();
}
@Generated
protected SFContentBlockerManager(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native SFContentBlockerManager alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native SFContentBlockerManager allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("getStateOfContentBlockerWithIdentifier:completionHandler:")
public static native void getStateOfContentBlockerWithIdentifierCompletionHandler(String identifier,
@ObjCBlock(name = "call_getStateOfContentBlockerWithIdentifierCompletionHandler") Block_getStateOfContentBlockerWithIdentifierCompletionHandler completionHandler);
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native SFContentBlockerManager new_objc();
@Generated
@Selector("reloadContentBlockerWithIdentifier:completionHandler:")
public static native void reloadContentBlockerWithIdentifierCompletionHandler(String identifier,
@ObjCBlock(name = "call_reloadContentBlockerWithIdentifierCompletionHandler") Block_reloadContentBlockerWithIdentifierCompletionHandler completionHandler);
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("version")
@NInt
public static native long version_static();
@Generated
@Selector("init")
public native SFContentBlockerManager init();
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_getStateOfContentBlockerWithIdentifierCompletionHandler {
@Generated
void call_getStateOfContentBlockerWithIdentifierCompletionHandler(SFContentBlockerState state, NSError error);
}
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_reloadContentBlockerWithIdentifierCompletionHandler {
@Generated
void call_reloadContentBlockerWithIdentifierCompletionHandler(NSError error);
}
}
| apache-2.0 |
googleads/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202108/ThirdPartyMeasurementSettings.java | 13628 | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.admanager.jaxws.v202108;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
*
* Contains third party auto-pixeling settings for cross-sell Partners.
*
*
* <p>Java class for ThirdPartyMeasurementSettings complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ThirdPartyMeasurementSettings">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="viewabilityPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyViewabilityIntegrationPartner" minOccurs="0"/>
* <element name="viewabilityClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="viewabilityReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="publisherViewabilityPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyViewabilityIntegrationPartner" minOccurs="0"/>
* <element name="publisherViewabilityClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="publisherViewabilityReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="brandLiftPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyBrandLiftIntegrationPartner" minOccurs="0"/>
* <element name="brandLiftClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="brandLiftReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="reachPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyReachIntegrationPartner" minOccurs="0"/>
* <element name="reachClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="reachReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="publisherReachPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyReachIntegrationPartner" minOccurs="0"/>
* <element name="publisherReachClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="publisherReachReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ThirdPartyMeasurementSettings", propOrder = {
"viewabilityPartner",
"viewabilityClientId",
"viewabilityReportingId",
"publisherViewabilityPartner",
"publisherViewabilityClientId",
"publisherViewabilityReportingId",
"brandLiftPartner",
"brandLiftClientId",
"brandLiftReportingId",
"reachPartner",
"reachClientId",
"reachReportingId",
"publisherReachPartner",
"publisherReachClientId",
"publisherReachReportingId"
})
public class ThirdPartyMeasurementSettings {
@XmlSchemaType(name = "string")
protected ThirdPartyViewabilityIntegrationPartner viewabilityPartner;
protected String viewabilityClientId;
protected String viewabilityReportingId;
@XmlSchemaType(name = "string")
protected ThirdPartyViewabilityIntegrationPartner publisherViewabilityPartner;
protected String publisherViewabilityClientId;
protected String publisherViewabilityReportingId;
@XmlSchemaType(name = "string")
protected ThirdPartyBrandLiftIntegrationPartner brandLiftPartner;
protected String brandLiftClientId;
protected String brandLiftReportingId;
@XmlSchemaType(name = "string")
protected ThirdPartyReachIntegrationPartner reachPartner;
protected String reachClientId;
protected String reachReportingId;
@XmlSchemaType(name = "string")
protected ThirdPartyReachIntegrationPartner publisherReachPartner;
protected String publisherReachClientId;
protected String publisherReachReportingId;
/**
* Gets the value of the viewabilityPartner property.
*
* @return
* possible object is
* {@link ThirdPartyViewabilityIntegrationPartner }
*
*/
public ThirdPartyViewabilityIntegrationPartner getViewabilityPartner() {
return viewabilityPartner;
}
/**
* Sets the value of the viewabilityPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyViewabilityIntegrationPartner }
*
*/
public void setViewabilityPartner(ThirdPartyViewabilityIntegrationPartner value) {
this.viewabilityPartner = value;
}
/**
* Gets the value of the viewabilityClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getViewabilityClientId() {
return viewabilityClientId;
}
/**
* Sets the value of the viewabilityClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setViewabilityClientId(String value) {
this.viewabilityClientId = value;
}
/**
* Gets the value of the viewabilityReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getViewabilityReportingId() {
return viewabilityReportingId;
}
/**
* Sets the value of the viewabilityReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setViewabilityReportingId(String value) {
this.viewabilityReportingId = value;
}
/**
* Gets the value of the publisherViewabilityPartner property.
*
* @return
* possible object is
* {@link ThirdPartyViewabilityIntegrationPartner }
*
*/
public ThirdPartyViewabilityIntegrationPartner getPublisherViewabilityPartner() {
return publisherViewabilityPartner;
}
/**
* Sets the value of the publisherViewabilityPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyViewabilityIntegrationPartner }
*
*/
public void setPublisherViewabilityPartner(ThirdPartyViewabilityIntegrationPartner value) {
this.publisherViewabilityPartner = value;
}
/**
* Gets the value of the publisherViewabilityClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPublisherViewabilityClientId() {
return publisherViewabilityClientId;
}
/**
* Sets the value of the publisherViewabilityClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPublisherViewabilityClientId(String value) {
this.publisherViewabilityClientId = value;
}
/**
* Gets the value of the publisherViewabilityReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPublisherViewabilityReportingId() {
return publisherViewabilityReportingId;
}
/**
* Sets the value of the publisherViewabilityReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPublisherViewabilityReportingId(String value) {
this.publisherViewabilityReportingId = value;
}
/**
* Gets the value of the brandLiftPartner property.
*
* @return
* possible object is
* {@link ThirdPartyBrandLiftIntegrationPartner }
*
*/
public ThirdPartyBrandLiftIntegrationPartner getBrandLiftPartner() {
return brandLiftPartner;
}
/**
* Sets the value of the brandLiftPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyBrandLiftIntegrationPartner }
*
*/
public void setBrandLiftPartner(ThirdPartyBrandLiftIntegrationPartner value) {
this.brandLiftPartner = value;
}
/**
* Gets the value of the brandLiftClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBrandLiftClientId() {
return brandLiftClientId;
}
/**
* Sets the value of the brandLiftClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBrandLiftClientId(String value) {
this.brandLiftClientId = value;
}
/**
* Gets the value of the brandLiftReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBrandLiftReportingId() {
return brandLiftReportingId;
}
/**
* Sets the value of the brandLiftReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBrandLiftReportingId(String value) {
this.brandLiftReportingId = value;
}
/**
* Gets the value of the reachPartner property.
*
* @return
* possible object is
* {@link ThirdPartyReachIntegrationPartner }
*
*/
public ThirdPartyReachIntegrationPartner getReachPartner() {
return reachPartner;
}
/**
* Sets the value of the reachPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyReachIntegrationPartner }
*
*/
public void setReachPartner(ThirdPartyReachIntegrationPartner value) {
this.reachPartner = value;
}
/**
* Gets the value of the reachClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getReachClientId() {
return reachClientId;
}
/**
* Sets the value of the reachClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setReachClientId(String value) {
this.reachClientId = value;
}
/**
* Gets the value of the reachReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getReachReportingId() {
return reachReportingId;
}
/**
* Sets the value of the reachReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setReachReportingId(String value) {
this.reachReportingId = value;
}
/**
* Gets the value of the publisherReachPartner property.
*
* @return
* possible object is
* {@link ThirdPartyReachIntegrationPartner }
*
*/
public ThirdPartyReachIntegrationPartner getPublisherReachPartner() {
return publisherReachPartner;
}
/**
* Sets the value of the publisherReachPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyReachIntegrationPartner }
*
*/
public void setPublisherReachPartner(ThirdPartyReachIntegrationPartner value) {
this.publisherReachPartner = value;
}
/**
* Gets the value of the publisherReachClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPublisherReachClientId() {
return publisherReachClientId;
}
/**
* Sets the value of the publisherReachClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPublisherReachClientId(String value) {
this.publisherReachClientId = value;
}
/**
* Gets the value of the publisherReachReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPublisherReachReportingId() {
return publisherReachReportingId;
}
/**
* Sets the value of the publisherReachReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPublisherReachReportingId(String value) {
this.publisherReachReportingId = value;
}
}
| apache-2.0 |
LAW-Unimi/BUbiNG | src/it/unimi/di/law/bubing/sieve/IdentitySieve.java | 2029 | package it.unimi.di.law.bubing.sieve;
/*
* Copyright (C) 2010-2017 Paolo Boldi, Massimo Santini, and Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import it.unimi.dsi.sux4j.mph.AbstractHashFunction;
import java.io.IOException;
//RELEASE-STATUS: DIST
/** A sieve that simply (and immediately) copies {@linkplain #enqueue(Object, Object) enqueued keys} to the {@linkplain #setNewFlowRecevier(it.unimi.di.law.bubing.sieve.AbstractSieve.NewFlowReceiver) new flow receiver}.
*
* <p>Note that instances of this class call {@link AbstractSieve.NewFlowReceiver#prepareToAppend()} in the constructor only, and
* {@link AbstractSieve.NewFlowReceiver#noMoreAppend()} in the method {@link #close()} only.
*/
public final class IdentitySieve<K, V> extends AbstractSieve<K, V> {
public IdentitySieve(final NewFlowReceiver<K> newFlowReceiver, final ByteSerializerDeserializer<K> keySerDeser, final ByteSerializerDeserializer<V> valueSerDeser, final AbstractHashFunction<K> hashingStrategy, final UpdateStrategy<K, V> updateStrategy) throws IOException {
super(keySerDeser, valueSerDeser, hashingStrategy, updateStrategy);
setNewFlowRecevier(newFlowReceiver);
newFlowReceiver.prepareToAppend();
}
@Override
public boolean enqueue(K key, V value) throws IOException {
newFlowReceiver.append(0, key);
return false;
}
@Override
public void close() throws IOException {
newFlowReceiver.noMoreAppend();
}
@Override
public void flush() throws IOException, InterruptedException {}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-elasticbeanstalk/src/main/java/com/amazonaws/services/elasticbeanstalk/model/DisassociateEnvironmentOperationsRoleRequest.java | 4243 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticbeanstalk.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Request to disassociate the operations role from an environment.
* </p>
*
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/elasticbeanstalk-2010-12-01/DisassociateEnvironmentOperationsRole"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DisassociateEnvironmentOperationsRoleRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the environment from which to disassociate the operations role.
* </p>
*/
private String environmentName;
/**
* <p>
* The name of the environment from which to disassociate the operations role.
* </p>
*
* @param environmentName
* The name of the environment from which to disassociate the operations role.
*/
public void setEnvironmentName(String environmentName) {
this.environmentName = environmentName;
}
/**
* <p>
* The name of the environment from which to disassociate the operations role.
* </p>
*
* @return The name of the environment from which to disassociate the operations role.
*/
public String getEnvironmentName() {
return this.environmentName;
}
/**
* <p>
* The name of the environment from which to disassociate the operations role.
* </p>
*
* @param environmentName
* The name of the environment from which to disassociate the operations role.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DisassociateEnvironmentOperationsRoleRequest withEnvironmentName(String environmentName) {
setEnvironmentName(environmentName);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getEnvironmentName() != null)
sb.append("EnvironmentName: ").append(getEnvironmentName());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DisassociateEnvironmentOperationsRoleRequest == false)
return false;
DisassociateEnvironmentOperationsRoleRequest other = (DisassociateEnvironmentOperationsRoleRequest) obj;
if (other.getEnvironmentName() == null ^ this.getEnvironmentName() == null)
return false;
if (other.getEnvironmentName() != null && other.getEnvironmentName().equals(this.getEnvironmentName()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getEnvironmentName() == null) ? 0 : getEnvironmentName().hashCode());
return hashCode;
}
@Override
public DisassociateEnvironmentOperationsRoleRequest clone() {
return (DisassociateEnvironmentOperationsRoleRequest) super.clone();
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-ssm/src/main/java/com/amazonaws/services/simplesystemsmanagement/model/transform/DocumentDescriptionJsonUnmarshaller.java | 10561 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simplesystemsmanagement.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.simplesystemsmanagement.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DocumentDescription JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DocumentDescriptionJsonUnmarshaller implements Unmarshaller<DocumentDescription, JsonUnmarshallerContext> {
public DocumentDescription unmarshall(JsonUnmarshallerContext context) throws Exception {
DocumentDescription documentDescription = new DocumentDescription();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("Sha1", targetDepth)) {
context.nextToken();
documentDescription.setSha1(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Hash", targetDepth)) {
context.nextToken();
documentDescription.setHash(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("HashType", targetDepth)) {
context.nextToken();
documentDescription.setHashType(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Name", targetDepth)) {
context.nextToken();
documentDescription.setName(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("DisplayName", targetDepth)) {
context.nextToken();
documentDescription.setDisplayName(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("VersionName", targetDepth)) {
context.nextToken();
documentDescription.setVersionName(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Owner", targetDepth)) {
context.nextToken();
documentDescription.setOwner(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("CreatedDate", targetDepth)) {
context.nextToken();
documentDescription.setCreatedDate(DateJsonUnmarshallerFactory.getInstance("unixTimestamp").unmarshall(context));
}
if (context.testExpression("Status", targetDepth)) {
context.nextToken();
documentDescription.setStatus(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("StatusInformation", targetDepth)) {
context.nextToken();
documentDescription.setStatusInformation(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("DocumentVersion", targetDepth)) {
context.nextToken();
documentDescription.setDocumentVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Description", targetDepth)) {
context.nextToken();
documentDescription.setDescription(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Parameters", targetDepth)) {
context.nextToken();
documentDescription.setParameters(new ListUnmarshaller<DocumentParameter>(DocumentParameterJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("PlatformTypes", targetDepth)) {
context.nextToken();
documentDescription.setPlatformTypes(new ListUnmarshaller<String>(context.getUnmarshaller(String.class))
.unmarshall(context));
}
if (context.testExpression("DocumentType", targetDepth)) {
context.nextToken();
documentDescription.setDocumentType(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("SchemaVersion", targetDepth)) {
context.nextToken();
documentDescription.setSchemaVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("LatestVersion", targetDepth)) {
context.nextToken();
documentDescription.setLatestVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("DefaultVersion", targetDepth)) {
context.nextToken();
documentDescription.setDefaultVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("DocumentFormat", targetDepth)) {
context.nextToken();
documentDescription.setDocumentFormat(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("TargetType", targetDepth)) {
context.nextToken();
documentDescription.setTargetType(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Tags", targetDepth)) {
context.nextToken();
documentDescription.setTags(new ListUnmarshaller<Tag>(TagJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("AttachmentsInformation", targetDepth)) {
context.nextToken();
documentDescription.setAttachmentsInformation(new ListUnmarshaller<AttachmentInformation>(AttachmentInformationJsonUnmarshaller
.getInstance())
.unmarshall(context));
}
if (context.testExpression("Requires", targetDepth)) {
context.nextToken();
documentDescription.setRequires(new ListUnmarshaller<DocumentRequires>(DocumentRequiresJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("Author", targetDepth)) {
context.nextToken();
documentDescription.setAuthor(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("ReviewInformation", targetDepth)) {
context.nextToken();
documentDescription.setReviewInformation(new ListUnmarshaller<ReviewInformation>(ReviewInformationJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("ApprovedVersion", targetDepth)) {
context.nextToken();
documentDescription.setApprovedVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("PendingReviewVersion", targetDepth)) {
context.nextToken();
documentDescription.setPendingReviewVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("ReviewStatus", targetDepth)) {
context.nextToken();
documentDescription.setReviewStatus(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Category", targetDepth)) {
context.nextToken();
documentDescription.setCategory(new ListUnmarshaller<String>(context.getUnmarshaller(String.class))
.unmarshall(context));
}
if (context.testExpression("CategoryEnum", targetDepth)) {
context.nextToken();
documentDescription.setCategoryEnum(new ListUnmarshaller<String>(context.getUnmarshaller(String.class))
.unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return documentDescription;
}
private static DocumentDescriptionJsonUnmarshaller instance;
public static DocumentDescriptionJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DocumentDescriptionJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
mikevoxcap/spring4-sample | src/main/java/com/pluralsight/orderfulfillment/customer/Customer.java | 2192 | package com.pluralsight.orderfulfillment.customer;
/**
* Domain object for a Customer
*
* @author Michael Hoffman, Pluralsight
*
*/
public class Customer {
private long id;
private String firstName;
private String lastName;
private String email;
public Customer() {
}
public Customer(long id, String firstName, String lastName, String email) {
super();
this.id = id;
this.firstName = firstName;
this.lastName = lastName;
this.email = email;
}
/**
* @return the id
*/
public long getId() {
return id;
}
/**
* @param id
* the id to set
*/
public void setId(long id) {
this.id = id;
}
/**
* @return the firstName
*/
public String getFirstName() {
return firstName;
}
/**
* @param firstName
* the firstName to set
*/
public void setFirstName(String firstName) {
this.firstName = firstName;
}
/**
* @return the lastName
*/
public String getLastName() {
return lastName;
}
/**
* @param lastName
* the lastName to set
*/
public void setLastName(String lastName) {
this.lastName = lastName;
}
/**
* @return the email
*/
public String getEmail() {
return email;
}
/**
* @param email
* the email to set
*/
public void setEmail(String email) {
this.email = email;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("Customer [id=");
builder.append(id);
builder.append(", ");
if (firstName != null) {
builder.append("firstName=");
builder.append(firstName);
builder.append(", ");
}
if (lastName != null) {
builder.append("lastName=");
builder.append(lastName);
builder.append(", ");
}
if (email != null) {
builder.append("email=");
builder.append(email);
}
builder.append("]");
return builder.toString();
}
}
| apache-2.0 |
wkadzz/YbEg | app/src/main/java/com/example/wk/ybeg/fragment/GengDuo.java | 626 | package com.example.wk.ybeg.fragment;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.example.wk.ybeg.R;
/**
* Created by WK on 2016/11/25.
*/
public class GengDuo extends BaseFragment {
public static final String TAG=GengDuo.class.getSimpleName();
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
view=inflater.inflate(R.layout.gengduo,container,false);
return view; }
}
| apache-2.0 |
Lihuanghe/CMPPGate | src/main/java/org/marre/mime/encoder/MimeEncoder.java | 3133 | /* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is "SMS Library for the Java platform".
*
* The Initial Developer of the Original Code is Markus Eriksson.
* Portions created by the Initial Developer are Copyright (C) 2002
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.marre.mime.encoder;
import java.io.IOException;
import java.io.OutputStream;
import org.marre.mime.MimeBodyPart;
/**
* Interface for all mime encoders.
*
* @author Markus Eriksson
* @version $Id$
*/
public interface MimeEncoder
{
/**
* Writes the content-type of the message to the given stream.
*
* @param os
* The stream to write to
* @param msg
* The message to get the content-type from
* @throws IOException
* Thrown if we fail to write the content-type to the stream
*/
void writeContentType(OutputStream os, MimeBodyPart msg) throws IOException;
/**
* Writes the headers of the message to the given stream.
*
* @param os
* The stream to write to
* @param msg
* The message to get the headers from
* @throws IOException
* Thrown if we fail to write the headers to the stream
*/
void writeHeaders(OutputStream os, MimeBodyPart msg) throws IOException;
/**
* Writes the body of the message to the given stream.
*
* @param os
* The stream to write to
* @param msg
* The message to get the data from
* @throws IOException
* Thrown if we fail to write the body to the stream
*/
void writeBody(OutputStream os, MimeBodyPart msg) throws IOException;
}
| apache-2.0 |
TranscendComputing/TopStackMetricSearch | src/com/transcend/monitor/transform/GetMetricStatisticsRequestUnmarshaller.java | 3650 | package com.transcend.monitor.transform;
import java.util.Date;
import java.util.Map;
import java.util.TimeZone;
import org.slf4j.Logger;
import com.msi.tough.core.Appctx;
import com.msi.tough.core.DateHelper;
import com.msi.tough.monitor.common.MonitorConstants;
import com.msi.tough.query.ErrorResponse;
import com.msi.tough.query.QueryFaults;
import com.transcend.monitor.message.GetMetricStatisticsMessage.GetMetricStatisticsRequest;
import com.transcend.monitor.message.MetricAlarmMessage.Statistic;
import com.transcend.monitor.message.MetricAlarmMessage.Unit;
public class GetMetricStatisticsRequestUnmarshaller extends BaseMonitorUnmarshaller<GetMetricStatisticsRequest>
{
public static final int MAX_DATAPOINTS = 100;
private final static Logger logger = Appctx
.getLogger(GetMetricStatisticsRequestUnmarshaller.class.getName());
private static GetMetricStatisticsRequestUnmarshaller instance;
public static GetMetricStatisticsRequestUnmarshaller getInstance()
{
if (instance == null)
{
instance = new GetMetricStatisticsRequestUnmarshaller();
}
return instance;
}
@Override
public GetMetricStatisticsRequest unmarshall(Map<String, String[]> in)
{
final GetMetricStatisticsRequest.Builder req =
GetMetricStatisticsRequest.newBuilder();
req.setPeriod(MarshallingUtils.unmarshallInteger(in,
MonitorConstants.NODE_PERIOD,
logger));
req.setStartTime(MarshallingUtils.unmarshallString(in,
MonitorConstants.NODE_STARTTIME,
logger));
req.setEndTime(MarshallingUtils.unmarshallString(in,
MonitorConstants.NODE_ENDTIME,
logger));
req.setMetricName(MarshallingUtils.unmarshallString(in,
MonitorConstants.NODE_METRICNAME,
logger));
String unit = MarshallingUtils.unmarshallString(in,
MonitorConstants.NODE_UNIT, null,
logger);
req.setUnit(unit == null? Unit.None : Unit.valueOf(unit));
int i = 0;
while (true)
{
i++;
final String n[] = in.get("Statistics.member." + i);
if (n == null)
{
break;
}
try {
req.addStatistic(Statistic.valueOf(n[0]));
} catch (Exception e) {
throw QueryFaults.InvalidParameterValue();
}
}
if (req.getStatisticCount() == 0) {
throw ErrorResponse.missingParameter();
}
req.addAllDimension(unmarshallDimensions(in));
Date start = DateHelper.getCalendarFromISO8601String(req.getStartTime(),
TimeZone.getTimeZone("GMT")).getTime();
Date end = DateHelper.getCalendarFromISO8601String(req.getEndTime(),
TimeZone.getTimeZone("GMT")).getTime();
if (!start.before(end)) {
throw QueryFaults.InvalidParameterValue();
}
if (req.getPeriod() < 60 || req.getPeriod() % 60 != 0) {
throw QueryFaults.InvalidParameterValue();
}
long timeDelta = end.getTime() -
start.getTime();
long numPoints = timeDelta / req.getPeriod() / 1000 / 60;
if (numPoints > MAX_DATAPOINTS) {
throw QueryFaults.InvalidParameterCombination("You have requested" +
" up to "+numPoints+" datapoints, which exceeds the " +
"limit of "+MAX_DATAPOINTS+".");
}
return super.unmarshall(req.buildPartial(), in);
}
}
| apache-2.0 |
eribeiro/kafka | clients/src/main/java/org/apache/kafka/clients/ClientUtils.java | 4919 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.kafka.clients;
import java.io.Closeable;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.kafka.common.network.ChannelBuilders;
import org.apache.kafka.common.network.LoginType;
import org.apache.kafka.common.network.Mode;
import org.apache.kafka.common.protocol.ApiKeys;
import org.apache.kafka.common.protocol.Protocol;
import org.apache.kafka.common.protocol.SecurityProtocol;
import org.apache.kafka.common.network.ChannelBuilder;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.requests.ApiVersionsResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.kafka.common.utils.Utils.getHost;
import static org.apache.kafka.common.utils.Utils.getPort;
public class ClientUtils {
private static final Logger log = LoggerFactory.getLogger(ClientUtils.class);
public static List<InetSocketAddress> parseAndValidateAddresses(List<String> urls) {
List<InetSocketAddress> addresses = new ArrayList<InetSocketAddress>();
for (String url : urls) {
if (url != null && !url.isEmpty()) {
try {
String host = getHost(url);
Integer port = getPort(url);
if (host == null || port == null)
throw new ConfigException("Invalid url in " + CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG + ": " + url);
InetSocketAddress address = new InetSocketAddress(host, port);
if (address.isUnresolved()) {
log.warn("Removing server {} from {} as DNS resolution failed for {}", url, CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, host);
} else {
addresses.add(address);
}
} catch (IllegalArgumentException e) {
throw new ConfigException("Invalid port in " + CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG + ": " + url);
}
}
}
if (addresses.isEmpty())
throw new ConfigException("No resolvable bootstrap urls given in " + CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG);
return addresses;
}
public static void closeQuietly(Closeable c, String name, AtomicReference<Throwable> firstException) {
if (c != null) {
try {
c.close();
} catch (Throwable t) {
firstException.compareAndSet(null, t);
log.error("Failed to close " + name, t);
}
}
}
/**
* @param configs client/server configs
* @return configured ChannelBuilder based on the configs.
*/
public static ChannelBuilder createChannelBuilder(Map<String, ?> configs) {
SecurityProtocol securityProtocol = SecurityProtocol.forName((String) configs.get(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG));
if (!SecurityProtocol.nonTestingValues().contains(securityProtocol))
throw new ConfigException("Invalid SecurityProtocol " + securityProtocol);
String clientSaslMechanism = (String) configs.get(SaslConfigs.SASL_MECHANISM);
return ChannelBuilders.create(securityProtocol, Mode.CLIENT, LoginType.CLIENT, configs, clientSaslMechanism, true);
}
public static Collection<ApiVersionsResponse.ApiVersion> buildExpectedApiVersions(Collection<ApiKeys> apiKeys) {
List<ApiVersionsResponse.ApiVersion> expectedApiVersions = new ArrayList<>();
for (ApiKeys apiKey : apiKeys)
expectedApiVersions.add(
// once backwards client compatibility is added, expected min API version for an API should be set to it's min version
new ApiVersionsResponse.ApiVersion(
apiKey.id, Protocol.CURR_VERSION[apiKey.id], Protocol.CURR_VERSION[apiKey.id]));
return expectedApiVersions;
}
}
| apache-2.0 |
bestdpf/2dbarcode | android/src/com/google/zxing/client/android/result/supplement/SupplementalInfoRetriever.java | 6505 | /*
* Copyright (C) 2010 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android.result.supplement;
import android.content.Context;
import android.os.AsyncTask;
import android.text.Spannable;
import android.text.SpannableString;
import android.text.Spanned;
import android.text.method.LinkMovementMethod;
import android.text.style.URLSpan;
import android.util.Log;
import android.widget.TextView;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import com.google.zxing.client.result.ISBNParsedResult;
import com.google.zxing.client.result.ParsedResult;
import com.google.zxing.client.result.ProductParsedResult;
import com.google.zxing.client.result.URIParsedResult;
import com.google.zxing.client.android.common.executor.AsyncTaskExecInterface;
import com.google.zxing.client.android.common.executor.AsyncTaskExecManager;
import com.google.zxing.client.android.history.HistoryManager;
public abstract class SupplementalInfoRetriever extends AsyncTask<Object,Object,Object> {
private static final String TAG = "SupplementalInfo";
public static void maybeInvokeRetrieval(TextView textView,
ParsedResult result,
HistoryManager historyManager,
Context context) {
AsyncTaskExecInterface taskExec = new AsyncTaskExecManager().build();
if (result instanceof URIParsedResult) {
taskExec.execute(new URIResultInfoRetriever(textView, (URIParsedResult) result, historyManager, context));
taskExec.execute(new TitleRetriever(textView, (URIParsedResult) result, historyManager));
} else if (result instanceof ProductParsedResult) {
String productID = ((ProductParsedResult) result).getProductID();
taskExec.execute(new ProductResultInfoRetriever(textView, productID, historyManager, context));
switch (productID.length()) {
case 12:
taskExec.execute(new AmazonInfoRetriever(textView, "UPC", productID, historyManager, context));
break;
case 13:
taskExec.execute(new AmazonInfoRetriever(textView, "EAN", productID, historyManager, context));
break;
}
} else if (result instanceof ISBNParsedResult) {
String isbn = ((ISBNParsedResult) result).getISBN();
taskExec.execute(new ProductResultInfoRetriever(textView, isbn, historyManager, context));
taskExec.execute(new BookResultInfoRetriever(textView, isbn, historyManager, context));
taskExec.execute(new AmazonInfoRetriever(textView, "ISBN", isbn, historyManager, context));
}
}
private final WeakReference<TextView> textViewRef;
private final WeakReference<HistoryManager> historyManagerRef;
private final List<Spannable> newContents;
private final List<String[]> newHistories;
SupplementalInfoRetriever(TextView textView, HistoryManager historyManager) {
textViewRef = new WeakReference<TextView>(textView);
historyManagerRef = new WeakReference<HistoryManager>(historyManager);
newContents = new ArrayList<Spannable>();
newHistories = new ArrayList<String[]>();
}
@Override
public final Object doInBackground(Object... args) {
try {
retrieveSupplementalInfo();
} catch (IOException e) {
Log.w(TAG, e);
}
return null;
}
@Override
protected final void onPostExecute(Object arg) {
TextView textView = textViewRef.get();
if (textView != null) {
for (Spannable content : newContents) {
textView.append(content);
}
textView.setMovementMethod(LinkMovementMethod.getInstance());
}
HistoryManager historyManager = historyManagerRef.get();
if (historyManager != null) {
for (String[] text : newHistories) {
historyManager.addHistoryItemDetails(text[0], text[1]);
}
}
}
abstract void retrieveSupplementalInfo() throws IOException;
final void append(String itemID, String source, String[] newTexts, String linkURL) {
StringBuilder newTextCombined = new StringBuilder();
if (source != null) {
newTextCombined.append(source).append(' ');
}
int linkStart = newTextCombined.length();
boolean first = true;
for (String newText : newTexts) {
if (first) {
newTextCombined.append(newText);
first = false;
} else {
newTextCombined.append(" [");
newTextCombined.append(newText);
newTextCombined.append(']');
}
}
int linkEnd = newTextCombined.length();
String newText = newTextCombined.toString();
Spannable content = new SpannableString(newText + "\n\n");
if (linkURL != null) {
// Strangely, some Android browsers don't seem to register to handle HTTP:// or HTTPS://.
// Lower-case these as it should always be OK to lower-case these schemes.
if (linkURL.startsWith("HTTP://")) {
linkURL = "http" + linkURL.substring(4);
} else if (linkURL.startsWith("HTTPS://")) {
linkURL = "https" + linkURL.substring(5);
}
content.setSpan(new URLSpan(linkURL), linkStart, linkEnd, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
newContents.add(content);
newHistories.add(new String[] {itemID, newText});
}
static void maybeAddText(String text, Collection<String> texts) {
if (text != null && text.length() > 0) {
texts.add(text);
}
}
static void maybeAddTextSeries(Collection<String> textSeries, Collection<String> texts) {
if (textSeries != null && !textSeries.isEmpty()) {
boolean first = true;
StringBuilder authorsText = new StringBuilder();
for (String author : textSeries) {
if (first) {
first = false;
} else {
authorsText.append(", ");
}
authorsText.append(author);
}
texts.add(authorsText.toString());
}
}
}
| apache-2.0 |
mgargadennec/blossom | blossom-autoconfigure/src/main/java/com/blossomproject/autoconfigure/core/ActuatorAutoConfiguration.java | 2930 | package com.blossomproject.autoconfigure.core;
import com.blossomproject.core.common.actuator.ElasticsearchTraceRepository;
import com.blossomproject.core.common.actuator.ElasticsearchTraceRepositoryImpl;
import com.blossomproject.core.common.actuator.TraceStatisticsMvcEndpoint;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import com.google.common.io.Resources;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.client.Client;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.actuate.autoconfigure.trace.http.HttpTraceAutoConfiguration;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.AutoConfigureBefore;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.io.Resource;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
/**
* Created by Maël Gargadennnec on 11/05/2017.
*/
@Configuration
@AutoConfigureAfter(ElasticsearchAutoConfiguration.class)
@AutoConfigureBefore(HttpTraceAutoConfiguration.class)
@PropertySource("classpath:/actuator.properties")
public class ActuatorAutoConfiguration {
@Configuration("BlossomActuatorAutoConfigurationTraceProperties")
@ConfigurationProperties("blossom.actuator.traces")
@PropertySource("classpath:/actuator.properties")
public static class TraceProperties {
private final Set<String> excludedUris = new HashSet<>();
private final Set<String> excludedRequestHeaders = new HashSet<>();
private final Set<String> excludedResponseHeaders = new HashSet<>();
public Set<String> getExcludedUris() {
return excludedUris;
}
public Set<String> getExcludedRequestHeaders() {
return excludedRequestHeaders;
}
public Set<String> getExcludedResponseHeaders() {
return excludedResponseHeaders;
}
}
@Bean
public ElasticsearchTraceRepository traceRepository(
Client client, BulkProcessor bulkProcessor,
@Value("classpath:/elasticsearch/traces.json") Resource resource,
ObjectMapper objectMapper,
TraceProperties traceProperties)
throws IOException {
String settings = Resources.toString(resource.getURL(), Charsets.UTF_8);
return new ElasticsearchTraceRepositoryImpl(
client, bulkProcessor, "traces", traceProperties.getExcludedUris(),
traceProperties.getExcludedRequestHeaders(), traceProperties.getExcludedResponseHeaders(), settings, objectMapper);
}
@Bean
public TraceStatisticsMvcEndpoint traceStatisticsMvcEndpoint(
ElasticsearchTraceRepository traceRepository) {
return new TraceStatisticsMvcEndpoint(traceRepository);
}
}
| apache-2.0 |
IHTSDO/snow-owl | dependencies/org.eclipse.emf.cdo.common/src/org/eclipse/emf/cdo/spi/common/id/AbstractCDOIDByteArray.java | 2099 | /*
* Copyright (c) 2004 - 2012 Eike Stepper (Berlin, Germany) and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eike Stepper - initial API and implementation
*/
package org.eclipse.emf.cdo.spi.common.id;
import java.io.IOException;
import java.util.Arrays;
import org.eclipse.emf.cdo.common.id.CDOIDUtil;
import org.eclipse.net4j.util.CheckUtil;
import org.eclipse.net4j.util.io.ExtendedDataInput;
import org.eclipse.net4j.util.io.ExtendedDataOutput;
/**
* @author Eike Stepper
* @since 4.1
* @noextend This interface is not intended to be extended by clients.
*/
public abstract class AbstractCDOIDByteArray extends AbstractCDOID
{
public static final String NULL_VALUE = null;
private static final long serialVersionUID = 1L;
private byte[] value;
public AbstractCDOIDByteArray()
{
}
public AbstractCDOIDByteArray(byte[] value)
{
CheckUtil.checkArg(value != null && value.length == 16, "Illegal UUID value");
this.value = value;
}
public byte[] getByteArrayValue()
{
return value;
}
public String toURIFragment()
{
return CDOIDUtil.encodeUUID(value);
}
@Override
public void read(String fragmentPart)
{
value = CDOIDUtil.decodeUUID(fragmentPart);
}
@Override
public void read(ExtendedDataInput in) throws IOException
{
value = in.readByteArray();
}
@Override
public void write(ExtendedDataOutput out) throws IOException
{
out.writeByteArray(value);
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
{
return true;
}
if (obj != null && obj.getClass() == getClass())
{
AbstractCDOIDByteArray that = (AbstractCDOIDByteArray)obj;
return Arrays.equals(value, that.value);
}
return false;
}
@Override
public int hashCode()
{
return getClass().hashCode() ^ Arrays.hashCode(value);
}
}
| apache-2.0 |
errorplayer/LaLa-Weather | app/src/main/java/com/errorplayer/lala_weather/db/City.java | 845 | package com.errorplayer.lala_weather.db;
import org.litepal.crud.DataSupport;
/**
* Created by linze on 2017/7/7.
*/
public class City extends DataSupport {
private int id;
private String cityName;
private int cityCode;
private int provinceId;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getCityName() {
return cityName;
}
public void setCityName(String cityName) {
this.cityName = cityName;
}
public int getCityCode() {
return cityCode;
}
public void setCityCode(int cityCode) {
this.cityCode = cityCode;
}
public int getProvinceId() {
return provinceId;
}
public void setProvinceId(int provinceId) {
this.provinceId = provinceId;
}
}
| apache-2.0 |
DevopsJK/SuitAgent | src/main/java/com/falcon/suitagent/web/Response.java | 2763 | /*
* www.yiji.com Inc.
* Copyright (c) 2016 All Rights Reserved
*/
package com.falcon.suitagent.web;
/*
* 修订记录:
* guqiu@yiji.com 2016-07-26 13:54 创建
*/
import com.falcon.suitagent.config.AgentConfiguration;
import com.falcon.suitagent.plugins.metrics.MetricsCommon;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
/**
* @author guqiu@yiji.com
*/
@Slf4j
public class Response {
private static final int BUFFER_SIZE = 1024;
Request request;
OutputStream output;
public Response(OutputStream output) {
this.output = output;
}
public void setRequest(Request request) {
this.request = request;
}
public void send_404() throws IOException {
String errorMessage = "HTTP/1.1 404 File Not Found\r\n" +
"Content-Type: text/html\r\n" +
"Content-Length: 23\r\n" +
"\r\n" +
"<h1>File Not Found</h1>";
output.write(errorMessage.getBytes());
}
public void send(String html) throws IOException {
String errorMessage = "HTTP/1.1 200 \r\n" +
"Content-Type: text/html\r\n" +
"Content-Length: " + html.length() + "\r\n" +
"\r\n" +
html;
output.write(errorMessage.getBytes());
}
public void doRequest() throws IOException {
List<String> urlPath = request.getUrlPath();
if(urlPath.size() >= 1 && "mock".equals(urlPath.get(0))){
if(urlPath.size() < 2){
send("error! must have option");
return;
}
String msg = "";
String option = urlPath.get(1);
if("list".equals(option)){
msg = MetricsCommon.getMockServicesList();
}else if(urlPath.size() != 4){
send("<h3>error! url path must be match : /mock/{option}/{serviceType}/{serviceName}</h3>");
}else{
String type = urlPath.get(2);
String server = urlPath.get(3);
if("add".equals(option)){
MetricsCommon.addMockService(type,server);
msg = String.format("<h2>add mock server %s:%s success</h2>",type,server);
}else if("remove".equals(option)){
MetricsCommon.removeMockService(type,server);
msg = String.format("<h2>remove mock server %s:%s success</h2>",type,server);
}
}
send(msg);
}else if(urlPath.size() >= 1 && "version".equals(urlPath.get(0))){
send("Version " + AgentConfiguration.VERSION);
}else{
send_404();
}
}
}
| apache-2.0 |
konik32/openrest | openrest-filters/src/main/java/pl/openrest/filters/query/PredicateContextBuilder.java | 587 | package pl.openrest.filters.query;
import java.io.Serializable;
import org.springframework.data.mapping.PersistentProperty;
import pl.openrest.predicate.parser.FilterPart;
import pl.openrest.predicate.parser.PredicateParts;
public interface PredicateContextBuilder {
PredicateContextBuilder withFilterTree(FilterPart tree);
PredicateContextBuilder withId(PersistentProperty<?> idProperty, Serializable id);
PredicateContextBuilder withPredicateParts(PredicateParts predicateParts);
PredicateContextBuilder withStaticFilters();
PredicateContext<?> build();
}
| apache-2.0 |
inferred/freevisitor | src/it/j8/main/java/org/inferred/freevisitor/tests/j8/TypeA.java | 88 | package org.inferred.freevisitor.tests.j8;
public class TypeA extends VisitableType {}
| apache-2.0 |
frydsh/GoogleBody | src/com/google/android/apps/body/tdl/TdlMath.java | 2574 | // Copyright 2011 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.android.apps.body.tdl;
import android.opengl.Matrix;
/**
* Helper class for math.
*/
public class TdlMath {
public static void perspective(
float[] m, float angle, float aspect, float near, float far) {
float f = (float)Math.tan(0.5 * (Math.PI - angle));
float range = near - far;
m[0] = f / aspect;
m[1] = 0;
m[2] = 0;
m[3] = 0;
m[4] = 0;
m[5] = f;
m[6] = 0;
m[7] = 0;
m[8] = 0;
m[9] = 0;
m[10] = (far + near) / range;
m[11] = -1;
m[12] = 0;
m[13] = 0;
m[14] = 2.0f * far * near / range;
m[15] = 0;
}
public static void pickMatrix(
float[] m, float x, float y, float width, float height, float[] viewport) {
m[0] = viewport[2] / width;
m[1] = 0;
m[2] = 0;
m[3] = 0;
m[4] = 0;
m[5] = viewport[3] / height;
m[6] = 0;
m[7] = 0;
m[8] = 0;
m[9] = 0;
m[10] = 1;
m[11] = 0;
m[12] = (viewport[2] + (viewport[0] - x)*2) / width;
m[13] = (viewport[3] + (viewport[1] - y)*2) / height;
m[14] = 0;
m[15] = 1;
}
public static float[] subVector(float[] a, float[] b) {
float[] result = { a[0] - b[0], a[1] - b[1], a[2] - b[2] };
return result;
}
public static float dot(float[] a, float[] b) {
return a[0]*b[0] + a[1]*b[1] + a[2]*b[2];
}
public static float[] normalize(float[] a) {
float f = 1 / (float)Matrix.length(a[0], a[1], a[2]);
float[] result = { a[0] * f, a[1] * f, a[2] * f };
return result;
}
public static float[] cross(float[] a, float[] b) {
float[] result = {
a[1] * b[2] - a[2] * b[1],
a[2] * b[0] - a[0] * b[2],
a[0] * b[1] - a[1] * b[0]
};
return result;
}
}
| apache-2.0 |
SmartDeveloperHub/sdh-curator-connector | src/test/java/org/smartdeveloperhub/curator/connector/io/DeliveryChannelParserTest.java | 2998 | /**
* #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
* This file is part of the Smart Developer Hub Project:
* http://www.smartdeveloperhub.org/
*
* Center for Open Middleware
* http://www.centeropenmiddleware.com/
* #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
* Copyright (C) 2015-2016 Center for Open Middleware.
* #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
* Artifact : org.smartdeveloperhub.curator:sdh-curator-connector:0.2.0
* Bundle : sdh-curator-connector-0.2.0.jar
* #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
*/
package org.smartdeveloperhub.curator.connector.io;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.fail;
import org.junit.Test;
import org.smartdeveloperhub.curator.protocol.DeliveryChannel;
import org.smartdeveloperhub.curator.protocol.vocabulary.STOA;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.Resource;
public class DeliveryChannelParserTest {
@Test
public void testFromModel$happyPath() {
new ParserTester("data/deliveryChannel/full.ttl",STOA.DELIVERY_CHANNEL_TYPE) {
@Override
protected void exercise(final Model model, final Resource target) {
final DeliveryChannel result=DeliveryChannelParser.fromModel(model, target);
assertThat(result,notNullValue());
System.out.println(result);
}
}.verify();
}
@Test
public void testFromModel$fail$noRoutingKey() {
new ParserTester("data/deliveryChannel/missing_routing_key.ttl",STOA.DELIVERY_CHANNEL_TYPE) {
@Override
protected void exercise(final Model model, final Resource target) {
try {
DeliveryChannelParser.fromModel(model, target);
fail("Should not return a delivery channel without routing key");
} catch (final Exception e) {
assertThat(e.getMessage(),equalTo("Variable routingKey (literal) not bound when resolving property amqp:routingKey of resource "+target+""));
assertThat(e.getCause(),nullValue());
}
}
}.verify();
}
}
| apache-2.0 |
OpenTOSCA/container | org.opentosca.planbuilder/org.opentosca.planbuilder.type.plugin.connectsto/src/main/java/org/opentosca/planbuilder/type/plugin/connectsto/core/ConfigureRelationsPlugin.java | 2350 | package org.opentosca.planbuilder.type.plugin.connectsto.core;
import java.util.List;
import java.util.Objects;
import org.eclipse.winery.model.tosca.TInterface;
import org.eclipse.winery.model.tosca.TNodeTemplate;
import org.eclipse.winery.model.tosca.TRelationshipTemplate;
import org.eclipse.winery.model.tosca.TRelationshipType;
import org.opentosca.container.core.model.ModelUtils;
import org.opentosca.container.core.model.csar.Csar;
import org.opentosca.planbuilder.core.plugins.context.PlanContext;
import org.opentosca.planbuilder.core.plugins.typebased.IPlanBuilderTypePlugin;
public abstract class ConfigureRelationsPlugin<T extends PlanContext> implements IPlanBuilderTypePlugin<T> {
public static final String INTERFACE_NAME =
"http://docs.oasis-open.org/tosca/ns/2011/12/interfaces/relationship/configure";
public static final String OPERATION_POST_CONFIGURE_SOURCE = "postConfigureSource";
public static final String OPERATION_POST_CONFIGURE_TARGET = "postConfigureTarget";
@Override
public boolean canHandleCreate(Csar csar, final TNodeTemplate nodeTemplate) {
return false;
}
@Override
public boolean canHandleCreate(Csar csar, final TRelationshipTemplate relationshipTemplate) {
TRelationshipType relationshipType = ModelUtils.findRelationshipType(relationshipTemplate, csar);
if (Objects.nonNull(ModelUtils.findRelationshipType(relationshipTemplate, csar).getInterfaces())) {
final List<TInterface> interfaces = relationshipType.getInterfaces();
if (interfaces == null) {
return false;
}
for (final TInterface i : interfaces) {
if (i.getName().equalsIgnoreCase(INTERFACE_NAME)) {
return true;
}
}
}
return false;
}
@Override
public String getID() {
return getClass().getCanonicalName();
}
@Override
public boolean canHandleTerminate(Csar csar, TNodeTemplate nodeTemplate) {
// TODO we have to define the semantics of a disconnect first
return false;
}
@Override
public boolean canHandleTerminate(Csar csar, TRelationshipTemplate relationshipTemplate) {
// TODO we have to define the semantics of a disconnect first
return false;
}
}
| apache-2.0 |
reinhard/indoqa-wadldoc-maven-plugin | src/test/java/com/indoqa/maven/wadldoc/transformation/Wadl2HtmlPipelineTest.java | 2549 | /*
* Licensed to Indoqa Software Design und Beratung GmbH (Indoqa)
* under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information
* regarding copyright ownership. Indoqa licenses this file
* to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.indoqa.maven.wadldoc.transformation;
import java.io.ByteArrayOutputStream;
import java.net.URL;
import junit.framework.Assert;
import org.apache.commons.io.IOUtils;
import org.custommonkey.xmlunit.Diff;
import org.junit.Test;
public class Wadl2HtmlPipelineTest {
@Test(expected = IllegalArgumentException.class)
public void constructPipelineWithURL() {
new Wadl2HtmlPipeline((URL) null, null);
}
@Test
public void simplePipeline() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
new Wadl2HtmlPipeline(this.getClass().getResource("wadl.xml"), null, true).execute(baos);
Assert.assertNotNull(baos);
Diff diff = createDiff("test1-result.html", baos);
Assert.assertTrue("Pieces of XML are not identical. " + diff, diff.identical());
}
@Test
public void stylesheet() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
new Wadl2HtmlPipeline(this.getClass().getResource("wadl.xml"), "stylesheet.css", true).execute(baos);
Assert.assertNotNull(baos);
Diff diff = createDiff("test2-result.html", baos);
Assert.assertTrue("Pieces of XML are not identical. " + diff, diff.similar());
}
private static Diff createDiff(String fileName, ByteArrayOutputStream actual) throws Exception {
return createDiff(Wadl2HtmlPipelineTest.class.getResource(fileName), actual);
}
private static Diff createDiff(URL expected, ByteArrayOutputStream actual) throws Exception {
String string1 = IOUtils.toString(expected.openStream());
String string2 = actual.toString();
return new Diff(string1, string2);
}
}
| apache-2.0 |
Doun2017/StudyJavaCode | Chapter21Concurrency/app/src/main/java/com/example/doun/chapter21concurrency/ReaderWriterMapPack/MapData.java | 2044 | //: net/mindview/util/MapData.java
// A Map filled with data using a generator object.
package com.example.doun.chapter21concurrency.ReaderWriterMapPack;
//package net.mindview.util;
import java.util.*;
public class MapData<K, V> extends LinkedHashMap<K, V> {
// A single Pair Generator:
public MapData(Generator<Pair<K, V>> gen, int quantity) {
for (int i = 0; i < quantity; i++) {
Pair<K, V> p = gen.next();
put(p.key, p.value);
}
}
// Two separate Generators:
public MapData(Generator<K> genK, Generator<V> genV,
int quantity) {
for (int i = 0; i < quantity; i++) {
put(genK.next(), genV.next());
}
}
// A key Generator and a single value:
public MapData(Generator<K> genK, V value, int quantity) {
for (int i = 0; i < quantity; i++) {
put(genK.next(), value);
}
}
// An Iterable and a value Generator:
public MapData(Iterable<K> genK, Generator<V> genV) {
for (K key : genK) {
put(key, genV.next());
}
}
// An Iterable and a single value:
public MapData(Iterable<K> genK, V value) {
for (K key : genK) {
put(key, value);
}
}
// Generic convenience methods:
public static <K, V> MapData<K, V>
map(Generator<Pair<K, V>> gen, int quantity) {
return new MapData<K, V>(gen, quantity);
}
public static <K, V> MapData<K, V>
map(Generator<K> genK, Generator<V> genV, int quantity) {
return new MapData<K, V>(genK, genV, quantity);
}
public static <K, V> MapData<K, V>
map(Generator<K> genK, V value, int quantity) {
return new MapData<K, V>(genK, value, quantity);
}
public static <K, V> MapData<K, V>
map(Iterable<K> genK, Generator<V> genV) {
return new MapData<K, V>(genK, genV);
}
public static <K, V> MapData<K, V>
map(Iterable<K> genK, V value) {
return new MapData<K, V>(genK, value);
}
} ///:~
| apache-2.0 |
thescouser89/pnc | indy-repository-manager/src/test/java/org/jboss/pnc/indyrepositorymanager/AllSessionUrlsForBuildAreAlikeTest.java | 2572 | /**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2020 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.indyrepositorymanager;
import org.jboss.pnc.indyrepositorymanager.fixture.TestBuildExecution;
import org.jboss.pnc.enums.RepositoryType;
import org.jboss.pnc.spi.repositorymanager.BuildExecution;
import org.jboss.pnc.spi.repositorymanager.model.RepositoryConnectionInfo;
import org.jboss.pnc.spi.repositorymanager.model.RepositorySession;
import org.jboss.pnc.test.category.ContainerTest;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.util.Collections;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
@Category(ContainerTest.class)
public class AllSessionUrlsForBuildAreAlikeTest extends AbstractRepositoryManagerDriverTest {
@Test
public void formatRepositoryURLForSimpleInfo_AllURLsMatch() throws Exception {
// create a dummy non-chained build execution and a repo session based on it
BuildExecution execution = new TestBuildExecution();
RepositorySession repositoryConfiguration = driver.createBuildRepository(
execution,
accessToken,
accessToken,
RepositoryType.MAVEN,
Collections.emptyMap(),
false);
assertThat(repositoryConfiguration, notNullValue());
RepositoryConnectionInfo connectionInfo = repositoryConfiguration.getConnectionInfo();
assertThat(connectionInfo, notNullValue());
// check that all URLs in the connection info are the same (this might be different in another repo driver)
String expectedUrl = connectionInfo.getDependencyUrl();
assertThat(connectionInfo.getToolchainUrl(), equalTo(expectedUrl));
// assertThat(connectionInfo.getDeployPath(), equalTo(expectedUrl));
}
}
| apache-2.0 |
tsygipova/java_first | addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/tests/TestBase.java | 542 | package ru.stqa.pft.addressbook.tests;
import org.openqa.selenium.remote.BrowserType;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeSuite;
import ru.stqa.pft.addressbook.appmanager.ApplicationManager;
/**
* Created by Дарья on 03-Sep-16.
*/
public class TestBase {
protected static final ApplicationManager app = new ApplicationManager(BrowserType.CHROME);
@BeforeSuite
public void setUp() throws Exception {
app.init();
}
@AfterSuite
public void tearDown() {
app.stop();
}
} | apache-2.0 |
lucasnoetzold/Viagens | src/java/com/rotoplastyc/util/NavDef.java | 641 | package com.rotoplastyc.util;
public class NavDef {
private final String i[];
public final static NavDef ERRO = new NavDef("err","mood_bad","ERRO");
public final static NavDef ERRO2 = new NavDef("err","mood","ERRO");
public NavDef(String contentPointer, String icon, String displayName) {
i = new String[3];
i[0] = contentPointer;
i[1] = icon;
i[2] = displayName;
}
public String getContentPointer() {
return i[0];
}
public String getIcon() {
return i[1];
}
public String getDisplayName() {
return i[2];
}
}
| apache-2.0 |
ilgrosso/oldSyncopeIdM | client/src/main/java/org/syncope/types/CipherAlgorithm.java | 1137 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.syncope.types;
public enum CipherAlgorithm {
MD5("MD5"),
SHA1("SHA-1"),
SHA256("SHA-256"),
AES("AES");
final private String algorithm;
CipherAlgorithm(String algorithm) {
this.algorithm = algorithm;
}
public final String getAlgorithm() {
return algorithm;
}
}
| apache-2.0 |
pepperonas/AndroidDemos | jsonlayout/src/test/java/io/celox/androiddemos/jsonlayout/ExampleUnitTest.java | 1014 | /*
* Copyright (c) 2017 Martin Pfeffer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.celox.androiddemos.jsonlayout;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
} | apache-2.0 |
freeVM/freeVM | enhanced/archive/classlib/java6/modules/awt/src/main/java/common/java/awt/font/ShapeGraphicAttribute.java | 3798 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Ilya S. Okomin
* @version $Revision$
*/
package java.awt.font;
import java.awt.BasicStroke;
import java.awt.Graphics2D;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.geom.AffineTransform;
import java.awt.geom.Rectangle2D;
import org.apache.harmony.misc.HashCode;
public final class ShapeGraphicAttribute extends GraphicAttribute {
// shape to render
private Shape fShape;
// flag, if the shape should be stroked (true) or filled (false)
private boolean fStroke;
// bounds of the shape
private Rectangle2D fBounds;
// X coordinate of the origin point
private float fOriginX;
// Y coordinate of the origin point
private float fOriginY;
// width of the shape
private float fShapeWidth;
// height of the shape
private float fShapeHeight;
public static final boolean STROKE = true;
public static final boolean FILL = false;
public ShapeGraphicAttribute(Shape shape, int alignment, boolean stroke) {
super(alignment);
this.fShape = shape;
this.fStroke = stroke;
this.fBounds = fShape.getBounds2D();
this.fOriginX = (float)fBounds.getMinX();
this.fOriginY = (float)fBounds.getMinY();
this.fShapeWidth = (float)fBounds.getWidth();
this.fShapeHeight = (float)fBounds.getHeight();
}
@Override
public int hashCode() {
HashCode hash = new HashCode();
hash.append(fShape.hashCode());
hash.append(getAlignment());
return hash.hashCode();
}
public boolean equals(ShapeGraphicAttribute sga) {
if (sga == null) {
return false;
}
if (sga == this) {
return true;
}
return ( fStroke == sga.fStroke &&
getAlignment() == sga.getAlignment() &&
fShape.equals(sga.fShape));
}
@Override
public boolean equals(Object obj) {
try {
return equals((ShapeGraphicAttribute) obj);
}
catch(ClassCastException e) {
return false;
}
}
@Override
public void draw(Graphics2D g2, float x, float y) {
AffineTransform at = AffineTransform.getTranslateInstance(x, y);
if (fStroke == STROKE){
Stroke oldStroke = g2.getStroke();
g2.setStroke(new BasicStroke());
g2.draw(at.createTransformedShape(fShape));
g2.setStroke(oldStroke);
} else {
g2.fill(at.createTransformedShape(fShape));
}
}
@Override
public float getAdvance() {
return Math.max(0, fShapeWidth + fOriginX);
}
@Override
public float getAscent() {
return Math.max(0, -fOriginY);
}
@Override
public Rectangle2D getBounds() {
return (Rectangle2D)fBounds.clone();
}
@Override
public float getDescent() {
return Math.max(0, fShapeHeight + fOriginY);
}
}
| apache-2.0 |
bobo159357456/bobo | android_project/ClassDemo02/app/src/main/java/com/jikexueyuan/classdemo/ClassDemo03.java | 365 | package com.jikexueyuan.classdemo;
/**
* Created by zmzp on 14-12-4.
*/
class Student{
public void tell(){
System.out.println("Hello Jikexueyuan");
}
}
public class ClassDemo03 {
public static void main(String[] args) {
// Student stu = new Student();
// stu.tell();
//匿名对象
new Student().tell();
}
}
| apache-2.0 |
scottrichards/PolarionLive2015Android | ParseStarterProject/src/main/java/com/polarion/starter/utility/URLService.java | 511 | package com.polarion.starter.utility;
/**
* Created by scottrichards on 9/5/15.
*/
public class URLService extends Object {
private static String baseURL = "http://54.183.27.217";
// return url formed by adding the specified ending url to the base url
static public String buildUrl(String url) {
String fullUrl;
if (url.charAt(0) != '/') {
fullUrl = baseURL + '/' + url;
} else {
fullUrl = baseURL + url;
}
return fullUrl;
}
}
| apache-2.0 |
CSCSI/Triana | triana-gui/src/main/java/org/trianacode/gui/help/UrlHistory.java | 4922 | /*
* The University of Wales, Cardiff Triana Project Software License (Based
* on the Apache Software License Version 1.1)
*
* Copyright (c) 2007 University of Wales, Cardiff. All rights reserved.
*
* Redistribution and use of the software in source and binary forms, with
* or without modification, are permitted provided that the following
* conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The end-user documentation included with the redistribution, if any,
* must include the following acknowledgment: "This product includes
* software developed by the University of Wales, Cardiff for the Triana
* Project (http://www.trianacode.org)." Alternately, this
* acknowledgment may appear in the software itself, if and wherever
* such third-party acknowledgments normally appear.
*
* 4. The names "Triana" and "University of Wales, Cardiff" must not be
* used to endorse or promote products derived from this software
* without prior written permission. For written permission, please
* contact triana@trianacode.org.
*
* 5. Products derived from this software may not be called "Triana," nor
* may Triana appear in their name, without prior written permission of
* the University of Wales, Cardiff.
*
* 6. This software may not be sold, used or incorporated into any product
* for sale to third parties.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
* NO EVENT SHALL UNIVERSITY OF WALES, CARDIFF OR ITS CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*
* ------------------------------------------------------------------------
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Triana Project. For more information on the
* Triana Project, please see. http://www.trianacode.org.
*
* This license is based on the BSD license as adopted by the Apache
* Foundation and is governed by the laws of England and Wales.
*
*/
package org.trianacode.gui.help;
import java.net.URL;
import java.util.Enumeration;
import java.util.Vector;
/**
* @version $Revision: 4048 $
*/
public class UrlHistory extends Vector {
private int urlIndex;
private int urlCount;
// A vector which contains the UrlEventListeners
protected Vector urlEventVector;
public UrlHistory() {
super();
urlEventVector = new Vector();
urlIndex = -1;
urlCount = 0;
// System.out.println("*** Creating UrlHistory ***");
}
public void addUrl(URL url) {
urlIndex++;
urlCount = urlIndex + 1;
if (urlCount > elementCount) {
setSize(urlCount + 10);
}
setElementAt(url, urlIndex);
// System.out.println("*** Adding URL ***");
processUrlEvent(new UrlEvent(this));
}
public URL getPreviousUrl() {
if (urlIndex <= 0) {
return null;
}
urlIndex--;
processUrlEvent(new UrlEvent(this));
return (URL) elementAt(urlIndex);
}
public URL getNextUrl() {
if (urlIndex >= (urlCount - 1)) {
return null;
}
urlIndex++;
processUrlEvent(new UrlEvent(this));
return (URL) elementAt(urlIndex);
}
public int getIndex() {
return urlIndex;
}
public int countUrls() {
return urlCount;
}
public void addUrlEventListener(UrlEventListener urlListener) {
urlEventVector.addElement(urlListener);
}
public void removeUrlEventListener(UrlEventListener urlListener) {
urlEventVector.removeElement(urlListener);
}
private void processUrlEvent(UrlEvent event) {
UrlEventListener listener;
Enumeration e = urlEventVector.elements();
while (e.hasMoreElements()) {
// System.out.println("*** Throwing UrlEvent ***");
listener = (UrlEventListener) e.nextElement();
listener.indexChanged(event);
}
}
}
| apache-2.0 |
fmrsabino/library | src/bftsmart/demo/listvalue/BFTMapList.java | 2180 | /**
Copyright (c) 2007-2013 Alysson Bessani, Eduardo Alchieri, Paulo Sousa, and the authors indicated in the @author tags
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package bftsmart.demo.listvalue;
import java.util.HashMap;
import java.util.Map;
import java.io.Serializable;
import java.util.List;
/**
*
* @author sweta
*/
public class BFTMapList implements Serializable {
private static final long serialVersionUID = -8898539992606449057L;
private Map<String, List<String>> tableList = null;
public BFTMapList() {
tableList=new HashMap<String, List<String>>();
}
public Map<String, List<String>> getLists() {
return tableList;
}
public List<String> addList(String key, List<String> list) {
return tableList.put(key, list);
}
public boolean addData(String tableName, String value) {
List <String> list = tableList.get(tableName);
return list.add(value);
}
public List<String> getName(String tableName) {
return tableList.get(tableName);
}
public String getEntry(String tableName, int index) {
System.out.println("Table name: "+tableName);
System.out.println("Entry index: "+ index);
List<String> info= tableList.get(tableName);
System.out.println("Table: "+info);
return info.get(index);
}
public int getSizeofList() {
return tableList.size();
}
public int getSize(String tableName) {
List<String> table = tableList.get(tableName);
return table.size();
}
public List<String> removeList(String tableName) {
return tableList.remove(tableName);
}
public String removeEntry(String tableName,int index) {
List<String> info= tableList.get(tableName);
return info.remove(index);
}
}
| apache-2.0 |
vovagrechka/fucking-everything | phizdets/phizdets-idea/src/vgrechka/phizdetsidea/phizdets/inspections/unresolvedReference/package-info.java | 871 | /*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* Inspection that reports unresolved and unused references.
* You can inject logic to mark some unused imports as used. See extension points in this package.
* @author Ilya.Kazakevich
*/
package vgrechka.phizdetsidea.phizdets.inspections.unresolvedReference; | apache-2.0 |
dagnir/aws-sdk-java | aws-java-sdk-events/src/main/java/com/amazonaws/services/cloudwatchevents/model/InputTransformer.java | 7772 | /*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudwatchevents.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Contains the parameters needed for you to provide custom input to a target based on one or more pieces of data
* extracted from the event.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/events-2015-10-07/InputTransformer" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class InputTransformer implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON path.
* </p>
*/
private java.util.Map<String, String> inputPathsMap;
/**
* <p>
* Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the data
* sent to the target.
* </p>
*/
private String inputTemplate;
/**
* <p>
* Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON path.
* </p>
*
* @return Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON
* path.
*/
public java.util.Map<String, String> getInputPathsMap() {
return inputPathsMap;
}
/**
* <p>
* Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON path.
* </p>
*
* @param inputPathsMap
* Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON
* path.
*/
public void setInputPathsMap(java.util.Map<String, String> inputPathsMap) {
this.inputPathsMap = inputPathsMap;
}
/**
* <p>
* Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON path.
* </p>
*
* @param inputPathsMap
* Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON
* path.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InputTransformer withInputPathsMap(java.util.Map<String, String> inputPathsMap) {
setInputPathsMap(inputPathsMap);
return this;
}
public InputTransformer addInputPathsMapEntry(String key, String value) {
if (null == this.inputPathsMap) {
this.inputPathsMap = new java.util.HashMap<String, String>();
}
if (this.inputPathsMap.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.inputPathsMap.put(key, value);
return this;
}
/**
* Removes all the entries added into InputPathsMap.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InputTransformer clearInputPathsMapEntries() {
this.inputPathsMap = null;
return this;
}
/**
* <p>
* Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the data
* sent to the target.
* </p>
*
* @param inputTemplate
* Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the
* data sent to the target.
*/
public void setInputTemplate(String inputTemplate) {
this.inputTemplate = inputTemplate;
}
/**
* <p>
* Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the data
* sent to the target.
* </p>
*
* @return Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the
* data sent to the target.
*/
public String getInputTemplate() {
return this.inputTemplate;
}
/**
* <p>
* Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the data
* sent to the target.
* </p>
*
* @param inputTemplate
* Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the
* data sent to the target.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InputTransformer withInputTemplate(String inputTemplate) {
setInputTemplate(inputTemplate);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getInputPathsMap() != null)
sb.append("InputPathsMap: ").append(getInputPathsMap()).append(",");
if (getInputTemplate() != null)
sb.append("InputTemplate: ").append(getInputTemplate());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof InputTransformer == false)
return false;
InputTransformer other = (InputTransformer) obj;
if (other.getInputPathsMap() == null ^ this.getInputPathsMap() == null)
return false;
if (other.getInputPathsMap() != null && other.getInputPathsMap().equals(this.getInputPathsMap()) == false)
return false;
if (other.getInputTemplate() == null ^ this.getInputTemplate() == null)
return false;
if (other.getInputTemplate() != null && other.getInputTemplate().equals(this.getInputTemplate()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getInputPathsMap() == null) ? 0 : getInputPathsMap().hashCode());
hashCode = prime * hashCode + ((getInputTemplate() == null) ? 0 : getInputTemplate().hashCode());
return hashCode;
}
@Override
public InputTransformer clone() {
try {
return (InputTransformer) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.cloudwatchevents.model.transform.InputTransformerMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| apache-2.0 |
springfox/springfox | swagger-contract-tests/src/main/java/springfox/test/contract/swagger/models/SameFancyPet.java | 976 | /*
*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package springfox.test.contract.swagger.models;
public class SameFancyPet extends SamePet {
private SameCategory extendedCategory;
public SameCategory getExtendedCategory() {
return extendedCategory;
}
public void setExtendedCategory(SameCategory extendedCategories) {
this.extendedCategory = extendedCategories;
}
} | apache-2.0 |
motorina0/flowable-engine | modules/flowable-engine/src/main/java/org/flowable/engine/impl/persistence/cache/EntityCacheImpl.java | 3610 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl.persistence.cache;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.flowable.engine.common.impl.persistence.entity.Entity;
/**
* @author Joram Barrez
*/
public class EntityCacheImpl implements EntityCache {
protected Map<Class<?>, Map<String, CachedEntity>> cachedObjects = new HashMap<Class<?>, Map<String,CachedEntity>>();
@Override
public CachedEntity put(Entity entity, boolean storeState) {
Map<String, CachedEntity> classCache = cachedObjects.get(entity.getClass());
if (classCache == null) {
classCache = new HashMap<String, CachedEntity>();
cachedObjects.put(entity.getClass(), classCache);
}
CachedEntity cachedObject = new CachedEntity(entity, storeState);
classCache.put(entity.getId(), cachedObject);
return cachedObject;
}
@Override
@SuppressWarnings("unchecked")
public <T> T findInCache(Class<T> entityClass, String id) {
CachedEntity cachedObject = null;
Map<String, CachedEntity> classCache = cachedObjects.get(entityClass);
if (classCache == null) {
classCache = findClassCacheByCheckingSubclasses(entityClass);
}
if (classCache != null) {
cachedObject = classCache.get(id);
}
if (cachedObject != null) {
return (T) cachedObject.getEntity();
}
return null;
}
protected Map<String, CachedEntity> findClassCacheByCheckingSubclasses(Class<?> entityClass) {
for (Class<?> clazz : cachedObjects.keySet()) {
if (entityClass.isAssignableFrom(clazz)) {
return cachedObjects.get(clazz);
}
}
return null;
}
@Override
public void cacheRemove(Class<?> entityClass, String entityId) {
Map<String, CachedEntity> classCache = cachedObjects.get(entityClass);
if (classCache == null) {
return;
}
classCache.remove(entityId);
}
@Override
public <T> Collection<CachedEntity> findInCacheAsCachedObjects(Class<T> entityClass) {
Map<String, CachedEntity> classCache = cachedObjects.get(entityClass);
if (classCache != null) {
return classCache.values();
}
return null;
}
@Override
@SuppressWarnings("unchecked")
public <T> List<T> findInCache(Class<T> entityClass) {
Map<String, CachedEntity> classCache = cachedObjects.get(entityClass);
if (classCache == null) {
classCache = findClassCacheByCheckingSubclasses(entityClass);
}
if (classCache != null) {
List<T> entities = new ArrayList<T>(classCache.size());
for (CachedEntity cachedObject : classCache.values()) {
entities.add((T) cachedObject.getEntity());
}
return entities;
}
return Collections.emptyList();
}
public Map<Class<?>, Map<String, CachedEntity>> getAllCachedEntities() {
return cachedObjects;
}
@Override
public void close() {
}
@Override
public void flush() {
}
}
| apache-2.0 |