hexsha stringlengths 40 40 | size int64 3 1.05M | ext stringclasses 1
value | lang stringclasses 1
value | max_stars_repo_path stringlengths 5 1.02k | max_stars_repo_name stringlengths 4 126 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses list | max_stars_count float64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 5 1.02k | max_issues_repo_name stringlengths 4 114 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses list | max_issues_count float64 1 92.2k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 5 1.02k | max_forks_repo_name stringlengths 4 136 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses list | max_forks_count float64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | avg_line_length float64 2.55 99.9 | max_line_length int64 3 1k | alphanum_fraction float64 0.25 1 | index int64 0 1M | content stringlengths 3 1.05M |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3e1e1f6457120f89f699a0f9b26c7a1d1dbf63c8 | 3,543 | java | Java | src/main/java/com/thinkgem/jeesite/modules/gsp/entity/t02receipt/T02Receipt.java | haodeSun/GSP | e2839c87f577c5ffef92b49043236c2d7ef00cf5 | [
"Apache-2.0"
] | null | null | null | src/main/java/com/thinkgem/jeesite/modules/gsp/entity/t02receipt/T02Receipt.java | haodeSun/GSP | e2839c87f577c5ffef92b49043236c2d7ef00cf5 | [
"Apache-2.0"
] | null | null | null | src/main/java/com/thinkgem/jeesite/modules/gsp/entity/t02receipt/T02Receipt.java | haodeSun/GSP | e2839c87f577c5ffef92b49043236c2d7ef00cf5 | [
"Apache-2.0"
] | null | null | null | 22.566879 | 107 | 0.712391 | 12,752 | /*
Copyright © 2012-2016 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved.
*/
package com.thinkgem.jeesite.modules.gsp.entity.t02receipt;
import org.hibernate.validator.constraints.Length;
import com.thinkgem.jeesite.common.persistence.DataEntity;
/**
* 收货信息Entity
* @author gyq
* @version 2016-10-26
*/
public class T02Receipt extends DataEntity<T02Receipt> {
private static final long serialVersionUID = 1L;
private String procInsId; // 流程实例ID
private String receNo; // 收货单号
private String wayBillNo; // 运单号
private String suppCode; // 供货者编号/代码
private String receName; // 名称
private String purcOrderNo; // 采购单号
private String arriDate; // 到货日期
private String tranAgree; // 运输条件是否一致
private String havePeerList; // 有无随货同行单
private String deliMan; // 送货人
private String reciMan; // 接收人
private String peerListAgree; // 随货同行单是否一致
private String toExam; // 审核
public T02Receipt() {
super();
}
public T02Receipt(String id){
super(id);
}
@Length(min=0, max=100, message="流程实例ID长度必须介于 0 和 100 之间")
public String getProcInsId() {
return procInsId;
}
public void setProcInsId(String procInsId) {
this.procInsId = procInsId;
}
@Length(min=0, max=100, message="收货单号长度必须介于 0 和 100 之间")
public String getReceNo() {
return receNo;
}
public void setReceNo(String receNo) {
this.receNo = receNo;
}
@Length(min=0, max=100, message="运单号长度必须介于 0 和 100 之间")
public String getWayBillNo() {
return wayBillNo;
}
public void setWayBillNo(String wayBillNo) {
this.wayBillNo = wayBillNo;
}
@Length(min=0, max=100, message="供货者编号/代码长度必须介于 0 和 100 之间")
public String getSuppCode() {
return suppCode;
}
public void setSuppCode(String suppCode) {
this.suppCode = suppCode;
}
@Length(min=0, max=100, message="名称长度必须介于 0 和 100 之间")
public String getReceName() {
return receName;
}
public void setReceName(String receName) {
this.receName = receName;
}
@Length(min=0, max=100, message="采购单号长度必须介于 0 和 100 之间")
public String getPurcOrderNo() {
return purcOrderNo;
}
public void setPurcOrderNo(String purcOrderNo) {
this.purcOrderNo = purcOrderNo;
}
@Length(min=0, max=100, message="到货日期长度必须介于 0 和 100 之间")
public String getArriDate() {
return arriDate;
}
public void setArriDate(String arriDate) {
this.arriDate = arriDate;
}
@Length(min=0, max=100, message="运输条件是否一致长度必须介于 0 和 100 之间")
public String getTranAgree() {
return tranAgree;
}
public void setTranAgree(String tranAgree) {
this.tranAgree = tranAgree;
}
@Length(min=0, max=100, message="有无随货同行单长度必须介于 0 和 100 之间")
public String getHavePeerList() {
return havePeerList;
}
public void setHavePeerList(String havePeerList) {
this.havePeerList = havePeerList;
}
@Length(min=0, max=100, message="送货人长度必须介于 0 和 100 之间")
public String getDeliMan() {
return deliMan;
}
public void setDeliMan(String deliMan) {
this.deliMan = deliMan;
}
@Length(min=0, max=100, message="接收人长度必须介于 0 和 100 之间")
public String getReciMan() {
return reciMan;
}
public void setReciMan(String reciMan) {
this.reciMan = reciMan;
}
@Length(min=0, max=100, message="随货同行单是否一致长度必须介于 0 和 100 之间")
public String getPeerListAgree() {
return peerListAgree;
}
public void setPeerListAgree(String peerListAgree) {
this.peerListAgree = peerListAgree;
}
@Length(min=0, max=100, message="审核长度必须介于 0 和 100 之间")
public String getToExam() {
return toExam;
}
public void setToExam(String toExam) {
this.toExam = toExam;
}
} |
3e1e1f9a13a255d97e334aa9ada71114b7bbf043 | 1,482 | java | Java | FunctionProgram/src/com/bridgeit/Algorithprogram/VendingMachine.java | kaleusha/AlgorithmPrograms | 6b09a1ed86347681b956480369c19b1c8184761e | [
"MIT"
] | null | null | null | FunctionProgram/src/com/bridgeit/Algorithprogram/VendingMachine.java | kaleusha/AlgorithmPrograms | 6b09a1ed86347681b956480369c19b1c8184761e | [
"MIT"
] | null | null | null | FunctionProgram/src/com/bridgeit/Algorithprogram/VendingMachine.java | kaleusha/AlgorithmPrograms | 6b09a1ed86347681b956480369c19b1c8184761e | [
"MIT"
] | null | null | null | 19.76 | 52 | 0.560729 | 12,753 | package com.bridgeit.Algorithprogram;
import java.util.Scanner;
public class VendingMachine {
public static void main(String[] args)
{
int amount;
int notes;
Scanner scanner = new Scanner(System.in);
System.out.println("Enter the amount=");
amount = scanner.nextInt();
/*if(amount%100 == 0)
{
System.out.println("Invalid amount");
}
else
{*/
if(amount >= 1000)
{
notes = amount / 1000;
amount = amount % 1000;
System.out.println("No of 1000 notes=" +notes);
}
else if(amount >= 500)
{
notes = amount / 500;
amount = amount % 500;
System.out.println("No of 500 notes=" +notes);
}
if(amount >= 100)
{
notes = amount / 100;
amount = amount % 100;
System.out.println("No of 100 notes=" +notes);
}
if(amount >= 50)
{
notes = amount /50;
amount = amount % 50;
System.out.println("No of 50 notes= " +notes);
}
if(amount >=10)
{
notes = amount /10;
amount = amount % 10;
System.out.println("No of 10 notes= " +notes);
}
if(amount >=5)
{
notes = amount /5;
amount = amount %5;
System.out.println("No of 5 Rs coins= "+notes);
}
if(amount >=2)
{
notes = amount/2;
amount = amount%2;
System.out.println("No of 2 Rs coins= " +notes);
}
if(amount >=1)
{
notes = amount /1;
amount = amount%1;
System.out.println("No of 1 Rs coins= " +notes);
}
scanner.close();
}
}
|
3e1e207acae3829d1887cae59275899f604ff31b | 4,175 | java | Java | panc/src/main/java/org/quattor/pan/dml/functions/Digest.java | aka7/pan | 4c728b9d172c548cd2a93a10ebe399efc14bb8f9 | [
"Apache-2.0"
] | 6 | 2017-07-25T01:53:30.000Z | 2021-12-24T11:44:38.000Z | panc/src/main/java/org/quattor/pan/dml/functions/Digest.java | aka7/pan | 4c728b9d172c548cd2a93a10ebe399efc14bb8f9 | [
"Apache-2.0"
] | 152 | 2015-01-02T22:46:27.000Z | 2022-02-18T13:36:39.000Z | panc/src/main/java/org/quattor/pan/dml/functions/Digest.java | aka7/pan | 4c728b9d172c548cd2a93a10ebe399efc14bb8f9 | [
"Apache-2.0"
] | 13 | 2015-03-03T15:34:25.000Z | 2019-11-08T18:06:33.000Z | 32.364341 | 101 | 0.747784 | 12,754 | /*
Copyright (c) 2006 Charles A. Loomis, Jr, Cedric Duprilot, and
Centre National de la Recherche Scientifique (CNRS).
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
$HeadURL: https://svn.lal.in2p3.fr/LCG/QWG/panc/trunk/src/org/quattor/pan/dml/functions/First.java $
$Id: First.java 3617 2008-08-21 10:02:57Z loomis $
*/
package org.quattor.pan.dml.functions;
import static org.quattor.pan.utils.MessageUtils.MSG_FIRST_STRING_ARG_REQ;
import static org.quattor.pan.utils.MessageUtils.MSG_INVALID_DIGEST_ALGORITHM;
import static org.quattor.pan.utils.MessageUtils.MSG_SECOND_STRING_ARG_REQ;
import static org.quattor.pan.utils.MessageUtils.MSG_TWO_ARGS_REQ;
import java.math.BigInteger;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.quattor.pan.dml.Operation;
import org.quattor.pan.dml.data.Element;
import org.quattor.pan.dml.data.StringProperty;
import org.quattor.pan.exceptions.EvaluationException;
import org.quattor.pan.exceptions.SyntaxException;
import org.quattor.pan.ttemplate.Context;
import org.quattor.pan.ttemplate.SourceRange;
/**
* Creates a digest of a string using the specified algorithm.
*
* @author loomis
*
*/
final public class Digest extends BuiltInFunction {
private Digest(SourceRange sourceRange, Operation... operations)
throws SyntaxException {
super("digest", sourceRange, operations);
}
public static Operation getInstance(SourceRange sourceRange,
Operation... operations) throws SyntaxException {
// Check that exactly two arguments have been provided.
if (operations.length != 2) {
throw SyntaxException.create(sourceRange, MSG_TWO_ARGS_REQ,
"digest");
}
// If the algorithm is a static value, then check that it is a string
// and is a valid algorithm name.
if (operations[0] instanceof Element) {
if (operations[0] instanceof StringProperty) {
String algorithm = ((StringProperty) operations[0]).getValue();
try {
MessageDigest.getInstance(algorithm);
} catch (NoSuchAlgorithmException e) {
throw SyntaxException.create(sourceRange,
MSG_INVALID_DIGEST_ALGORITHM, algorithm);
}
} else {
throw SyntaxException.create(sourceRange,
MSG_FIRST_STRING_ARG_REQ, "digest");
}
}
// Check that the message is a string if it is a constant value.
if (operations[1] instanceof Element) {
if (!(operations[1] instanceof StringProperty)) {
throw EvaluationException.create(sourceRange,
MSG_SECOND_STRING_ARG_REQ, "digest");
}
}
return new Digest(sourceRange, operations);
}
@Override
public Element execute(Context context) {
// Retrieve the values of the arguments.
Element[] args = calculateArgs(context);
assert (args.length == 2);
// Check that both of the arguments are strings.
String algorithm = "";
String message = "";
if (args[0] instanceof StringProperty) {
algorithm = ((StringProperty) args[0]).getValue();
} else {
throw EvaluationException.create(sourceRange,
MSG_FIRST_STRING_ARG_REQ, name);
}
if (args[1] instanceof StringProperty) {
message = ((StringProperty) args[1]).getValue();
} else {
throw EvaluationException.create(sourceRange,
MSG_SECOND_STRING_ARG_REQ, name);
}
String digest = "";
try {
MessageDigest m = MessageDigest.getInstance(algorithm);
m.update(message.getBytes(Charset.forName("UTF-8")), 0, message.length());
digest = new BigInteger(1, m.digest()).toString(16);
} catch (NoSuchAlgorithmException e) {
throw EvaluationException.create(sourceRange,
MSG_INVALID_DIGEST_ALGORITHM, algorithm);
}
return StringProperty.getInstance(digest);
}
}
|
3e1e22b1f3a4b0b940574e3ac36d2a1925956485 | 1,833 | java | Java | src/main/java/com/flower/game/socket/SocketRegister.java | arsenalfaning/landlord-socket | eab0e20deed5cee8a9927b90f7311cd7f4ef48d7 | [
"Apache-2.0"
] | null | null | null | src/main/java/com/flower/game/socket/SocketRegister.java | arsenalfaning/landlord-socket | eab0e20deed5cee8a9927b90f7311cd7f4ef48d7 | [
"Apache-2.0"
] | null | null | null | src/main/java/com/flower/game/socket/SocketRegister.java | arsenalfaning/landlord-socket | eab0e20deed5cee8a9927b90f7311cd7f4ef48d7 | [
"Apache-2.0"
] | null | null | null | 25.109589 | 89 | 0.6012 | 12,755 | package com.flower.game.socket;
import org.springframework.stereotype.Component;
import org.springframework.web.reactive.socket.WebSocketSession;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.ConcurrentSkipListMap;
@Component
public class SocketRegister {
private Map<String, SocketSender> SOCKET_SESSION_MAP = new ConcurrentSkipListMap<>();
/**
* 注册新用户
* @param socketSender
*/
public void register(String gamerId, SocketSender socketSender) {
SOCKET_SESSION_MAP.put(gamerId, socketSender);
}
/**
* 用户下线
*/
public void remove(String gamerId) {
SOCKET_SESSION_MAP.remove(gamerId);
}
/**
* 广播消息
* @param gamerIds
* @param text
*/
public void broadcast(Collection<String> gamerIds, String text) {
if (gamerIds.isEmpty()) {
SOCKET_SESSION_MAP.values().stream().forEach(s -> s.send(text));
} else {
gamerIds.stream().forEach(e -> {
SocketSender socketSender = SOCKET_SESSION_MAP.get(e);
if (socketSender != null) {
socketSender.send(text);
}
});
}
}
/**
* 单独给某个用户发消息
* @param gamerId
* @param text
*/
public void messageTo(String gamerId, String text) {
SocketSender socketSender = SOCKET_SESSION_MAP.get(gamerId);
if (socketSender != null) {
socketSender.send(text);
}
}
/**
* 获取session
* @param gamerId
* @return
*/
public WebSocketSession getSession(String gamerId) {
SocketSender socketSender = SOCKET_SESSION_MAP.get(gamerId);
if (socketSender != null) {
return socketSender.getWebSocketSession();
}
return null;
}
}
|
3e1e23bbbe9c2b656f5205c060605450b885c3f0 | 1,177 | java | Java | src/main/java/com/github/xdcrafts/swarm/async/IChannel.java | xdcrafts/swarm | d853c962cc10d66fe1c8892c8b7929c0f42b6b86 | [
"Apache-2.0"
] | null | null | null | src/main/java/com/github/xdcrafts/swarm/async/IChannel.java | xdcrafts/swarm | d853c962cc10d66fe1c8892c8b7929c0f42b6b86 | [
"Apache-2.0"
] | null | null | null | src/main/java/com/github/xdcrafts/swarm/async/IChannel.java | xdcrafts/swarm | d853c962cc10d66fe1c8892c8b7929c0f42b6b86 | [
"Apache-2.0"
] | null | null | null | 24.020408 | 87 | 0.633815 | 12,756 | package com.github.xdcrafts.swarm.async;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.function.Supplier;
/**
* Async channel.
* @param <T> value type
* @param <I> input value type
*/
public interface IChannel<T, I> {
/**
* Close channel.
*/
void close();
/**
* Is this channel already closed.
* @return boolean
*/
boolean isClosed();
/**
* Async take from channel.
* @return completable future with value
*/
CompletableFuture<T> take();
/**
* Async put to channel.
* @param value supplier of value of type I
* @return completable future with optional, if optional is
* empty than put is not done.
*/
CompletableFuture<Optional<Supplier<T>>> put(Supplier<I> value);
/**
* Async put to channel.
* @param future puts asynchronous value to channel
* @return completable future with optional, if optional is
* empty than put is not done.
*/
default CompletableFuture<Optional<Supplier<T>>> put(CompletableFuture<I> future) {
return future.thenCompose(value -> put(() -> value));
}
}
|
3e1e23bbdb743b98f0709c9057e887943b857544 | 3,533 | java | Java | app/src/main/java/com/moringaschool/nyaranga_social_app_version2/controller/NetworkUtils.java | JAPHETHNYARANGA/Nyaranga-social-app-version2 | 915a70ef212bfb49205221795dad5b1bc43dc87b | [
"MIT"
] | null | null | null | app/src/main/java/com/moringaschool/nyaranga_social_app_version2/controller/NetworkUtils.java | JAPHETHNYARANGA/Nyaranga-social-app-version2 | 915a70ef212bfb49205221795dad5b1bc43dc87b | [
"MIT"
] | null | null | null | app/src/main/java/com/moringaschool/nyaranga_social_app_version2/controller/NetworkUtils.java | JAPHETHNYARANGA/Nyaranga-social-app-version2 | 915a70ef212bfb49205221795dad5b1bc43dc87b | [
"MIT"
] | null | null | null | 32.412844 | 95 | 0.619587 | 12,757 | package com.moringaschool.nyaranga_social_app_version2.controller;
import android.net.Uri;
import android.util.Log;
import com.moringaschool.nyaranga_social_app_version2.controller.models.Repository;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
public class NetworkUtils {
private final static String GITHUB_BASE_URL = "https://api.github.com/search/repositories";
private final static String PARAM_QUERY = "q";
private final static String PARAM_SORT = "sort";
private final static String SORT_BY = "stars";
private static URL buildUrl(String gitHubSearchQuery) {
//Create Full Link
Uri builtUri = Uri.parse(GITHUB_BASE_URL).buildUpon()
.appendQueryParameter(PARAM_QUERY, gitHubSearchQuery)
.appendQueryParameter(PARAM_SORT, SORT_BY)
.build();
//Convert URI To URL
URL url = null;
try {
url = new URL(builtUri.toString());
} catch (MalformedURLException e) {
e.printStackTrace();
}
return url;
}
private static String getResponseFromHttpUrl(URL url) throws IOException {
HttpURLConnection urlConnection = (HttpURLConnection) url.openConnection();
try {
InputStream in = urlConnection.getInputStream();
Scanner scanner = new Scanner(in);
scanner.useDelimiter("\\A");
boolean hasInput = scanner.hasNext();
if (hasInput) {
return scanner.next();
} else {
return null;
}
}
finally {
urlConnection.disconnect();
}
}
private static List<Repository> jsonFormatter(String jsonResponse){
List<Repository> repositoryList = new ArrayList<>();
try{
JSONObject json = new JSONObject(jsonResponse);
JSONArray items = json.getJSONArray("items");
//Get First 50 Repository
int dataLen = 50;
if(items.length() < dataLen){
dataLen = items.length();
}
for(int i = 0 ; i < dataLen ; i++){
JSONObject currentRepo = items.getJSONObject(i);
String repoName = currentRepo.getString("name");
String repoOwner = currentRepo.getJSONObject("owner").getString("login");
String repoLang = currentRepo.getString("language");
String repoStart = currentRepo.getString("stargazers_count");
Log.v("Data","Number " + i);
//Create Repository Object
Repository repository = new Repository(repoName,repoOwner,repoLang,repoStart);
//Add This Repository To List
repositoryList.add(repository);
}
}
catch (JSONException ex){
Log.v("Network","Can't Read Json");
}
return repositoryList;
}
public static List<Repository> getDataFromApi(String query) throws IOException {
//First Create URL
URL apiURL = buildUrl(query);
String jsonResponse = getResponseFromHttpUrl(apiURL);
List<Repository> repositoryList = jsonFormatter(jsonResponse);
return repositoryList;
}
}
|
3e1e25566645ed8b296367f6745088dd96351fb4 | 965 | java | Java | src/shogun-core-main/src/main/java/de/terrestris/shoguncore/dao/MapDao.java | hwbllmnn/shogun-core | 74f7dc8b6f1a7177ee41c5157319201e6d8e13dc | [
"Apache-2.0"
] | 17 | 2019-02-19T06:43:03.000Z | 2021-06-05T12:17:05.000Z | src/shogun-core-main/src/main/java/de/terrestris/shoguncore/dao/MapDao.java | hwbllmnn/shogun-core | 74f7dc8b6f1a7177ee41c5157319201e6d8e13dc | [
"Apache-2.0"
] | 265 | 2015-01-16T11:41:32.000Z | 2019-01-28T10:08:49.000Z | src/shogun-core-main/src/main/java/de/terrestris/shoguncore/dao/MapDao.java | hwbllmnn/shogun-core | 74f7dc8b6f1a7177ee41c5157319201e6d8e13dc | [
"Apache-2.0"
] | 22 | 2015-01-16T09:40:55.000Z | 2018-08-08T21:32:07.000Z | 22.97619 | 92 | 0.66943 | 12,758 | package de.terrestris.shoguncore.dao;
import de.terrestris.shoguncore.model.layer.Layer;
import de.terrestris.shoguncore.model.module.Map;
import org.hibernate.HibernateException;
import org.springframework.stereotype.Repository;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@Repository("mapDao")
public class MapDao<E extends Map> extends
ModuleDao<E> {
/**
* Public default constructor for this DAO.
*/
@SuppressWarnings("unchecked")
public MapDao() {
super((Class<E>) Map.class);
}
/**
* Constructor that has to be called by subclasses.
*
* @param clazz
*/
protected MapDao(Class<E> clazz) {
super(clazz);
}
/**
*
*/
public Set<E> findMapsWithLayer(Layer layer) throws HibernateException {
final List<E> resultList = this.findAllWithCollectionContaining("mapLayers", layer);
return new HashSet<>(resultList);
}
}
|
3e1e257ab498adb2ecb932e9ceb9384736c2a46f | 909 | java | Java | collection/cp/Algorithm_Collection-master/pascalTriangle.java | daemonslayer/Notebook | a9880be9bd86955afd6b8f7352822bc18673eda3 | [
"Apache-2.0"
] | 1 | 2019-03-24T13:12:01.000Z | 2019-03-24T13:12:01.000Z | collection/cp/Algorithm_Collection-master/pascalTriangle.java | bruler/Notebook | a9880be9bd86955afd6b8f7352822bc18673eda3 | [
"Apache-2.0"
] | null | null | null | collection/cp/Algorithm_Collection-master/pascalTriangle.java | bruler/Notebook | a9880be9bd86955afd6b8f7352822bc18673eda3 | [
"Apache-2.0"
] | null | null | null | 20.2 | 61 | 0.647965 | 12,759 | import java.util.ArrayList;
import java.util.List;
public class pascalTriangle {
public static void main(String [] args)
{
int row = 7;
List<List<Integer>> ret = generate(row);
for(int i=0;i<ret.size();i++) {
for(int j=0;j<ret.get(i).size();j++) {
System.out.printf("%d",ret.get(i).get(j));
}
System.out.printf("\n");
}
}
public static List<List<Integer>> generate(int numRows) {
List<List<Integer>> result = new ArrayList<List<Integer>>();
if (numRows == 0)
return result;
//Generate the first list
List<Integer> previous = new ArrayList<Integer>(1);
previous.add(1);
result.add(previous);
for (int i =2; i<= numRows; i++) {
List<Integer> current = new ArrayList<Integer>(i);
current.add(1);
for(int j=1;j<previous.size();j++)
current.add(previous.get(j-1)+previous.get(j));
current.add(1);
result.add(current);
previous = current;
}
return result;
}
}
|
3e1e25be044fa4e34c0f23010222c6d7b37aac60 | 223 | java | Java | src/com/floreantpos/model/dao/PizzaModifierPriceDAO.java | jalalzia1/infinity | 264d16cef247f820967ef753e61feb13065b644c | [
"Apache-2.0"
] | null | null | null | src/com/floreantpos/model/dao/PizzaModifierPriceDAO.java | jalalzia1/infinity | 264d16cef247f820967ef753e61feb13065b644c | [
"Apache-2.0"
] | null | null | null | src/com/floreantpos/model/dao/PizzaModifierPriceDAO.java | jalalzia1/infinity | 264d16cef247f820967ef753e61feb13065b644c | [
"Apache-2.0"
] | null | null | null | 17.153846 | 70 | 0.748879 | 12,760 | package com.floreantpos.model.dao;
public class PizzaModifierPriceDAO extends BasePizzaModifierPriceDAO {
/**
* Default constructor. Can be used in place of getInstance()
*/
public PizzaModifierPriceDAO () {}
} |
3e1e26134db91058fd144e93a3f867ed9c4ca512 | 1,597 | java | Java | topographica-web-shared/src/main/java/nl/rutgerkok/topographica/marker/HtmlString.java | rutgerkok/Topographica | 4a7d8e27d9918da718f1403f60aed96da1b70ec5 | [
"MIT"
] | 11 | 2019-03-17T20:56:28.000Z | 2021-09-09T21:40:51.000Z | topographica-web-shared/src/main/java/nl/rutgerkok/topographica/marker/HtmlString.java | rutgerkok/Topographica | 4a7d8e27d9918da718f1403f60aed96da1b70ec5 | [
"MIT"
] | null | null | null | topographica-web-shared/src/main/java/nl/rutgerkok/topographica/marker/HtmlString.java | rutgerkok/Topographica | 4a7d8e27d9918da718f1403f60aed96da1b70ec5 | [
"MIT"
] | 1 | 2020-04-11T22:54:29.000Z | 2020-04-11T22:54:29.000Z | 22.814286 | 78 | 0.550407 | 12,761 | package nl.rutgerkok.topographica.marker;
import java.util.Objects;
import nl.rutgerkok.topographica.webserver.Escape;
public final class HtmlString {
/**
* Creates a HTML string. Newlines (\n) are converted to line break tags.
*
* @param plainText
* Plain text.
* @return The HTML string.
*/
public static HtmlString fromPlainText(String plainText) {
String raw = Escape.forHtml(plainText).replace("\n", "<br>");
return fromRawHtml(raw);
}
/**
* Wraps exiting HTML into a {@link HtmlString} object. Note: the validity
* of the HTML is not checked!
*
* @param raw
* The raw HTML, like "{@code Hi <i>everyone</i>!}".
* @return A HTML object.
*/
public static HtmlString fromRawHtml(String raw) {
return new HtmlString(raw);
}
private final String raw;
private HtmlString(String raw) {
this.raw = Objects.requireNonNull(raw, "raw");
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
HtmlString other = (HtmlString) obj;
if (!raw.equals(other.raw)) {
return false;
}
return true;
}
@Override
public int hashCode() {
return raw.hashCode();
}
/**
* Gets the raw HTML.
*/
@Override
public String toString() {
return raw;
}
}
|
3e1e264890a494789f8cd9480ada3f575e270dac | 873 | java | Java | app/src/main/java/com/ksekey/timeman/models/Task.java | ksekey/TimeMan | 6af1817d3bce1de87f84e0d876f6a8fef96dfc7d | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/ksekey/timeman/models/Task.java | ksekey/TimeMan | 6af1817d3bce1de87f84e0d876f6a8fef96dfc7d | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/ksekey/timeman/models/Task.java | ksekey/TimeMan | 6af1817d3bce1de87f84e0d876f6a8fef96dfc7d | [
"Apache-2.0"
] | null | null | null | 17.816327 | 66 | 0.586483 | 12,762 | package com.ksekey.timeman.models;
import com.j256.ormlite.field.DatabaseField;
import com.j256.ormlite.table.DatabaseTable;
/**
* Created by kk on 26/12/2017.
*/
@DatabaseTable(tableName = "task")
public class Task {
@DatabaseField(id = true)
private String id;
@DatabaseField
private String name;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return name;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Task task = (Task) o;
return id.equals(task.id);
}
}
|
3e1e26bf9266c80f9a2890ac82036086a542331f | 1,500 | java | Java | feilong-namespace/src/test/java/com/feilong/namespace/DingtalkBotTagTest.java | venusdrogon/feilong | 6e5809373fb0c53c0cd26e40985ab40cd06e01f7 | [
"Apache-2.0"
] | null | null | null | feilong-namespace/src/test/java/com/feilong/namespace/DingtalkBotTagTest.java | venusdrogon/feilong | 6e5809373fb0c53c0cd26e40985ab40cd06e01f7 | [
"Apache-2.0"
] | 90 | 2020-04-15T05:49:27.000Z | 2020-04-22T11:03:49.000Z | feilong-namespace/src/test/java/com/feilong/namespace/DingtalkBotTagTest.java | venusdrogon/feilong | 6e5809373fb0c53c0cd26e40985ab40cd06e01f7 | [
"Apache-2.0"
] | null | null | null | 34.883721 | 104 | 0.575333 | 12,763 | package com.feilong.namespace;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.AbstractJUnit4SpringContextTests;
import com.feilong.net.bot.dingtalk.DingTalkBot;
@ContextConfiguration(locations = { "classpath*:dingtalkbot.xml" })
public class DingtalkBotTagTest extends AbstractJUnit4SpringContextTests{
@Autowired
@Qualifier("dingtalkBot")
private DingTalkBot dingTalkBot;
//---------------------------------------------------------------
@Test
public void test(){
assertTrue(dingTalkBot.sendMessage("lalalal"));
}
@Test
public void test1(){
String content = "## 今晚去喝酒吗😁 \n" + //
// "@15001841318 \n" + //
// "@金鑫 \n" + //
" \n" + //
"> 曾经有一段真挚的爱情 \n" + //
"1. 美女 \n" + //
"2. 帅哥 \n" + //
"- **喝酒** \n" + //
"- [百度](http://baidu.com) \n" + //
"- *唱歌* @金鑫 \n";
boolean result = dingTalkBot.sendMessage("测试测试", content, "15001841318");
assertEquals(true, result);
}
} |
3e1e275dcb940385c42ccfdde2a21d583b67eabf | 4,947 | java | Java | src/app/voltdb/voltdb_src/src/hsqldb19b3/org/hsqldb_voltpatches/scriptio/ScriptWriterBinary.java | OpenMPDK/SMDK | 8f19d32d999731242cb1ab116a4cb445d9993b15 | [
"BSD-3-Clause"
] | 44 | 2022-03-16T08:32:31.000Z | 2022-03-31T16:02:35.000Z | src/app/voltdb/voltdb_src/src/hsqldb19b3/org/hsqldb_voltpatches/scriptio/ScriptWriterBinary.java | H2O0Lee/SMDK | eff49bc17a55a83ea968112feb2e2f2ea18c4ff5 | [
"BSD-3-Clause"
] | 1 | 2022-03-29T02:30:28.000Z | 2022-03-30T03:40:46.000Z | src/app/voltdb/voltdb_src/src/hsqldb19b3/org/hsqldb_voltpatches/scriptio/ScriptWriterBinary.java | H2O0Lee/SMDK | eff49bc17a55a83ea968112feb2e2f2ea18c4ff5 | [
"BSD-3-Clause"
] | 18 | 2022-03-19T04:41:04.000Z | 2022-03-31T03:32:12.000Z | 36.91791 | 83 | 0.681221 | 12,764 | /* Copyright (c) 2001-2009, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb_voltpatches.scriptio;
import java.io.IOException;
import org.hsqldb_voltpatches.Database;
import org.hsqldb_voltpatches.NumberSequence;
import org.hsqldb_voltpatches.Session;
import org.hsqldb_voltpatches.Table;
import org.hsqldb_voltpatches.result.Result;
import org.hsqldb_voltpatches.rowio.RowOutputBinary;
import org.hsqldb_voltpatches.rowio.RowOutputInterface;
import org.hsqldb_voltpatches.lib.DataOutputStream;
/**
* @author Fred Toussi (fredt@users dot sourceforge.net)
* @since 1.8.0
* @version 1.7.2
*/
class ScriptWriterBinary extends ScriptWriterBase {
RowOutputInterface rowOut;
ScriptWriterBinary(Database db, String file, boolean includeCached,
boolean newFile) {
super(db, file, includeCached, newFile, false);
}
protected void initBuffers() {
rowOut = new RowOutputBinary();
}
protected void writeSingleColumnResult(Result r) throws IOException {
DataOutputStream dataOutput = new DataOutputStream(fileStreamOut);
rowOut.reset();
r.write(dataOutput, rowOut);
dataOutput.flush();
}
// int : row size (0 if no more rows) ,
// RowInput/OutputBinary : row (column values)
protected void writeRow(Session session, Table t,
Object[] data) throws IOException {
rowOut.reset();
rowOut.writeRow(data, t.getColumnTypes());
fileStreamOut.write(rowOut.getOutputStream().getBuffer(), 0,
rowOut.size());
tableRowCount++;
}
// int : headersize (0 if no more tables), String : tablename, int : operation,
protected void writeTableInit(Table t) throws IOException {
tableRowCount = 0;
rowOut.reset();
rowOut.writeSize(0);
rowOut.writeString(t.getName().name);
rowOut.writeInt(INSERT_WITH_SCHEMA);
rowOut.writeString(t.getSchemaName().name);
rowOut.writeIntData(rowOut.size(), 0);
fileStreamOut.write(rowOut.getOutputStream().getBuffer(), 0,
rowOut.size());
}
protected void writeTableTerm(Table t) throws IOException {
rowOut.reset();
rowOut.writeSize(0);
rowOut.writeInt(this.tableRowCount);
fileStreamOut.write(rowOut.getOutputStream().getBuffer(), 0,
rowOut.size());
}
protected void writeDataTerm() throws IOException {
rowOut.reset();
rowOut.writeSize(0);
fileStreamOut.write(rowOut.getOutputStream().getBuffer(), 0,
rowOut.size());
}
public void writeLogStatement(Session session,
String s) throws IOException {}
protected void addSessionId(Session session) throws IOException {}
public void writeDeleteStatement(Session session, Table table,
Object[] ddata) throws IOException {}
public void writeSequenceStatement(Session session,
NumberSequence seq)
throws IOException {}
public void writeInsertStatement(Session session, Table table,
Object[] data) throws IOException {}
public void writeCommitStatement(Session session) throws IOException {}
}
|
3e1e27b06e260bcd1fc1bbfee925176e5e09e60d | 6,087 | java | Java | PacMan/src/main/java/com/coolioasjulio/arcademachine/pacman/PacMan.java | coolioasjulio/Arcade-Machine | 7d1069fd50744e1b71cf6728348a0366051bc0a4 | [
"MIT"
] | null | null | null | PacMan/src/main/java/com/coolioasjulio/arcademachine/pacman/PacMan.java | coolioasjulio/Arcade-Machine | 7d1069fd50744e1b71cf6728348a0366051bc0a4 | [
"MIT"
] | null | null | null | PacMan/src/main/java/com/coolioasjulio/arcademachine/pacman/PacMan.java | coolioasjulio/Arcade-Machine | 7d1069fd50744e1b71cf6728348a0366051bc0a4 | [
"MIT"
] | null | null | null | 37.574074 | 110 | 0.590439 | 12,765 | package com.coolioasjulio.arcademachine.pacman;
import com.coolioasjulio.arcademachine.launcher.gameutils.InputManager;
import com.coolioasjulio.arcademachine.pacman.engine.BoxCollider;
import com.coolioasjulio.arcademachine.pacman.engine.Coord;
import com.coolioasjulio.arcademachine.pacman.engine.Drawer;
import com.coolioasjulio.arcademachine.pacman.engine.GameObject;
import com.coolioasjulio.arcademachine.pacman.engine.Sprite;
import java.awt.event.KeyEvent;
import java.util.Arrays;
import java.util.function.Supplier;
import java.awt.Color;
public class PacMan extends GameObject {
private Direction direction = Direction.EAST;
private int size;
private int speed;
private Coord prevTile;
public PacMan(int x, int y, int size, Color color, Color bgColor) {
super(x, y, new BoxCollider(0, 0, size, size), new PacManSprite(size, color, bgColor));
// Instead of using multiple sprites, we'll just use one and dynamically rotate it
((PacManSprite) getSprites()[0]).setDirectionSupplier(this::getDirection);
this.size = size;
}
public int getSpeed() {
return speed;
}
public void setSpeed(int speed) {
this.speed = speed;
}
public Direction getDirection() {
return direction;
}
public void setDirection(Direction direction) {
this.direction = direction;
}
/**
* Update pacman by a timestep. The direction it moves in is determined by the player input.
* This handles moving the player and wrapping around the edges.
*
* @param levelMap The map of the active level.
* @param dt The elapsed time in seconds since the last update.
*/
public void update(LevelMap levelMap, double dt) {
Coord currTile = getTile();
// Get the new direction to move in
updateDirection();
// Move in the appropriate direction
Utils.moveGameObject(this, levelMap, direction, Utils.round(speed * dt), size);
// If required, wrap around the map
Utils.warpEdges(this, levelMap, size);
if (!currTile.equals(getTile())) {
prevTile = currTile;
}
}
public Coord getPrevTile() {
return prevTile;
}
public Coord getTile() {
return new Coord(getTileX(size), getTileY(size));
}
private void updateDirection() {
Direction newDir;
// If a key was pressed in the last tick, we're changing directions
if (InputManager.keyPressed(KeyEvent.VK_UP)) newDir = Direction.NORTH;
else if (InputManager.keyPressed(KeyEvent.VK_RIGHT)) newDir = Direction.EAST;
else if (InputManager.keyPressed(KeyEvent.VK_DOWN)) newDir = Direction.SOUTH;
else if (InputManager.keyPressed(KeyEvent.VK_LEFT)) newDir = Direction.WEST;
else return;
int threshold = 3 * size / 8; // the snap threshold is +-3/8 of the tile size
if (newDir != direction && newDir != Utils.opposite(direction)) {
// If we're close enough to the center of the block, snap to the middle
// This is required so we don't collide with any walls.
if (Math.abs(getTileX(size) * size - getX()) <= threshold &&
Math.abs(getTileY(size) * size - getY()) <= threshold) {
int dx = Utils.getDeltaX(newDir);
int dy = Utils.getDeltaY(newDir);
// If the tile in the new direction is open, snap to the middle
if (PacManGame.getInstance().getLevelMap().isOpen(getTileX(size) + dx, getTileY(size) + dy)) {
Utils.snapGameObject(this, size, threshold);
direction = newDir;
}
}
} else {
direction = newDir;
}
}
/**
* This is the sprite of the pac man, which handles the rendering
*/
private static class PacManSprite extends Sprite {
private int size;
private final Color color;
private final Color bgColor;
private Supplier<Direction> directionSupplier;
public PacManSprite(int size, Color color, Color bgColor) {
super(0, 0, 10); // Render the animation at 10fps. (5 cycles per second)
this.size = size;
this.color = color;
this.bgColor = bgColor;
}
private void setDirectionSupplier(Supplier<Direction> directionSupplier) {
this.directionSupplier = directionSupplier;
}
public void drawActiveImage(Drawer d) {
// Draw the circle
d.setColor(color);
d.fillOval(getX(), getY(), size, size);
// Then draw the mouth. Depending on the direction of travel, the mouth should be rotated
d.setColor(bgColor);
// These are the x and y coordinates if we're moving north
int[] x = new int[]{size / 4, size / 2, 3 * size / 4};
int[] y = new int[]{0, 3 * size / 5, 0};
int[] temp = y;
// Depending on which direction we're actually moving, rotate the coordinates
switch (directionSupplier.get()) {
case NORTH:
break;
case WEST:
y = x;
x = temp;
break;
case EAST:
y = x;
x = Arrays.stream(temp).map(i -> size - i).toArray();
break;
case SOUTH:
y = Arrays.stream(y).map(i -> size - i).toArray();
break;
}
x = Arrays.stream(x).map(i -> i + getX()).toArray();
y = Arrays.stream(y).map(i -> i + getY()).toArray();
// Render the mouth
// Since we're being animated, we have two images (mouth open, mouth closed)
// Only draw the mouth if the mouth is open (index is 1)
if (getActiveImageIndex() != 0) d.fillPolygon(x, y);
}
public int numImages() {
return 2;
}
}
}
|
3e1e28b2adf398a73b0dec4b6f80c732871c8a88 | 1,526 | java | Java | src/main/java/GeneradorMiniexamenes/controllers/Export.java | Irvel/generador-miniexamenes | 359c399192b6905f70cb3694a26da59ff03e6b88 | [
"MIT"
] | null | null | null | src/main/java/GeneradorMiniexamenes/controllers/Export.java | Irvel/generador-miniexamenes | 359c399192b6905f70cb3694a26da59ff03e6b88 | [
"MIT"
] | null | null | null | src/main/java/GeneradorMiniexamenes/controllers/Export.java | Irvel/generador-miniexamenes | 359c399192b6905f70cb3694a26da59ff03e6b88 | [
"MIT"
] | null | null | null | 30.52 | 90 | 0.683486 | 12,766 | package GeneradorMiniexamenes.controllers;
import GeneradorMiniexamenes.model.Subject;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import javafx.event.ActionEvent;
import javafx.scene.Node;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import java.io.File;
import java.io.IOException;
/**
* Saves a given Subject to a .json file
*/
public class Export {
public void saveExternalJson(Subject subject, Stage currentStage) throws IOException {
ObjectMapper objectMapper = new ObjectMapper();
// Indent output JSON
objectMapper.configure(SerializationFeature.INDENT_OUTPUT, true);
FileChooser fileChooser = new FileChooser();
//Set extension filter
fileChooser.getExtensionFilters().add(
new FileChooser.ExtensionFilter("JSON files (*.json)", "*.json"));
fileChooser.setInitialFileName(subject.getSubjectName() + ".json");
//Show save file dialog
File file = fileChooser.showSaveDialog(currentStage);
if(file != null){
objectMapper.writeValue(file, subject);
}
}
public void onClick(ActionEvent actionEvent, Subject subject) {
Node source = (Node) actionEvent.getSource();
Stage currentStage = (Stage) source.getScene().getWindow();
try {
saveExternalJson(subject, currentStage);
}
catch (IOException e) {
e.printStackTrace();
}
}
}
|
3e1e2949e401918adabd724122a9a3228fb66d3c | 1,297 | java | Java | src/com/jspxcms/core/repository/AttachmentDao.java | xrogzu/jspxcms6 | 7b7df6771bfb7a5576f289176c6a3b69caa4df9c | [
"Apache-2.0"
] | 1 | 2019-07-05T06:28:09.000Z | 2019-07-05T06:28:09.000Z | src/com/jspxcms/core/repository/AttachmentDao.java | xrogzu/jspxcms6 | 7b7df6771bfb7a5576f289176c6a3b69caa4df9c | [
"Apache-2.0"
] | null | null | null | src/com/jspxcms/core/repository/AttachmentDao.java | xrogzu/jspxcms6 | 7b7df6771bfb7a5576f289176c6a3b69caa4df9c | [
"Apache-2.0"
] | 4 | 2017-08-07T07:29:12.000Z | 2021-09-26T15:33:28.000Z | 30.880952 | 82 | 0.760987 | 12,767 | package com.jspxcms.core.repository;
import java.util.Collection;
import java.util.List;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.Repository;
import com.jspxcms.common.orm.Limitable;
import com.jspxcms.core.domain.Attachment;
public interface AttachmentDao extends Repository<Attachment, Integer>,
AttachmentDaoPlus {
public Page<Attachment> findAll(Specification<Attachment> spec,
Pageable pageable);
public List<Attachment> findAll(Specification<Attachment> spec,
Limitable limitable);
public Attachment findOne(Integer id);
public Attachment save(Attachment bean);
public void delete(Attachment bean);
// --------------------
public List<Attachment> findByName(String name);
@Modifying
@Query("delete from Attachment bean where bean.site.id in (?1)")
public int deleteBySiteId(Collection<Integer> siteIds);
@Modifying
@Query("update Attachment bean set bean.user.id = 0 where bean.user.id in (?1)")
public int toAnonymous(Collection<Integer> userIds);
}
|
3e1e2a12bf4058c3385d3a81134691be572046ef | 1,702 | java | Java | chapter20/rx-java2/src/test/java/camelinaction/CamelNumbersTest.java | nicolaskruger/camel-in-action | 568b3bb176adc988988809f385499595f8256ca0 | [
"Apache-2.0"
] | 571 | 2015-07-06T17:08:00.000Z | 2022-03-29T11:55:31.000Z | chapter20/rx-java2/src/test/java/camelinaction/CamelNumbersTest.java | nicolaskruger/camel-in-action | 568b3bb176adc988988809f385499595f8256ca0 | [
"Apache-2.0"
] | 148 | 2015-09-09T15:32:48.000Z | 2022-03-27T08:09:49.000Z | chapter20/rx-java2/src/test/java/camelinaction/CamelNumbersTest.java | nicolaskruger/camel-in-action | 568b3bb176adc988988809f385499595f8256ca0 | [
"Apache-2.0"
] | 409 | 2015-08-13T01:21:55.000Z | 2022-03-22T05:09:20.000Z | 36.212766 | 83 | 0.652761 | 12,768 | package camelinaction;
import io.reactivex.Flowable;
import org.apache.camel.RoutesBuilder;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.reactive.streams.api.CamelReactiveStreams;
import org.apache.camel.component.reactive.streams.api.CamelReactiveStreamsService;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
import org.reactivestreams.Publisher;
public class CamelNumbersTest extends CamelTestSupport {
@Test
public void testNumbers() throws Exception {
CamelReactiveStreamsService rsCamel = CamelReactiveStreams.get(context);
// create a published that receive from the numbers stream
Publisher<Integer> numbers = rsCamel.fromStream("numbers", Integer.class);
// use stream engine to subscribe from the publisher
// where we filter out the big numbers which is logged
Flowable.fromPublisher(numbers)
.filter(n -> n > 5)
.doOnNext(c -> log.info("Streaming big number {}", c))
.subscribe();
// let it run for 10 seconds
Thread.sleep(10000);
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
// use timer to create a continued stream of numbers
from("timer:number")
.transform(simple("${random(0,10)}"))
.log("Generated random number ${body}")
// send the numbers to the stream named numbers
.to("reactive-streams:numbers");
}
};
}
}
|
3e1e2a5de7916e202359b675b5514ce091339a1d | 283 | java | Java | api/repositorio-principal/src/main/java/br/gov/df/emater/repositorio_principal/dao/sistema/ConfiguracaoDAO.java | ffrazao/evbem | f1980777551f49c2c89daa6cc3b66b49ce5d8d72 | [
"Apache-2.0"
] | null | null | null | api/repositorio-principal/src/main/java/br/gov/df/emater/repositorio_principal/dao/sistema/ConfiguracaoDAO.java | ffrazao/evbem | f1980777551f49c2c89daa6cc3b66b49ce5d8d72 | [
"Apache-2.0"
] | 24 | 2020-03-04T23:20:27.000Z | 2022-03-02T05:59:54.000Z | api/repositorio-principal/src/main/java/br/gov/df/emater/repositorio_principal/dao/sistema/ConfiguracaoDAO.java | ffrazao/evbem | f1980777551f49c2c89daa6cc3b66b49ce5d8d72 | [
"Apache-2.0"
] | 1 | 2019-10-30T00:19:33.000Z | 2019-10-30T00:19:33.000Z | 35.375 | 79 | 0.85159 | 12,769 | package br.gov.df.emater.repositorio_principal.dao.sistema;
import org.springframework.data.jpa.repository.JpaRepository;
import br.gov.df.emater.repositorio_principal.entidade.sistema.Configuracao;
public interface ConfiguracaoDAO extends JpaRepository<Configuracao, Integer> {
} |
3e1e2a6dc287525f3b2635393ea0d36d08f19987 | 3,103 | java | Java | src/main/java/org/webbitserver/netty/NettyWebSocketConnection.java | enterstudio/webbit | f628a7a3ffdd8c288514784f5b0426faaee2a2e3 | [
"BSD-3-Clause"
] | 318 | 2015-01-05T10:41:44.000Z | 2022-03-25T17:06:45.000Z | src/main/java/org/webbitserver/netty/NettyWebSocketConnection.java | msdgwzhy6/webbit | f628a7a3ffdd8c288514784f5b0426faaee2a2e3 | [
"BSD-3-Clause"
] | 14 | 2015-01-07T19:33:59.000Z | 2020-07-18T12:14:12.000Z | src/main/java/org/webbitserver/netty/NettyWebSocketConnection.java | msdgwzhy6/webbit | f628a7a3ffdd8c288514784f5b0426faaee2a2e3 | [
"BSD-3-Clause"
] | 136 | 2015-01-07T17:46:27.000Z | 2022-03-25T17:06:53.000Z | 33.010638 | 166 | 0.69739 | 12,770 | package org.webbitserver.netty;
import org.jboss.netty.buffer.ChannelBuffers;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.handler.codec.http.websocket.DefaultWebSocketFrame;
import org.jboss.netty.util.CharsetUtil;
import org.webbitserver.WebSocketConnection;
import java.util.concurrent.Executor;
public class NettyWebSocketConnection extends AbstractHttpConnection implements WebSocketConnection {
private final byte[] outboundMaskingKey;
private String version;
private boolean hybi;
public NettyWebSocketConnection(Executor executor, NettyHttpRequest nettyHttpRequest, ChannelHandlerContext ctx, byte[] outboundMaskingKey) {
super(ctx, nettyHttpRequest, executor);
this.outboundMaskingKey = outboundMaskingKey;
}
@Override
public NettyWebSocketConnection send(String message) {
if (hybi) {
writeMessage(new EncodingHybiFrame(Opcodes.OPCODE_TEXT, true, 0, outboundMaskingKey, ChannelBuffers.copiedBuffer(message, CharsetUtil.UTF_8)));
} else {
writeMessage(new DefaultWebSocketFrame(message));
}
return this;
}
@Override
public NettyWebSocketConnection send(byte[] message) {
return send(message, 0, message.length);
}
@Override
public NettyWebSocketConnection send(byte[] message, int offset, int length) {
writeMessage(new EncodingHybiFrame(Opcodes.OPCODE_BINARY, true, 0, outboundMaskingKey, ChannelBuffers.copiedBuffer(message, offset, length)));
return this;
}
@Override
public NettyWebSocketConnection ping(byte[] message) {
writeMessage(new EncodingHybiFrame(Opcodes.OPCODE_PING, true, 0, outboundMaskingKey, ChannelBuffers.copiedBuffer(message)));
return this;
}
@Override
public NettyWebSocketConnection pong(byte[] message) {
writeMessage(new EncodingHybiFrame(Opcodes.OPCODE_PONG, true, 0, outboundMaskingKey, ChannelBuffers.copiedBuffer(message)));
return this;
}
@Override
public NettyWebSocketConnection close() {
if (hybi) {
writeMessage(new EncodingHybiFrame(Opcodes.OPCODE_CLOSE, true, 0, outboundMaskingKey, ChannelBuffers.buffer(0))).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture channelFuture) throws Exception {
closeChannel();
}
});
} else {
closeChannel();
}
return this;
}
@Override
public NettyWebSocketConnection data(String key, Object value) {
putData(key, value);
return this;
}
@Override
public String version() {
return version;
}
void setVersion(String version) {
this.version = version;
}
public void setHybiWebSocketVersion(int webSocketVersion) {
setVersion("Sec-WebSocket-Version-" + webSocketVersion);
hybi = true;
}
}
|
3e1e2a90b023301d1e20f6c7f885455c486c4afe | 1,214 | java | Java | src/main/java/com/microsoft/store/partnercenter/products/ISkuCollectionByTargetSegment.java | epinter/Partner-Center-Java | 4a05288ec99477f45b34f21cb8a7f0044c8caa10 | [
"MIT"
] | null | null | null | src/main/java/com/microsoft/store/partnercenter/products/ISkuCollectionByTargetSegment.java | epinter/Partner-Center-Java | 4a05288ec99477f45b34f21cb8a7f0044c8caa10 | [
"MIT"
] | null | null | null | src/main/java/com/microsoft/store/partnercenter/products/ISkuCollectionByTargetSegment.java | epinter/Partner-Center-Java | 4a05288ec99477f45b34f21cb8a7f0044c8caa10 | [
"MIT"
] | null | null | null | 43.357143 | 102 | 0.693575 | 12,771 | // -----------------------------------------------------------------------
// <copyright file="ISkuCollectionByTargetSegment.java" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// -----------------------------------------------------------------------
package com.microsoft.store.partnercenter.products;
import com.microsoft.store.partnercenter.IPartnerComponent;
import com.microsoft.store.partnercenter.genericoperations.IEntireEntityCollectionRetrievalOperations;
import com.microsoft.store.partnercenter.models.ResourceCollection;
import com.microsoft.store.partnercenter.models.products.Sku;
import com.microsoft.store.partnercenter.models.utils.TripletTuple;
/**
* Holds operations that can be performed on skus from a specific target segment.
*/
public interface ISkuCollectionByTargetSegment
extends IPartnerComponent<TripletTuple<String, String, String>>,
IEntireEntityCollectionRetrievalOperations<Sku, ResourceCollection<Sku>>
{
/**
* Retrieves all the skus for the provided product and target segment.
*
* @return The SKUs for the provided product and target segment.
*/
ResourceCollection<Sku> get();
} |
3e1e2a92041adaf79f294993b324c7886e68a516 | 1,831 | java | Java | android-networking/src/main/java/com/androidnetworking/internal/DownloadProgressHandler.java | inboofficial/Fast-Android-Networking | c65ffc9f61eed3320860b7a3fca90d872e3474e8 | [
"Apache-2.0"
] | 5,549 | 2016-07-16T20:46:20.000Z | 2022-03-28T06:51:43.000Z | android-networking/src/main/java/com/androidnetworking/internal/DownloadProgressHandler.java | inboofficial/Fast-Android-Networking | c65ffc9f61eed3320860b7a3fca90d872e3474e8 | [
"Apache-2.0"
] | 546 | 2016-07-16T19:16:40.000Z | 2022-03-29T21:54:20.000Z | android-networking/src/main/java/com/androidnetworking/internal/DownloadProgressHandler.java | inboofficial/Fast-Android-Networking | c65ffc9f61eed3320860b7a3fca90d872e3474e8 | [
"Apache-2.0"
] | 1,039 | 2016-07-18T01:01:38.000Z | 2022-03-23T11:03:47.000Z | 33.290909 | 101 | 0.688149 | 12,772 | /*
* Copyright (C) 2016 Amit Shekhar
* Copyright (C) 2011 Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.androidnetworking.internal;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import com.androidnetworking.common.ANConstants;
import com.androidnetworking.interfaces.DownloadProgressListener;
import com.androidnetworking.model.Progress;
/**
* Created by amitshekhar on 24/05/16.
*/
public class DownloadProgressHandler extends Handler {
private final DownloadProgressListener mDownloadProgressListener;
public DownloadProgressHandler(DownloadProgressListener downloadProgressListener) {
super(Looper.getMainLooper());
mDownloadProgressListener = downloadProgressListener;
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case ANConstants.UPDATE:
if (mDownloadProgressListener != null) {
final Progress progress = (Progress) msg.obj;
mDownloadProgressListener.onProgress(progress.currentBytes, progress.totalBytes);
}
break;
default:
super.handleMessage(msg);
break;
}
}
}
|
3e1e2a97631d3b82923d756fcb94c3700c390e74 | 999 | java | Java | Ghidra/Features/Base/src/main/java/ghidra/app/util/bin/format/pe/rich/MSProductType.java | bdcht/ghidra | 9e732318148cd11edeb4862afd23d56418551812 | [
"Apache-2.0"
] | 17 | 2022-01-15T03:52:37.000Z | 2022-03-30T18:12:17.000Z | Ghidra/Features/Base/src/main/java/ghidra/app/util/bin/format/pe/rich/MSProductType.java | BStudent/ghidra | 0cdc722921cef61b7ca1b7236bdc21079fd4c03e | [
"Apache-2.0"
] | 9 | 2022-01-15T03:58:02.000Z | 2022-02-21T10:22:49.000Z | Ghidra/Features/Base/src/main/java/ghidra/app/util/bin/format/pe/rich/MSProductType.java | BStudent/ghidra | 0cdc722921cef61b7ca1b7236bdc21079fd4c03e | [
"Apache-2.0"
] | 3 | 2019-12-02T13:36:50.000Z | 2019-12-04T05:40:12.000Z | 24.975 | 75 | 0.708709 | 12,773 | /* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.util.bin.format.pe.rich;
public enum MSProductType {
CXX_Compiler("C++ Compiler"),
C_Compiler("C Compiler"),
Assembler("Assembler"),
Import("Linker"),
Export("Linker"),
ImportExport("Linker"),
Linker("Linker"),
CVTRes("CVTRes"),
Unknown("Unknown");
private final String desc;
private MSProductType(String d) {
this.desc = d;
}
public String toString() {
return desc;
}
}
|
3e1e2cdf193f25b33773dca57eb079dff7274be7 | 1,041 | java | Java | Java/mine/v1ch08/pair1/PairTest1.java | jinbooooom/PROGRAMMING-Checking-for-Missing | 2a96f0d4197ba8447239c0bf2520085adf6f8e7b | [
"MIT"
] | 46 | 2020-08-01T06:55:55.000Z | 2022-01-26T14:11:33.000Z | Java/mine/v1ch08/pair1/PairTest1.java | jinbooooom/PROGRAMMING-Checking-for-Missing | 2a96f0d4197ba8447239c0bf2520085adf6f8e7b | [
"MIT"
] | null | null | null | Java/mine/v1ch08/pair1/PairTest1.java | jinbooooom/PROGRAMMING-Checking-for-Missing | 2a96f0d4197ba8447239c0bf2520085adf6f8e7b | [
"MIT"
] | 14 | 2020-08-01T07:22:44.000Z | 2022-01-26T14:11:02.000Z | 25.390244 | 79 | 0.557157 | 12,774 | //package pair1;
import pair1.*;
/**
* @version 1.01 2012-01-26
* @author Cay Horstmann
*/
public class PairTest1
{
public static void main(String[] args)
{
String[] words = { "Mary", "had", "a", "little", "lamb" };
Pair<String> mm = ArrayAlg.minmax(words);
System.out.println("min = " + mm.getFirst());
System.out.println("max = " + mm.getSecond());
}
}
class ArrayAlg
{
/**
* Gets the minimum and maximum of an array of strings.
* @param a an array of strings
* @return a pair with the min and max values, or null if a is null or empty
*/
public static Pair<String> minmax(String[] a)
{
if (a == null || a.length == 0) return null;
String min = a[0];
String max = a[0];
for (int i = 1; i < a.length; i++)
{
if (min.compareTo(a[i]) > 0) min = a[i];
if (max.compareTo(a[i]) < 0) max = a[i];
}
//return new Pair<String>(min, max);
// 菱形括号里的类型可省略,编译器有足够的信息判断出你想要的方法【i331】
return new Pair<>(min, max);
}
}
|
3e1e2d6570d681a540f0e608b61b437cbab9f955 | 4,367 | java | Java | andmore-swt/org.eclipse.andmore.sdkuilib/src/com/android/sdkuilib/repository/SdkUpdaterWindow.java | androidworx/andworx | f8de7f8caa20e632fdd1eda2e47f32ec391392c4 | [
"Apache-2.0"
] | 2 | 2018-11-17T08:48:47.000Z | 2019-03-01T14:51:27.000Z | andmore-swt/org.eclipse.andmore.sdkuilib/src/com/android/sdkuilib/repository/SdkUpdaterWindow.java | androidworx/andworx | f8de7f8caa20e632fdd1eda2e47f32ec391392c4 | [
"Apache-2.0"
] | 1 | 2018-11-28T02:36:03.000Z | 2018-11-28T02:36:03.000Z | andmore-swt/org.eclipse.andmore.sdkuilib/src/com/android/sdkuilib/repository/SdkUpdaterWindow.java | androidworx/andworx | f8de7f8caa20e632fdd1eda2e47f32ec391392c4 | [
"Apache-2.0"
] | null | null | null | 32.110294 | 97 | 0.640714 | 12,775 | /*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.sdkuilib.repository;
import com.android.repository.api.RemotePackage;
import com.android.sdkuilib.internal.repository.content.PackageType;
import com.android.sdkuilib.internal.repository.ui.SdkUpdaterWindowImpl2;
import java.util.List;
import org.eclipse.andmore.sdktool.SdkCallAgent;
import org.eclipse.swt.widgets.Shell;
/**
* Opens an SDK Manager Window.
*
* This is the public entry point for using the window.
*/
public class SdkUpdaterWindow {
/** The actual window implementation to which this class delegates. */
private final SdkUpdaterWindowImpl2 mWindow;
private final SdkCallAgent mSdkCallAgent;
/**
* Enum giving some indication of what is invoking this window.
* The behavior and UI will change slightly depending on the context.
* <p/>
* Note: if you add Android support to your specific IDE, you might want
* to specialize this context enum.
*/
public enum SdkInvocationContext {
/**
* The SDK Manager is invoked from the stand-alone 'android' tool.
* In this mode, we present an about box, a settings page.
* For SdkMan2, we also have a menu bar and link to the AVD manager.
*/
STANDALONE,
/**
* The SDK Manager is invoked from the standalone AVD Manager.
* This is similar to the standalone mode except that in this case we
* don't display a menu item linking to the AVD Manager.
*/
AVD_MANAGER,
/**
* The SDK Manager is invoked from an IDE.
* In this mode, we do not modify the menu bar. There is no about box
* and no settings (e.g. HTTP proxy settings are inherited from Eclipse.)
*/
IDE,
/**
* The SDK Manager is invoked from the AVD Selector.
* For SdkMan1, this means the AVD page will be displayed first.
* For SdkMan2, we won't be using this.
*/
AVD_SELECTOR
}
/**
* Creates a new window. Caller must call open(), which will block.
*
* @param parentShell Parent shell.
* @param sdkCallAgent Mediates between application and UI layer
* @param context The {@link SdkInvocationContext} to change the behavior depending on who's
* opening the SDK Manager.
*/
public SdkUpdaterWindow(
Shell parentShell,
SdkCallAgent sdkCallAgent,
SdkInvocationContext context) {
this.mSdkCallAgent = sdkCallAgent;
mWindow = new SdkUpdaterWindowImpl2(parentShell, sdkCallAgent.getSdkContext(), context);
}
/**
* Adds a new listener to be notified when a change is made to the content of the SDK.
* This should be called before {@link #open()}.
*/
public void addListener(ISdkChangeListener listener) {
mWindow.addListener(listener);
}
/**
* Removes a new listener to be notified anymore when a change is made to the content of
* the SDK.
*/
public void removeListener(ISdkChangeListener listener) {
mWindow.removeListener(listener);
}
public void addPackageFilter(PackageType packageType) {
mWindow.addPackageFilter(packageType);
}
public void setTagFilter(String tag) {
mWindow.setTagFilter(tag);
}
/**
* Opens the window.
*/
public void open() {
try {
mWindow.open();
}
finally {
mSdkCallAgent.completeSdkOperations();
}
}
public List<RemotePackage> getPackagesInstalled() {
return mWindow.getPackagesInstalled();
}
}
|
3e1e2eba6b51db9ecc61ed1fb553e0d81d23b1ed | 375 | java | Java | springboot-admin-server/src/test/java/com/ctg/test/springbootadminserver/SpringbootAdminServerApplicationTests.java | yxye/springBoot-demos | c19c4f72d0c373ad6f1051d2074bfb6d1cd45963 | [
"Apache-2.0"
] | 328 | 2018-11-22T02:00:37.000Z | 2022-03-30T07:33:51.000Z | springboot-admin-server/src/test/java/com/ctg/test/springbootadminserver/SpringbootAdminServerApplicationTests.java | zzzzxxz/springBoot-demos | 142b9bf8434d444ae7ced7a7ae6a68877994aa5a | [
"Apache-2.0"
] | 3 | 2019-07-18T07:27:36.000Z | 2022-02-09T22:09:57.000Z | springboot-admin-server/src/test/java/com/ctg/test/springbootadminserver/SpringbootAdminServerApplicationTests.java | zzzzxxz/springBoot-demos | 142b9bf8434d444ae7ced7a7ae6a68877994aa5a | [
"Apache-2.0"
] | 371 | 2019-01-18T06:59:37.000Z | 2022-03-30T07:34:01.000Z | 22.058824 | 60 | 0.802667 | 12,776 | package com.ctg.test.springbootadminserver;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest
public class SpringbootAdminServerApplicationTests {
@Test
public void contextLoads() {
}
}
|
3e1e302f3c8b1cc8eb271d80044321bc1e18aff3 | 3,734 | java | Java | src/main/java/de/netbeacon/jstorage/server/socket/api/processing/action/CacheAction_DeleteCache.java | Horstexplorer/JStorage | 608c5ef4fb25a0404022e6552b3da8f799733eb4 | [
"Apache-2.0"
] | null | null | null | src/main/java/de/netbeacon/jstorage/server/socket/api/processing/action/CacheAction_DeleteCache.java | Horstexplorer/JStorage | 608c5ef4fb25a0404022e6552b3da8f799733eb4 | [
"Apache-2.0"
] | null | null | null | src/main/java/de/netbeacon/jstorage/server/socket/api/processing/action/CacheAction_DeleteCache.java | Horstexplorer/JStorage | 608c5ef4fb25a0404022e6552b3da8f799733eb4 | [
"Apache-2.0"
] | null | null | null | 36.607843 | 216 | 0.736743 | 12,777 | /*
* Copyright 2020 Horstexplorer @ https://www.netbeacon.de
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.netbeacon.jstorage.server.socket.api.processing.action;
import de.netbeacon.jstorage.server.internal.cachemanager.CacheManager;
import de.netbeacon.jstorage.server.internal.usermanager.UserManager;
import de.netbeacon.jstorage.server.internal.usermanager.object.DependentPermission;
import de.netbeacon.jstorage.server.internal.usermanager.object.GlobalPermission;
import de.netbeacon.jstorage.server.internal.usermanager.object.User;
import de.netbeacon.jstorage.server.socket.api.processing.APIProcessorResult;
import de.netbeacon.jstorage.server.tools.exceptions.CryptException;
import de.netbeacon.jstorage.server.tools.exceptions.DataStorageException;
import de.netbeacon.jstorage.server.tools.exceptions.GenericObjectException;
import org.json.JSONObject;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
/**
* Cache Action - Delete Cache
* <p>
* --- Does --- <br>
* Tries to delete the specified cache <br>
* Exceptions catched by superordinate processing handler <br>
* --- Returns --- <br>
* cache <br>
* --- Requirements --- <br>
* path: cache/mng <br>
* action: delete <br>
* http_method: delete <br>
* login-mode: token <br>
* payload: no <br>
* permissions: GlobalPermission.Admin, GlobalPermission.CacheAdmin, DependentPermission.CacheAdmin_Creator <br>
* required_arguments: identifier(String, cacheIdentifier) <br>
* optional_arguments: <br>
*
* @author horstexplorer <br>
*/
public class CacheAction_DeleteCache implements ProcessingAction {
private APIProcessorResult result;
private HashMap<String, String> args;
private User user;
@Override
public ProcessingAction createNewInstance() {
return new CacheAction_DeleteCache();
}
@Override
public boolean supportedHTTPMethod(String method) {
return "delete".equalsIgnoreCase(method);
}
@Override
public boolean userHasPermission() {
return user.hasGlobalPermission(GlobalPermission.Admin) || user.hasGlobalPermission(GlobalPermission.CacheAdmin) || user.hasDependentPermission(args.get("identifier"), DependentPermission.CacheAdmin_Creator);
}
@Override
public String getAction() {
return "delete";
}
@Override
public List<String> requiredArguments() {
return Collections.singletonList("identifier");
}
@Override
public void setup(User user, APIProcessorResult result, HashMap<String, String> args){
this.user = user;
this.result = result;
this.args = args;
}
@Override
public void process() throws DataStorageException, GenericObjectException, CryptException, NullPointerException {
CacheManager.getInstance().deleteCache(args.get("identifier"));
UserManager.getInstance().getDataPool().forEach((k,v)->v.removeDependentPermissions(args.get("identifier")));
JSONObject customResponseData = new JSONObject()
.put("identifier", args.get("identifier").toLowerCase());
// set result
result.addResult(this.getDefaultResponse(customResponseData));
}
}
|
3e1e30dc5523b9fe46eabfe7b7d4e42933b8ed74 | 4,201 | java | Java | initializer/data-generators/pdgf-fork/src/main/java/pdgf/generator/NameGenerator.java | dritter-hd/eipbench | c49c109e58226b06c96c54f69bab6e061a738225 | [
"Apache-2.0"
] | null | null | null | initializer/data-generators/pdgf-fork/src/main/java/pdgf/generator/NameGenerator.java | dritter-hd/eipbench | c49c109e58226b06c96c54f69bab6e061a738225 | [
"Apache-2.0"
] | 4 | 2020-03-04T22:18:44.000Z | 2021-12-09T20:51:18.000Z | initializer/data-generators/pdgf-fork/src/main/java/pdgf/generator/NameGenerator.java | dritter-hd/eipbench | c49c109e58226b06c96c54f69bab6e061a738225 | [
"Apache-2.0"
] | null | null | null | 34.154472 | 101 | 0.709831 | 12,778 | /*******************************************************************************
* Copyright (c) 2011, Chair of Distributed Information Systems, University of Passau.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the University of Passau nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
******************************************************************************/
package pdgf.generator;
import pdgf.core.FieldValueDTO;
import pdgf.core.dataGenerator.GenerationContext;
import pdgf.core.dbSchema.Field;
import pdgf.core.exceptions.ConfigurationException;
import pdgf.core.exceptions.XmlException;
import pdgf.plugin.AbstractPDGFRandom;
import pdgf.plugin.Generator;
import pdgf.util.File.LineAccessFile;
public class NameGenerator extends Generator {
private Field genderField = null;
private LineAccessFile male;
private LineAccessFile female;
public NameGenerator() throws XmlException {
super(
"First file must provide a list of Male names, second file must provide a list of female names");
// TODO Auto-generated constructor stub
}
@Override
public synchronized void initialize(int workers)
throws ConfigurationException, XmlException {
// TODO Auto-generated method stub
super.initialize(workers);
// check if name should depend on a referenced GenderGenerator
if (this.getParent().getReference(0) != null) {
genderField = this.getParent().getReference(0).getRefField();
if (!(genderField.getChild() instanceof GenderGenerator)) {
throw new XmlException(getNodeInfo()
+ "Generator of reference: "
+ genderField.getNodeInfo()
+ " must be an instance of GenderGenerator");
}
}
male = getFile(0);
female = getFile(1);
}
@Override
protected void nextValue(AbstractPDGFRandom rng,
GenerationContext generationContext, FieldValueDTO currentFieldValue) {
boolean isMale = false;
if (genderField == null) {
isMale = rng.nextBoolean();
} else {
isMale = ((Character) getCachedValue(genderField, generationContext)
.getCachedValue() == 'M');
}
if (isMale) {
currentFieldValue.setValue(male.getLine(getRandomNo(rng,
generationContext, male.getLineCount())));
} else {
currentFieldValue.setValue(female.getLine(getRandomNo(rng,
generationContext, female.getLineCount())));
}
}
private long getRandomNo(AbstractPDGFRandom rng, GenerationContext gc,
long l) {
long number;
if (getDistribution() == null) {
number = rng.nextLong();
} else {
number = getDistribution().nextLongValue(rng, gc);
}
if (number < 0) {
number = -number;
}
return number % l;
}
@Override
protected void configParsers() throws XmlException {
// TODO Auto-generated method stub
super.configParsers();
addFileNodeParser();
}
}
|
3e1e30e8720bdc77fa06d579024474fff320f70d | 21,960 | java | Java | core/src/main/java/com/riiablo/excel/Excel.java | anastasiard/riiablo | d7ba13fdffe43f54087c2342d07d38251e0aec9b | [
"Apache-2.0"
] | 543 | 2019-03-30T23:50:39.000Z | 2022-03-29T09:45:29.000Z | core/src/main/java/com/riiablo/excel/Excel.java | dcorazolla/riiablo | 995244a94091cd24ba8524832834639b7e1d9275 | [
"Apache-2.0"
] | 127 | 2019-03-29T11:49:49.000Z | 2022-03-01T19:02:28.000Z | core/src/main/java/com/riiablo/excel/Excel.java | dcorazolla/riiablo | 995244a94091cd24ba8524832834639b7e1d9275 | [
"Apache-2.0"
] | 73 | 2019-04-01T16:35:11.000Z | 2022-03-29T10:16:47.000Z | 34.3125 | 105 | 0.588206 | 12,779 | package com.riiablo.excel;
import java.io.IOException;
import java.io.InputStream;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.Iterator;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.commons.lang3.tuple.Triple;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.IntMap;
import com.badlogic.gdx.utils.ObjectIntMap;
import com.riiablo.logger.LogManager;
import com.riiablo.logger.Logger;
import com.riiablo.logger.MDC;
import com.riiablo.util.ClassUtils;
/**
* Root class of an excel table.
*/
public abstract class Excel<
E extends Excel.Entry,
S extends Serializer<E>
>
implements Iterable<E>
{
private static final Logger log = LogManager.getLogger(Excel.class);
/** Forces excels to either have a {@link PrimaryKey} set or be {@link Indexed} */
private static final boolean FORCE_PRIMARY_KEY = !true;
private static final ObjectIntMap EMPTY_OBJECT_INT_MAP = new ObjectIntMap();
@SuppressWarnings("unchecked") // doesn't store anything
static <T> ObjectIntMap<T> emptyMap() {
return (ObjectIntMap<T>) EMPTY_OBJECT_INT_MAP;
}
/**
* Root class of an excel entry.
*/
public static abstract class Entry {
/**
* Tags a specified field as a column within the excel table.
*/
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface Column {
/**
* Start index of {@link #format()} (inclusive)
*/
int startIndex() default 0;
/**
* End index of {@link #format()} (exclusive)
*/
int endIndex() default 0;
/**
* String format of column name, {@code ""} to use field name
* <p>
* <p>Examples:
* <ul>
* <li>{@code "class"}
* <li>{@code "Transform Color"}
* <li>{@code "Level%s"}
* <li>{@code "Skill %d"}
*/
String format() default "";
/**
* Index values of format in the case of non-numerical indexes.
* <p>
* <p>Examples:
* <ul>
* <li>{@code {"", "(N)", "(H)"}}
* <li>{@code {"r", "g", "b"}}
* <li>{@code {"Min", "Max", "MagicMin", "MagicMax", "MagicLvl"}}
*/
String[] values() default {};
/**
* Manually sets the column index. This property overrides all other
* properties.
*/
int columnIndex() default -1;
}
}
public static <E extends Entry, S extends Serializer<E>, T extends Excel<E, S>>
T load(T excel, FileHandle txt) throws IOException {
return load(excel, txt, null);
}
public static <E extends Entry, S extends Serializer<E>, T extends Excel<E, S>>
T load(T excel, FileHandle txt, FileHandle bin) throws IOException {
throw null;
}
static <E extends Entry, S extends Serializer<E>, T extends Excel<E, S>>
T loadTxt(T excel, FileHandle handle) throws IOException {
InputStream in = handle.read();
try {
MDC.put("excel", handle.path());
TxtParser parser = TxtParser.parse(in);
return loadTxt(excel, parser);
} catch (Throwable t) {
log.fatal("Unable to load {} as {}: {}",
handle,
excel.getClass().getCanonicalName(),
ExceptionUtils.getRootCauseMessage(t),
t);
return ExceptionUtils.rethrow(t);
} finally {
MDC.remove("excel");
IOUtils.closeQuietly(in);
}
}
static <E extends Entry, S extends Serializer<E>, T extends Excel<E, S>>
T loadTxt(T excel, TxtParser parser)
throws IOException, ParseException, IllegalAccessException
{
final Class<E> entryClass = excel.entryClass();
final boolean indexed = ClassUtils.hasAnnotation(entryClass, Indexed.class);
final String[] TMP = new String[1];
Field primaryKey = null, firstKey = null;
Array<Triple<Field, int[], String[]>> columns = new Array<>(true, parser.numColumns(), Triple.class);
for (Field field : entryClass.getFields()) {
Entry.Column column = field.getAnnotation(Entry.Column.class);
if (column == null) {
log.warn("{} is not tagged with {}", field, Entry.Column.class.getCanonicalName());
continue;
}
PrimaryKey key = field.getAnnotation(PrimaryKey.class);
if (key != null) {
if (!ArrayUtils.contains(PrimaryKey.SUPPORTED_TYPES, field.getType())) {
throw new ParseException(field, "%s must be one of %s",
field, Arrays.toString(PrimaryKey.SUPPORTED_TYPES));
}
if (indexed) {
// Indexed excels have their primary key assigned automatically based on row index
log.warn("{} has {} set to the primary key, but class is tagged with {}",
entryClass, field, Indexed.class.getCanonicalName());
} else if (primaryKey != null) {
// Allow declared field tagged as a primary key to override inherited ones
boolean primaryDeclared = ClassUtils.isDeclaredField(entryClass, primaryKey);
boolean fieldDeclared = ClassUtils.isDeclaredField(entryClass, field);
if (primaryDeclared != fieldDeclared) {
if (fieldDeclared) {
log.debug("primary key {} changed to {}", primaryKey, field);
primaryKey = field;
}
} else {
log.warn("multiple primary keys set within {}: {} and {}",
entryClass, primaryKey.getName(), field.getName());
}
} else {
primaryKey = field;
}
}
if (firstKey == null) firstKey = field;
populateColumnIndexes(columns, parser, column, field, TMP);
}
if (primaryKey == null && !indexed) {
if (FORCE_PRIMARY_KEY) {
throw new ParseException(entryClass, "%s does not have a %s set!",
entryClass, PrimaryKey.class.getCanonicalName());
} else {
log.warn("{} does not have a {} set! Defaulting to first key: {}",
entryClass, PrimaryKey.class.getCanonicalName(), firstKey);
primaryKey = firstKey;
}
}
// Locate the column index of the primary key
// TODO: this operation can be cleaned up, but this is only an identity test
int[] primaryKeyColumnIds = null;
final Triple<Field, int[], String[]>[] columnTriples = columns.items;
for (int i = 0, s = columnTriples.length; i < s; i++) {
if (columnTriples[i].getLeft() == primaryKey) {
primaryKeyColumnIds = columnTriples[i].getMiddle();
break;
}
}
int nonzeroIndex = -1;
if (!indexed) {
for (int i = 0, s = primaryKeyColumnIds.length; i < s; i++) {
if (primaryKeyColumnIds[i] >= 0) {
nonzeroIndex = i;
break;
}
}
if (nonzeroIndex == -1) {
throw new ParseException(primaryKey,
"primary key %s does not have any columns associated with it",
primaryKey);
}
}
final int primaryKeyColumnId = indexed ? -1 : primaryKeyColumnIds[nonzeroIndex];
final Class primaryKeyType = indexed ? null : primaryKey.getType();
for (int i = excel.offset(); parser.cacheLine() != -1; i++) {
E entry = excel.newEntry();
String name = indexed ? null : parser.parseString(primaryKeyColumnId, "");
try {
MDC.put("entry", indexed || StringUtils.isBlank(name) ? "" + i : name);
parseColumns(excel, entry, name, columns, parser);
} finally {
MDC.remove("entry");
}
putIndex(primaryKey, primaryKeyType, i++, indexed, excel, entry);
}
return excel;
}
static void
catchParseException(
Throwable t,
Field field,
Class type,
String key,
String columnName,
CharSequence token
) {
ParseException parseException = new ParseException(t, field,
"error parsing field %s row: '%s' column: '%s': '%s' as %s",
field, key, columnName, token.toString(),
type.isArray() ? type.getComponentType().getCanonicalName() : type.getCanonicalName());
log.warn(parseException.getMessage(), parseException);
}
static <E extends Entry, S extends Serializer<E>, T extends Excel<E, S>>
void parseColumns(
T excel,
E entry,
String key,
Array<Triple<Field, int[], String[]>> columns,
TxtParser parser
)
throws IllegalAccessException, ParseException
{
for (Triple<Field, int[], String[]> column : columns) {
final Field field = column.getLeft();
final int[] columnIds = column.getMiddle();
final int numColumns = columnIds.length;
final String[] columnNames = column.getRight();
final Class type = field.getType();
try {
if (type == String.class) {
try {
field.set(entry, parser.parseString(columnIds[0], ""));
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[0], parser.token(columnIds[0]));
}
} else if (type == String[].class) {
final String[] value = new String[numColumns];
for (int i = 0; i < numColumns; i++) {
try {
value[i] = parser.parseString(columnIds[i], "");
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[i], parser.token(columnIds[i]));
}
}
field.set(entry, value);
}
else if (type == byte.class) {
try {
field.setByte(entry, parser.parseByte(columnIds[0], (byte) 0));
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[0], parser.token(columnIds[0]));
}
} else if (type == byte[].class) {
final byte[] value = new byte[numColumns];
for (int i = 0; i < numColumns; i++) {
try {
value[i] = parser.parseByte(columnIds[i], (byte) 0);
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[i], parser.token(columnIds[i]));
}
}
field.set(entry, value);
}
else if (type == short.class) {
try {
field.setShort(entry, parser.parseShort(columnIds[0], (short) 0));
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[0], parser.token(columnIds[0]));
}
} else if (type == short[].class) {
final short[] value = new short[numColumns];
for (int i = 0; i < numColumns; i++) {
try {
value[i] = parser.parseShort(columnIds[i], (short) 0);
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[i], parser.token(columnIds[i]));
}
}
field.set(entry, value);
}
else if (type == int.class) {
try {
field.setInt(entry, parser.parseInt(columnIds[0], 0));
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[0], parser.token(columnIds[0]));
}
} else if (type == int[].class) {
final int[] value = new int[numColumns];
for (int i = 0; i < numColumns; i++) {
try {
value[i] = parser.parseInt(columnIds[i], 0);
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[i], parser.token(columnIds[i]));
}
}
field.set(entry, value);
}
else if (type == long.class) {
try {
field.setLong(entry, parser.parseLong(columnIds[0], 0L));
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[0], parser.token(columnIds[0]));
}
} else if (type == long[].class) {
final long[] value = new long[numColumns];
for (int i = 0; i < numColumns; i++) {
try {
value[i] = parser.parseLong(columnIds[i], 0L);
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[i], parser.token(columnIds[i]));
}
}
field.set(entry, value);
}
else if (type == boolean.class) {
try {
field.setBoolean(entry, parser.parseBoolean(columnIds[0], false));
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[0], parser.token(columnIds[0]));
}
} else if (type == boolean[].class) {
final boolean[] value = new boolean[numColumns];
for (int i = 0; i < numColumns; i++) {
try {
value[i] = parser.parseBoolean(columnIds[i], false);
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[i], parser.token(columnIds[i]));
}
}
field.set(entry, value);
}
else if (type == float.class) {
try {
field.setFloat(entry, parser.parseFloat(columnIds[0], 0f));
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[0], parser.token(columnIds[0]));
}
} else if (type == float[].class) {
final float[] value = new float[numColumns];
for (int i = 0; i < numColumns; i++) {
try {
value[i] = parser.parseFloat(columnIds[i], 0f);
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[i], parser.token(columnIds[i]));
}
}
field.set(entry, value);
}
else if (type == double.class) {
try {
field.setDouble(entry, parser.parseDouble(columnIds[0], 0d));
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[0], parser.token(columnIds[0]));
}
} else if (type == double[].class) {
final double[] value = new double[numColumns];
for (int i = 0; i < numColumns; i++) {
try {
value[i] = parser.parseDouble(columnIds[i], 0d);
} catch (Throwable t) {
catchParseException(t, field, type, key, columnNames[i], parser.token(columnIds[i]));
}
}
field.set(entry, value);
}
else {
throw new ParseException(field, "Cannot parse fields of type %s",
org.apache.commons.lang3.ClassUtils.getCanonicalName(type));
}
} catch (ColumnFormat t) {
ParseException parseException = new ParseException(field,
"error parsing field %s row: '%s' column: '%s': '%s' as %s",
field, key, columnNames[t.columnIndex()], t.columnText(),
type.isArray() ? type.getComponentType().getCanonicalName() : type.getCanonicalName());
parseException.initCause(t);
throw parseException;
}
}
}
/**
* Parses the specified field using it's column definition annotation to
* generate a list of column names and indexes associated with them. These
* indexes are then stored as a mapping from field to associated column
* indexes which can be used to retrieve data from the backing excel.
*/
static void populateColumnIndexes(
final Array<Triple<Field, int[], String[]>> columns,
final TxtParser parser,
final Entry.Column column,
final Field field,
final String[] TMP
) throws ParseException {
final String format = column.format();
final String[] values = column.values();
final int startIndex = column.startIndex();
final int endIndex = column.endIndex();
final int columnIndex = column.columnIndex();
if (columnIndex >= 0) {
final int[] columnIndexes = new int[] { columnIndex };
final String[] columnNames = new String[] { null };
columns.add(Triple.of(field, columnIndexes, columnNames));
log.trace("pushing column <{}>->{}", field, columnIndexes);
} else if (format.isEmpty()) {
final String fieldName = field.getName();
if (values.length > 0) {
// values[] used as literal column names
checkArrayColumns(field, values.length);
String[] columnNames = new String[values.length];
for (int i = 0; i < values.length; i++) {
columnNames[i] = values[i];
}
putColumns(columns, parser, field, columnNames);
} else if (startIndex == 0 && endIndex == 0) {
// field name used as literal column name
TMP[0] = fieldName;
putColumns(columns, parser, field, TMP);
} else {
// field name + indexes used as column names
checkArrayColumns(field, endIndex - startIndex);
String[] columnNames = new String[endIndex - startIndex];
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
columnNames[j] = fieldName + i;
}
putColumns(columns, parser, field, columnNames);
}
} else {
if (startIndex == 0 && endIndex == 0) {
// format used as literal column name
TMP[0] = format;
putColumns(columns, parser, field, TMP);
} else {
checkArrayColumns(field, endIndex - startIndex);
String[] columnNames = new String[endIndex - startIndex];
if (values.length == 0) {
// format used in conjunction with indexes as column names
// format must contain %d within it, replaced with indexes
for (int i = startIndex, j = 0; i < endIndex; i++, j++) {
columnNames[j] = String.format(format, i);
}
} else {
// format used in conjunction with values as column names
// format must contain as many values as indexes
for (int i = 0, s = values.length; i < s; i++) {
columnNames[i] = String.format(format, values[i]);
}
}
putColumns(columns, parser, field, columnNames);
}
}
if (log.debugEnabled()) {
StringBuilder builder = new StringBuilder(256);
builder.append('{');
for (Triple<Field, int[], String[]> pair : columns) {
builder
.append(pair.getLeft().getName())
.append('=')
.append(Arrays.toString(pair.getMiddle()))
.append(", ");
}
if (columns.size > 0) builder.setLength(builder.length() - 2);
builder.append('}');
log.debug("columns: {}", builder.toString());
}
}
static void checkArrayColumns(Field field, int length) throws ParseException {
if (!field.getType().isArray() && length > 1) {
throw new ParseException(field, ""
+ "field %s corresponds to multiple columns. "
+ "is it supposed to be an array type?", field);
}
}
static int putColumns(
Array<Triple<Field, int[], String[]>> columns,
TxtParser parser,
Field field,
String[] columnNames
) {
final int index = columns.size;
final int[] columnIndexes = parser.columnId(columnNames);
columns.add(Triple.of(field, columnIndexes, columnNames));
log.trace("pushing columns {}->{}", columnNames, columnIndexes);
if (log.warnEnabled()) {
for (int i = 0, s = columnIndexes.length; i < s; i++) {
if (columnIndexes[i] == -1) {
log.warn("Unable to parse column named '{}'", columnNames[i]);
}
}
}
return index;
}
static <E extends Entry, S extends Serializer<E>, T extends Excel<E, S>>
T loadBin(T excel, FileHandle handle) {
throw null;
}
static <E extends Entry, T extends Excel<E, ?>>
void putIndex(
Field primaryKey,
Class primaryKeyType,
int i,
boolean indexed,
T excel,
E entry
) throws IllegalAccessException {
if (indexed) {
excel._put(i, entry);
} else if (primaryKeyType == int.class) {
int id = primaryKey.getInt(entry);
excel._put(id, entry);
} else if (primaryKeyType == String.class) {
String id = (String) primaryKey.get(entry);
excel._put(i, entry);
if (excel.stringToIndex == EMPTY_OBJECT_INT_MAP) excel.stringToIndex = new ObjectIntMap<>();
if (!excel.stringToIndex.containsKey(id)) excel.stringToIndex.put(id, i);
}
}
protected final Class<E> entryClass;
protected ObjectIntMap<String> stringToIndex;
protected IntMap<E> entries;
protected Array<Entry> ordered;
protected Excel(Class<E> entryClass) {
this(entryClass, 53);
}
protected Excel(Class<E> entryClass, int initialCapacity) {
this(entryClass, initialCapacity, 0.8f);
}
protected Excel(Class<E> entryClass, int initialCapacity, float loadFactor) {
this.entryClass = entryClass;
this.stringToIndex = emptyMap();
this.entries = new IntMap<>(initialCapacity, loadFactor);
this.ordered = new Array<>(true, (int) (initialCapacity * loadFactor), Entry.class);
}
public Class<? extends Excel> excelClass() {
return getClass();
}
public Class<E> entryClass() {
return entryClass;
}
final void _put(int id, E value) {
entries.put(id, value);
put(id, value);
}
protected void put(int id, E value) {}
protected int offset() {
return 0;
}
protected void init() {}
public E get(String id) {
return get(index(id));
}
public E get(int id) {
return entries.get(id);
}
public int index(String id) {
return stringToIndex.get(id, -1);
}
public int size() {
return entries.size;
}
public abstract E newEntry();
public abstract S newSerializer();
@Override
public Iterator<E> iterator() {
return entries.values().iterator();
}
}
|
3e1e32e4544feafa51678c03dff0cf609bd0d5a6 | 3,589 | java | Java | src/client/MapleStat.java | marcosppastor/MSV146 | 95100059cb1542735b104f47f6e86d42f0ec4c7c | [
"MIT"
] | 5 | 2021-07-12T03:02:41.000Z | 2021-11-23T18:36:25.000Z | src/client/MapleStat.java | marcosppastor/MSV83 | 95100059cb1542735b104f47f6e86d42f0ec4c7c | [
"MIT"
] | null | null | null | src/client/MapleStat.java | marcosppastor/MSV83 | 95100059cb1542735b104f47f6e86d42f0ec4c7c | [
"MIT"
] | null | null | null | 28.015625 | 73 | 0.562744 | 12,780 | /*
This file is part of the OdinMS Maple Story Server
Copyright (C) 2008 Patrick Huy <envkt@example.com>
Matthias Butz <envkt@example.com>
Jan Christian Meyer <hzdkv@example.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation version 3 as published by
the Free Software Foundation. You may not use, modify or distribute
this program under any other version of the GNU Affero General Public
License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package client;
public enum MapleStat {
SKIN(0x1),
FACE(0x2),
HAIR(0x4),
LEVEL(0x10),
JOB(0x20),
STR(0x40),
DEX(0x80),
INT(0x100),
LUK(0x200),
HP(0x400),
MAXHP(0x800),
MP(0x1000),
MAXMP(0x2000),
AVAILABLEAP(0x4000),
AVAILABLESP(0x8000),
EXP(0x10000),
FAME(0x20000),
MESO(0x40000),
PET(0x180008),
GACHAEXP(0x200000),
CHARISMA(0x800000), // ambition int
INSIGHT(0x1000000),
WILL(0x2000000), // int
CRAFT(0x4000000), // dilligence, int
SENSE(0x8000000), // empathy, int
CHARM(0x10000000);
private final int i;
private MapleStat(int i) {
this.i = i;
}
public int getValue() {
return i;
}
public static MapleStat getByValue(int value) {
for (MapleStat stat : MapleStat.values()) {
if (stat.getValue() == value) {
return stat;
}
}
return null;
}
public static MapleStat getBy5ByteEncoding(int encoded) {
switch (encoded) {
case 64:
return STR;
case 128:
return DEX;
case 256:
return INT;
case 512:
return LUK;
}
return null;
}
public static MapleStat getByString(String type) {
if (type.equals("SKIN")) {
return SKIN;
} else if (type.equals("FACE")) {
return FACE;
} else if (type.equals("HAIR")) {
return HAIR;
} else if (type.equals("LEVEL")) {
return LEVEL;
} else if (type.equals("JOB")) {
return JOB;
} else if (type.equals("STR")) {
return STR;
} else if (type.equals("DEX")) {
return DEX;
} else if (type.equals("INT")) {
return INT;
} else if (type.equals("LUK")) {
return LUK;
} else if (type.equals("HP")) {
return HP;
} else if (type.equals("MAXHP")) {
return MAXHP;
} else if (type.equals("MP")) {
return MP;
} else if (type.equals("MAXMP")) {
return MAXMP;
} else if (type.equals("AVAILABLEAP")) {
return AVAILABLEAP;
} else if (type.equals("AVAILABLESP")) {
return AVAILABLESP;
} else if (type.equals("EXP")) {
return EXP;
} else if (type.equals("FAME")) {
return FAME;
} else if (type.equals("MESO")) {
return MESO;
} else if (type.equals("PET")) {
return PET;
}
return null;
}
}
|
3e1e34d999e2748ca9d84e18f520c6d219085551 | 9,966 | java | Java | storm-core/src/jvm/org/apache/storm/daemon/supervisor/timer/RunProfilerActions.java | one3chens/storm | e9785d8f1944f6c4bd9ad24dbf7b296830c7e685 | [
"Apache-2.0"
] | 1 | 2016-05-05T02:06:14.000Z | 2016-05-05T02:06:14.000Z | storm-core/src/jvm/org/apache/storm/daemon/supervisor/timer/RunProfilerActions.java | one3chens/storm | e9785d8f1944f6c4bd9ad24dbf7b296830c7e685 | [
"Apache-2.0"
] | 3 | 2021-06-23T21:23:19.000Z | 2021-12-14T22:01:48.000Z | storm-core/src/jvm/org/apache/storm/daemon/supervisor/timer/RunProfilerActions.java | one3chens/storm | e9785d8f1944f6c4bd9ad24dbf7b296830c7e685 | [
"Apache-2.0"
] | 1 | 2017-04-12T17:06:41.000Z | 2017-04-12T17:06:41.000Z | 47.009434 | 143 | 0.622617 | 12,781 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.daemon.supervisor.timer;
import com.google.common.collect.Lists;
import org.apache.storm.Config;
import org.apache.storm.cluster.IStormClusterState;
import org.apache.storm.daemon.supervisor.SupervisorData;
import org.apache.storm.daemon.supervisor.SupervisorUtils;
import org.apache.storm.generated.ProfileAction;
import org.apache.storm.generated.ProfileRequest;
import org.apache.storm.utils.ConfigUtils;
import org.apache.storm.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.*;
public class RunProfilerActions implements Runnable {
private static Logger LOG = LoggerFactory.getLogger(RunProfilerActions.class);
private Map conf;
private IStormClusterState stormClusterState;
private String hostName;
private String profileCmd;
private SupervisorData supervisorData;
private class ActionExitCallback implements Utils.ExitCodeCallable {
private String stormId;
private ProfileRequest profileRequest;
private String logPrefix;
private boolean stop;
public ActionExitCallback(String stormId, ProfileRequest profileRequest, String logPrefix, boolean stop) {
this.stormId = stormId;
this.profileRequest = profileRequest;
this.logPrefix = logPrefix;
this.stop = stop;
}
@Override
public Object call() throws Exception {
return null;
}
@Override
public Object call(int exitCode) {
LOG.info("{} profile-action exited for {}", logPrefix, exitCode);
try {
if (stop)
stormClusterState.deleteTopologyProfileRequests(stormId, profileRequest);
} catch (Exception e) {
LOG.warn("failed delete profileRequest: " + profileRequest);
}
return null;
}
}
public RunProfilerActions(SupervisorData supervisorData) {
this.conf = supervisorData.getConf();
this.stormClusterState = supervisorData.getStormClusterState();
this.hostName = supervisorData.getHostName();
String stormHome = System.getProperty("storm.home");
this.profileCmd = stormHome + Utils.FILE_PATH_SEPARATOR + "bin" + Utils.FILE_PATH_SEPARATOR + conf.get(Config.WORKER_PROFILER_COMMAND);
this.supervisorData = supervisorData;
}
@Override
public void run() {
Map<String, List<ProfileRequest>> stormIdToActions = supervisorData.getStormIdToProfilerActions().get();
try {
for (Map.Entry<String, List<ProfileRequest>> entry : stormIdToActions.entrySet()) {
String stormId = entry.getKey();
List<ProfileRequest> requests = entry.getValue();
if (requests != null) {
for (ProfileRequest profileRequest : requests) {
if (profileRequest.get_nodeInfo().get_node().equals(hostName)) {
boolean stop = System.currentTimeMillis() > profileRequest.get_time_stamp();
Long port = profileRequest.get_nodeInfo().get_port().iterator().next();
String targetDir = ConfigUtils.workerArtifactsRoot(conf, stormId, port.intValue());
Map stormConf = ConfigUtils.readSupervisorStormConf(conf, stormId);
String user = null;
if (stormConf.get(Config.TOPOLOGY_SUBMITTER_USER) != null) {
user = (String) (stormConf.get(Config.TOPOLOGY_SUBMITTER_USER));
}
Map<String, String> env = null;
if (stormConf.get(Config.TOPOLOGY_ENVIRONMENT) != null) {
env = (Map<String, String>) stormConf.get(Config.TOPOLOGY_ENVIRONMENT);
} else {
env = new HashMap<String, String>();
}
String str = ConfigUtils.workerArtifactsPidPath(conf, stormId, port.intValue());
StringBuilder stringBuilder = new StringBuilder();
String workerPid = null;
try (FileReader reader = new FileReader(str); BufferedReader br = new BufferedReader(reader)) {
workerPid = br.readLine().trim();
}
ProfileAction profileAction = profileRequest.get_action();
String logPrefix = "ProfilerAction process " + stormId + ":" + port + " PROFILER_ACTION: " + profileAction + " ";
// Until PROFILER_STOP action is invalid, keep launching profiler start in case worker restarted
// The profiler plugin script validates if JVM is recording before starting another recording.
List<String> command = mkCommand(profileAction, stop, workerPid, targetDir);
try {
ActionExitCallback actionExitCallback = new ActionExitCallback(stormId, profileRequest, logPrefix, stop);
launchProfilerActionForWorker(user, targetDir, command, env, actionExitCallback, logPrefix);
} catch (IOException e) {
LOG.error("Error in processing ProfilerAction '{}' for {}:{}, will retry later", profileAction, stormId, port);
} catch (RuntimeException e) {
LOG.error("Error in processing ProfilerAction '{}' for {}:{}, will retry later", profileAction, stormId, port);
}
}
}
}
}
} catch (Exception e) {
LOG.error("Error running profiler actions, will retry again later");
}
}
private void launchProfilerActionForWorker(String user, String targetDir, List<String> commands, Map<String, String> environment,
final Utils.ExitCodeCallable exitCodeCallable, String logPrefix) throws IOException {
File targetFile = new File(targetDir);
if (Utils.getBoolean(conf.get(Config.SUPERVISOR_RUN_WORKER_AS_USER), false)) {
LOG.info("Running as user:{} command:{}", user, commands);
String containerFile = Utils.containerFilePath(targetDir);
if (Utils.checkFileExists(containerFile)) {
SupervisorUtils.rmrAsUser(conf, containerFile, containerFile);
}
String scriptFile = Utils.scriptFilePath(targetDir);
if (Utils.checkFileExists(scriptFile)) {
SupervisorUtils.rmrAsUser(conf, scriptFile, scriptFile);
}
String script = Utils.writeScript(targetDir, commands, environment);
List<String> args = new ArrayList<>();
args.add("profiler");
args.add(targetDir);
args.add(script);
SupervisorUtils.processLauncher(conf, user, null, args, environment, logPrefix, exitCodeCallable, targetFile);
} else {
Utils.launchProcess(commands, environment, logPrefix, exitCodeCallable, targetFile);
}
}
private List<String> mkCommand(ProfileAction action, boolean stop, String workerPid, String targetDir) {
if (action == ProfileAction.JMAP_DUMP) {
return jmapDumpCmd(workerPid, targetDir);
} else if (action == ProfileAction.JSTACK_DUMP) {
return jstackDumpCmd(workerPid, targetDir);
} else if (action == ProfileAction.JPROFILE_DUMP) {
return jprofileDump(workerPid, targetDir);
} else if (action == ProfileAction.JVM_RESTART) {
return jprofileJvmRestart(workerPid);
} else if (!stop && action == ProfileAction.JPROFILE_STOP) {
return jprofileStart(workerPid);
} else if (stop && action == ProfileAction.JPROFILE_STOP) {
return jprofileStop(workerPid, targetDir);
}
return Lists.newArrayList();
}
private List<String> jmapDumpCmd(String pid, String targetDir) {
return Lists.newArrayList(profileCmd, pid, "jmap", targetDir);
}
private List<String> jstackDumpCmd(String pid, String targetDir) {
return Lists.newArrayList(profileCmd, pid, "jstack", targetDir);
}
private List<String> jprofileStart(String pid) {
return Lists.newArrayList(profileCmd, pid, "start");
}
private List<String> jprofileStop(String pid, String targetDir) {
return Lists.newArrayList(profileCmd, pid, "stop", targetDir);
}
private List<String> jprofileDump(String pid, String targetDir) {
return Lists.newArrayList(profileCmd, pid, "dump", targetDir);
}
private List<String> jprofileJvmRestart(String pid) {
return Lists.newArrayList(profileCmd, pid, "kill");
}
}
|
3e1e358206b0dda6822042b1bb11d70231ede86c | 1,717 | java | Java | model/src/main/java/org/jpwh/model/fetching/profile/Item.java | ytachi0026/road.to.hibernate.master | 67615f1855284d262cb2a314149bef2fa7d858e0 | [
"MIT"
] | null | null | null | model/src/main/java/org/jpwh/model/fetching/profile/Item.java | ytachi0026/road.to.hibernate.master | 67615f1855284d262cb2a314149bef2fa7d858e0 | [
"MIT"
] | null | null | null | model/src/main/java/org/jpwh/model/fetching/profile/Item.java | ytachi0026/road.to.hibernate.master | 67615f1855284d262cb2a314149bef2fa7d858e0 | [
"MIT"
] | null | null | null | 20.2 | 66 | 0.651718 | 12,782 | package org.jpwh.model.fetching.profile;
import org.jpwh.model.Constants;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.validation.constraints.NotNull;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
@Entity
public class Item {
public static final String PROFILE_JOIN_SELLER = "JoinSeller";
public static final String PROFILE_JOIN_BIDS = "JoinBids";
@Id
@GeneratedValue(generator = Constants.ID_GENERATOR)
protected Long id;
@NotNull
protected String name;
@NotNull
protected Date auctionEnd;
@NotNull
@ManyToOne(fetch = FetchType.LAZY)
protected User seller;
@OneToMany(mappedBy = "item")
protected Set<Bid> bids = new HashSet<>();
public Item() {
}
public Item(String name, Date auctionEnd, User seller) {
this.name = name;
this.auctionEnd = auctionEnd;
this.seller = seller;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Date getAuctionEnd() {
return auctionEnd;
}
public void setAuctionEnd(Date auctionEnd) {
this.auctionEnd = auctionEnd;
}
public User getSeller() {
return seller;
}
public void setSeller(User seller) {
this.seller = seller;
}
public Set<Bid> getBids() {
return bids;
}
public void setBids(Set<Bid> bids) {
this.bids = bids;
}
// ...
}
|
3e1e35adc8b7b76030c74e6973f4f71f2eb5861f | 462 | java | Java | src/main/java/cn/linz/base/demo/TestController.java | taoguoliang/base | 94055cf7c9ca22c895c47b3c28cc02468f2a63a9 | [
"Apache-2.0"
] | null | null | null | src/main/java/cn/linz/base/demo/TestController.java | taoguoliang/base | 94055cf7c9ca22c895c47b3c28cc02468f2a63a9 | [
"Apache-2.0"
] | null | null | null | src/main/java/cn/linz/base/demo/TestController.java | taoguoliang/base | 94055cf7c9ca22c895c47b3c28cc02468f2a63a9 | [
"Apache-2.0"
] | null | null | null | 22 | 84 | 0.755411 | 12,783 | package cn.linz.base.demo;
import cn.linz.base.controller.BaseController;
import io.swagger.annotations.Api;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* test
*
* @author taogl
* @date 2021/12/12 01:03
* @version 1.0.0
*/
@RestController
@RequestMapping("api")
@Api(tags = "测试")
public class TestController extends BaseController<Test, String, Test, Test, Test> {
}
|
3e1e35f1a369ba0b82cb519af98d648ad2ba4cce | 1,049 | java | Java | src/partsom/clustering/KMeansException.java | pauloewerton/partSOM4Grid | ff7e90aa7c68130e1fc8ea2b258636ba6efe829b | [
"Artistic-2.0"
] | null | null | null | src/partsom/clustering/KMeansException.java | pauloewerton/partSOM4Grid | ff7e90aa7c68130e1fc8ea2b258636ba6efe829b | [
"Artistic-2.0"
] | null | null | null | src/partsom/clustering/KMeansException.java | pauloewerton/partSOM4Grid | ff7e90aa7c68130e1fc8ea2b258636ba6efe829b | [
"Artistic-2.0"
] | null | null | null | 25.585366 | 85 | 0.708294 | 12,784 | /*
* 2010, http://github.com/pauloewerton/partSOM4Grid
* This file is part of partSOM4Grid
*
* partSOM4Grid is free software: you can redistribute it and/or modify it under the
* terms of the Artistic License 2.0 as published by the OSI.
*
* This program is distributed in hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the Artistic License 2.0
* for more details.
*
* You should have received a copy of the Artistic License 2.0
* along with this program. See <www.opensource.org/licenses/artistic-license-2.0>.
*
*/
package partsom.clustering;
public class KMeansException extends Exception {
private static final long serialVersionUID = 1L;
public KMeansException() {
}
public KMeansException(String arg0) {
super(arg0);
}
public KMeansException(Throwable arg0) {
super(arg0);
}
public KMeansException(String arg0, Throwable arg1) {
super(arg0, arg1);
}
}
|
3e1e364e3972efad1f118507ba37faaade3943fc | 504 | java | Java | src/main/java/me/qyh/blog/file/vo/FileContent.java | LikoLi/blog | 2e46b38a220afc55564809c144ea075c53ba0d75 | [
"Apache-2.0"
] | null | null | null | src/main/java/me/qyh/blog/file/vo/FileContent.java | LikoLi/blog | 2e46b38a220afc55564809c144ea075c53ba0d75 | [
"Apache-2.0"
] | null | null | null | src/main/java/me/qyh/blog/file/vo/FileContent.java | LikoLi/blog | 2e46b38a220afc55564809c144ea075c53ba0d75 | [
"Apache-2.0"
] | null | null | null | 12.923077 | 41 | 0.662698 | 12,785 | package me.qyh.blog.file.vo;
/**
* @since 6.3
* @author wwwqyhme
*
*/
public class FileContent {
private String path;
private String ext;
private String content;
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public String getExt() {
return ext;
}
public void setExt(String ext) {
this.ext = ext;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
}
|
3e1e368681d64caad05d96dc4afef9567873b359 | 1,379 | java | Java | modules/composite-diagram/src/main/java/org/apache/tuscany/sca/diagram/artifacts/Link.java | apache/tuscany-sca-2.x | 89f2d366d4b0869a4e42ff265ccf4503dda4dc8b | [
"Apache-2.0"
] | 18 | 2015-01-17T17:09:47.000Z | 2021-11-10T16:04:56.000Z | modules/composite-diagram/src/main/java/org/apache/tuscany/sca/diagram/artifacts/Link.java | apache/tuscany-sca-2.x | 89f2d366d4b0869a4e42ff265ccf4503dda4dc8b | [
"Apache-2.0"
] | null | null | null | modules/composite-diagram/src/main/java/org/apache/tuscany/sca/diagram/artifacts/Link.java | apache/tuscany-sca-2.x | 89f2d366d4b0869a4e42ff265ccf4503dda4dc8b | [
"Apache-2.0"
] | 18 | 2015-08-26T15:18:06.000Z | 2021-11-10T16:04:45.000Z | 32.833333 | 98 | 0.711385 | 12,786 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tuscany.sca.diagram.artifacts;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* An "a" element to link two SVG diagrams: not a SCA artifact
*
*/
public class Link {
public Element addElement(Document document, String svgNs, String fileName, String compName) {
Element link = document.createElementNS(svgNs, "a");
link.setAttributeNS(null, "xlink:href", fileName);
link.setAttributeNS(null, "xlink:show", "new");
link.setTextContent(" [" + compName + "] ");
return link;
}
}
|
3e1e36b317ba41f165b90922b86cde9eb90836ed | 761 | java | Java | src/test/java/org/simpleframework/xml/reflect/ReflectionTest.java | AMCON-GmbH/simplexml | 3b49fc611ab37090c4cfa643fb2ae77dfc06f2de | [
"Apache-2.0"
] | 95 | 2015-10-01T08:25:33.000Z | 2022-03-10T22:18:05.000Z | src/test/java/org/simpleframework/xml/reflect/ReflectionTest.java | AMCON-GmbH/simplexml | 3b49fc611ab37090c4cfa643fb2ae77dfc06f2de | [
"Apache-2.0"
] | 40 | 2015-10-08T04:45:48.000Z | 2022-01-24T22:13:06.000Z | src/test/java/org/simpleframework/xml/reflect/ReflectionTest.java | AMCON-GmbH/simplexml | 3b49fc611ab37090c4cfa643fb2ae77dfc06f2de | [
"Apache-2.0"
] | 33 | 2015-10-13T06:49:36.000Z | 2022-02-14T20:25:39.000Z | 29.269231 | 126 | 0.708279 | 12,787 | package org.simpleframework.xml.reflect;
import java.lang.reflect.Method;
import junit.framework.TestCase;
public class ReflectionTest extends TestCase {
static final String[] EMPTY_NAMES = new String[0];
private static String someMethod(int anInt, String someString, Class thisIsAType) {
return null;
}
public void testParameterNames() throws Exception {
Method method = ReflectionTest.class.getDeclaredMethod("someMethod", new Class[]{int.class, String.class, Class.class});
Reflection namer = new Reflection();
String[] names = namer.lookupParameterNames(method, true);
assertEquals("anInt", names[0]);
assertEquals("someString", names[1]);
assertEquals("thisIsAType", names[2]);
}
} |
3e1e376c6668200a2830ef813e9a4f21737cb7f8 | 2,928 | java | Java | src/Components/Specifications/Presentation/IPresentationComp.java | ssanyu/pamoja | 7e99e4f3464937900c3146de09c2e4bdfe37b6cf | [
"Apache-2.0"
] | 1 | 2021-01-07T07:08:43.000Z | 2021-01-07T07:08:43.000Z | src/Components/Specifications/Presentation/IPresentationComp.java | ssanyu/PAMOJA | 7e99e4f3464937900c3146de09c2e4bdfe37b6cf | [
"Apache-2.0"
] | null | null | null | src/Components/Specifications/Presentation/IPresentationComp.java | ssanyu/PAMOJA | 7e99e4f3464937900c3146de09c2e4bdfe37b6cf | [
"Apache-2.0"
] | null | null | null | 34.857143 | 208 | 0.724044 | 12,788 | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package Components.Specifications.Presentation;
import Components.Abstract.Patterns.CPatternsComp;
import Components.IPAMOJAComponent;
import Nodes.CNode;
import java.awt.*;
/**
* Interface providing services for collaborating with other components.
* @author Jackline Ssanyu (efpyi@example.com)
*/
public interface IPresentationComp extends IPAMOJAComponent {
/**
* Get the string representation of this symbolStyle customizer object.
*
* @return the string value of symbolStyle customizer
*/
String getSymbolStyleCustomizerText();
/**
* Sets the value of symbolStyle customizer text, generates its corresponding internal representation and notifies observers about <code>SymbolStyleCustomizerText</code> property changes.
* @pre well-formed color and font attributes. See <code>fromText(String aSymbolStyleCustomizer)</code>
* @param aSymbolStyleCustomizerText the string value of symbolStyle customizer
*/
void setSymbolStyleCustomizerText(String aSymbolStyleCustomizerText);
/**
* Get the internal structure of this symbolStyle customizer object.
*
* @return the internal structure value of symbolStyle customizer
*/
CSymbolStyleCustomizerStructure getSymbolStyleCustomizerStructure();
/**
* Sets the value of symbolStyle customizer internal structure, generates its corresponding string representation and notifies observers about <code>SymbolStyleCustomizerStructure</code> property changes.
* @param aSymbolStyleCustomizerStructure
*/
void setSymbolStyleCustomizerStructure(CSymbolStyleCustomizerStructure aSymbolStyleCustomizerStructure);
/**
* Returns the color for highlighting the specified symbol-name.
*
* @param aSymbolName the name of a symbol to highlight.
* @return the color for highlighting the symbol
*/
public Color symbolNameToColor(String aSymbolName);
/**
* Returns the font for highlighting the specified symbol-name.
*
* @param aSymbolName the name of a symbol to highlight.
* @return the font for highlighting the symbol
*/
public Font symbolNameToFont(String aSymbolName);
/**
* Returns an instance of a Patterns subcomponent.
* @return CPatternsComp object.
*/
CPatternsComp getPatternsComp();
/**
* Returns a string representation of pattern layouts.
* @return a string representation of pattern layouts.
*/
String getPatternLayouts();
/**
* Returns a string representation of pattern layouts for menu items.
* @return a string representation of pattern layouts for menu items.
*/
String getMenuPatternLayouts();
/**
* Returns a patterns node object.
* @return CNode object for patterns.
*/
CNode getNode();
}
|
3e1e37c5b18d627134fdcbf625b73fcb71a5d7ca | 638 | java | Java | gulimall-core/src/main/java/com/atguigu/gulimall/commons/to/SkuSaleInfoTo.java | zou-cj/gulimall | 059b87624befff8b0474b8cac7bc1c584c7d0b6b | [
"Apache-2.0"
] | null | null | null | gulimall-core/src/main/java/com/atguigu/gulimall/commons/to/SkuSaleInfoTo.java | zou-cj/gulimall | 059b87624befff8b0474b8cac7bc1c584c7d0b6b | [
"Apache-2.0"
] | 4 | 2021-04-22T16:53:33.000Z | 2021-09-20T20:50:58.000Z | gulimall-core/src/main/java/com/atguigu/gulimall/commons/to/SkuSaleInfoTo.java | hzq379103164/gulimall | 08e0f0df83d753ded26768eb6ae252b911fcba47 | [
"Apache-2.0"
] | null | null | null | 19.333333 | 40 | 0.669279 | 12,789 | package com.atguigu.gulimall.commons.to;
import lombok.Data;
import java.math.BigDecimal;
@Data
public class SkuSaleInfoTo {
//skuid
private Long skuId;
private BigDecimal growBounds;
private BigDecimal buyBounds;
private Integer[] work;
//上面是 积分设置的信息
// 0000
private Integer fullCount;
private BigDecimal discount;
private Integer ladderAddOther;
//上面是 阶梯价格的信息
/**
* "fullPrice": 0, //满多少
* "reducePrice": 0, //减多少
* "fullAddOther": 0, //满减是否可以叠加其他优惠
*/
private BigDecimal fullPrice;
private BigDecimal reducePrice;
private Integer fullAddOther;
}
|
3e1e385a88179ff40b1b5ea504caf1fd1cfee5b9 | 57,888 | java | Java | app/src/main/java/io/pslab/fragment/LALogicLinesFragment.java | jddeep/pslab-android | 7b9a5bddaea4f819f99dbe0bd896fd0fb9094fc6 | [
"Apache-2.0"
] | 1 | 2019-07-13T09:01:49.000Z | 2019-07-13T09:01:49.000Z | app/src/main/java/io/pslab/fragment/LALogicLinesFragment.java | jddeep/pslab-android | 7b9a5bddaea4f819f99dbe0bd896fd0fb9094fc6 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/io/pslab/fragment/LALogicLinesFragment.java | jddeep/pslab-android | 7b9a5bddaea4f819f99dbe0bd896fd0fb9094fc6 | [
"Apache-2.0"
] | null | null | null | 45.437991 | 149 | 0.5199 | 12,790 | package io.pslab.fragment;
import android.app.Activity;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.design.widget.BottomSheetBehavior;
import android.support.v4.app.Fragment;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.GestureDetector;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import com.github.mikephil.charting.charts.LineChart;
import com.github.mikephil.charting.components.Legend;
import com.github.mikephil.charting.components.XAxis;
import com.github.mikephil.charting.components.YAxis;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.data.LineData;
import com.github.mikephil.charting.data.LineDataSet;
import com.github.mikephil.charting.highlight.Highlight;
import com.github.mikephil.charting.interfaces.datasets.ILineDataSet;
import com.github.mikephil.charting.listener.OnChartValueSelectedListener;
import io.pslab.DataFormatter;
import io.pslab.R;
import io.pslab.activity.LogicalAnalyzerActivity;
import io.pslab.communication.ScienceLab;
import io.pslab.communication.digitalChannel.DigitalChannel;
import io.pslab.others.LogicAnalyzerAxisFormatter;
import io.pslab.others.MathUtils;
import io.pslab.others.ScienceLabCommon;
import io.pslab.others.SwipeGestureDetector;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import butterknife.ButterKnife;
import in.goodiebag.carouselpicker.CarouselPicker;
import static android.content.Context.MODE_PRIVATE;
/**
* Created by viveksb007 on 9/6/17.
*/
public class LALogicLinesFragment extends Fragment {
private static final int EVERY_EDGE = 1;
private static final int DISABLED = 0;
private static final int EVERY_FOURTH_RISING_EDGE = 4;
private static final int EVERY_RISING_EDGE = 3;
private static final int EVERY_FALLING_EDGE = 2;
private final Object lock = new Object();
List<Entry> tempInput;
DigitalChannel digitalChannel;
ArrayList<DigitalChannel> digitalChannelArray;
List<ILineDataSet> dataSets;
// Graph Plot
private CarouselPicker carouselPicker;
private LinearLayout llChannel1, llChannel2, llChannel3, llChannel4;
private Spinner channelSelectSpinner1, channelSelectSpinner2, channelSelectSpinner3, channelSelectSpinner4;
private Spinner edgeSelectSpinner1, edgeSelectSpinner2, edgeSelectSpinner3, edgeSelectSpinner4;
private Button analyze_button;
private ProgressBar progressBar;
private CaptureOne captureOne;
private CaptureTwo captureTwo;
private CaptureThree captureThree;
private CaptureFour captureFour;
private int currentChannel = 0;
private int[] colors = new int[]{Color.MAGENTA, Color.GREEN, Color.CYAN, Color.YELLOW};
private OnChartValueSelectedListener listener;
private Activity activity;
private int channelMode;
private ScienceLab scienceLab;
private LineChart logicLinesChart;
private ArrayList<String> channelNames = new ArrayList<>();
private ArrayList<String> edgesNames = new ArrayList<>();
private TextView tvTimeUnit, xCoordinateText;
private ImageView ledImageView;
private Runnable logicAnalysis;
public static LALogicLinesFragment newInstance(Activity activity) {
LALogicLinesFragment laLogicLinesFragment = new LALogicLinesFragment();
laLogicLinesFragment.activity = activity;
return laLogicLinesFragment;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ButterKnife.bind(getActivity());
scienceLab = ScienceLabCommon.scienceLab;
logicAnalysis = new Runnable() {
@Override
public void run() {
while (true) {
if (scienceLab.isConnected()) {
if (!String.valueOf(ledImageView.getTag()).equals("green")) {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
ledImageView.setImageResource(R.drawable.green_led);
ledImageView.setTag("green");
}
});
}
} else {
if (!String.valueOf(ledImageView.getTag()).equals("red")) {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
ledImageView.setImageResource(R.drawable.red_led);
ledImageView.setTag("red");
}
});
}
}
}
}
};
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.logic_analyzer_logic_lines, container, false);
// LED Indicator
ledImageView = v.findViewById(R.id.imageView_led_la);
// Heading
tvTimeUnit = v.findViewById(R.id.la_tv_time_unit);
tvTimeUnit.setText(getString(R.string.time_unit_la));
// Carousel View
carouselPicker = v.findViewById(R.id.carouselPicker);
llChannel1 = v.findViewById(R.id.ll_chart_channel_1);
llChannel1.setVisibility(View.VISIBLE);
llChannel2 = v.findViewById(R.id.ll_chart_channel_2);
llChannel2.setVisibility(View.GONE);
llChannel3 = v.findViewById(R.id.ll_chart_channel_3);
llChannel3.setVisibility(View.GONE);
llChannel4 = v.findViewById(R.id.ll_chart_channel_4);
llChannel4.setVisibility(View.GONE);
channelSelectSpinner1 = v.findViewById(R.id.channel_select_spinner_1);
channelSelectSpinner2 = v.findViewById(R.id.channel_select_spinner_2);
channelSelectSpinner3 = v.findViewById(R.id.channel_select_spinner_3);
channelSelectSpinner4 = v.findViewById(R.id.channel_select_spinner_4);
edgeSelectSpinner1 = v.findViewById(R.id.edge_select_spinner_1);
edgeSelectSpinner2 = v.findViewById(R.id.edge_select_spinner_2);
edgeSelectSpinner3 = v.findViewById(R.id.edge_select_spinner_3);
edgeSelectSpinner4 = v.findViewById(R.id.edge_select_spinner_4);
analyze_button = v.findViewById(R.id.analyze_button);
channelMode = 1;
// Axis Indicator
xCoordinateText = v.findViewById(R.id.x_coordinate_text);
xCoordinateText.setText("Time: 0.0 mS");
progressBar = v.findViewById(R.id.la_progressBar);
progressBar.setVisibility(View.GONE);
((LogicalAnalyzerActivity) getActivity()).setStatus(false);
// Declaring digital data set
digitalChannelArray = new ArrayList<>();
dataSets = new ArrayList<>();
// Creating base layout for chart
logicLinesChart = v.findViewById(R.id.chart_la);
logicLinesChart.setBorderWidth(2);
Legend legend = logicLinesChart.getLegend();
legend.setTextColor(Color.WHITE);
XAxis xAxis = logicLinesChart.getXAxis();
xAxis.setPosition(XAxis.XAxisPosition.TOP);
xAxis.setTextColor(Color.WHITE);
setCarouselPicker();
setAdapters();
return v;
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
if (scienceLab.isConnected()) {
new Thread(logicAnalysis).start();
}
carouselPicker.setOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
}
@Override
public void onPageSelected(int position) {
}
@Override
public void onPageScrollStateChanged(int state) {
if (state == 0) {
switch (carouselPicker.getCurrentItem()) {
case 0:
channelMode = 1;
setAdapters();
llChannel1.setVisibility(View.VISIBLE);
llChannel2.setVisibility(View.GONE);
llChannel3.setVisibility(View.GONE);
llChannel4.setVisibility(View.GONE);
channelSelectSpinner1.setEnabled(true);
break;
case 1:
channelMode = 2;
setAdapterForTwoChannelMode();
llChannel1.setVisibility(View.VISIBLE);
llChannel2.setVisibility(View.VISIBLE);
llChannel3.setVisibility(View.GONE);
llChannel4.setVisibility(View.GONE);
channelSelectSpinner1.setEnabled(true);
channelSelectSpinner2.setEnabled(true);
break;
case 2:
channelMode = 3;
setAdapters();
llChannel1.setVisibility(View.VISIBLE);
llChannel2.setVisibility(View.VISIBLE);
llChannel3.setVisibility(View.VISIBLE);
llChannel4.setVisibility(View.GONE);
channelSelectSpinner1.setSelection(0);
channelSelectSpinner2.setSelection(1);
channelSelectSpinner3.setSelection(2);
channelSelectSpinner1.setEnabled(false);
channelSelectSpinner2.setEnabled(false);
channelSelectSpinner3.setEnabled(false);
break;
case 3:
channelMode = 4;
setAdapters();
llChannel1.setVisibility(View.VISIBLE);
llChannel2.setVisibility(View.VISIBLE);
llChannel3.setVisibility(View.VISIBLE);
llChannel4.setVisibility(View.VISIBLE);
channelSelectSpinner1.setSelection(0);
channelSelectSpinner2.setSelection(1);
channelSelectSpinner3.setSelection(2);
channelSelectSpinner4.setSelection(3);
channelSelectSpinner1.setEnabled(false);
channelSelectSpinner2.setEnabled(false);
channelSelectSpinner3.setEnabled(false);
channelSelectSpinner4.setEnabled(false);
break;
default:
channelMode = 1;
setAdapters();
llChannel1.setVisibility(View.VISIBLE);
llChannel2.setVisibility(View.GONE);
llChannel3.setVisibility(View.GONE);
llChannel4.setVisibility(View.GONE);
channelSelectSpinner1.setEnabled(true);
break;
}
}
}
});
analyze_button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (channelMode > 0) {
if (scienceLab.isConnected()) {
analyze_button.setClickable(false);
// Change all variables to default value
currentChannel = 0;
dataSets.clear();
digitalChannelArray.clear();
channelNames.clear();
edgesNames.clear();
logicLinesChart.clear();
logicLinesChart.invalidate();
switch (channelMode) {
case 1:
channelNames.add(channelSelectSpinner1.getSelectedItem().toString());
edgesNames.add(edgeSelectSpinner1.getSelectedItem().toString());
break;
case 2:
channelNames.add(channelSelectSpinner1.getSelectedItem().toString());
channelNames.add(channelSelectSpinner2.getSelectedItem().toString());
edgesNames.add(edgeSelectSpinner1.getSelectedItem().toString());
edgesNames.add(edgeSelectSpinner2.getSelectedItem().toString());
break;
case 3:
channelNames.add(channelSelectSpinner1.getSelectedItem().toString());
channelNames.add(channelSelectSpinner2.getSelectedItem().toString());
channelNames.add(channelSelectSpinner3.getSelectedItem().toString());
edgesNames.add(edgeSelectSpinner1.getSelectedItem().toString());
edgesNames.add(edgeSelectSpinner2.getSelectedItem().toString());
edgesNames.add(edgeSelectSpinner3.getSelectedItem().toString());
break;
case 4:
channelNames.add(channelSelectSpinner1.getSelectedItem().toString());
channelNames.add(channelSelectSpinner2.getSelectedItem().toString());
channelNames.add(channelSelectSpinner3.getSelectedItem().toString());
channelNames.add(channelSelectSpinner4.getSelectedItem().toString());
edgesNames.add(edgeSelectSpinner1.getSelectedItem().toString());
edgesNames.add(edgeSelectSpinner2.getSelectedItem().toString());
edgesNames.add(edgeSelectSpinner3.getSelectedItem().toString());
edgesNames.add(edgeSelectSpinner4.getSelectedItem().toString());
break;
default:
channelNames.add(channelSelectSpinner1.getSelectedItem().toString());
edgesNames.add(edgeSelectSpinner1.getSelectedItem().toString());
break;
}
Thread monitor;
switch (channelMode) {
case 1:
progressBar.setVisibility(View.VISIBLE);
((LogicalAnalyzerActivity) getActivity()).setStatus(true);
monitor = new Thread(new Runnable() {
@Override
public void run() {
captureOne = new CaptureOne();
captureOne.execute(channelNames.get(0), edgesNames.get(0));
synchronized (lock) {
try {
lock.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
});
monitor.start();
break;
case 2:
progressBar.setVisibility(View.VISIBLE);
((LogicalAnalyzerActivity) getActivity()).setStatus(true);
monitor = new Thread(new Runnable() {
@Override
public void run() {
captureTwo = new CaptureTwo();
ArrayList<String> channels = new ArrayList<>();
channels.add(channelNames.get(0));
channels.add(channelNames.get(1));
ArrayList<String> edges = new ArrayList<>();
edges.add(edgesNames.get(0));
edges.add(edgesNames.get(1));
captureTwo.execute(channels, edges);
synchronized (lock) {
try {
lock.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
});
monitor.start();
break;
case 3:
progressBar.setVisibility(View.VISIBLE);
((LogicalAnalyzerActivity) getActivity()).setStatus(true);
monitor = new Thread(new Runnable() {
@Override
public void run() {
captureThree = new CaptureThree();
ArrayList<String> channels = new ArrayList<>();
channels.add(channelNames.get(0));
channels.add(channelNames.get(1));
channels.add(channelNames.get(2));
ArrayList<String> edges = new ArrayList<>();
edges.add(edgesNames.get(0));
edges.add(edgesNames.get(1));
edges.add(edgesNames.get(2));
captureThree.execute(channels, edges);
synchronized (lock) {
try {
lock.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
});
monitor.start();
break;
case 4:
progressBar.setVisibility(View.VISIBLE);
((LogicalAnalyzerActivity) getActivity()).setStatus(true);
monitor = new Thread(new Runnable() {
@Override
public void run() {
captureFour = new CaptureFour();
ArrayList<String> channels = new ArrayList<>();
channels.add(channelNames.get(0));
channels.add(channelNames.get(1));
channels.add(channelNames.get(2));
channels.add(channelNames.get(3));
ArrayList<String> edges = new ArrayList<>();
edges.add(edgesNames.get(0));
edges.add(edgesNames.get(1));
edges.add(edgesNames.get(2));
edges.add(edgesNames.get(3));
captureFour.execute(channels, edges);
synchronized (lock) {
try {
lock.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
});
monitor.start();
break;
default:
Toast.makeText(getContext(), getResources().getString(R.string.needs_implementation), Toast.LENGTH_SHORT).show();
break;
}
// Setting cursor to display time at highlighted points
listener = new OnChartValueSelectedListener() {
@Override
public void onValueSelected(Entry e, Highlight h) {
double result = Math.round(e.getX() * 100.0) / 100.0;
xCoordinateText.setText("Time: " + DataFormatter.formatDouble(result, DataFormatter.LOW_PRECISION_FORMAT) + " mS");
}
@Override
public void onNothingSelected() {
}
};
logicLinesChart.setOnChartValueSelectedListener(listener);
} else
Toast.makeText(getContext(), getResources().getString(R.string.device_not_found), Toast.LENGTH_SHORT).show();
}
}
});
}
/**
* Plots every edge of a digital pulse for one channel at a time
*
* @param xData Data points fetched for X-axis
* @param yData Data points fetched for Y-axis
*/
private void singleChannelEveryEdge(double[] xData, double[] yData) {
tempInput = new ArrayList<>();
int[] temp = new int[xData.length];
int[] yAxis = new int[yData.length];
for (int i = 0; i < xData.length; i++) {
temp[i] = (int) xData[i];
yAxis[i] = (int) yData[i];
}
ArrayList<Integer> xaxis = new ArrayList<>();
ArrayList<Integer> yaxis = new ArrayList<>();
xaxis.add(temp[0]);
yaxis.add(yAxis[0]);
for (int i = 1; i < xData.length; i++) {
if (temp[i] != temp[i - 1]) {
xaxis.add(temp[i]);
yaxis.add(yAxis[i]);
}
}
// Add data to axis in actual graph
if (yaxis.size() > 1) {
if (yaxis.get(1).equals(yaxis.get(0)))
tempInput.add(new Entry(xaxis.get(0), yaxis.get(0) + 2 * currentChannel));
else {
tempInput.add(new Entry(xaxis.get(0), yaxis.get(0) + 2 * currentChannel));
tempInput.add(new Entry(xaxis.get(0), yaxis.get(1) + 2 * currentChannel));
}
for (int i = 1; i < xaxis.size() - 1; i++) {
if (yaxis.get(i).equals(yaxis.get(i + 1)))
tempInput.add(new Entry(xaxis.get(i), yaxis.get(i) + 2 * currentChannel));
else {
tempInput.add(new Entry(xaxis.get(i), yaxis.get(i) + 2 * currentChannel));
tempInput.add(new Entry(xaxis.get(i), yaxis.get(i + 1) + 2 * currentChannel));
}
tempInput.add(new Entry(xaxis.get(xaxis.size() - 1), yaxis.get(xaxis.size() - 1)));
}
} else {
tempInput.add(new Entry(xaxis.get(0), yaxis.get(0)));
}
setLineDataSet();
}
/**
* Plots every fourth rising edge of a digital pulse for one channel at a time
*
* @param xData Data points fetched for X-axis
*/
private void singleChannelFourthRisingEdge(double[] xData) {
tempInput = new ArrayList<>();
int xaxis = (int) xData[0];
tempInput.add(new Entry(xaxis, 0 + 2 * currentChannel));
tempInput.add(new Entry(xaxis, 1 + 2 * currentChannel));
tempInput.add(new Entry(xaxis, 0 + 2 * currentChannel));
int check = xaxis;
int count = 0;
if (xData.length > 1) {
for (int i = 1; i < xData.length; i++) {
xaxis = (int) xData[i];
if (xaxis != check) {
if (count == 3) {
tempInput.add(new Entry(xaxis, 0 + 2 * currentChannel));
tempInput.add(new Entry(xaxis, 1 + 2 * currentChannel));
tempInput.add(new Entry(xaxis, 0 + 2 * currentChannel));
count = 0;
} else
count++;
check = xaxis;
}
}
}
setLineDataSet();
}
/**
* Plots every rising edges of a digital pulse for one channel at a time
*
* @param xData Data points fetched for X-axis
* @param yData Data points fetched for Y-axis
*/
private void singleChannelRisingEdges(double[] xData, double[] yData) {
tempInput = new ArrayList<>();
for (int i = 1; i < xData.length; i += 6) {
tempInput.add(new Entry((int) xData[i], (int) yData[i] + 2 * currentChannel));
tempInput.add(new Entry((int) xData[i + 1], (int) yData[i + 1] + 2 * currentChannel));
tempInput.add(new Entry((int) xData[i + 2], (int) yData[i + 2] + 2 * currentChannel));
}
setLineDataSet();
}
/**
* Plots every falling edges of a digital pulse for one channel at a time
*
* @param xData Data points fetched for X-axis
* @param yData Data points fetched for Y-axis
*/
private void singleChannelFallingEdges(double[] xData, double[] yData) {
tempInput = new ArrayList<>();
for (int i = 4; i < xData.length; i += 6) {
tempInput.add(new Entry((int) xData[i], (int) yData[i] + 2 * currentChannel));
tempInput.add(new Entry((int) xData[i + 1], (int) yData[i + 1] + 2 * currentChannel));
tempInput.add(new Entry((int) xData[i + 2], (int) yData[i + 2] + 2 * currentChannel));
}
setLineDataSet();
}
/**
* Plots every data point fetched for a digital pulse (default case)
*
* @param xData Data points fetched for X-axis
* @param yData Data points fetched for Y-axis
*/
private void singleChannelOtherEdges(double[] xData, double[] yData) {
tempInput = new ArrayList<>();
for (int i = 0; i < xData.length; i++) {
int xaxis = (int) xData[i];
int yaxis = (int) yData[i];
tempInput.add(new Entry(xaxis, yaxis + 2 * currentChannel));
}
setLineDataSet();
}
/**
* Plot the entries available in tuple (X-axis, Y-axis) on the graph
*/
private void setLineDataSet() {
LineDataSet lineDataSet = new LineDataSet(tempInput, channelNames.get(currentChannel));
lineDataSet.setColor(colors[currentChannel]);
lineDataSet.setCircleRadius(1);
lineDataSet.setLineWidth(2);
lineDataSet.setCircleColor(Color.GREEN);
lineDataSet.setDrawValues(false);
lineDataSet.setDrawCircles(false);
lineDataSet.setHighLightColor(getResources().getColor(R.color.golden));
dataSets.add(lineDataSet);
}
/**
* Sets adapters to spinners for all modes except for TwoChannel Mode
*/
private void setAdapters() {
String[] channels = getResources().getStringArray(R.array.channel_choices);
String[] edges = getResources().getStringArray(R.array.edge_choices);
ArrayAdapter<String> channel_adapter = new ArrayAdapter<>(getContext(), R.layout.modified_spinner_dropdown_list, channels);
ArrayAdapter<String> edges_adapter = new ArrayAdapter<>(getContext(), R.layout.modified_spinner_dropdown_list, edges);
channelSelectSpinner1.setAdapter(channel_adapter);
channelSelectSpinner2.setAdapter(channel_adapter);
channelSelectSpinner3.setAdapter(channel_adapter);
channelSelectSpinner4.setAdapter(channel_adapter);
edgeSelectSpinner1.setAdapter(edges_adapter);
edgeSelectSpinner2.setAdapter(edges_adapter);
edgeSelectSpinner3.setAdapter(edges_adapter);
edgeSelectSpinner4.setAdapter(edges_adapter);
}
/**
* Sets adapters to spinners for TwoChannel Mode
*/
private void setAdapterForTwoChannelMode() {
final String[] channels = getResources().getStringArray(R.array.channel_choices);
final String[] edges = getResources().getStringArray(R.array.edge_choices);
final List<String> channel_one_list = new ArrayList<>( Arrays.asList(channels));
final List<String> channel_two_list = new ArrayList<>( Arrays.asList(channels));
final ArrayAdapter<String> channel_one_adapter = new ArrayAdapter<>(getContext(), R.layout.modified_spinner_dropdown_list, channel_one_list);
final ArrayAdapter<String> channel_two_adapter = new ArrayAdapter<>(getContext(), R.layout.modified_spinner_dropdown_list, channel_two_list);
ArrayAdapter<String> edges_adapter = new ArrayAdapter<>(getContext(), R.layout.modified_spinner_dropdown_list, edges);
channelSelectSpinner1.setAdapter(channel_one_adapter);
channelSelectSpinner2.setAdapter(channel_two_adapter);
edgeSelectSpinner1.setAdapter(edges_adapter);
edgeSelectSpinner2.setAdapter(edges_adapter);
channelSelectSpinner1.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
String selection = channelSelectSpinner1.getItemAtPosition(position).toString();
channel_two_list.clear();
for(int i = 0; i < channels.length; i++) {
if(!channels[i].equals(selection)) {
channel_two_list.add(channels[i]);
}
}
channel_two_adapter.notifyDataSetChanged();
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
// No use
}
});
channelSelectSpinner2.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
String selection = channelSelectSpinner2.getItemAtPosition(position).toString();
channel_one_list.clear();
for(int i = 0; i < channels.length; i++) {
if(!channels[i].equals(selection)) {
channel_one_list.add(channels[i]);
}
}
channel_one_adapter.notifyDataSetChanged();
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
// No use
}
});
}
/**
* Sets the text in Carousel Picker
*/
private void setCarouselPicker() {
// Calculation made for setting the text size in Carousel Picker for different screens
DisplayMetrics dm = new DisplayMetrics();
getActivity().getWindowManager().getDefaultDisplay().getMetrics(dm);
int width = dm.widthPixels;
int height = dm.heightPixels;
double wi = (double) width / (double) dm.xdpi;
double hi = (double) height / (double) dm.ydpi;
double x = Math.pow(wi, 2);
double y = Math.pow(hi, 2);
double screenInches = Math.sqrt(x + y) + 0.01;
int textsize;
if (screenInches < 5)
textsize = 11;
else
textsize = 9;
// Items for Carousel Picker
List<CarouselPicker.PickerItem> channelModes = new ArrayList<>();
channelModes.add(new CarouselPicker.TextItem("1", textsize));
channelModes.add(new CarouselPicker.TextItem("2", textsize));
channelModes.add(new CarouselPicker.TextItem("3", textsize));
channelModes.add(new CarouselPicker.TextItem("4", textsize));
CarouselPicker.CarouselViewAdapter channelAdapter = new CarouselPicker.CarouselViewAdapter(getContext(), channelModes, 0);
carouselPicker.setAdapter(channelAdapter);
carouselPicker.setCurrentItem(0);
}
@Override
public void onResume() {
super.onResume();
if (((AppCompatActivity) getActivity()).getSupportActionBar() != null)
((AppCompatActivity) getActivity()).getSupportActionBar().hide();
}
@Override
public void onStop() {
if (((AppCompatActivity) getActivity()).getSupportActionBar() != null)
((AppCompatActivity) getActivity()).getSupportActionBar().show();
super.onStop();
}
/**
* Used to delay a thread by some given time in milliseconds
* @param delay Time to delay in milliseconds
*/
public void delayThread(long delay) {
try {
Thread.sleep(delay);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private class CaptureOne extends AsyncTask<String, String, Void> {
private String edgeOption = "";
private boolean holder;
@Override
protected Void doInBackground(String... params) {
try {
int channelNumber = scienceLab.calculateDigitalChannel(params[0]);
digitalChannel = scienceLab.getDigitalChannel(channelNumber);
edgeOption = params[1];
switch (edgeOption) {
case "EVERY EDGE":
digitalChannel.mode = EVERY_EDGE;
break;
case "EVERY FALLING EDGE":
digitalChannel.mode = EVERY_FALLING_EDGE;
break;
case "EVERY RISING EDGE":
digitalChannel.mode = EVERY_RISING_EDGE;
break;
case "EVERY FOURTH RISING EDGE":
digitalChannel.mode = EVERY_FOURTH_RISING_EDGE;
break;
case "DISABLED":
digitalChannel.mode = DISABLED;
break;
default:
digitalChannel.mode = EVERY_EDGE;
}
scienceLab.startOneChannelLA(params[0], digitalChannel.mode, params[0], 3);
delayThread(1000);
LinkedHashMap<String, Integer> data = scienceLab.getLAInitialStates();
delayThread(500);
holder = scienceLab.fetchLAChannel(channelNumber, data);
} catch (NullPointerException e) {
cancel(true);
}
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
if (holder) {
double[] xaxis = digitalChannel.getXAxis();
double[] yaxis = digitalChannel.getYAxis();
StringBuilder stringBuilder1 = new StringBuilder();
StringBuilder stringBuilder2 = new StringBuilder();
for (int i = 0; i < xaxis.length; i++) {
stringBuilder1.append(DataFormatter.formatDouble(xaxis[i], DataFormatter.LOW_PRECISION_FORMAT));
stringBuilder2.append(DataFormatter.formatDouble(yaxis[i], DataFormatter.LOW_PRECISION_FORMAT));
stringBuilder1.append(" ");
stringBuilder2.append(" ");
}
Log.v("x Axis", stringBuilder1.toString());
Log.v("y Axis", stringBuilder2.toString());
// Plot the fetched data
switch (edgeOption) {
case "EVERY EDGE":
singleChannelEveryEdge(xaxis, yaxis);
break;
case "EVERY FOURTH RISING EDGE":
singleChannelFourthRisingEdge(xaxis);
break;
case "EVERY RISING EDGE":
singleChannelRisingEdges(xaxis, yaxis);
break;
case "EVERY FALLING EDGE":
singleChannelFallingEdges(xaxis, yaxis);
break;
default:
singleChannelOtherEdges(xaxis, yaxis);
break;
}
progressBar.setVisibility(View.GONE);
((LogicalAnalyzerActivity) getActivity()).setStatus(false);
logicLinesChart.setData(new LineData(dataSets));
logicLinesChart.notifyDataSetChanged();
logicLinesChart.invalidate();
YAxis left = logicLinesChart.getAxisLeft();
left.setValueFormatter(new LogicAnalyzerAxisFormatter(channelNames));
left.setTextColor(Color.WHITE);
left.setGranularity(1f);
left.setTextSize(12f);
logicLinesChart.getAxisRight().setDrawLabels(false);
logicLinesChart.getDescription().setEnabled(false);
logicLinesChart.setScaleYEnabled(false);
synchronized (lock) {
lock.notify();
}
} else {
progressBar.setVisibility(View.GONE);
((LogicalAnalyzerActivity) getActivity()).setStatus(false);
Toast.makeText(getContext(), getResources().getString(R.string.no_data_generated), Toast.LENGTH_SHORT).show();
analyze_button.setClickable(true);
}
analyze_button.setClickable(true);
}
}
private class CaptureTwo extends AsyncTask<ArrayList<String>, ArrayList<String>, Void> {
private String[] edgeOption = new String[channelMode];
private boolean holder1, holder2;
@SafeVarargs
@Override
protected final Void doInBackground(ArrayList<String>... arrayLists) {
try {
int channelNumber1 = scienceLab.calculateDigitalChannel(arrayLists[0].get(0));
int channelNumber2 = scienceLab.calculateDigitalChannel(arrayLists[0].get(1));
digitalChannelArray.add(scienceLab.getDigitalChannel(channelNumber1));
digitalChannelArray.add(scienceLab.getDigitalChannel(channelNumber2));
edgeOption[0] = arrayLists[1].get(0);
edgeOption[1] = arrayLists[1].get(1);
ArrayList<Integer> modes = new ArrayList<>();
for (int i = 0; i < channelMode; i++) {
switch (edgeOption[i]) {
case "EVERY EDGE":
digitalChannelArray.get(i).mode = EVERY_EDGE;
modes.add(EVERY_EDGE);
break;
case "EVERY FALLING EDGE":
digitalChannelArray.get(i).mode = EVERY_FALLING_EDGE;
modes.add(EVERY_FALLING_EDGE);
break;
case "EVERY RISING EDGE":
digitalChannelArray.get(i).mode = EVERY_RISING_EDGE;
modes.add(EVERY_RISING_EDGE);
break;
case "EVERY FOURTH RISING EDGE":
digitalChannelArray.get(i).mode = EVERY_FOURTH_RISING_EDGE;
modes.add(EVERY_FOURTH_RISING_EDGE);
break;
case "DISABLED":
digitalChannelArray.get(i).mode = DISABLED;
modes.add(DISABLED);
break;
default:
digitalChannelArray.get(i).mode = EVERY_EDGE;
modes.add(EVERY_EDGE);
}
}
scienceLab.startTwoChannelLA(arrayLists[0], modes, 67, null, null, null);
delayThread(1000);
LinkedHashMap<String, Integer> data = scienceLab.getLAInitialStates();
delayThread(500);
holder1 = scienceLab.fetchLAChannel(channelNumber1, data);
delayThread(500);
holder2 = scienceLab.fetchLAChannel(channelNumber2, data);
} catch (NullPointerException e) {
cancel(true);
}
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
if (holder1 && holder2) {
ArrayList<double[]> xaxis = new ArrayList<>();
xaxis.add(digitalChannelArray.get(0).getXAxis());
xaxis.add(digitalChannelArray.get(1).getXAxis());
ArrayList<double[]> yaxis = new ArrayList<>();
yaxis.add(digitalChannelArray.get(0).getYAxis());
yaxis.add(digitalChannelArray.get(1).getYAxis());
// Plot the fetched data
for (int i = 0; i < channelMode; i++) {
switch (edgeOption[i]) {
case "EVERY EDGE":
singleChannelEveryEdge(xaxis.get(i), yaxis.get(i));
break;
case "EVERY FOURTH RISING EDGE":
singleChannelFourthRisingEdge(xaxis.get(i));
break;
case "EVERY RISING EDGE":
singleChannelRisingEdges(xaxis.get(i), yaxis.get(i));
break;
case "EVERY FALLING EDGE":
singleChannelFallingEdges(xaxis.get(i), yaxis.get(i));
break;
default:
singleChannelOtherEdges(xaxis.get(i), yaxis.get(i));
break;
}
currentChannel++;
}
progressBar.setVisibility(View.GONE);
((LogicalAnalyzerActivity) getActivity()).setStatus(false);
logicLinesChart.setData(new LineData(dataSets));
logicLinesChart.invalidate();
YAxis left = logicLinesChart.getAxisLeft();
left.setValueFormatter(new LogicAnalyzerAxisFormatter(channelNames));
left.setTextColor(Color.WHITE);
left.setGranularity(1f);
left.setTextSize(12f);
logicLinesChart.getAxisRight().setDrawLabels(false);
logicLinesChart.getDescription().setEnabled(false);
logicLinesChart.setScaleYEnabled(false);
synchronized (lock) {
lock.notify();
}
} else {
progressBar.setVisibility(View.GONE);
((LogicalAnalyzerActivity) getActivity()).setStatus(false);
Toast.makeText(getContext(), getResources().getString(R.string.no_data_generated), Toast.LENGTH_SHORT).show();
}
analyze_button.setClickable(true);
}
}
private class CaptureThree extends AsyncTask<ArrayList<String>, ArrayList<String>, Void> {
private String[] edgeOption = new String[channelMode];
private boolean holder1, holder2, holder3;
@SafeVarargs
@Override
protected final Void doInBackground(ArrayList<String>... arrayLists) {
try {
int channelNumber1 = scienceLab.calculateDigitalChannel(arrayLists[0].get(0));
int channelNumber2 = scienceLab.calculateDigitalChannel(arrayLists[0].get(1));
int channelNumber3 = scienceLab.calculateDigitalChannel(arrayLists[0].get(2));
digitalChannelArray.add(scienceLab.getDigitalChannel(channelNumber1));
digitalChannelArray.add(scienceLab.getDigitalChannel(channelNumber2));
digitalChannelArray.add(scienceLab.getDigitalChannel(channelNumber3));
edgeOption[0] = arrayLists[1].get(0);
edgeOption[1] = arrayLists[1].get(1);
edgeOption[2] = arrayLists[1].get(2);
ArrayList<Integer> modes = new ArrayList<>();
for (int i = 0; i < channelMode; i++) {
switch (edgeOption[i]) {
case "EVERY EDGE":
digitalChannelArray.get(i).mode = EVERY_EDGE;
modes.add(EVERY_EDGE);
break;
case "EVERY FALLING EDGE":
digitalChannelArray.get(i).mode = EVERY_FALLING_EDGE;
modes.add(EVERY_FALLING_EDGE);
break;
case "EVERY RISING EDGE":
digitalChannelArray.get(i).mode = EVERY_RISING_EDGE;
modes.add(EVERY_RISING_EDGE);
break;
case "EVERY FOURTH RISING EDGE":
digitalChannelArray.get(i).mode = EVERY_FOURTH_RISING_EDGE;
modes.add(EVERY_FOURTH_RISING_EDGE);
break;
case "DISABLED":
digitalChannelArray.get(i).mode = DISABLED;
modes.add(DISABLED);
break;
default:
digitalChannelArray.get(i).mode = EVERY_EDGE;
modes.add(EVERY_EDGE);
}
}
scienceLab.startThreeChannelLA(modes, null, null);
delayThread(1000);
LinkedHashMap<String, Integer> data = scienceLab.getLAInitialStates();
delayThread(500);
holder1 = scienceLab.fetchLAChannel(channelNumber1, data);
delayThread(500);
holder2 = scienceLab.fetchLAChannel(channelNumber2, data);
delayThread(500);
holder3 = scienceLab.fetchLAChannel(channelNumber3, data);
} catch (NullPointerException e) {
cancel(true);
}
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
if (holder1 && holder2 && holder3) {
ArrayList<double[]> xaxis = new ArrayList<>();
xaxis.add(digitalChannelArray.get(0).getXAxis());
xaxis.add(digitalChannelArray.get(1).getXAxis());
xaxis.add(digitalChannelArray.get(2).getXAxis());
ArrayList<double[]> yaxis = new ArrayList<>();
yaxis.add(digitalChannelArray.get(0).getYAxis());
yaxis.add(digitalChannelArray.get(1).getYAxis());
yaxis.add(digitalChannelArray.get(2).getYAxis());
// Plot the fetched data
for (int i = 0; i < channelMode; i++) {
switch (edgeOption[i]) {
case "EVERY EDGE":
singleChannelEveryEdge(xaxis.get(i), yaxis.get(i));
break;
case "EVERY FOURTH RISING EDGE":
singleChannelFourthRisingEdge(xaxis.get(i));
break;
case "EVERY RISING EDGE":
singleChannelRisingEdges(xaxis.get(i), yaxis.get(i));
break;
case "EVERY FALLING EDGE":
singleChannelFallingEdges(xaxis.get(i), yaxis.get(i));
break;
default:
singleChannelOtherEdges(xaxis.get(i), yaxis.get(i));
break;
}
currentChannel++;
}
progressBar.setVisibility(View.GONE);
((LogicalAnalyzerActivity) getActivity()).setStatus(false);
logicLinesChart.setData(new LineData(dataSets));
logicLinesChart.invalidate();
YAxis left = logicLinesChart.getAxisLeft();
left.setValueFormatter(new LogicAnalyzerAxisFormatter(channelNames));
left.setTextColor(Color.WHITE);
left.setGranularity(1f);
left.setTextSize(12f);
logicLinesChart.getAxisRight().setDrawLabels(false);
logicLinesChart.getDescription().setEnabled(false);
logicLinesChart.setScaleYEnabled(false);
synchronized (lock) {
lock.notify();
}
} else {
progressBar.setVisibility(View.GONE);
((LogicalAnalyzerActivity) getActivity()).setStatus(false);
Toast.makeText(getContext(), getResources().getString(R.string.no_data_generated), Toast.LENGTH_SHORT).show();
}
analyze_button.setClickable(true);
}
}
private class CaptureFour extends AsyncTask<ArrayList<String>, ArrayList<String>, Void> {
private String[] edgeOption = new String[channelMode];
private boolean holder1, holder2, holder3, holder4;
@Override
protected Void doInBackground(ArrayList<String>... arrayLists) {
try {
int channelNumber1 = scienceLab.calculateDigitalChannel(arrayLists[0].get(0));
int channelNumber2 = scienceLab.calculateDigitalChannel(arrayLists[0].get(1));
int channelNumber3 = scienceLab.calculateDigitalChannel(arrayLists[0].get(2));
int channelNumber4 = scienceLab.calculateDigitalChannel(arrayLists[0].get(3));
digitalChannelArray.add(scienceLab.getDigitalChannel(channelNumber1));
digitalChannelArray.add(scienceLab.getDigitalChannel(channelNumber2));
digitalChannelArray.add(scienceLab.getDigitalChannel(channelNumber3));
digitalChannelArray.add(scienceLab.getDigitalChannel(channelNumber4));
edgeOption[0] = arrayLists[1].get(0);
edgeOption[1] = arrayLists[1].get(1);
edgeOption[2] = arrayLists[1].get(2);
edgeOption[3] = arrayLists[1].get(3);
ArrayList<Integer> modes = new ArrayList<>();
for (int i = 0; i < channelMode; i++) {
switch (edgeOption[i]) {
case "EVERY EDGE":
digitalChannelArray.get(i).mode = EVERY_EDGE;
modes.add(EVERY_EDGE);
break;
case "EVERY FALLING EDGE":
digitalChannelArray.get(i).mode = EVERY_FALLING_EDGE;
modes.add(EVERY_FALLING_EDGE);
break;
case "EVERY RISING EDGE":
digitalChannelArray.get(i).mode = EVERY_RISING_EDGE;
modes.add(EVERY_RISING_EDGE);
break;
case "EVERY FOURTH RISING EDGE":
digitalChannelArray.get(i).mode = EVERY_FOURTH_RISING_EDGE;
modes.add(EVERY_FOURTH_RISING_EDGE);
break;
case "DISABLED":
digitalChannelArray.get(i).mode = DISABLED;
modes.add(DISABLED);
break;
default:
digitalChannelArray.get(i).mode = EVERY_EDGE;
modes.add(EVERY_EDGE);
}
}
ArrayList<Boolean> triggerChannel = new ArrayList<>();
triggerChannel.add(true);
triggerChannel.add(true);
triggerChannel.add(true);
scienceLab.startFourChannelLA(null, null, modes, null, triggerChannel);
delayThread(1000);
LinkedHashMap<String, Integer> data = scienceLab.getLAInitialStates();
delayThread(500);
holder1 = scienceLab.fetchLAChannel(channelNumber1, data);
delayThread(500);
holder2 = scienceLab.fetchLAChannel(channelNumber2, data);
delayThread(500);
holder3 = scienceLab.fetchLAChannel(channelNumber3, data);
delayThread(500);
holder4 = scienceLab.fetchLAChannel(channelNumber4, data);
} catch (NullPointerException e) {
cancel(true);
}
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
if (holder1 && holder2 && holder3 && holder4) {
ArrayList<double[]> xaxis = new ArrayList<>();
xaxis.add(digitalChannelArray.get(0).getXAxis());
xaxis.add(digitalChannelArray.get(1).getXAxis());
xaxis.add(digitalChannelArray.get(2).getXAxis());
xaxis.add(digitalChannelArray.get(3).getXAxis());
ArrayList<double[]> yaxis = new ArrayList<>();
yaxis.add(digitalChannelArray.get(0).getYAxis());
yaxis.add(digitalChannelArray.get(1).getYAxis());
yaxis.add(digitalChannelArray.get(2).getYAxis());
yaxis.add(digitalChannelArray.get(3).getYAxis());
// Plot the fetched data
for (int i = 0; i < channelMode; i++) {
switch (edgeOption[i]) {
case "EVERY EDGE":
singleChannelEveryEdge(xaxis.get(i), yaxis.get(i));
break;
case "EVERY FOURTH RISING EDGE":
singleChannelFourthRisingEdge(xaxis.get(i));
break;
case "EVERY RISING EDGE":
singleChannelRisingEdges(xaxis.get(i), yaxis.get(i));
break;
case "EVERY FALLING EDGE":
singleChannelFallingEdges(xaxis.get(i), yaxis.get(i));
break;
default:
singleChannelOtherEdges(xaxis.get(i), yaxis.get(i));
break;
}
currentChannel++;
}
progressBar.setVisibility(View.GONE);
((LogicalAnalyzerActivity) getActivity()).setStatus(false);
logicLinesChart.setData(new LineData(dataSets));
logicLinesChart.invalidate();
YAxis left = logicLinesChart.getAxisLeft();
left.setValueFormatter(new LogicAnalyzerAxisFormatter(channelNames));
left.setTextColor(Color.WHITE);
left.setGranularity(1f);
left.setTextSize(12f);
logicLinesChart.getAxisRight().setDrawLabels(false);
logicLinesChart.getDescription().setEnabled(false);
logicLinesChart.setScaleYEnabled(false);
synchronized (lock) {
lock.notify();
}
} else {
progressBar.setVisibility(View.GONE);
((LogicalAnalyzerActivity) getActivity()).setStatus(false);
Toast.makeText(getContext(), getResources().getString(R.string.no_data_generated), Toast.LENGTH_SHORT).show();
}
analyze_button.setClickable(true);
}
}
}
|
3e1e388df0731f4f2291ef12a8d5286392eab8a5 | 8,233 | java | Java | app/src/main/java/com/scatl/uestcbbs/util/CommonUtil.java | liuwenhaha/UestcBBS-MVP | 96f6546f2bf167751f58daffd9fe01084d80b3cf | [
"Apache-2.0"
] | 1 | 2020-02-17T11:06:00.000Z | 2020-02-17T11:06:00.000Z | app/src/main/java/com/scatl/uestcbbs/util/CommonUtil.java | liuwenhaha/UestcBBS-MVP | 96f6546f2bf167751f58daffd9fe01084d80b3cf | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/scatl/uestcbbs/util/CommonUtil.java | liuwenhaha/UestcBBS-MVP | 96f6546f2bf167751f58daffd9fe01084d80b3cf | [
"Apache-2.0"
] | null | null | null | 32.932 | 145 | 0.622009 | 12,791 | package com.scatl.uestcbbs.util;
import android.annotation.SuppressLint;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.graphics.Color;
import android.net.Uri;
import android.os.Build;
import android.text.TextUtils;
import android.util.DisplayMetrics;
import android.util.Log;
import android.util.TypedValue;
import android.view.View;
import android.view.inputmethod.InputMethodManager;
import android.widget.ImageView;
import android.widget.Toast;
import androidx.core.content.FileProvider;
import androidx.fragment.app.FragmentActivity;
import androidx.vectordrawable.graphics.drawable.VectorDrawableCompat;
import com.scatl.uestcbbs.callback.OnPermission;
import com.tbruyelle.rxpermissions2.Permission;
import com.tbruyelle.rxpermissions2.RxPermissions;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import io.reactivex.functions.Consumer;
/**
* author: sca_tl
* description:
* date: 2019/11/24 13:46
*/
public class CommonUtil {
public static int getAttrColor(Context context, int resId) {
TypedValue typedValue = new TypedValue();
context.getTheme().resolveAttribute(resId, typedValue, true);
return context.getColor(typedValue.resourceId);
}
/**
* author: TanLei
* description: 打开浏览器
*/
public static void openBrowser(Context context, String url) {
if (TextUtils.isEmpty(url)) {
ToastUtil.showToast(context, "链接为空");
} else {
try{
Intent intent= new Intent();
intent.setAction("android.intent.action.VIEW");
Uri content_url = Uri.parse(url);
intent.setData(content_url);
context.startActivity(intent);
} catch (ActivityNotFoundException e) {
e.getMessage();
}
}
}
/**
* author: sca_tl
* description: 安装软件
*/
public static void installApk(Context context, File apkFile) {
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.N){
Uri apkUri = FileProvider.getUriForFile(context, "com.scatl.uestcbbs.fileprovider", apkFile);
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
intent.setDataAndType(apkUri, "application/vnd.android.package-archive");
context.startActivity(intent);
} else {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setDataAndType(Uri.fromFile(apkFile), "application/vnd.android.package-archive");
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
}
/**
* author: sca_tl
* description: 屏幕dp宽度
*/
public static int screenDpWidth(Context context) {
Resources resources = context.getResources();
DisplayMetrics dm = resources.getDisplayMetrics();
return px2dip(context, dm.widthPixels);
}
/**
* author: sca_tl
* description: 屏幕dp宽度
*/
public static int screenDpHeight(Context context) {
Resources resources = context.getResources();
DisplayMetrics dm = resources.getDisplayMetrics();
return px2dip(context, dm.heightPixels);
}
public static int dip2px(Context context, float dpValue) {
final float scale = context.getResources().getDisplayMetrics().density;
return (int) (dpValue * scale + 0.5f);
}
public static int px2dip(Context context, float pxValue) {
final float scale = context.getResources().getDisplayMetrics().density;
return (int) (pxValue / scale + 0.5f);
}
public static void hideSoftKeyboard(Context context, View view) {
InputMethodManager imm = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE);
if (view != null && imm != null){
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
}
}
public static void showSoftKeyboard(final Context context, final View view, int delayMs) {
if (view != null) {
view.postDelayed(() -> {
view.requestFocus();
InputMethodManager imm = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE);
if (imm != null) { imm.showSoftInput(view, InputMethodManager.SHOW_IMPLICIT); }
}, delayMs);
}
}
/**
* author: sca_tl
* description: 获取某个int颜色更深(浅)的颜色
* @param offSet 偏移量,负代表颜色越深,反之颜色越浅
*/
public static int getOffsetColor(int offSet, int intColor) {
int blue = (intColor & 0xff) + offSet;
int green = ((intColor & 0x00ff00) >> 8) + offSet;
int red = ((intColor & 0xff0000) >> 16) + offSet;
return (blue <= 0 || green <= 0 || red <= 0) ? 0: Color.rgb(red, green, blue);
}
/**
* author: sca_tl
* description: 改变svg图片颜色
*/
public static void setVectorColor(Context context, ImageView imageView, int drawable, int color) {
VectorDrawableCompat vectorDrawableCompat = VectorDrawableCompat.create(context.getResources(), drawable, context.getTheme());
vectorDrawableCompat.setTint(context.getColor(color));
imageView.setImageDrawable(vectorDrawableCompat);
}
public static int getTranslucentColor(float f, int rgb) {
int blue = rgb & 0xff;
int green = (rgb & 0x00ff00) >> 8;
int red = (rgb & 0xff0000) >> 16;
Log.e("fffffffff", blue + "==" + green + "==" + red);
int alpha = rgb >>> 24;
alpha = Math.round(alpha * f);
return Color.argb(alpha, red, green, blue);
}
/**
* author: sca_tl
* description: obj转为list,主要是避免编辑器的警告
*/
public static <T> List<T> objCastList(Object obj, Class<T> clazz) {
List<T> result = new ArrayList<>();
if(obj instanceof List<?>) {
for (Object o : (List<?>) obj) {
result.add(clazz.cast(o));
}
return result;
}
return null;
}
/**
* author: sca_tl
* description: 获取版本号和版本名
*/
public static int getVersionCode(Context context) {
try {
return context.getPackageManager().getPackageInfo(context.getPackageName(), 0).versionCode;
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
return 0;
}
public static String getVersionName(Context context) {
try {
return context.getPackageManager().getPackageInfo(context.getPackageName(), 0).versionName;
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
return "";
}
/**
* author: sca_tl
* description: 请求权限
*/
@SuppressLint("CheckResult")
public static void requestPermission(final FragmentActivity fragmentActivity, final OnPermission onPermission, final String... permissions) {
new RxPermissions(fragmentActivity)
.requestEach(permissions)
.subscribe(permission -> {
if (permission.granted) {
onPermission.onGranted();
} else if (permission.shouldShowRequestPermissionRationale) {
onPermission.onRefused();
} else {//选中不再询问
onPermission.onRefusedWithNoMoreRequest();
}
});
}
/**
* author: sca_tl
* description: toString后的List<String>还原成原来的list
*/
public static List<String> toList(String toStringList){
List<String> list = new ArrayList<>();
if (!toStringList.contains(",")) {
return list;
} else {
String[] b = toStringList.substring(1, toStringList.length() - 1).split(",");
for (String temp : b){
list.add(temp.replace(" ", ""));
}
return list;
}
}
}
|
3e1e39d675c4f706d71135c0a5d051f7b7bfaae9 | 1,374 | java | Java | sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/DoubleExemplarData.java | jack-berg/opentelemetry-java | 02ef53b1095c29d4969576ec0a4ddc1ba425874c | [
"Apache-2.0"
] | 1,189 | 2019-05-01T19:16:45.000Z | 2022-03-31T09:42:31.000Z | sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/DoubleExemplarData.java | jack-berg/opentelemetry-java | 02ef53b1095c29d4969576ec0a4ddc1ba425874c | [
"Apache-2.0"
] | 3,239 | 2019-05-01T20:08:06.000Z | 2022-03-31T23:12:43.000Z | sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/data/DoubleExemplarData.java | kittylyst/opentelemetry-java | f171884ed5f7c5135aca768208c396813382c69a | [
"Apache-2.0"
] | 548 | 2019-05-01T19:16:41.000Z | 2022-03-31T05:47:18.000Z | 28.625 | 99 | 0.729258 | 12,792 | /*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.data;
import com.google.auto.value.AutoValue;
import io.opentelemetry.api.common.Attributes;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
/** An {@link ExemplarData} with {@code double} measurments. */
@Immutable
@AutoValue
public abstract class DoubleExemplarData implements ExemplarData {
/**
* Construct a new exemplar.
*
* @param filteredAttributes The set of {@link Attributes} not already associated with the {@link
* PointData}.
* @param recordTimeNanos The time when the sample qas recorded in nanoseconds.
* @param spanId (optional) The associated SpanId.
* @param traceId (optional) The associated TraceId.
* @param value The value recorded.
*/
public static DoubleExemplarData create(
Attributes filteredAttributes,
long recordTimeNanos,
@Nullable String spanId,
@Nullable String traceId,
double value) {
return new AutoValue_DoubleExemplarData(
filteredAttributes, recordTimeNanos, spanId, traceId, value);
}
DoubleExemplarData() {}
/** Numerical value of the measurement that was recorded. */
public abstract double getValue();
@Override
public final double getValueAsDouble() {
return getValue();
}
}
|
3e1e3a37a5020bb7d5db71df0c8bb00734599796 | 17,055 | java | Java | src/test/java/com/box/sdk/BoxAPIConnectionTest.java | AngGrey/boxjavasdk | 0630f6dc378e4cebca788ba5cfcf2faf17b4f54a | [
"Apache-2.0"
] | null | null | null | src/test/java/com/box/sdk/BoxAPIConnectionTest.java | AngGrey/boxjavasdk | 0630f6dc378e4cebca788ba5cfcf2faf17b4f54a | [
"Apache-2.0"
] | null | null | null | src/test/java/com/box/sdk/BoxAPIConnectionTest.java | AngGrey/boxjavasdk | 0630f6dc378e4cebca788ba5cfcf2faf17b4f54a | [
"Apache-2.0"
] | null | null | null | 38.334831 | 119 | 0.674541 | 12,793 | package com.box.sdk;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import com.eclipsesource.json.JsonObject;
import com.github.tomakehurst.wiremock.client.WireMock;
import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
import static com.github.tomakehurst.wiremock.client.WireMock.post;
import static com.github.tomakehurst.wiremock.client.WireMock.stubFor;
import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo;
import com.github.tomakehurst.wiremock.junit.WireMockRule;
public class BoxAPIConnectionTest {
/**
* Wiremock
*/
@Rule
public final WireMockRule wireMockRule = new WireMockRule(53620);
@Test
@Category(UnitTest.class)
public void canRefreshWhenGivenRefreshToken() {
final String anyClientID = "";
final String anyClientSecret = "";
final String anyAccessToken = "";
final String anyRefreshToken = "";
BoxAPIConnection api = new BoxAPIConnection(anyClientID, anyClientSecret, anyAccessToken, anyRefreshToken);
assertThat(api.canRefresh(), is(true));
}
@Test
@Category(UnitTest.class)
public void needsRefreshWhenTokenHasExpired() {
final String anyAccessToken = "";
BoxAPIConnection api = new BoxAPIConnection(anyAccessToken);
api.setExpires(-1);
assertThat(api.needsRefresh(), is(true));
}
@Test
@Category(UnitTest.class)
public void doesNotNeedRefreshWhenTokenHasNotExpired() {
final String anyAccessToken = "";
BoxAPIConnection api = new BoxAPIConnection(anyAccessToken);
api.setExpires(Long.MAX_VALUE);
assertThat(api.needsRefresh(), is(not(true)));
}
@Test
@Category(UnitTest.class)
public void needsRefreshWhenExpiresIsZero() {
final String anyAccessToken = "";
BoxAPIConnection api = new BoxAPIConnection(anyAccessToken);
api.setExpires(0);
assertThat(api.needsRefresh(), is(true));
}
@Test
@Category(UnitTest.class)
public void interceptorReceivesSentRequest() throws MalformedURLException {
BoxAPIConnection api = new BoxAPIConnection("");
BoxAPIResponse fakeResponse = new BoxAPIResponse();
RequestInterceptor mockInterceptor = mock(RequestInterceptor.class);
when(mockInterceptor.onRequest(any(BoxAPIRequest.class))).thenReturn(fakeResponse);
api.setRequestInterceptor(mockInterceptor);
BoxAPIRequest request = new BoxAPIRequest(api, new URL("http://anyurl.com"), "GET");
BoxAPIResponse response = request.send();
assertThat(response, is(equalTo(fakeResponse)));
}
@Test
@Category(UnitTest.class)
public void restoreConnectionThatDoesNotNeedRefresh() {
BoxAPIConnection api = new BoxAPIConnection("fake client ID", "fake client secret", "fake access token",
"fake refresh token");
api.setExpires(3600000L);
api.setLastRefresh(System.currentTimeMillis());
String state = api.save();
final BoxAPIConnection restoredAPI = BoxAPIConnection.restore("fake client ID", "fake client secret", state);
restoredAPI.setRequestInterceptor(new RequestInterceptor() {
@Override
public BoxAPIResponse onRequest(BoxAPIRequest request) {
String tokenURLString = restoredAPI.getTokenURL().toString();
String requestURLString = request.getUrl().toString();
if (requestURLString.contains(tokenURLString)) {
fail("The connection was refreshed.");
}
if (requestURLString.contains("folders")) {
return new BoxJSONResponse() {
@Override
public String getJSON() {
JsonObject responseJSON = new JsonObject()
.add("id", "fake ID")
.add("type", "folder");
return responseJSON.toString();
}
};
}
fail("Unexpected request.");
return null;
}
});
assertFalse(restoredAPI.needsRefresh());
}
@Test
@Category(UnitTest.class)
public void getAuthorizationURLSuccess() throws Exception {
List<String> scopes = new ArrayList<String>();
scopes.add("root_readwrite");
scopes.add("manage_groups");
URL authURL = BoxAPIConnection.getAuthorizationURL("wncmz88sacf5oyaxf502dybcruqbzzy0",
new URI("http://localhost:3000"), "test", scopes);
Assert.assertTrue(authURL.toString().startsWith("https://account.box.com/api/oauth2/authorize"));
StringTokenizer tokenizer = new StringTokenizer(authURL.getQuery(), "&");
while (tokenizer.hasMoreTokens()) {
String token = tokenizer.nextToken();
if (token.startsWith("client_id")) {
Assert.assertEquals(token, "se2xy1bknelxn4y8xzxu3trosptip3q5");
} else if (token.startsWith("response_type")) {
Assert.assertEquals(token, "response_type=code");
} else if (token.startsWith("redirect_uri")) {
Assert.assertEquals(token, "redirect_uri=http%3A%2F%2Flocalhost%3A3000");
} else if (token.startsWith("state")) {
Assert.assertEquals(token, "state=test");
} else if (token.startsWith("scope")) {
Assert.assertEquals(token, "scope=root_readwrite+manage_groups");
}
}
}
@Test
@Category(IntegrationTest.class)
public void requestIsSentNormallyWhenInterceptorReturnsNullResponse() throws MalformedURLException {
BoxAPIConnection api = new BoxAPIConnection("");
RequestInterceptor mockInterceptor = mock(RequestInterceptor.class);
when(mockInterceptor.onRequest(any(BoxAPIRequest.class))).thenReturn(null);
api.setRequestInterceptor(mockInterceptor);
BoxAPIRequest request = new BoxAPIRequest(api, new URL("http://box.com"), "GET");
BoxAPIResponse response = request.send();
assertThat(response.getResponseCode(), is(200));
}
@Test
@Category(IntegrationTest.class)
public void refreshSucceeds() {
final String originalAccessToken = TestConfig.getAccessToken();
final String originalRefreshToken = TestConfig.getRefreshToken();
BoxAPIConnection api = new BoxAPIConnection(TestConfig.getClientID(), TestConfig.getClientSecret(),
originalAccessToken, originalRefreshToken);
api.refresh();
String actualAccessToken = api.getAccessToken();
String actualRefreshToken = api.getRefreshToken();
assertThat(originalRefreshToken, not(equalTo(actualRefreshToken)));
assertThat(originalAccessToken, not(equalTo(actualAccessToken)));
TestConfig.setAccessToken(actualAccessToken);
TestConfig.setRefreshToken(actualRefreshToken);
}
@Test
@Category(IntegrationTest.class)
public void refreshesWhenGetAccessTokenIsCalledAndTokenHasExpired() {
final String originalAccessToken = TestConfig.getAccessToken();
final String originalRefreshToken = TestConfig.getRefreshToken();
BoxAPIConnection api = new BoxAPIConnection(TestConfig.getClientID(), TestConfig.getClientSecret(),
originalAccessToken, originalRefreshToken);
api.setExpires(-1);
String actualAccessToken = api.getAccessToken();
String actualRefreshToken = api.getRefreshToken();
assertThat(originalRefreshToken, not(equalTo(actualRefreshToken)));
assertThat(originalAccessToken, not(equalTo(actualAccessToken)));
TestConfig.setAccessToken(actualAccessToken);
TestConfig.setRefreshToken(actualRefreshToken);
}
@Test
@Category(IntegrationTest.class)
public void doesNotRefreshWhenGetAccessTokenIsCalledAndTokenHasNotExpired() {
final String originalAccessToken = TestConfig.getAccessToken();
final String originalRefreshToken = TestConfig.getRefreshToken();
BoxAPIConnection api = new BoxAPIConnection(TestConfig.getClientID(), TestConfig.getClientSecret(),
originalAccessToken, originalRefreshToken);
api.setExpires(Long.MAX_VALUE);
String actualAccessToken = api.getAccessToken();
String actualRefreshToken = api.getRefreshToken();
assertThat(originalRefreshToken, equalTo(actualRefreshToken));
assertThat(originalAccessToken, equalTo(actualAccessToken));
TestConfig.setAccessToken(actualAccessToken);
TestConfig.setRefreshToken(actualRefreshToken);
}
@Test
@Category(IntegrationTest.class)
public void successfullySavesAndRestoresConnection() {
final String originalAccessToken = TestConfig.getAccessToken();
final String originalRefreshToken = TestConfig.getRefreshToken();
BoxAPIConnection api = new BoxAPIConnection(TestConfig.getClientID(), TestConfig.getClientSecret(),
originalAccessToken, originalRefreshToken);
String state = api.save();
BoxAPIConnection restoredAPI = BoxAPIConnection.restore(TestConfig.getClientID(), TestConfig.getClientSecret(),
state);
BoxFolder.Info rootFolderInfo = BoxFolder.getRootFolder(restoredAPI).getInfo();
TestConfig.setAccessToken(restoredAPI.getAccessToken());
TestConfig.setRefreshToken(restoredAPI.getRefreshToken());
}
@Test
@Category(IntegrationTest.class)
public void revokeToken() {
String accessToken = TestConfig.getAccessToken();
String clientID = TestConfig.getClientID();
String clientSecret = TestConfig.getClientSecret();
BoxAPIConnection api = new BoxAPIConnection(clientID, clientSecret, accessToken, "");
BoxFolder.getRootFolder(api);
api.revokeToken();
try {
BoxFolder.getRootFolder(api);
} catch (BoxAPIException ex) {
assertEquals(401, ex.getResponseCode());
}
}
@Test
@Category(UnitTest.class)
public void revokeTokenCallsCorrectEndpoint() {
String accessToken = "fakeAccessToken";
String clientID = "fakeID";
String clientSecret = "fakeSecret";
BoxAPIConnection api = new BoxAPIConnection(clientID, clientSecret, accessToken, "");
api.setRevokeURL("http://localhost:53620/oauth2/revoke");
stubFor(post(urlPathEqualTo("/oauth2/revoke"))
.withRequestBody(WireMock.equalTo("token=fakeAccessToken&client_id=fakeID&client_secret=fakeSecret"))
.willReturn(aResponse()
.withHeader("Content-Type", "application/json")
.withBody("")));
api.revokeToken();
}
@Test
@Category(IntegrationTestJWT.class)
public void developerEditionAppAuthWorks() throws IOException {
Reader reader = new FileReader("src/test/config/config.json");
BoxConfig boxConfig = BoxConfig.readFrom(reader);
IAccessTokenCache accessTokenCache = new InMemoryLRUAccessTokenCache(100);
BoxDeveloperEditionAPIConnection api =
BoxDeveloperEditionAPIConnection.getAppEnterpriseConnection(boxConfig, accessTokenCache);
assertThat(api.getAccessToken(), not(equalTo(null)));
final String name = "app user name";
final String externalAppUserId = "ychag@example.com";
CreateUserParams params = new CreateUserParams();
params.setExternalAppUserId(externalAppUserId);
BoxUser appUser = null;
try {
BoxUser.Info createdUserInfo = BoxUser.createAppUser(api, name, params);
final String appUserId = createdUserInfo.getID();
assertThat(createdUserInfo.getID(), not(equalTo(null)));
assertThat(createdUserInfo.getName(), equalTo(name));
appUser = new BoxUser(api, appUserId);
assertEquals(externalAppUserId,
appUser.getInfo(BoxUser.ALL_FIELDS).getExternalAppUserId());
//Testing update works
final String newName = "app user updated name";
final String updatedExternalAppUserId = "lyhxr@example.com";
createdUserInfo.setName(newName);
createdUserInfo.setExternalAppUserId(updatedExternalAppUserId);
appUser.updateInfo(createdUserInfo);
assertThat(createdUserInfo.getName(), equalTo(newName));
assertEquals(updatedExternalAppUserId,
createdUserInfo.getResource().getInfo("external_app_user_id").getExternalAppUserId());
//Testing getAppUsers works
Iterable<BoxUser.Info> users = BoxUser.getAppUsersByExternalAppUserID(api,
updatedExternalAppUserId, "external_app_user_id");
for (BoxUser.Info userInfo : users) {
assertEquals(updatedExternalAppUserId, userInfo.getExternalAppUserId());
}
} finally {
appUser.delete(false, true);
}
api.refresh();
}
@Test
@Category(IntegrationTestJWT.class)
public void developerEditionAppUserWorks() throws IOException {
Reader reader = new FileReader("src/test/config/config.json");
BoxConfig boxConfig = BoxConfig.readFrom(reader);
IAccessTokenCache accessTokenCache = new InMemoryLRUAccessTokenCache(100);
BoxDeveloperEditionAPIConnection appAuthConnection =
BoxDeveloperEditionAPIConnection.getAppEnterpriseConnection(boxConfig, accessTokenCache);
final String name = "app user name two";
BoxUser.Info createdUserInfo = BoxUser.createAppUser(appAuthConnection, name);
final String appUserId = createdUserInfo.getID();
BoxDeveloperEditionAPIConnection api = BoxDeveloperEditionAPIConnection.getAppUserConnection(appUserId,
boxConfig, accessTokenCache);
BoxUser appUser = new BoxUser(api, appUserId);
assertThat(api.getAccessToken(), not(equalTo(null)));
BoxUser.Info info = appUser.getInfo();
assertThat(info.getID(), equalTo(appUserId));
assertThat(info.getName(), equalTo(name));
api.refresh();
BoxUser appUserFromAdmin = new BoxUser(appAuthConnection, appUserId);
appUserFromAdmin.delete(false, true);
}
@Test
@Category(UnitTest.class)
public void getLowerScopedTokenRefreshesTheTokenIfNeededbyCallingGetAccessToken() {
BoxAPIConnection api = mock(BoxAPIConnection.class);
List<String> scopes = new ArrayList<String>();
scopes.add("DummyScope");
String resource = "";
when(api.getTokenURL()).thenReturn("https://api.box.com/oauth2/token");
when(api.getLowerScopedToken(scopes, resource)).thenCallRealMethod();
try {
api.getLowerScopedToken(scopes, resource);
} catch (RuntimeException e) {
//Ignore it
}
verify(api).getAccessToken();
}
@Test
@Category(UnitTest.class)
public void getLowerScopedTokenWithNullResource() {
BoxAPIConnection api = mock(BoxAPIConnection.class);
List<String> scopes = new ArrayList<String>();
scopes.add("DummyScope");
String resource = null;
when(api.getTokenURL()).thenReturn("https://api.box.com/oauth2/token");
when(api.getLowerScopedToken(scopes, resource)).thenCallRealMethod();
try {
api.getLowerScopedToken(scopes, resource);
} catch (RuntimeException e) {
//Ignore it
}
verify(api).getAccessToken();
}
@Test
@Category(IntegrationTest.class)
public void getLowerScopedTokenWorks() {
final String originalAccessToken = TestConfig.getAccessToken();
BoxAPIConnection api = new BoxAPIConnection(originalAccessToken);
String resource = "https://api.box.com/2.0/files/135906984991";
List<String> scopes = new ArrayList<String>();
scopes.add("item_preview");
scopes.add("item_content_upload");
ScopedToken token = api.getLowerScopedToken(scopes, resource);
assertThat(token, notNullValue());
assertThat(token.getAccessToken(), notNullValue());
}
}
|
3e1e3a806b82ab92ce2d72a4c443cb65d482a161 | 1,813 | java | Java | src/main/java/com/generation/energym/security/BasicSecurityConfig.java | MatheusMMonteiro/Energym | fd79632f0a9e59ccb3dfd9e27a4d69e88dda2fda | [
"MIT"
] | 4 | 2021-09-23T13:46:10.000Z | 2021-11-14T18:33:29.000Z | src/main/java/com/generation/energym/security/BasicSecurityConfig.java | MatheusMMonteiro/Energym | fd79632f0a9e59ccb3dfd9e27a4d69e88dda2fda | [
"MIT"
] | null | null | null | src/main/java/com/generation/energym/security/BasicSecurityConfig.java | MatheusMMonteiro/Energym | fd79632f0a9e59ccb3dfd9e27a4d69e88dda2fda | [
"MIT"
] | 7 | 2021-09-23T14:19:16.000Z | 2021-12-02T22:42:23.000Z | 38.574468 | 107 | 0.825703 | 12,794 | package com.generation.energym.security;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.http.HttpMethod;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
@EnableWebSecurity
public class BasicSecurityConfig extends WebSecurityConfigurerAdapter {
@Autowired
private UserDetailsService userdetailsService;
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
auth.userDetailsService(userdetailsService);
auth.inMemoryAuthentication().withUser("root").password(passwordEncoder().encode("root"))
.authorities("ROLE_USER");
}
@Bean
public PasswordEncoder passwordEncoder() {
return new BCryptPasswordEncoder();
}
@Override
protected void configure(HttpSecurity http) throws Exception {
http.authorizeRequests()
.antMatchers("/usuarios/logar").permitAll()
.antMatchers("/usuarios/cadastrar").permitAll()
.antMatchers(HttpMethod.OPTIONS).permitAll()
.anyRequest().authenticated()
.and().httpBasic()
.and().sessionManagement().sessionCreationPolicy(SessionCreationPolicy.STATELESS)
.and().cors()
.and().csrf().disable();
}
} |
3e1e3bb0da6f3824d8cfe76b676c3f320899779a | 6,249 | java | Java | server/src/test/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java | korpx-z/OpenSearch | dd9622fec64805141848537c0cf5265e8650ffb9 | [
"Apache-2.0"
] | 4,822 | 2021-04-12T15:03:57.000Z | 2022-03-31T23:37:49.000Z | server/src/test/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java | korpx-z/OpenSearch | dd9622fec64805141848537c0cf5265e8650ffb9 | [
"Apache-2.0"
] | 2,146 | 2021-04-12T14:10:48.000Z | 2022-03-31T23:58:34.000Z | server/src/test/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java | dlvenable/OpenSearch | 17e7a4962da9e85f30c16f552dafea00fd921649 | [
"Apache-2.0"
] | 593 | 2021-04-12T16:26:05.000Z | 2022-03-31T11:33:45.000Z | 46.634328 | 139 | 0.706673 | 12,795 | /*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.search.aggregations.metrics;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.NumericUtils;
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.NumberFieldMapper;
import org.opensearch.search.aggregations.AggregationBuilder;
import org.opensearch.search.aggregations.AggregatorTestCase;
import org.opensearch.search.aggregations.support.AggregationInspectionHelper;
import org.opensearch.search.aggregations.support.CoreValuesSourceType;
import org.opensearch.search.aggregations.support.ValuesSourceType;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
public class HDRPercentileRanksAggregatorTests extends AggregatorTestCase {
@Override
protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
return new PercentileRanksAggregationBuilder("hdr_ranks", new double[] { 0.1, 0.5, 12 }).field(fieldName)
.percentilesConfig(new PercentilesConfig.Hdr());
}
@Override
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
return Arrays.asList(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN);
}
public void testEmpty() throws IOException {
PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.5 }).field("field")
.method(PercentilesMethod.HDR);
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE);
try (IndexReader reader = new MultiReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
Percentile rank = ranks.iterator().next();
assertEquals(Double.NaN, rank.getPercent(), 0d);
assertEquals(0.5, rank.getValue(), 0d);
assertFalse(AggregationInspectionHelper.hasValue((InternalHDRPercentileRanks) ranks));
}
}
public void testSimple() throws IOException {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
for (double value : new double[] { 3, 0.2, 10 }) {
Document doc = new Document();
doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value)));
w.addDocument(doc);
}
PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.1, 0.5, 12 })
.field("field")
.method(PercentilesMethod.HDR);
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE);
try (IndexReader reader = w.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
Iterator<Percentile> rankIterator = ranks.iterator();
Percentile rank = rankIterator.next();
assertEquals(0.1, rank.getValue(), 0d);
assertThat(rank.getPercent(), Matchers.equalTo(0d));
rank = rankIterator.next();
assertEquals(0.5, rank.getValue(), 0d);
assertThat(rank.getPercent(), Matchers.greaterThan(0d));
assertThat(rank.getPercent(), Matchers.lessThan(100d));
rank = rankIterator.next();
assertEquals(12, rank.getValue(), 0d);
assertThat(rank.getPercent(), Matchers.equalTo(100d));
assertFalse(rankIterator.hasNext());
assertTrue(AggregationInspectionHelper.hasValue((InternalHDRPercentileRanks) ranks));
}
}
}
public void testNullValues() throws IOException {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> new PercentileRanksAggregationBuilder("my_agg", null).field("field").method(PercentilesMethod.HDR)
);
assertThat(e.getMessage(), Matchers.equalTo("[values] must not be null: [my_agg]"));
}
public void testEmptyValues() throws IOException {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> new PercentileRanksAggregationBuilder("my_agg", new double[0]).field("field").method(PercentilesMethod.HDR)
);
assertThat(e.getMessage(), Matchers.equalTo("[values] must not be an empty array: [my_agg]"));
}
}
|
3e1e3bc5e6c1b7c87bf3e3fd97a01298e285a62f | 430 | java | Java | common/src/main/java/com/zhongjh/common/utils/MimeTypeUtils.java | FrankLove/AlbumCameraRecorder | 50542238d2ea1191d39fa4c2b48e534b163377bf | [
"MIT"
] | null | null | null | common/src/main/java/com/zhongjh/common/utils/MimeTypeUtils.java | FrankLove/AlbumCameraRecorder | 50542238d2ea1191d39fa4c2b48e534b163377bf | [
"MIT"
] | null | null | null | common/src/main/java/com/zhongjh/common/utils/MimeTypeUtils.java | FrankLove/AlbumCameraRecorder | 50542238d2ea1191d39fa4c2b48e534b163377bf | [
"MIT"
] | null | null | null | 15.925926 | 49 | 0.565116 | 12,796 | package com.zhongjh.common.utils;
import android.text.TextUtils;
/**
* 类型工具类
*
* @author zhongjh
* @date 2022/2/8
*/
public class MimeTypeUtils {
/**
* is content://
*
* @param uri uri
* @return 判断uri是否content类型
*/
public static boolean isContent(String uri) {
if (TextUtils.isEmpty(uri)) {
return false;
}
return uri.startsWith("content://");
}
}
|
3e1e3c04e3b3526bb0d9eb7aa20d3f122f566c9c | 4,111 | java | Java | commercetools-models/src/test/java/io/sphere/sdk/inventory/commands/InventoryEntryUpdateCommandIntegrationTest.java | FL-K/commercetools-jvm-sdk | 6c3ba0d2e29e2f9c1a4054d99ea1e3ba8cf323ac | [
"Apache-2.0"
] | null | null | null | commercetools-models/src/test/java/io/sphere/sdk/inventory/commands/InventoryEntryUpdateCommandIntegrationTest.java | FL-K/commercetools-jvm-sdk | 6c3ba0d2e29e2f9c1a4054d99ea1e3ba8cf323ac | [
"Apache-2.0"
] | null | null | null | commercetools-models/src/test/java/io/sphere/sdk/inventory/commands/InventoryEntryUpdateCommandIntegrationTest.java | FL-K/commercetools-jvm-sdk | 6c3ba0d2e29e2f9c1a4054d99ea1e3ba8cf323ac | [
"Apache-2.0"
] | null | null | null | 47.802326 | 124 | 0.70956 | 12,797 | package io.sphere.sdk.inventory.commands;
import io.sphere.sdk.channels.ChannelRole;
import io.sphere.sdk.commands.UpdateAction;
import io.sphere.sdk.inventory.InventoryEntry;
import io.sphere.sdk.inventory.commands.updateactions.*;
import io.sphere.sdk.test.IntegrationTest;
import io.sphere.sdk.test.SphereTestUtils;
import org.junit.Test;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Optional;
import static io.sphere.sdk.channels.ChannelFixtures.withChannelOfRole;
import static io.sphere.sdk.inventory.InventoryEntryFixtures.withUpdateableInventoryEntry;
import static org.assertj.core.api.Assertions.assertThat;
public class InventoryEntryUpdateCommandIntegrationTest extends IntegrationTest {
@Test
public void addQuantity() throws Exception {
withUpdateableInventoryEntry(client(), entry -> {
final long additionalQuantity = 4;
final UpdateAction<InventoryEntry> action = AddQuantity.of(additionalQuantity);
final InventoryEntry updatedEntry = client().executeBlocking(InventoryEntryUpdateCommand.of(entry, action));
assertThat(updatedEntry.getQuantityOnStock()).isEqualTo(entry.getQuantityOnStock() + additionalQuantity);
return updatedEntry;
});
}
@Test
public void changeQuantity() throws Exception {
withUpdateableInventoryEntry(client(), entry -> {
final UpdateAction<InventoryEntry> action = ChangeQuantity.of(5000L);
final InventoryEntry updatedEntry = client().executeBlocking(InventoryEntryUpdateCommand.of(entry, action));
assertThat(updatedEntry.getQuantityOnStock()).isEqualTo(5000);
return updatedEntry;
});
}
@Test
public void removeQuantity() throws Exception {
withUpdateableInventoryEntry(client(), entry -> {
final long removingQuantity = 4;
final UpdateAction<InventoryEntry> action = RemoveQuantity.of(removingQuantity);
final InventoryEntry updatedEntry = client().executeBlocking(InventoryEntryUpdateCommand.of(entry, action));
assertThat(updatedEntry.getQuantityOnStock()).isEqualTo(entry.getQuantityOnStock() - removingQuantity);
return updatedEntry;
});
}
@Test
public void setSupplyChannel() throws Exception {
withChannelOfRole(client(), ChannelRole.INVENTORY_SUPPLY, channel -> {
withUpdateableInventoryEntry(client(), entry -> {
final UpdateAction<InventoryEntry> action = SetSupplyChannel.of(channel.toResourceIdentifier());
final InventoryEntry updatedEntry = client().executeBlocking(InventoryEntryUpdateCommand.of(entry, action));
assertThat(updatedEntry.getSupplyChannel()).isEqualTo(channel.toReference());
return updatedEntry;
});
});
}
@Test
public void setRestockableInDays() throws Exception {
withUpdateableInventoryEntry(client(), entry -> {
final int restockableInDays = Optional.ofNullable(entry.getRestockableInDays()).map(i -> i + 4).orElse(4);
final UpdateAction<InventoryEntry> action = SetRestockableInDays.of(restockableInDays);
final InventoryEntry updatedEntry = client().executeBlocking(InventoryEntryUpdateCommand.of(entry, action));
assertThat(updatedEntry.getRestockableInDays()).isEqualTo(restockableInDays);
return updatedEntry;
});
}
@Test
public void setExpectedDelivery() throws Exception {
withUpdateableInventoryEntry(client(), entry -> {
final ZonedDateTime expectedDelivery = SphereTestUtils.now().plus(7, ChronoUnit.DAYS);
final UpdateAction<InventoryEntry> action = SetExpectedDelivery.of(expectedDelivery);
final InventoryEntry updatedEntry = client().executeBlocking(InventoryEntryUpdateCommand.of(entry, action));
assertThat(updatedEntry.getExpectedDelivery())
.isEqualTo(expectedDelivery);
return updatedEntry;
});
}
} |
3e1e3c5e51dd98c23faf608a2238cfae6ebc37b9 | 2,073 | java | Java | app/src/main/java/com/me/silencedut/nbaplus/model/Statistics.java | SilenceDut/NBAPlus | 2bbb39739d7fff7a9d409186199849f07c8ce8e3 | [
"Apache-2.0"
] | 824 | 2015-12-28T16:56:31.000Z | 2022-03-10T01:51:16.000Z | app/src/main/java/com/me/silencedut/nbaplus/model/Statistics.java | saber13812002/NBAPlus | 2bbb39739d7fff7a9d409186199849f07c8ce8e3 | [
"Apache-2.0"
] | 8 | 2015-12-29T05:55:21.000Z | 2016-11-10T02:23:53.000Z | app/src/main/java/com/me/silencedut/nbaplus/model/Statistics.java | saber13812002/NBAPlus | 2bbb39739d7fff7a9d409186199849f07c8ce8e3 | [
"Apache-2.0"
] | 191 | 2015-12-29T00:45:29.000Z | 2022-01-04T14:06:21.000Z | 20.126214 | 62 | 0.544139 | 12,798 | package com.me.silencedut.nbaplus.model;
import java.util.List;
/**
* Created by SilenceDut on 2015/12/17.
*/
public class Statistics {
/**
* place : 1
* statkind : points
* name : 杰里米-林
* statdata : 35
* team : 黄蜂
*/
private List<StatEntity> dailyStat;
/**
* place : 1
* statkind : points
* name : 斯蒂芬-库里
* statdata : 32
* team : 勇士
*/
private List<StatEntity> everageStat;
public void setDailyStat(List<StatEntity> dailyStat) {
this.dailyStat = dailyStat;
}
public void setEverageStat(List<StatEntity> everageStat) {
this.everageStat = everageStat;
}
public List<StatEntity> getDailyStat() {
return dailyStat;
}
public List<StatEntity> getEverageStat() {
return everageStat;
}
public static class StatEntity {
private String place;
private String statkind;
private String playerurl;
private String name;
private String statdata;
private String team;
public String getPlayerurl() {
return playerurl;
}
public void setPlayerurl(String playerurl) {
this.playerurl = playerurl;
}
public void setPlace(String place) {
this.place = place;
}
public void setStatkind(String statkind) {
this.statkind = statkind;
}
public void setName(String name) {
this.name = name;
}
public void setStatdata(String statdata) {
this.statdata = statdata;
}
public void setTeam(String team) {
this.team = team;
}
public String getPlace() {
return place;
}
public String getStatkind() {
return statkind;
}
public String getName() {
return name;
}
public String getStatdata() {
return statdata;
}
public String getTeam() {
return team;
}
}
}
|
3e1e3c76e5faf22244f3db3ca5e5c89130134711 | 186,097 | java | Java | aws-java-sdk-forecast/src/main/java/com/amazonaws/services/forecast/AmazonForecastClient.java | vinayakpokharkar/aws-sdk-java | fd409dee8ae23fb8953e0bb4dbde65536a7e0514 | [
"Apache-2.0"
] | 1 | 2022-01-04T04:11:16.000Z | 2022-01-04T04:11:16.000Z | aws-java-sdk-forecast/src/main/java/com/amazonaws/services/forecast/AmazonForecastClient.java | vinayakpokharkar/aws-sdk-java | fd409dee8ae23fb8953e0bb4dbde65536a7e0514 | [
"Apache-2.0"
] | null | null | null | aws-java-sdk-forecast/src/main/java/com/amazonaws/services/forecast/AmazonForecastClient.java | vinayakpokharkar/aws-sdk-java | fd409dee8ae23fb8953e0bb4dbde65536a7e0514 | [
"Apache-2.0"
] | null | null | null | 48.249157 | 160 | 0.690704 | 12,799 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.forecast;
import org.w3c.dom.*;
import java.net.*;
import java.util.*;
import javax.annotation.Generated;
import org.apache.commons.logging.*;
import com.amazonaws.*;
import com.amazonaws.annotation.SdkInternalApi;
import com.amazonaws.auth.*;
import com.amazonaws.handlers.*;
import com.amazonaws.http.*;
import com.amazonaws.internal.*;
import com.amazonaws.internal.auth.*;
import com.amazonaws.metrics.*;
import com.amazonaws.regions.*;
import com.amazonaws.transform.*;
import com.amazonaws.util.*;
import com.amazonaws.protocol.json.*;
import com.amazonaws.util.AWSRequestMetrics.Field;
import com.amazonaws.annotation.ThreadSafe;
import com.amazonaws.client.AwsSyncClientParams;
import com.amazonaws.client.builder.AdvancedConfig;
import com.amazonaws.services.forecast.AmazonForecastClientBuilder;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.services.forecast.model.*;
import com.amazonaws.services.forecast.model.transform.*;
/**
* Client for accessing Amazon Forecast Service. All service calls made using this client are blocking, and will not
* return until the service call completes.
* <p>
* <p>
* Provides APIs for creating and managing Amazon Forecast resources.
* </p>
*/
@ThreadSafe
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AmazonForecastClient extends AmazonWebServiceClient implements AmazonForecast {
/** Provider for AWS credentials. */
private final AWSCredentialsProvider awsCredentialsProvider;
private static final Log log = LogFactory.getLog(AmazonForecast.class);
/** Default signing name for the service. */
private static final String DEFAULT_SIGNING_NAME = "forecast";
/** Client configuration factory providing ClientConfigurations tailored to this client */
protected static final ClientConfigurationFactory configFactory = new ClientConfigurationFactory();
private final AdvancedConfig advancedConfig;
private static final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory = new com.amazonaws.protocol.json.SdkJsonProtocolFactory(
new JsonClientMetadata()
.withProtocolVersion("1.1")
.withSupportsCbor(false)
.withSupportsIon(false)
.addErrorMetadata(
new JsonErrorShapeMetadata().withErrorCode("ResourceInUseException").withExceptionUnmarshaller(
com.amazonaws.services.forecast.model.transform.ResourceInUseExceptionUnmarshaller.getInstance()))
.addErrorMetadata(
new JsonErrorShapeMetadata().withErrorCode("InvalidNextTokenException").withExceptionUnmarshaller(
com.amazonaws.services.forecast.model.transform.InvalidNextTokenExceptionUnmarshaller.getInstance()))
.addErrorMetadata(
new JsonErrorShapeMetadata().withErrorCode("LimitExceededException").withExceptionUnmarshaller(
com.amazonaws.services.forecast.model.transform.LimitExceededExceptionUnmarshaller.getInstance()))
.addErrorMetadata(
new JsonErrorShapeMetadata().withErrorCode("ResourceNotFoundException").withExceptionUnmarshaller(
com.amazonaws.services.forecast.model.transform.ResourceNotFoundExceptionUnmarshaller.getInstance()))
.addErrorMetadata(
new JsonErrorShapeMetadata().withErrorCode("InvalidInputException").withExceptionUnmarshaller(
com.amazonaws.services.forecast.model.transform.InvalidInputExceptionUnmarshaller.getInstance()))
.addErrorMetadata(
new JsonErrorShapeMetadata().withErrorCode("ResourceAlreadyExistsException").withExceptionUnmarshaller(
com.amazonaws.services.forecast.model.transform.ResourceAlreadyExistsExceptionUnmarshaller.getInstance()))
.withBaseServiceExceptionClass(com.amazonaws.services.forecast.model.AmazonForecastException.class));
public static AmazonForecastClientBuilder builder() {
return AmazonForecastClientBuilder.standard();
}
/**
* Constructs a new client to invoke service methods on Amazon Forecast Service using the specified parameters.
*
* <p>
* All service calls made using this new client object are blocking, and will not return until the service call
* completes.
*
* @param clientParams
* Object providing client parameters.
*/
AmazonForecastClient(AwsSyncClientParams clientParams) {
this(clientParams, false);
}
/**
* Constructs a new client to invoke service methods on Amazon Forecast Service using the specified parameters.
*
* <p>
* All service calls made using this new client object are blocking, and will not return until the service call
* completes.
*
* @param clientParams
* Object providing client parameters.
*/
AmazonForecastClient(AwsSyncClientParams clientParams, boolean endpointDiscoveryEnabled) {
super(clientParams);
this.awsCredentialsProvider = clientParams.getCredentialsProvider();
this.advancedConfig = clientParams.getAdvancedConfig();
init();
}
private void init() {
setServiceNameIntern(DEFAULT_SIGNING_NAME);
setEndpointPrefix(ENDPOINT_PREFIX);
// calling this.setEndPoint(...) will also modify the signer accordingly
setEndpoint("forecast.us-east-1.amazonaws.com");
HandlerChainFactory chainFactory = new HandlerChainFactory();
requestHandler2s.addAll(chainFactory.newRequestHandlerChain("/com/amazonaws/services/forecast/request.handlers"));
requestHandler2s.addAll(chainFactory.newRequestHandler2Chain("/com/amazonaws/services/forecast/request.handler2s"));
requestHandler2s.addAll(chainFactory.getGlobalHandlers());
}
/**
* <p>
* Creates an Amazon Forecast predictor.
* </p>
* <p>
* Amazon Forecast creates predictors with AutoPredictor, which involves applying the optimal combination of
* algorithms to each time series in your datasets. You can use <a>CreateAutoPredictor</a> to create new predictors
* or upgrade/retrain existing predictors.
* </p>
* <p>
* <b>Creating new predictors</b>
* </p>
* <p>
* The following parameters are required when creating a new predictor:
* </p>
* <ul>
* <li>
* <p>
* <code>PredictorName</code> - A unique name for the predictor.
* </p>
* </li>
* <li>
* <p>
* <code>DatasetGroupArn</code> - The ARN of the dataset group used to train the predictor.
* </p>
* </li>
* <li>
* <p>
* <code>ForecastFrequency</code> - The granularity of your forecasts (hourly, daily, weekly, etc).
* </p>
* </li>
* <li>
* <p>
* <code>ForecastHorizon</code> - The number of time steps being forecasted.
* </p>
* </li>
* </ul>
* <p>
* When creating a new predictor, do not specify a value for <code>ReferencePredictorArn</code>.
* </p>
* <p>
* <b>Upgrading and retraining predictors</b>
* </p>
* <p>
* The following parameters are required when retraining or upgrading a predictor:
* </p>
* <ul>
* <li>
* <p>
* <code>PredictorName</code> - A unique name for the predictor.
* </p>
* </li>
* <li>
* <p>
* <code>ReferencePredictorArn</code> - The ARN of the predictor to retrain or upgrade.
* </p>
* </li>
* </ul>
* <p>
* When upgrading or retraining a predictor, only specify values for the <code>ReferencePredictorArn</code> and
* <code>PredictorName</code>.
* </p>
*
* @param createAutoPredictorRequest
* @return Result of the CreateAutoPredictor operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceAlreadyExistsException
* There is already a resource with this name. Try again with a different name.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @throws LimitExceededException
* The limit on the number of resources per account has been exceeded.
* @sample AmazonForecast.CreateAutoPredictor
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/CreateAutoPredictor" target="_top">AWS
* API Documentation</a>
*/
@Override
public CreateAutoPredictorResult createAutoPredictor(CreateAutoPredictorRequest request) {
request = beforeClientExecution(request);
return executeCreateAutoPredictor(request);
}
@SdkInternalApi
final CreateAutoPredictorResult executeCreateAutoPredictor(CreateAutoPredictorRequest createAutoPredictorRequest) {
ExecutionContext executionContext = createExecutionContext(createAutoPredictorRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<CreateAutoPredictorRequest> request = null;
Response<CreateAutoPredictorResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new CreateAutoPredictorRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(createAutoPredictorRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "CreateAutoPredictor");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<CreateAutoPredictorResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new CreateAutoPredictorResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Creates an Amazon Forecast dataset. The information about the dataset that you provide helps Forecast understand
* how to consume the data for model training. This includes the following:
* </p>
* <ul>
* <li>
* <p>
* <i> <code>DataFrequency</code> </i> - How frequently your historical time-series data is collected.
* </p>
* </li>
* <li>
* <p>
* <i> <code>Domain</code> </i> and <i> <code>DatasetType</code> </i> - Each dataset has an associated dataset
* domain and a type within the domain. Amazon Forecast provides a list of predefined domains and types within each
* domain. For each unique dataset domain and type within the domain, Amazon Forecast requires your data to include
* a minimum set of predefined fields.
* </p>
* </li>
* <li>
* <p>
* <i> <code>Schema</code> </i> - A schema specifies the fields in the dataset, including the field name and data
* type.
* </p>
* </li>
* </ul>
* <p>
* After creating a dataset, you import your training data into it and add the dataset to a dataset group. You use
* the dataset group to create a predictor. For more information, see <a>howitworks-datasets-groups</a>.
* </p>
* <p>
* To get a list of all your datasets, use the <a>ListDatasets</a> operation.
* </p>
* <p>
* For example Forecast datasets, see the <a href="https://github.com/aws-samples/amazon-forecast-samples">Amazon
* Forecast Sample GitHub repository</a>.
* </p>
* <note>
* <p>
* The <code>Status</code> of a dataset must be <code>ACTIVE</code> before you can import training data. Use the
* <a>DescribeDataset</a> operation to get the status.
* </p>
* </note>
*
* @param createDatasetRequest
* @return Result of the CreateDataset operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceAlreadyExistsException
* There is already a resource with this name. Try again with a different name.
* @throws LimitExceededException
* The limit on the number of resources per account has been exceeded.
* @sample AmazonForecast.CreateDataset
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/CreateDataset" target="_top">AWS API
* Documentation</a>
*/
@Override
public CreateDatasetResult createDataset(CreateDatasetRequest request) {
request = beforeClientExecution(request);
return executeCreateDataset(request);
}
@SdkInternalApi
final CreateDatasetResult executeCreateDataset(CreateDatasetRequest createDatasetRequest) {
ExecutionContext executionContext = createExecutionContext(createDatasetRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<CreateDatasetRequest> request = null;
Response<CreateDatasetResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new CreateDatasetRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(createDatasetRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "CreateDataset");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<CreateDatasetResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new CreateDatasetResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Creates a dataset group, which holds a collection of related datasets. You can add datasets to the dataset group
* when you create the dataset group, or later by using the <a>UpdateDatasetGroup</a> operation.
* </p>
* <p>
* After creating a dataset group and adding datasets, you use the dataset group when you create a predictor. For
* more information, see <a>howitworks-datasets-groups</a>.
* </p>
* <p>
* To get a list of all your datasets groups, use the <a>ListDatasetGroups</a> operation.
* </p>
* <note>
* <p>
* The <code>Status</code> of a dataset group must be <code>ACTIVE</code> before you can use the dataset group to
* create a predictor. To get the status, use the <a>DescribeDatasetGroup</a> operation.
* </p>
* </note>
*
* @param createDatasetGroupRequest
* @return Result of the CreateDatasetGroup operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceAlreadyExistsException
* There is already a resource with this name. Try again with a different name.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @throws LimitExceededException
* The limit on the number of resources per account has been exceeded.
* @sample AmazonForecast.CreateDatasetGroup
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/CreateDatasetGroup" target="_top">AWS
* API Documentation</a>
*/
@Override
public CreateDatasetGroupResult createDatasetGroup(CreateDatasetGroupRequest request) {
request = beforeClientExecution(request);
return executeCreateDatasetGroup(request);
}
@SdkInternalApi
final CreateDatasetGroupResult executeCreateDatasetGroup(CreateDatasetGroupRequest createDatasetGroupRequest) {
ExecutionContext executionContext = createExecutionContext(createDatasetGroupRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<CreateDatasetGroupRequest> request = null;
Response<CreateDatasetGroupResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new CreateDatasetGroupRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(createDatasetGroupRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "CreateDatasetGroup");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<CreateDatasetGroupResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new CreateDatasetGroupResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Imports your training data to an Amazon Forecast dataset. You provide the location of your training data in an
* Amazon Simple Storage Service (Amazon S3) bucket and the Amazon Resource Name (ARN) of the dataset that you want
* to import the data to.
* </p>
* <p>
* You must specify a <a>DataSource</a> object that includes an AWS Identity and Access Management (IAM) role that
* Amazon Forecast can assume to access the data, as Amazon Forecast makes a copy of your data and processes it in
* an internal AWS system. For more information, see <a>aws-forecast-iam-roles</a>.
* </p>
* <p>
* The training data must be in CSV format. The delimiter must be a comma (,).
* </p>
* <p>
* You can specify the path to a specific CSV file, the S3 bucket, or to a folder in the S3 bucket. For the latter
* two cases, Amazon Forecast imports all files up to the limit of 10,000 files.
* </p>
* <p>
* Because dataset imports are not aggregated, your most recent dataset import is the one that is used when training
* a predictor or generating a forecast. Make sure that your most recent dataset import contains all of the data you
* want to model off of, and not just the new data collected since the previous import.
* </p>
* <p>
* To get a list of all your dataset import jobs, filtered by specified criteria, use the
* <a>ListDatasetImportJobs</a> operation.
* </p>
*
* @param createDatasetImportJobRequest
* @return Result of the CreateDatasetImportJob operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceAlreadyExistsException
* There is already a resource with this name. Try again with a different name.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @throws LimitExceededException
* The limit on the number of resources per account has been exceeded.
* @sample AmazonForecast.CreateDatasetImportJob
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/CreateDatasetImportJob"
* target="_top">AWS API Documentation</a>
*/
@Override
public CreateDatasetImportJobResult createDatasetImportJob(CreateDatasetImportJobRequest request) {
request = beforeClientExecution(request);
return executeCreateDatasetImportJob(request);
}
@SdkInternalApi
final CreateDatasetImportJobResult executeCreateDatasetImportJob(CreateDatasetImportJobRequest createDatasetImportJobRequest) {
ExecutionContext executionContext = createExecutionContext(createDatasetImportJobRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<CreateDatasetImportJobRequest> request = null;
Response<CreateDatasetImportJobResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new CreateDatasetImportJobRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(createDatasetImportJobRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "CreateDatasetImportJob");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<CreateDatasetImportJobResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new CreateDatasetImportJobResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <note>
* <p>
* Explainability is only available for Forecasts and Predictors generated from an AutoPredictor
* (<a>CreateAutoPredictor</a>)
* </p>
* </note>
* <p>
* Creates an Amazon Forecast Explainability.
* </p>
* <p>
* Explainability helps you better understand how the attributes in your datasets impact forecast. Amazon Forecast
* uses a metric called Impact scores to quantify the relative impact of each attribute and determine whether they
* increase or decrease forecast values.
* </p>
* <p>
* To enable Forecast Explainability, your predictor must include at least one of the following: related time
* series, item metadata, or additional datasets like Holidays and the Weather Index.
* </p>
* <p>
* CreateExplainability accepts either a Predictor ARN or Forecast ARN. To receive aggregated Impact scores for all
* time series and time points in your datasets, provide a Predictor ARN. To receive Impact scores for specific time
* series and time points, provide a Forecast ARN.
* </p>
* <p>
* <b>CreateExplainability with a Predictor ARN</b>
* </p>
* <note>
* <p>
* You can only have one Explainability resource per predictor. If you already enabled <code>ExplainPredictor</code>
* in <a>CreateAutoPredictor</a>, that predictor already has an Explainability resource.
* </p>
* </note>
* <p>
* The following parameters are required when providing a Predictor ARN:
* </p>
* <ul>
* <li>
* <p>
* <code>ExplainabilityName</code> - A unique name for the Explainability.
* </p>
* </li>
* <li>
* <p>
* <code>ResourceArn</code> - The Arn of the predictor.
* </p>
* </li>
* <li>
* <p>
* <code>TimePointGranularity</code> - Must be set to “ALL”.
* </p>
* </li>
* <li>
* <p>
* <code>TimeSeriesGranularity</code> - Must be set to “ALL”.
* </p>
* </li>
* </ul>
* <p>
* Do not specify a value for the following parameters:
* </p>
* <ul>
* <li>
* <p>
* <code>DataSource</code> - Only valid when TimeSeriesGranularity is “SPECIFIC”.
* </p>
* </li>
* <li>
* <p>
* <code>Schema</code> - Only valid when TimeSeriesGranularity is “SPECIFIC”.
* </p>
* </li>
* <li>
* <p>
* <code>StartDateTime</code> - Only valid when TimePointGranularity is “SPECIFIC”.
* </p>
* </li>
* <li>
* <p>
* <code>EndDateTime</code> - Only valid when TimePointGranularity is “SPECIFIC”.
* </p>
* </li>
* </ul>
* <p>
* <b>CreateExplainability with a Forecast ARN</b>
* </p>
* <note>
* <p>
* You can specify a maximum of 50 time series and 500 time points.
* </p>
* </note>
* <p>
* The following parameters are required when providing a Predictor ARN:
* </p>
* <ul>
* <li>
* <p>
* <code>ExplainabilityName</code> - A unique name for the Explainability.
* </p>
* </li>
* <li>
* <p>
* <code>ResourceArn</code> - The Arn of the forecast.
* </p>
* </li>
* <li>
* <p>
* <code>TimePointGranularity</code> - Either “ALL” or “SPECIFIC”.
* </p>
* </li>
* <li>
* <p>
* <code>TimeSeriesGranularity</code> - Either “ALL” or “SPECIFIC”.
* </p>
* </li>
* </ul>
* <p>
* If you set TimeSeriesGranularity to “SPECIFIC”, you must also provide the following:
* </p>
* <ul>
* <li>
* <p>
* <code>DataSource</code> - The S3 location of the CSV file specifying your time series.
* </p>
* </li>
* <li>
* <p>
* <code>Schema</code> - The Schema defines the attributes and attribute types listed in the Data Source.
* </p>
* </li>
* </ul>
* <p>
* If you set TimePointGranularity to “SPECIFIC”, you must also provide the following:
* </p>
* <ul>
* <li>
* <p>
* <code>StartDateTime</code> - The first timestamp in the range of time points.
* </p>
* </li>
* <li>
* <p>
* <code>EndDateTime</code> - The last timestamp in the range of time points.
* </p>
* </li>
* </ul>
*
* @param createExplainabilityRequest
* @return Result of the CreateExplainability operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceAlreadyExistsException
* There is already a resource with this name. Try again with a different name.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @throws LimitExceededException
* The limit on the number of resources per account has been exceeded.
* @sample AmazonForecast.CreateExplainability
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/CreateExplainability" target="_top">AWS
* API Documentation</a>
*/
@Override
public CreateExplainabilityResult createExplainability(CreateExplainabilityRequest request) {
request = beforeClientExecution(request);
return executeCreateExplainability(request);
}
@SdkInternalApi
final CreateExplainabilityResult executeCreateExplainability(CreateExplainabilityRequest createExplainabilityRequest) {
ExecutionContext executionContext = createExecutionContext(createExplainabilityRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<CreateExplainabilityRequest> request = null;
Response<CreateExplainabilityResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new CreateExplainabilityRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(createExplainabilityRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "CreateExplainability");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<CreateExplainabilityResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new CreateExplainabilityResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Exports an Explainability resource created by the <a>CreateExplainability</a> operation. Exported files are
* exported to an Amazon Simple Storage Service (Amazon S3) bucket.
* </p>
* <p>
* You must specify a <a>DataDestination</a> object that includes an Amazon S3 bucket and an AWS Identity and Access
* Management (IAM) role that Amazon Forecast can assume to access the Amazon S3 bucket. For more information, see
* <a>aws-forecast-iam-roles</a>.
* </p>
* <note>
* <p>
* The <code>Status</code> of the export job must be <code>ACTIVE</code> before you can access the export in your
* Amazon S3 bucket. To get the status, use the <a>DescribeExplainabilityExport</a> operation.
* </p>
* </note>
*
* @param createExplainabilityExportRequest
* @return Result of the CreateExplainabilityExport operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceAlreadyExistsException
* There is already a resource with this name. Try again with a different name.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @throws LimitExceededException
* The limit on the number of resources per account has been exceeded.
* @sample AmazonForecast.CreateExplainabilityExport
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/CreateExplainabilityExport"
* target="_top">AWS API Documentation</a>
*/
@Override
public CreateExplainabilityExportResult createExplainabilityExport(CreateExplainabilityExportRequest request) {
request = beforeClientExecution(request);
return executeCreateExplainabilityExport(request);
}
@SdkInternalApi
final CreateExplainabilityExportResult executeCreateExplainabilityExport(CreateExplainabilityExportRequest createExplainabilityExportRequest) {
ExecutionContext executionContext = createExecutionContext(createExplainabilityExportRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<CreateExplainabilityExportRequest> request = null;
Response<CreateExplainabilityExportResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new CreateExplainabilityExportRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(createExplainabilityExportRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "CreateExplainabilityExport");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<CreateExplainabilityExportResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new CreateExplainabilityExportResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Creates a forecast for each item in the <code>TARGET_TIME_SERIES</code> dataset that was used to train the
* predictor. This is known as inference. To retrieve the forecast for a single item at low latency, use the
* operation. To export the complete forecast into your Amazon Simple Storage Service (Amazon S3) bucket, use the
* <a>CreateForecastExportJob</a> operation.
* </p>
* <p>
* The range of the forecast is determined by the <code>ForecastHorizon</code> value, which you specify in the
* <a>CreatePredictor</a> request. When you query a forecast, you can request a specific date range within the
* forecast.
* </p>
* <p>
* To get a list of all your forecasts, use the <a>ListForecasts</a> operation.
* </p>
* <note>
* <p>
* The forecasts generated by Amazon Forecast are in the same time zone as the dataset that was used to create the
* predictor.
* </p>
* </note>
* <p>
* For more information, see <a>howitworks-forecast</a>.
* </p>
* <note>
* <p>
* The <code>Status</code> of the forecast must be <code>ACTIVE</code> before you can query or export the forecast.
* Use the <a>DescribeForecast</a> operation to get the status.
* </p>
* </note>
*
* @param createForecastRequest
* @return Result of the CreateForecast operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceAlreadyExistsException
* There is already a resource with this name. Try again with a different name.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @throws LimitExceededException
* The limit on the number of resources per account has been exceeded.
* @sample AmazonForecast.CreateForecast
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/CreateForecast" target="_top">AWS API
* Documentation</a>
*/
@Override
public CreateForecastResult createForecast(CreateForecastRequest request) {
request = beforeClientExecution(request);
return executeCreateForecast(request);
}
@SdkInternalApi
final CreateForecastResult executeCreateForecast(CreateForecastRequest createForecastRequest) {
ExecutionContext executionContext = createExecutionContext(createForecastRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<CreateForecastRequest> request = null;
Response<CreateForecastResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new CreateForecastRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(createForecastRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "CreateForecast");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<CreateForecastResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new CreateForecastResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Exports a forecast created by the <a>CreateForecast</a> operation to your Amazon Simple Storage Service (Amazon
* S3) bucket. The forecast file name will match the following conventions:
* </p>
* <p>
* <ForecastExportJobName>_<ExportTimestamp>_<PartNumber>
* </p>
* <p>
* where the <ExportTimestamp> component is in Java SimpleDateFormat (yyyy-MM-ddTHH-mm-ssZ).
* </p>
* <p>
* You must specify a <a>DataDestination</a> object that includes an AWS Identity and Access Management (IAM) role
* that Amazon Forecast can assume to access the Amazon S3 bucket. For more information, see
* <a>aws-forecast-iam-roles</a>.
* </p>
* <p>
* For more information, see <a>howitworks-forecast</a>.
* </p>
* <p>
* To get a list of all your forecast export jobs, use the <a>ListForecastExportJobs</a> operation.
* </p>
* <note>
* <p>
* The <code>Status</code> of the forecast export job must be <code>ACTIVE</code> before you can access the forecast
* in your Amazon S3 bucket. To get the status, use the <a>DescribeForecastExportJob</a> operation.
* </p>
* </note>
*
* @param createForecastExportJobRequest
* @return Result of the CreateForecastExportJob operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceAlreadyExistsException
* There is already a resource with this name. Try again with a different name.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @throws LimitExceededException
* The limit on the number of resources per account has been exceeded.
* @sample AmazonForecast.CreateForecastExportJob
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/CreateForecastExportJob"
* target="_top">AWS API Documentation</a>
*/
@Override
public CreateForecastExportJobResult createForecastExportJob(CreateForecastExportJobRequest request) {
request = beforeClientExecution(request);
return executeCreateForecastExportJob(request);
}
@SdkInternalApi
final CreateForecastExportJobResult executeCreateForecastExportJob(CreateForecastExportJobRequest createForecastExportJobRequest) {
ExecutionContext executionContext = createExecutionContext(createForecastExportJobRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<CreateForecastExportJobRequest> request = null;
Response<CreateForecastExportJobResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new CreateForecastExportJobRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(createForecastExportJobRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "CreateForecastExportJob");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<CreateForecastExportJobResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new CreateForecastExportJobResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <note>
* <p>
* This operation creates a legacy predictor that does not include all the predictor functionalities provided by
* Amazon Forecast. To create a predictor that is compatible with all aspects of Forecast, use
* <a>CreateAutoPredictor</a>.
* </p>
* </note>
* <p>
* Creates an Amazon Forecast predictor.
* </p>
* <p>
* In the request, provide a dataset group and either specify an algorithm or let Amazon Forecast choose an
* algorithm for you using AutoML. If you specify an algorithm, you also can override algorithm-specific
* hyperparameters.
* </p>
* <p>
* Amazon Forecast uses the algorithm to train a predictor using the latest version of the datasets in the specified
* dataset group. You can then generate a forecast using the <a>CreateForecast</a> operation.
* </p>
* <p>
* To see the evaluation metrics, use the <a>GetAccuracyMetrics</a> operation.
* </p>
* <p>
* You can specify a featurization configuration to fill and aggregate the data fields in the
* <code>TARGET_TIME_SERIES</code> dataset to improve model training. For more information, see
* <a>FeaturizationConfig</a>.
* </p>
* <p>
* For RELATED_TIME_SERIES datasets, <code>CreatePredictor</code> verifies that the <code>DataFrequency</code>
* specified when the dataset was created matches the <code>ForecastFrequency</code>. TARGET_TIME_SERIES datasets
* don't have this restriction. Amazon Forecast also verifies the delimiter and timestamp format. For more
* information, see <a>howitworks-datasets-groups</a>.
* </p>
* <p>
* By default, predictors are trained and evaluated at the 0.1 (P10), 0.5 (P50), and 0.9 (P90) quantiles. You can
* choose custom forecast types to train and evaluate your predictor by setting the <code>ForecastTypes</code>.
* </p>
* <p>
* <b>AutoML</b>
* </p>
* <p>
* If you want Amazon Forecast to evaluate each algorithm and choose the one that minimizes the
* <code>objective function</code>, set <code>PerformAutoML</code> to <code>true</code>. The
* <code>objective function</code> is defined as the mean of the weighted losses over the forecast types. By
* default, these are the p10, p50, and p90 quantile losses. For more information, see <a>EvaluationResult</a>.
* </p>
* <p>
* When AutoML is enabled, the following properties are disallowed:
* </p>
* <ul>
* <li>
* <p>
* <code>AlgorithmArn</code>
* </p>
* </li>
* <li>
* <p>
* <code>HPOConfig</code>
* </p>
* </li>
* <li>
* <p>
* <code>PerformHPO</code>
* </p>
* </li>
* <li>
* <p>
* <code>TrainingParameters</code>
* </p>
* </li>
* </ul>
* <p>
* To get a list of all of your predictors, use the <a>ListPredictors</a> operation.
* </p>
* <note>
* <p>
* Before you can use the predictor to create a forecast, the <code>Status</code> of the predictor must be
* <code>ACTIVE</code>, signifying that training has completed. To get the status, use the <a>DescribePredictor</a>
* operation.
* </p>
* </note>
*
* @param createPredictorRequest
* @return Result of the CreatePredictor operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceAlreadyExistsException
* There is already a resource with this name. Try again with a different name.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @throws LimitExceededException
* The limit on the number of resources per account has been exceeded.
* @sample AmazonForecast.CreatePredictor
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/CreatePredictor" target="_top">AWS API
* Documentation</a>
*/
@Override
public CreatePredictorResult createPredictor(CreatePredictorRequest request) {
request = beforeClientExecution(request);
return executeCreatePredictor(request);
}
@SdkInternalApi
final CreatePredictorResult executeCreatePredictor(CreatePredictorRequest createPredictorRequest) {
ExecutionContext executionContext = createExecutionContext(createPredictorRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<CreatePredictorRequest> request = null;
Response<CreatePredictorResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new CreatePredictorRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(createPredictorRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "CreatePredictor");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<CreatePredictorResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new CreatePredictorResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Exports backtest forecasts and accuracy metrics generated by the <a>CreateAutoPredictor</a> or
* <a>CreatePredictor</a> operations. Two folders containing CSV files are exported to your specified S3 bucket.
* </p>
* <p>
* The export file names will match the following conventions:
* </p>
* <p>
* <code><ExportJobName>_<ExportTimestamp>_<PartNumber>.csv</code>
* </p>
* <p>
* The <ExportTimestamp> component is in Java SimpleDate format (yyyy-MM-ddTHH-mm-ssZ).
* </p>
* <p>
* You must specify a <a>DataDestination</a> object that includes an Amazon S3 bucket and an AWS Identity and Access
* Management (IAM) role that Amazon Forecast can assume to access the Amazon S3 bucket. For more information, see
* <a>aws-forecast-iam-roles</a>.
* </p>
* <note>
* <p>
* The <code>Status</code> of the export job must be <code>ACTIVE</code> before you can access the export in your
* Amazon S3 bucket. To get the status, use the <a>DescribePredictorBacktestExportJob</a> operation.
* </p>
* </note>
*
* @param createPredictorBacktestExportJobRequest
* @return Result of the CreatePredictorBacktestExportJob operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceAlreadyExistsException
* There is already a resource with this name. Try again with a different name.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @throws LimitExceededException
* The limit on the number of resources per account has been exceeded.
* @sample AmazonForecast.CreatePredictorBacktestExportJob
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/CreatePredictorBacktestExportJob"
* target="_top">AWS API Documentation</a>
*/
@Override
public CreatePredictorBacktestExportJobResult createPredictorBacktestExportJob(CreatePredictorBacktestExportJobRequest request) {
request = beforeClientExecution(request);
return executeCreatePredictorBacktestExportJob(request);
}
@SdkInternalApi
final CreatePredictorBacktestExportJobResult executeCreatePredictorBacktestExportJob(
CreatePredictorBacktestExportJobRequest createPredictorBacktestExportJobRequest) {
ExecutionContext executionContext = createExecutionContext(createPredictorBacktestExportJobRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<CreatePredictorBacktestExportJobRequest> request = null;
Response<CreatePredictorBacktestExportJobResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new CreatePredictorBacktestExportJobRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(createPredictorBacktestExportJobRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "CreatePredictorBacktestExportJob");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<CreatePredictorBacktestExportJobResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new CreatePredictorBacktestExportJobResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Deletes an Amazon Forecast dataset that was created using the <a>CreateDataset</a> operation. You can only delete
* datasets that have a status of <code>ACTIVE</code> or <code>CREATE_FAILED</code>. To get the status use the
* <a>DescribeDataset</a> operation.
* </p>
* <note>
* <p>
* Forecast does not automatically update any dataset groups that contain the deleted dataset. In order to update
* the dataset group, use the operation, omitting the deleted dataset's ARN.
* </p>
* </note>
*
* @param deleteDatasetRequest
* @return Result of the DeleteDataset operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @sample AmazonForecast.DeleteDataset
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DeleteDataset" target="_top">AWS API
* Documentation</a>
*/
@Override
public DeleteDatasetResult deleteDataset(DeleteDatasetRequest request) {
request = beforeClientExecution(request);
return executeDeleteDataset(request);
}
@SdkInternalApi
final DeleteDatasetResult executeDeleteDataset(DeleteDatasetRequest deleteDatasetRequest) {
ExecutionContext executionContext = createExecutionContext(deleteDatasetRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DeleteDatasetRequest> request = null;
Response<DeleteDatasetResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DeleteDatasetRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(deleteDatasetRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DeleteDataset");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DeleteDatasetResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DeleteDatasetResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Deletes a dataset group created using the <a>CreateDatasetGroup</a> operation. You can only delete dataset groups
* that have a status of <code>ACTIVE</code>, <code>CREATE_FAILED</code>, or <code>UPDATE_FAILED</code>. To get the
* status, use the <a>DescribeDatasetGroup</a> operation.
* </p>
* <p>
* This operation deletes only the dataset group, not the datasets in the group.
* </p>
*
* @param deleteDatasetGroupRequest
* @return Result of the DeleteDatasetGroup operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @sample AmazonForecast.DeleteDatasetGroup
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DeleteDatasetGroup" target="_top">AWS
* API Documentation</a>
*/
@Override
public DeleteDatasetGroupResult deleteDatasetGroup(DeleteDatasetGroupRequest request) {
request = beforeClientExecution(request);
return executeDeleteDatasetGroup(request);
}
@SdkInternalApi
final DeleteDatasetGroupResult executeDeleteDatasetGroup(DeleteDatasetGroupRequest deleteDatasetGroupRequest) {
ExecutionContext executionContext = createExecutionContext(deleteDatasetGroupRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DeleteDatasetGroupRequest> request = null;
Response<DeleteDatasetGroupResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DeleteDatasetGroupRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(deleteDatasetGroupRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DeleteDatasetGroup");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DeleteDatasetGroupResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DeleteDatasetGroupResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Deletes a dataset import job created using the <a>CreateDatasetImportJob</a> operation. You can delete only
* dataset import jobs that have a status of <code>ACTIVE</code> or <code>CREATE_FAILED</code>. To get the status,
* use the <a>DescribeDatasetImportJob</a> operation.
* </p>
*
* @param deleteDatasetImportJobRequest
* @return Result of the DeleteDatasetImportJob operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @sample AmazonForecast.DeleteDatasetImportJob
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DeleteDatasetImportJob"
* target="_top">AWS API Documentation</a>
*/
@Override
public DeleteDatasetImportJobResult deleteDatasetImportJob(DeleteDatasetImportJobRequest request) {
request = beforeClientExecution(request);
return executeDeleteDatasetImportJob(request);
}
@SdkInternalApi
final DeleteDatasetImportJobResult executeDeleteDatasetImportJob(DeleteDatasetImportJobRequest deleteDatasetImportJobRequest) {
ExecutionContext executionContext = createExecutionContext(deleteDatasetImportJobRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DeleteDatasetImportJobRequest> request = null;
Response<DeleteDatasetImportJobResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DeleteDatasetImportJobRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(deleteDatasetImportJobRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DeleteDatasetImportJob");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DeleteDatasetImportJobResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new DeleteDatasetImportJobResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Deletes an Explainability resource.
* </p>
* <p>
* You can delete only predictor that have a status of <code>ACTIVE</code> or <code>CREATE_FAILED</code>. To get the
* status, use the <a>DescribeExplainability</a> operation.
* </p>
*
* @param deleteExplainabilityRequest
* @return Result of the DeleteExplainability operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @sample AmazonForecast.DeleteExplainability
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DeleteExplainability" target="_top">AWS
* API Documentation</a>
*/
@Override
public DeleteExplainabilityResult deleteExplainability(DeleteExplainabilityRequest request) {
request = beforeClientExecution(request);
return executeDeleteExplainability(request);
}
@SdkInternalApi
final DeleteExplainabilityResult executeDeleteExplainability(DeleteExplainabilityRequest deleteExplainabilityRequest) {
ExecutionContext executionContext = createExecutionContext(deleteExplainabilityRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DeleteExplainabilityRequest> request = null;
Response<DeleteExplainabilityResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DeleteExplainabilityRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(deleteExplainabilityRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DeleteExplainability");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DeleteExplainabilityResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DeleteExplainabilityResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Deletes an Explainability export.
* </p>
*
* @param deleteExplainabilityExportRequest
* @return Result of the DeleteExplainabilityExport operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @sample AmazonForecast.DeleteExplainabilityExport
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DeleteExplainabilityExport"
* target="_top">AWS API Documentation</a>
*/
@Override
public DeleteExplainabilityExportResult deleteExplainabilityExport(DeleteExplainabilityExportRequest request) {
request = beforeClientExecution(request);
return executeDeleteExplainabilityExport(request);
}
@SdkInternalApi
final DeleteExplainabilityExportResult executeDeleteExplainabilityExport(DeleteExplainabilityExportRequest deleteExplainabilityExportRequest) {
ExecutionContext executionContext = createExecutionContext(deleteExplainabilityExportRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DeleteExplainabilityExportRequest> request = null;
Response<DeleteExplainabilityExportResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DeleteExplainabilityExportRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(deleteExplainabilityExportRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DeleteExplainabilityExport");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DeleteExplainabilityExportResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new DeleteExplainabilityExportResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Deletes a forecast created using the <a>CreateForecast</a> operation. You can delete only forecasts that have a
* status of <code>ACTIVE</code> or <code>CREATE_FAILED</code>. To get the status, use the <a>DescribeForecast</a>
* operation.
* </p>
* <p>
* You can't delete a forecast while it is being exported. After a forecast is deleted, you can no longer query the
* forecast.
* </p>
*
* @param deleteForecastRequest
* @return Result of the DeleteForecast operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @sample AmazonForecast.DeleteForecast
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DeleteForecast" target="_top">AWS API
* Documentation</a>
*/
@Override
public DeleteForecastResult deleteForecast(DeleteForecastRequest request) {
request = beforeClientExecution(request);
return executeDeleteForecast(request);
}
@SdkInternalApi
final DeleteForecastResult executeDeleteForecast(DeleteForecastRequest deleteForecastRequest) {
ExecutionContext executionContext = createExecutionContext(deleteForecastRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DeleteForecastRequest> request = null;
Response<DeleteForecastResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DeleteForecastRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(deleteForecastRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DeleteForecast");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DeleteForecastResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DeleteForecastResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Deletes a forecast export job created using the <a>CreateForecastExportJob</a> operation. You can delete only
* export jobs that have a status of <code>ACTIVE</code> or <code>CREATE_FAILED</code>. To get the status, use the
* <a>DescribeForecastExportJob</a> operation.
* </p>
*
* @param deleteForecastExportJobRequest
* @return Result of the DeleteForecastExportJob operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @sample AmazonForecast.DeleteForecastExportJob
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DeleteForecastExportJob"
* target="_top">AWS API Documentation</a>
*/
@Override
public DeleteForecastExportJobResult deleteForecastExportJob(DeleteForecastExportJobRequest request) {
request = beforeClientExecution(request);
return executeDeleteForecastExportJob(request);
}
@SdkInternalApi
final DeleteForecastExportJobResult executeDeleteForecastExportJob(DeleteForecastExportJobRequest deleteForecastExportJobRequest) {
ExecutionContext executionContext = createExecutionContext(deleteForecastExportJobRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DeleteForecastExportJobRequest> request = null;
Response<DeleteForecastExportJobResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DeleteForecastExportJobRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(deleteForecastExportJobRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DeleteForecastExportJob");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DeleteForecastExportJobResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new DeleteForecastExportJobResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Deletes a predictor created using the <a>DescribePredictor</a> or <a>CreatePredictor</a> operations. You can
* delete only predictor that have a status of <code>ACTIVE</code> or <code>CREATE_FAILED</code>. To get the status,
* use the <a>DescribePredictor</a> operation.
* </p>
*
* @param deletePredictorRequest
* @return Result of the DeletePredictor operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @sample AmazonForecast.DeletePredictor
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DeletePredictor" target="_top">AWS API
* Documentation</a>
*/
@Override
public DeletePredictorResult deletePredictor(DeletePredictorRequest request) {
request = beforeClientExecution(request);
return executeDeletePredictor(request);
}
@SdkInternalApi
final DeletePredictorResult executeDeletePredictor(DeletePredictorRequest deletePredictorRequest) {
ExecutionContext executionContext = createExecutionContext(deletePredictorRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DeletePredictorRequest> request = null;
Response<DeletePredictorResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DeletePredictorRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(deletePredictorRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DeletePredictor");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DeletePredictorResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DeletePredictorResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Deletes a predictor backtest export job.
* </p>
*
* @param deletePredictorBacktestExportJobRequest
* @return Result of the DeletePredictorBacktestExportJob operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @sample AmazonForecast.DeletePredictorBacktestExportJob
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DeletePredictorBacktestExportJob"
* target="_top">AWS API Documentation</a>
*/
@Override
public DeletePredictorBacktestExportJobResult deletePredictorBacktestExportJob(DeletePredictorBacktestExportJobRequest request) {
request = beforeClientExecution(request);
return executeDeletePredictorBacktestExportJob(request);
}
@SdkInternalApi
final DeletePredictorBacktestExportJobResult executeDeletePredictorBacktestExportJob(
DeletePredictorBacktestExportJobRequest deletePredictorBacktestExportJobRequest) {
ExecutionContext executionContext = createExecutionContext(deletePredictorBacktestExportJobRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DeletePredictorBacktestExportJobRequest> request = null;
Response<DeletePredictorBacktestExportJobResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DeletePredictorBacktestExportJobRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(deletePredictorBacktestExportJobRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DeletePredictorBacktestExportJob");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DeletePredictorBacktestExportJobResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new DeletePredictorBacktestExportJobResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Deletes an entire resource tree. This operation will delete the parent resource and its child resources.
* </p>
* <p>
* Child resources are resources that were created from another resource. For example, when a forecast is generated
* from a predictor, the forecast is the child resource and the predictor is the parent resource.
* </p>
* <p>
* Amazon Forecast resources possess the following parent-child resource hierarchies:
* </p>
* <ul>
* <li>
* <p>
* <b>Dataset</b>: dataset import jobs
* </p>
* </li>
* <li>
* <p>
* <b>Dataset Group</b>: predictors, predictor backtest export jobs, forecasts, forecast export jobs
* </p>
* </li>
* <li>
* <p>
* <b>Predictor</b>: predictor backtest export jobs, forecasts, forecast export jobs
* </p>
* </li>
* <li>
* <p>
* <b>Forecast</b>: forecast export jobs
* </p>
* </li>
* </ul>
* <note>
* <p>
* <code>DeleteResourceTree</code> will only delete Amazon Forecast resources, and will not delete datasets or
* exported files stored in Amazon S3.
* </p>
* </note>
*
* @param deleteResourceTreeRequest
* @return Result of the DeleteResourceTree operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @sample AmazonForecast.DeleteResourceTree
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DeleteResourceTree" target="_top">AWS
* API Documentation</a>
*/
@Override
public DeleteResourceTreeResult deleteResourceTree(DeleteResourceTreeRequest request) {
request = beforeClientExecution(request);
return executeDeleteResourceTree(request);
}
@SdkInternalApi
final DeleteResourceTreeResult executeDeleteResourceTree(DeleteResourceTreeRequest deleteResourceTreeRequest) {
ExecutionContext executionContext = createExecutionContext(deleteResourceTreeRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DeleteResourceTreeRequest> request = null;
Response<DeleteResourceTreeResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DeleteResourceTreeRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(deleteResourceTreeRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DeleteResourceTree");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DeleteResourceTreeResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DeleteResourceTreeResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Describes a predictor created using the CreateAutoPredictor operation.
* </p>
*
* @param describeAutoPredictorRequest
* @return Result of the DescribeAutoPredictor operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @sample AmazonForecast.DescribeAutoPredictor
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DescribeAutoPredictor" target="_top">AWS
* API Documentation</a>
*/
@Override
public DescribeAutoPredictorResult describeAutoPredictor(DescribeAutoPredictorRequest request) {
request = beforeClientExecution(request);
return executeDescribeAutoPredictor(request);
}
@SdkInternalApi
final DescribeAutoPredictorResult executeDescribeAutoPredictor(DescribeAutoPredictorRequest describeAutoPredictorRequest) {
ExecutionContext executionContext = createExecutionContext(describeAutoPredictorRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DescribeAutoPredictorRequest> request = null;
Response<DescribeAutoPredictorResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DescribeAutoPredictorRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(describeAutoPredictorRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DescribeAutoPredictor");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DescribeAutoPredictorResult>> responseHandler = protocolFactory
.createResponseHandler(new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new DescribeAutoPredictorResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Describes an Amazon Forecast dataset created using the <a>CreateDataset</a> operation.
* </p>
* <p>
* In addition to listing the parameters specified in the <code>CreateDataset</code> request, this operation
* includes the following dataset properties:
* </p>
* <ul>
* <li>
* <p>
* <code>CreationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>LastModificationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>Status</code>
* </p>
* </li>
* </ul>
*
* @param describeDatasetRequest
* @return Result of the DescribeDataset operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @sample AmazonForecast.DescribeDataset
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DescribeDataset" target="_top">AWS API
* Documentation</a>
*/
@Override
public DescribeDatasetResult describeDataset(DescribeDatasetRequest request) {
request = beforeClientExecution(request);
return executeDescribeDataset(request);
}
@SdkInternalApi
final DescribeDatasetResult executeDescribeDataset(DescribeDatasetRequest describeDatasetRequest) {
ExecutionContext executionContext = createExecutionContext(describeDatasetRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DescribeDatasetRequest> request = null;
Response<DescribeDatasetResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DescribeDatasetRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(describeDatasetRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DescribeDataset");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DescribeDatasetResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DescribeDatasetResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Describes a dataset group created using the <a>CreateDatasetGroup</a> operation.
* </p>
* <p>
* In addition to listing the parameters provided in the <code>CreateDatasetGroup</code> request, this operation
* includes the following properties:
* </p>
* <ul>
* <li>
* <p>
* <code>DatasetArns</code> - The datasets belonging to the group.
* </p>
* </li>
* <li>
* <p>
* <code>CreationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>LastModificationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>Status</code>
* </p>
* </li>
* </ul>
*
* @param describeDatasetGroupRequest
* @return Result of the DescribeDatasetGroup operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @sample AmazonForecast.DescribeDatasetGroup
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DescribeDatasetGroup" target="_top">AWS
* API Documentation</a>
*/
@Override
public DescribeDatasetGroupResult describeDatasetGroup(DescribeDatasetGroupRequest request) {
request = beforeClientExecution(request);
return executeDescribeDatasetGroup(request);
}
@SdkInternalApi
final DescribeDatasetGroupResult executeDescribeDatasetGroup(DescribeDatasetGroupRequest describeDatasetGroupRequest) {
ExecutionContext executionContext = createExecutionContext(describeDatasetGroupRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DescribeDatasetGroupRequest> request = null;
Response<DescribeDatasetGroupResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DescribeDatasetGroupRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(describeDatasetGroupRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DescribeDatasetGroup");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DescribeDatasetGroupResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DescribeDatasetGroupResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Describes a dataset import job created using the <a>CreateDatasetImportJob</a> operation.
* </p>
* <p>
* In addition to listing the parameters provided in the <code>CreateDatasetImportJob</code> request, this operation
* includes the following properties:
* </p>
* <ul>
* <li>
* <p>
* <code>CreationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>LastModificationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>DataSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>FieldStatistics</code>
* </p>
* </li>
* <li>
* <p>
* <code>Status</code>
* </p>
* </li>
* <li>
* <p>
* <code>Message</code> - If an error occurred, information about the error.
* </p>
* </li>
* </ul>
*
* @param describeDatasetImportJobRequest
* @return Result of the DescribeDatasetImportJob operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @sample AmazonForecast.DescribeDatasetImportJob
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DescribeDatasetImportJob"
* target="_top">AWS API Documentation</a>
*/
@Override
public DescribeDatasetImportJobResult describeDatasetImportJob(DescribeDatasetImportJobRequest request) {
request = beforeClientExecution(request);
return executeDescribeDatasetImportJob(request);
}
@SdkInternalApi
final DescribeDatasetImportJobResult executeDescribeDatasetImportJob(DescribeDatasetImportJobRequest describeDatasetImportJobRequest) {
ExecutionContext executionContext = createExecutionContext(describeDatasetImportJobRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DescribeDatasetImportJobRequest> request = null;
Response<DescribeDatasetImportJobResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DescribeDatasetImportJobRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(describeDatasetImportJobRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DescribeDatasetImportJob");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DescribeDatasetImportJobResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new DescribeDatasetImportJobResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Describes an Explainability resource created using the <a>CreateExplainability</a> operation.
* </p>
*
* @param describeExplainabilityRequest
* @return Result of the DescribeExplainability operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @sample AmazonForecast.DescribeExplainability
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DescribeExplainability"
* target="_top">AWS API Documentation</a>
*/
@Override
public DescribeExplainabilityResult describeExplainability(DescribeExplainabilityRequest request) {
request = beforeClientExecution(request);
return executeDescribeExplainability(request);
}
@SdkInternalApi
final DescribeExplainabilityResult executeDescribeExplainability(DescribeExplainabilityRequest describeExplainabilityRequest) {
ExecutionContext executionContext = createExecutionContext(describeExplainabilityRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DescribeExplainabilityRequest> request = null;
Response<DescribeExplainabilityResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DescribeExplainabilityRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(describeExplainabilityRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DescribeExplainability");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DescribeExplainabilityResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new DescribeExplainabilityResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Describes an Explainability export created using the <a>CreateExplainabilityExport</a> operation.
* </p>
*
* @param describeExplainabilityExportRequest
* @return Result of the DescribeExplainabilityExport operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @sample AmazonForecast.DescribeExplainabilityExport
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DescribeExplainabilityExport"
* target="_top">AWS API Documentation</a>
*/
@Override
public DescribeExplainabilityExportResult describeExplainabilityExport(DescribeExplainabilityExportRequest request) {
request = beforeClientExecution(request);
return executeDescribeExplainabilityExport(request);
}
@SdkInternalApi
final DescribeExplainabilityExportResult executeDescribeExplainabilityExport(DescribeExplainabilityExportRequest describeExplainabilityExportRequest) {
ExecutionContext executionContext = createExecutionContext(describeExplainabilityExportRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DescribeExplainabilityExportRequest> request = null;
Response<DescribeExplainabilityExportResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DescribeExplainabilityExportRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(describeExplainabilityExportRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DescribeExplainabilityExport");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DescribeExplainabilityExportResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new DescribeExplainabilityExportResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Describes a forecast created using the <a>CreateForecast</a> operation.
* </p>
* <p>
* In addition to listing the properties provided in the <code>CreateForecast</code> request, this operation lists
* the following properties:
* </p>
* <ul>
* <li>
* <p>
* <code>DatasetGroupArn</code> - The dataset group that provided the training data.
* </p>
* </li>
* <li>
* <p>
* <code>CreationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>LastModificationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>Status</code>
* </p>
* </li>
* <li>
* <p>
* <code>Message</code> - If an error occurred, information about the error.
* </p>
* </li>
* </ul>
*
* @param describeForecastRequest
* @return Result of the DescribeForecast operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @sample AmazonForecast.DescribeForecast
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DescribeForecast" target="_top">AWS API
* Documentation</a>
*/
@Override
public DescribeForecastResult describeForecast(DescribeForecastRequest request) {
request = beforeClientExecution(request);
return executeDescribeForecast(request);
}
@SdkInternalApi
final DescribeForecastResult executeDescribeForecast(DescribeForecastRequest describeForecastRequest) {
ExecutionContext executionContext = createExecutionContext(describeForecastRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DescribeForecastRequest> request = null;
Response<DescribeForecastResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DescribeForecastRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(describeForecastRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DescribeForecast");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DescribeForecastResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DescribeForecastResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Describes a forecast export job created using the <a>CreateForecastExportJob</a> operation.
* </p>
* <p>
* In addition to listing the properties provided by the user in the <code>CreateForecastExportJob</code> request,
* this operation lists the following properties:
* </p>
* <ul>
* <li>
* <p>
* <code>CreationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>LastModificationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>Status</code>
* </p>
* </li>
* <li>
* <p>
* <code>Message</code> - If an error occurred, information about the error.
* </p>
* </li>
* </ul>
*
* @param describeForecastExportJobRequest
* @return Result of the DescribeForecastExportJob operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @sample AmazonForecast.DescribeForecastExportJob
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DescribeForecastExportJob"
* target="_top">AWS API Documentation</a>
*/
@Override
public DescribeForecastExportJobResult describeForecastExportJob(DescribeForecastExportJobRequest request) {
request = beforeClientExecution(request);
return executeDescribeForecastExportJob(request);
}
@SdkInternalApi
final DescribeForecastExportJobResult executeDescribeForecastExportJob(DescribeForecastExportJobRequest describeForecastExportJobRequest) {
ExecutionContext executionContext = createExecutionContext(describeForecastExportJobRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DescribeForecastExportJobRequest> request = null;
Response<DescribeForecastExportJobResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DescribeForecastExportJobRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(describeForecastExportJobRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DescribeForecastExportJob");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DescribeForecastExportJobResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new DescribeForecastExportJobResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <note>
* <p>
* This operation is only valid for legacy predictors created with CreatePredictor. If you are not using a legacy
* predictor, use <a>DescribeAutoPredictor</a>.
* </p>
* </note>
* <p>
* Describes a predictor created using the <a>CreatePredictor</a> operation.
* </p>
* <p>
* In addition to listing the properties provided in the <code>CreatePredictor</code> request, this operation lists
* the following properties:
* </p>
* <ul>
* <li>
* <p>
* <code>DatasetImportJobArns</code> - The dataset import jobs used to import training data.
* </p>
* </li>
* <li>
* <p>
* <code>AutoMLAlgorithmArns</code> - If AutoML is performed, the algorithms that were evaluated.
* </p>
* </li>
* <li>
* <p>
* <code>CreationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>LastModificationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>Status</code>
* </p>
* </li>
* <li>
* <p>
* <code>Message</code> - If an error occurred, information about the error.
* </p>
* </li>
* </ul>
*
* @param describePredictorRequest
* @return Result of the DescribePredictor operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @sample AmazonForecast.DescribePredictor
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DescribePredictor" target="_top">AWS API
* Documentation</a>
*/
@Override
public DescribePredictorResult describePredictor(DescribePredictorRequest request) {
request = beforeClientExecution(request);
return executeDescribePredictor(request);
}
@SdkInternalApi
final DescribePredictorResult executeDescribePredictor(DescribePredictorRequest describePredictorRequest) {
ExecutionContext executionContext = createExecutionContext(describePredictorRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DescribePredictorRequest> request = null;
Response<DescribePredictorResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DescribePredictorRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(describePredictorRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DescribePredictor");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DescribePredictorResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DescribePredictorResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Describes a predictor backtest export job created using the <a>CreatePredictorBacktestExportJob</a> operation.
* </p>
* <p>
* In addition to listing the properties provided by the user in the <code>CreatePredictorBacktestExportJob</code>
* request, this operation lists the following properties:
* </p>
* <ul>
* <li>
* <p>
* <code>CreationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>LastModificationTime</code>
* </p>
* </li>
* <li>
* <p>
* <code>Status</code>
* </p>
* </li>
* <li>
* <p>
* <code>Message</code> (if an error occurred)
* </p>
* </li>
* </ul>
*
* @param describePredictorBacktestExportJobRequest
* @return Result of the DescribePredictorBacktestExportJob operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @sample AmazonForecast.DescribePredictorBacktestExportJob
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/DescribePredictorBacktestExportJob"
* target="_top">AWS API Documentation</a>
*/
@Override
public DescribePredictorBacktestExportJobResult describePredictorBacktestExportJob(DescribePredictorBacktestExportJobRequest request) {
request = beforeClientExecution(request);
return executeDescribePredictorBacktestExportJob(request);
}
@SdkInternalApi
final DescribePredictorBacktestExportJobResult executeDescribePredictorBacktestExportJob(
DescribePredictorBacktestExportJobRequest describePredictorBacktestExportJobRequest) {
ExecutionContext executionContext = createExecutionContext(describePredictorBacktestExportJobRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DescribePredictorBacktestExportJobRequest> request = null;
Response<DescribePredictorBacktestExportJobResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DescribePredictorBacktestExportJobRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(describePredictorBacktestExportJobRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DescribePredictorBacktestExportJob");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DescribePredictorBacktestExportJobResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new DescribePredictorBacktestExportJobResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Provides metrics on the accuracy of the models that were trained by the <a>CreatePredictor</a> operation. Use
* metrics to see how well the model performed and to decide whether to use the predictor to generate a forecast.
* For more information, see <a href="https://docs.aws.amazon.com/forecast/latest/dg/metrics.html">Predictor
* Metrics</a>.
* </p>
* <p>
* This operation generates metrics for each backtest window that was evaluated. The number of backtest windows (
* <code>NumberOfBacktestWindows</code>) is specified using the <a>EvaluationParameters</a> object, which is
* optionally included in the <code>CreatePredictor</code> request. If <code>NumberOfBacktestWindows</code> isn't
* specified, the number defaults to one.
* </p>
* <p>
* The parameters of the <code>filling</code> method determine which items contribute to the metrics. If you want
* all items to contribute, specify <code>zero</code>. If you want only those items that have complete data in the
* range being evaluated to contribute, specify <code>nan</code>. For more information, see
* <a>FeaturizationMethod</a>.
* </p>
* <note>
* <p>
* Before you can get accuracy metrics, the <code>Status</code> of the predictor must be <code>ACTIVE</code>,
* signifying that training has completed. To get the status, use the <a>DescribePredictor</a> operation.
* </p>
* </note>
*
* @param getAccuracyMetricsRequest
* @return Result of the GetAccuracyMetrics operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @sample AmazonForecast.GetAccuracyMetrics
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/GetAccuracyMetrics" target="_top">AWS
* API Documentation</a>
*/
@Override
public GetAccuracyMetricsResult getAccuracyMetrics(GetAccuracyMetricsRequest request) {
request = beforeClientExecution(request);
return executeGetAccuracyMetrics(request);
}
@SdkInternalApi
final GetAccuracyMetricsResult executeGetAccuracyMetrics(GetAccuracyMetricsRequest getAccuracyMetricsRequest) {
ExecutionContext executionContext = createExecutionContext(getAccuracyMetricsRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<GetAccuracyMetricsRequest> request = null;
Response<GetAccuracyMetricsResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new GetAccuracyMetricsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(getAccuracyMetricsRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "GetAccuracyMetrics");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<GetAccuracyMetricsResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new GetAccuracyMetricsResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a list of dataset groups created using the <a>CreateDatasetGroup</a> operation. For each dataset group,
* this operation returns a summary of its properties, including its Amazon Resource Name (ARN). You can retrieve
* the complete set of properties by using the dataset group ARN with the <a>DescribeDatasetGroup</a> operation.
* </p>
*
* @param listDatasetGroupsRequest
* @return Result of the ListDatasetGroups operation returned by the service.
* @throws InvalidNextTokenException
* The token is not valid. Tokens expire after 24 hours.
* @sample AmazonForecast.ListDatasetGroups
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/ListDatasetGroups" target="_top">AWS API
* Documentation</a>
*/
@Override
public ListDatasetGroupsResult listDatasetGroups(ListDatasetGroupsRequest request) {
request = beforeClientExecution(request);
return executeListDatasetGroups(request);
}
@SdkInternalApi
final ListDatasetGroupsResult executeListDatasetGroups(ListDatasetGroupsRequest listDatasetGroupsRequest) {
ExecutionContext executionContext = createExecutionContext(listDatasetGroupsRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<ListDatasetGroupsRequest> request = null;
Response<ListDatasetGroupsResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new ListDatasetGroupsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listDatasetGroupsRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "ListDatasetGroups");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<ListDatasetGroupsResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListDatasetGroupsResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a list of dataset import jobs created using the <a>CreateDatasetImportJob</a> operation. For each import
* job, this operation returns a summary of its properties, including its Amazon Resource Name (ARN). You can
* retrieve the complete set of properties by using the ARN with the <a>DescribeDatasetImportJob</a> operation. You
* can filter the list by providing an array of <a>Filter</a> objects.
* </p>
*
* @param listDatasetImportJobsRequest
* @return Result of the ListDatasetImportJobs operation returned by the service.
* @throws InvalidNextTokenException
* The token is not valid. Tokens expire after 24 hours.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @sample AmazonForecast.ListDatasetImportJobs
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/ListDatasetImportJobs" target="_top">AWS
* API Documentation</a>
*/
@Override
public ListDatasetImportJobsResult listDatasetImportJobs(ListDatasetImportJobsRequest request) {
request = beforeClientExecution(request);
return executeListDatasetImportJobs(request);
}
@SdkInternalApi
final ListDatasetImportJobsResult executeListDatasetImportJobs(ListDatasetImportJobsRequest listDatasetImportJobsRequest) {
ExecutionContext executionContext = createExecutionContext(listDatasetImportJobsRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<ListDatasetImportJobsRequest> request = null;
Response<ListDatasetImportJobsResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new ListDatasetImportJobsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listDatasetImportJobsRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "ListDatasetImportJobs");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<ListDatasetImportJobsResult>> responseHandler = protocolFactory
.createResponseHandler(new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new ListDatasetImportJobsResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a list of datasets created using the <a>CreateDataset</a> operation. For each dataset, a summary of its
* properties, including its Amazon Resource Name (ARN), is returned. To retrieve the complete set of properties,
* use the ARN with the <a>DescribeDataset</a> operation.
* </p>
*
* @param listDatasetsRequest
* @return Result of the ListDatasets operation returned by the service.
* @throws InvalidNextTokenException
* The token is not valid. Tokens expire after 24 hours.
* @sample AmazonForecast.ListDatasets
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/ListDatasets" target="_top">AWS API
* Documentation</a>
*/
@Override
public ListDatasetsResult listDatasets(ListDatasetsRequest request) {
request = beforeClientExecution(request);
return executeListDatasets(request);
}
@SdkInternalApi
final ListDatasetsResult executeListDatasets(ListDatasetsRequest listDatasetsRequest) {
ExecutionContext executionContext = createExecutionContext(listDatasetsRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<ListDatasetsRequest> request = null;
Response<ListDatasetsResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new ListDatasetsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listDatasetsRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "ListDatasets");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<ListDatasetsResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListDatasetsResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a list of Explainability resources created using the <a>CreateExplainability</a> operation. This
* operation returns a summary for each Explainability. You can filter the list using an array of <a>Filter</a>
* objects.
* </p>
* <p>
* To retrieve the complete set of properties for a particular Explainability resource, use the ARN with the
* <a>DescribeExplainability</a> operation.
* </p>
*
* @param listExplainabilitiesRequest
* @return Result of the ListExplainabilities operation returned by the service.
* @throws InvalidNextTokenException
* The token is not valid. Tokens expire after 24 hours.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @sample AmazonForecast.ListExplainabilities
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/ListExplainabilities" target="_top">AWS
* API Documentation</a>
*/
@Override
public ListExplainabilitiesResult listExplainabilities(ListExplainabilitiesRequest request) {
request = beforeClientExecution(request);
return executeListExplainabilities(request);
}
@SdkInternalApi
final ListExplainabilitiesResult executeListExplainabilities(ListExplainabilitiesRequest listExplainabilitiesRequest) {
ExecutionContext executionContext = createExecutionContext(listExplainabilitiesRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<ListExplainabilitiesRequest> request = null;
Response<ListExplainabilitiesResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new ListExplainabilitiesRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listExplainabilitiesRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "ListExplainabilities");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<ListExplainabilitiesResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListExplainabilitiesResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a list of Explainability exports created using the <a>CreateExplainabilityExport</a> operation. This
* operation returns a summary for each Explainability export. You can filter the list using an array of
* <a>Filter</a> objects.
* </p>
* <p>
* To retrieve the complete set of properties for a particular Explainability export, use the ARN with the
* <a>DescribeExplainability</a> operation.
* </p>
*
* @param listExplainabilityExportsRequest
* @return Result of the ListExplainabilityExports operation returned by the service.
* @throws InvalidNextTokenException
* The token is not valid. Tokens expire after 24 hours.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @sample AmazonForecast.ListExplainabilityExports
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/ListExplainabilityExports"
* target="_top">AWS API Documentation</a>
*/
@Override
public ListExplainabilityExportsResult listExplainabilityExports(ListExplainabilityExportsRequest request) {
request = beforeClientExecution(request);
return executeListExplainabilityExports(request);
}
@SdkInternalApi
final ListExplainabilityExportsResult executeListExplainabilityExports(ListExplainabilityExportsRequest listExplainabilityExportsRequest) {
ExecutionContext executionContext = createExecutionContext(listExplainabilityExportsRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<ListExplainabilityExportsRequest> request = null;
Response<ListExplainabilityExportsResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new ListExplainabilityExportsRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(listExplainabilityExportsRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "ListExplainabilityExports");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<ListExplainabilityExportsResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new ListExplainabilityExportsResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a list of forecast export jobs created using the <a>CreateForecastExportJob</a> operation. For each
* forecast export job, this operation returns a summary of its properties, including its Amazon Resource Name
* (ARN). To retrieve the complete set of properties, use the ARN with the <a>DescribeForecastExportJob</a>
* operation. You can filter the list using an array of <a>Filter</a> objects.
* </p>
*
* @param listForecastExportJobsRequest
* @return Result of the ListForecastExportJobs operation returned by the service.
* @throws InvalidNextTokenException
* The token is not valid. Tokens expire after 24 hours.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @sample AmazonForecast.ListForecastExportJobs
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/ListForecastExportJobs"
* target="_top">AWS API Documentation</a>
*/
@Override
public ListForecastExportJobsResult listForecastExportJobs(ListForecastExportJobsRequest request) {
request = beforeClientExecution(request);
return executeListForecastExportJobs(request);
}
@SdkInternalApi
final ListForecastExportJobsResult executeListForecastExportJobs(ListForecastExportJobsRequest listForecastExportJobsRequest) {
ExecutionContext executionContext = createExecutionContext(listForecastExportJobsRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<ListForecastExportJobsRequest> request = null;
Response<ListForecastExportJobsResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new ListForecastExportJobsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listForecastExportJobsRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "ListForecastExportJobs");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<ListForecastExportJobsResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new ListForecastExportJobsResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a list of forecasts created using the <a>CreateForecast</a> operation. For each forecast, this operation
* returns a summary of its properties, including its Amazon Resource Name (ARN). To retrieve the complete set of
* properties, specify the ARN with the <a>DescribeForecast</a> operation. You can filter the list using an array of
* <a>Filter</a> objects.
* </p>
*
* @param listForecastsRequest
* @return Result of the ListForecasts operation returned by the service.
* @throws InvalidNextTokenException
* The token is not valid. Tokens expire after 24 hours.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @sample AmazonForecast.ListForecasts
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/ListForecasts" target="_top">AWS API
* Documentation</a>
*/
@Override
public ListForecastsResult listForecasts(ListForecastsRequest request) {
request = beforeClientExecution(request);
return executeListForecasts(request);
}
@SdkInternalApi
final ListForecastsResult executeListForecasts(ListForecastsRequest listForecastsRequest) {
ExecutionContext executionContext = createExecutionContext(listForecastsRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<ListForecastsRequest> request = null;
Response<ListForecastsResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new ListForecastsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listForecastsRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "ListForecasts");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<ListForecastsResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListForecastsResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a list of predictor backtest export jobs created using the <a>CreatePredictorBacktestExportJob</a>
* operation. This operation returns a summary for each backtest export job. You can filter the list using an array
* of <a>Filter</a> objects.
* </p>
* <p>
* To retrieve the complete set of properties for a particular backtest export job, use the ARN with the
* <a>DescribePredictorBacktestExportJob</a> operation.
* </p>
*
* @param listPredictorBacktestExportJobsRequest
* @return Result of the ListPredictorBacktestExportJobs operation returned by the service.
* @throws InvalidNextTokenException
* The token is not valid. Tokens expire after 24 hours.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @sample AmazonForecast.ListPredictorBacktestExportJobs
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/ListPredictorBacktestExportJobs"
* target="_top">AWS API Documentation</a>
*/
@Override
public ListPredictorBacktestExportJobsResult listPredictorBacktestExportJobs(ListPredictorBacktestExportJobsRequest request) {
request = beforeClientExecution(request);
return executeListPredictorBacktestExportJobs(request);
}
@SdkInternalApi
final ListPredictorBacktestExportJobsResult executeListPredictorBacktestExportJobs(
ListPredictorBacktestExportJobsRequest listPredictorBacktestExportJobsRequest) {
ExecutionContext executionContext = createExecutionContext(listPredictorBacktestExportJobsRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<ListPredictorBacktestExportJobsRequest> request = null;
Response<ListPredictorBacktestExportJobsResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new ListPredictorBacktestExportJobsRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(listPredictorBacktestExportJobsRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "ListPredictorBacktestExportJobs");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<ListPredictorBacktestExportJobsResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new ListPredictorBacktestExportJobsResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a list of predictors created using the <a>CreateAutoPredictor</a> or <a>CreatePredictor</a> operations.
* For each predictor, this operation returns a summary of its properties, including its Amazon Resource Name (ARN).
* </p>
* <p>
* You can retrieve the complete set of properties by using the ARN with the <a>DescribeAutoPredictor</a> and
* <a>DescribePredictor</a> operations. You can filter the list using an array of <a>Filter</a> objects.
* </p>
*
* @param listPredictorsRequest
* @return Result of the ListPredictors operation returned by the service.
* @throws InvalidNextTokenException
* The token is not valid. Tokens expire after 24 hours.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @sample AmazonForecast.ListPredictors
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/ListPredictors" target="_top">AWS API
* Documentation</a>
*/
@Override
public ListPredictorsResult listPredictors(ListPredictorsRequest request) {
request = beforeClientExecution(request);
return executeListPredictors(request);
}
@SdkInternalApi
final ListPredictorsResult executeListPredictors(ListPredictorsRequest listPredictorsRequest) {
ExecutionContext executionContext = createExecutionContext(listPredictorsRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<ListPredictorsRequest> request = null;
Response<ListPredictorsResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new ListPredictorsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listPredictorsRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "ListPredictors");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<ListPredictorsResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListPredictorsResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Lists the tags for an Amazon Forecast resource.
* </p>
*
* @param listTagsForResourceRequest
* @return Result of the ListTagsForResource operation returned by the service.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @sample AmazonForecast.ListTagsForResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/ListTagsForResource" target="_top">AWS
* API Documentation</a>
*/
@Override
public ListTagsForResourceResult listTagsForResource(ListTagsForResourceRequest request) {
request = beforeClientExecution(request);
return executeListTagsForResource(request);
}
@SdkInternalApi
final ListTagsForResourceResult executeListTagsForResource(ListTagsForResourceRequest listTagsForResourceRequest) {
ExecutionContext executionContext = createExecutionContext(listTagsForResourceRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<ListTagsForResourceRequest> request = null;
Response<ListTagsForResourceResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new ListTagsForResourceRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listTagsForResourceRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "ListTagsForResource");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<ListTagsForResourceResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListTagsForResourceResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Stops a resource.
* </p>
* <p>
* The resource undergoes the following states: <code>CREATE_STOPPING</code> and <code>CREATE_STOPPED</code>. You
* cannot resume a resource once it has been stopped.
* </p>
* <p>
* This operation can be applied to the following resources (and their corresponding child resources):
* </p>
* <ul>
* <li>
* <p>
* Dataset Import Job
* </p>
* </li>
* <li>
* <p>
* Predictor Job
* </p>
* </li>
* <li>
* <p>
* Forecast Job
* </p>
* </li>
* <li>
* <p>
* Forecast Export Job
* </p>
* </li>
* <li>
* <p>
* Predictor Backtest Export Job
* </p>
* </li>
* <li>
* <p>
* Explainability Job
* </p>
* </li>
* <li>
* <p>
* Explainability Export Job
* </p>
* </li>
* </ul>
*
* @param stopResourceRequest
* @return Result of the StopResource operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws LimitExceededException
* The limit on the number of resources per account has been exceeded.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @sample AmazonForecast.StopResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/StopResource" target="_top">AWS API
* Documentation</a>
*/
@Override
public StopResourceResult stopResource(StopResourceRequest request) {
request = beforeClientExecution(request);
return executeStopResource(request);
}
@SdkInternalApi
final StopResourceResult executeStopResource(StopResourceRequest stopResourceRequest) {
ExecutionContext executionContext = createExecutionContext(stopResourceRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<StopResourceRequest> request = null;
Response<StopResourceResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new StopResourceRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(stopResourceRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "StopResource");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<StopResourceResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new StopResourceResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Associates the specified tags to a resource with the specified <code>resourceArn</code>. If existing tags on a
* resource are not specified in the request parameters, they are not changed. When a resource is deleted, the tags
* associated with that resource are also deleted.
* </p>
*
* @param tagResourceRequest
* @return Result of the TagResource operation returned by the service.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws LimitExceededException
* The limit on the number of resources per account has been exceeded.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @sample AmazonForecast.TagResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/TagResource" target="_top">AWS API
* Documentation</a>
*/
@Override
public TagResourceResult tagResource(TagResourceRequest request) {
request = beforeClientExecution(request);
return executeTagResource(request);
}
@SdkInternalApi
final TagResourceResult executeTagResource(TagResourceRequest tagResourceRequest) {
ExecutionContext executionContext = createExecutionContext(tagResourceRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<TagResourceRequest> request = null;
Response<TagResourceResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new TagResourceRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(tagResourceRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "TagResource");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<TagResourceResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new TagResourceResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Deletes the specified tags from a resource.
* </p>
*
* @param untagResourceRequest
* @return Result of the UntagResource operation returned by the service.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @sample AmazonForecast.UntagResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/UntagResource" target="_top">AWS API
* Documentation</a>
*/
@Override
public UntagResourceResult untagResource(UntagResourceRequest request) {
request = beforeClientExecution(request);
return executeUntagResource(request);
}
@SdkInternalApi
final UntagResourceResult executeUntagResource(UntagResourceRequest untagResourceRequest) {
ExecutionContext executionContext = createExecutionContext(untagResourceRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<UntagResourceRequest> request = null;
Response<UntagResourceResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new UntagResourceRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(untagResourceRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "UntagResource");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<UntagResourceResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new UntagResourceResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Replaces the datasets in a dataset group with the specified datasets.
* </p>
* <note>
* <p>
* The <code>Status</code> of the dataset group must be <code>ACTIVE</code> before you can use the dataset group to
* create a predictor. Use the <a>DescribeDatasetGroup</a> operation to get the status.
* </p>
* </note>
*
* @param updateDatasetGroupRequest
* @return Result of the UpdateDatasetGroup operation returned by the service.
* @throws InvalidInputException
* We can't process the request because it includes an invalid value or a value that exceeds the valid
* range.
* @throws ResourceNotFoundException
* We can't find a resource with that Amazon Resource Name (ARN). Check the ARN and try again.
* @throws ResourceInUseException
* The specified resource is in use.
* @sample AmazonForecast.UpdateDatasetGroup
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/UpdateDatasetGroup" target="_top">AWS
* API Documentation</a>
*/
@Override
public UpdateDatasetGroupResult updateDatasetGroup(UpdateDatasetGroupRequest request) {
request = beforeClientExecution(request);
return executeUpdateDatasetGroup(request);
}
@SdkInternalApi
final UpdateDatasetGroupResult executeUpdateDatasetGroup(UpdateDatasetGroupRequest updateDatasetGroupRequest) {
ExecutionContext executionContext = createExecutionContext(updateDatasetGroupRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<UpdateDatasetGroupRequest> request = null;
Response<UpdateDatasetGroupResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new UpdateDatasetGroupRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(updateDatasetGroupRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.CLIENT_ENDPOINT, endpoint);
request.addHandlerContext(HandlerContextKey.ENDPOINT_OVERRIDDEN, isEndpointOverridden());
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "forecast");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "UpdateDatasetGroup");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<UpdateDatasetGroupResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new UpdateDatasetGroupResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* Returns additional metadata for a previously executed successful, request, typically used for debugging issues
* where a service isn't acting as expected. This data isn't considered part of the result data returned by an
* operation, so it's available through this separate, diagnostic interface.
* <p>
* Response metadata is only cached for a limited period of time, so if you need to access this extra diagnostic
* information for an executed request, you should use this method to retrieve it as soon as possible after
* executing the request.
*
* @param request
* The originally executed request
*
* @return The response metadata for the specified request, or null if none is available.
*/
public ResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest request) {
return client.getResponseMetadataForRequest(request);
}
/**
* Normal invoke with authentication. Credentials are required and may be overriden at the request level.
**/
private <X, Y extends AmazonWebServiceRequest> Response<X> invoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler,
ExecutionContext executionContext) {
return invoke(request, responseHandler, executionContext, null, null);
}
/**
* Normal invoke with authentication. Credentials are required and may be overriden at the request level.
**/
private <X, Y extends AmazonWebServiceRequest> Response<X> invoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler,
ExecutionContext executionContext, URI cachedEndpoint, URI uriFromEndpointTrait) {
executionContext.setCredentialsProvider(CredentialUtils.getCredentialsProvider(request.getOriginalRequest(), awsCredentialsProvider));
return doInvoke(request, responseHandler, executionContext, cachedEndpoint, uriFromEndpointTrait);
}
/**
* Invoke with no authentication. Credentials are not required and any credentials set on the client or request will
* be ignored for this operation.
**/
private <X, Y extends AmazonWebServiceRequest> Response<X> anonymousInvoke(Request<Y> request,
HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler, ExecutionContext executionContext) {
return doInvoke(request, responseHandler, executionContext, null, null);
}
/**
* Invoke the request using the http client. Assumes credentials (or lack thereof) have been configured in the
* ExecutionContext beforehand.
**/
private <X, Y extends AmazonWebServiceRequest> Response<X> doInvoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler,
ExecutionContext executionContext, URI discoveredEndpoint, URI uriFromEndpointTrait) {
if (discoveredEndpoint != null) {
request.setEndpoint(discoveredEndpoint);
request.getOriginalRequest().getRequestClientOptions().appendUserAgent("endpoint-discovery");
} else if (uriFromEndpointTrait != null) {
request.setEndpoint(uriFromEndpointTrait);
} else {
request.setEndpoint(endpoint);
}
request.setTimeOffset(timeOffset);
HttpResponseHandler<AmazonServiceException> errorResponseHandler = protocolFactory.createErrorResponseHandler(new JsonErrorResponseMetadata());
return client.execute(request, responseHandler, errorResponseHandler, executionContext);
}
@com.amazonaws.annotation.SdkInternalApi
static com.amazonaws.protocol.json.SdkJsonProtocolFactory getProtocolFactory() {
return protocolFactory;
}
@Override
public void shutdown() {
super.shutdown();
}
}
|
3e1e3c7c04bb82552911e4eb575351c6d8e03171 | 2,069 | java | Java | src/com/rb2750/lwjgl/graphics/Texture.java | rb2750/FourCorners | c61bfc15432dbb3d7c49ad4a1fe612487788daa2 | [
"MIT"
] | null | null | null | src/com/rb2750/lwjgl/graphics/Texture.java | rb2750/FourCorners | c61bfc15432dbb3d7c49ad4a1fe612487788daa2 | [
"MIT"
] | null | null | null | src/com/rb2750/lwjgl/graphics/Texture.java | rb2750/FourCorners | c61bfc15432dbb3d7c49ad4a1fe612487788daa2 | [
"MIT"
] | null | null | null | 23.511364 | 128 | 0.58434 | 12,800 | package com.rb2750.lwjgl.graphics;
import com.rb2750.lwjgl.util.BufferUtils;
import lombok.Getter;
import lombok.Setter;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.FileInputStream;
import java.io.IOException;
import static org.lwjgl.opengl.GL11.*;
public class Texture
{
private int width, height;
@Getter
private int texture;
@Getter
@Setter
private float shineDamper = 1.0f;
@Getter
@Setter
private float reflectivity = 0.0f;
public Texture(String path)
{
texture = load(path);
}
private int load(String path)
{
int[] pixels = null;
try
{
BufferedImage image = ImageIO.read(new FileInputStream(path));
width = image.getWidth();
height = image.getHeight();
pixels = new int[width * height];
image.getRGB(0, 0, width, height, pixels, 0, width);
}
catch (IOException e)
{
e.printStackTrace();
}
int[] data = new int[width * height];
for (int i = 0; i < width * height; i++)
{
int a = (pixels[i] & 0xff000000) >> 24;
int r = (pixels[i] & 0xff0000) >> 16;
int g = (pixels[i] & 0xff00) >> 8;
int b = (pixels[i] & 0xff);
data[i] = a << 24 | b << 16 | g << 8 | r;
}
int result = glGenTextures();
glBindTexture(GL_TEXTURE_2D, result);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, BufferUtils.createIntBuffer(data));
glBindTexture(GL_TEXTURE_2D, 0);
return result;
}
public void bind()
{
glBindTexture(GL_TEXTURE_2D, texture);
}
public void unbind()
{
glBindTexture(GL_TEXTURE_2D, 0);
}
public void cleanUp()
{
glDeleteTextures(texture);
}
}
|
3e1e3d01b0ca6cf6846e41f1b03d274d97872bfd | 16,201 | java | Java | newrelic-agent/src/test/java/com/newrelic/agent/config/ConfigServiceTest.java | brunolellis/newrelic-java-agent | 8f0708ce0f23407f8ed1993a21e524d3c0a362e3 | [
"Apache-2.0"
] | 119 | 2020-08-15T21:38:07.000Z | 2022-03-25T12:07:11.000Z | newrelic-agent/src/test/java/com/newrelic/agent/config/ConfigServiceTest.java | brunolellis/newrelic-java-agent | 8f0708ce0f23407f8ed1993a21e524d3c0a362e3 | [
"Apache-2.0"
] | 468 | 2020-08-07T17:56:39.000Z | 2022-03-31T17:33:45.000Z | newrelic-agent/src/test/java/com/newrelic/agent/config/ConfigServiceTest.java | brunolellis/newrelic-java-agent | 8f0708ce0f23407f8ed1993a21e524d3c0a362e3 | [
"Apache-2.0"
] | 91 | 2020-08-11T14:23:08.000Z | 2022-03-31T17:28:18.000Z | 50.470405 | 147 | 0.741744 | 12,801 | /*
*
* * Copyright 2020 New Relic Corporation. All rights reserved.
* * SPDX-License-Identifier: Apache-2.0
*
*/
package com.newrelic.agent.config;
import com.google.common.collect.ImmutableMap;
import com.newrelic.agent.ConnectionConfigListener;
import com.newrelic.agent.HarvestService;
import com.newrelic.agent.IRPMService;
import com.newrelic.agent.MockHarvestService;
import com.newrelic.agent.MockRPMService;
import com.newrelic.agent.MockRPMServiceManager;
import com.newrelic.agent.MockServiceManager;
import com.newrelic.agent.instrumentation.ClassTransformerService;
import com.newrelic.agent.service.ServiceFactory;
import com.newrelic.agent.service.ServiceManager;
import org.json.simple.JSONObject;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ConfigServiceTest {
@After
public void afterTest() throws Exception {
ServiceManager serviceManager = ServiceFactory.getServiceManager();
if (serviceManager != null) {
serviceManager.stop();
}
}
private void createServiceManager(Map<String, Object> configMap) throws Exception {
ConfigService configService = ConfigServiceFactory.createConfigServiceUsingSettings(configMap);
MockServiceManager serviceManager = new MockServiceManager(configService);
ServiceFactory.setServiceManager(serviceManager);
MockRPMServiceManager rpmServiceManager = new MockRPMServiceManager();
serviceManager.setRPMServiceManager(rpmServiceManager);
MockRPMService rpmService = new MockRPMService();
rpmService.setApplicationName("Unit Test");
rpmServiceManager.setRPMService(rpmService);
HarvestService harvestService = new MockHarvestService();
serviceManager.setHarvestService(harvestService);
configService.start();
serviceManager.start();
}
@Test
public void constructedWithoutStuff() throws Exception {
AgentConfig emptyConfig = AgentConfigFactory.createAgentConfig(
Collections.<String, Object>emptyMap(),
Collections.<String, Object>emptyMap(),
Collections.<String, Boolean>emptyMap());
File noConfigFile = null;
Map<String, Object> noFileMeansNoSettings = null;
ConfigService target = new ConfigServiceImpl(emptyConfig, noConfigFile, noFileMeansNoSettings, false);
target.start();
target.stop();
}
@Test
public void isEnabled() throws Exception {
Map<String, Object> configMap = AgentConfigFactoryTest.createStagingMap();
createServiceManager(configMap);
ConfigService configService = ServiceFactory.getConfigService();
assertTrue(configService.isEnabled());
}
@Test
public void connectionListener() throws Exception {
Map<String, Object> configMap = AgentConfigFactoryTest.createStagingMap();
createServiceManager(configMap);
ConfigService configService = ServiceFactory.getConfigService();
MockRPMServiceManager rpmServiceManager = (MockRPMServiceManager) ServiceFactory.getRPMServiceManager();
ConnectionConfigListener connectionConfigListener = rpmServiceManager.getConnectionConfigListener();
MockRPMService rpmService = (MockRPMService) rpmServiceManager.getRPMService();
String appName = rpmService.getApplicationName();
String appName2 = "bogus";
Map<String, Object> data = new HashMap<>();
Map<String, Object> agentData = new HashMap<>();
data.put(AgentConfigFactory.AGENT_CONFIG, agentData);
data.put(AgentConfigImpl.APDEX_T, 0.500d);
data.put(TransactionTracerConfigImpl.COLLECT_TRACES, true);
data.put(ErrorCollectorConfigImpl.COLLECT_ERRORS, true);
agentData.put(AgentConfigFactory.TRANSACTION_TRACER_PREFIX + TransactionTracerConfigImpl.ENABLED, true);
agentData.put(AgentConfigFactory.ERROR_COLLECTOR_PREFIX + ErrorCollectorConfigImpl.ENABLED, true);
assertFalse(configService.getAgentConfig(appName).getTransactionTracerConfig().isEnabled());
assertFalse(configService.getAgentConfig(appName2).getTransactionTracerConfig().isEnabled());
assertTrue(configService.getAgentConfig(appName).getErrorCollectorConfig().isEnabled());
assertTrue(configService.getAgentConfig(appName2).getErrorCollectorConfig().isEnabled());
connectionConfigListener.connected(rpmService, data);
assertEquals(500L, configService.getAgentConfig(appName).getApdexTInMillis());
assertEquals(1000L, configService.getAgentConfig(appName2).getApdexTInMillis());
assertTrue(configService.getAgentConfig(appName).getTransactionTracerConfig().isEnabled());
assertFalse(configService.getAgentConfig(appName2).getTransactionTracerConfig().isEnabled());
assertTrue(configService.getAgentConfig(appName).getErrorCollectorConfig().isEnabled());
assertTrue(configService.getAgentConfig(appName2).getErrorCollectorConfig().isEnabled());
data.put(AgentConfigImpl.APDEX_T, 1.500d);
connectionConfigListener.connected(rpmService, data);
assertEquals(1500L, configService.getAgentConfig(appName).getApdexTInMillis());
assertEquals(1000L, configService.getAgentConfig(appName2).getApdexTInMillis());
rpmService = new MockRPMService();
rpmService.setApplicationName(appName2);
data.put(AgentConfigImpl.APDEX_T, 2.000d);
connectionConfigListener.connected(rpmService, data);
assertEquals(1500L, configService.getAgentConfig(appName).getApdexTInMillis());
assertEquals(2000L, configService.getAgentConfig(appName2).getApdexTInMillis());
}
@Test
public void connectionListenerAndErrorEvents() throws Exception {
Map<String, Object> configMap = AgentConfigFactoryTest.createStagingMap();
createServiceManager(configMap);
ConfigService configService = ServiceFactory.getConfigService();
MockRPMServiceManager rpmServiceManager = (MockRPMServiceManager) ServiceFactory.getRPMServiceManager();
ConnectionConfigListener connectionConfigListener = rpmServiceManager.getConnectionConfigListener();
// test defaults
MockRPMService rpmService = (MockRPMService) rpmServiceManager.getRPMService();
String appName = rpmService.getApplicationName();
String appName2 = "bogus";
Map<String, Object> data = new HashMap<>();
Map<String, Object> agentData = new HashMap<>();
data.put(AgentConfigFactory.AGENT_CONFIG, agentData);
assertTrue(configService.getAgentConfig(appName).getErrorCollectorConfig().isEventsEnabled());
assertTrue(configService.getAgentConfig(appName2).getErrorCollectorConfig().isEventsEnabled());
assertEquals(100, configService.getAgentConfig(appName).getErrorCollectorConfig().getMaxSamplesStored());
assertEquals(100, configService.getAgentConfig(appName2).getErrorCollectorConfig().getMaxSamplesStored());
// test collector shut off
data.put(ErrorCollectorConfigImpl.COLLECT_EVENTS, false);
connectionConfigListener.connected(rpmService, data);
assertFalse(configService.getAgentConfig(appName).getErrorCollectorConfig().isEventsEnabled());
assertTrue(configService.getAgentConfig(appName2).getErrorCollectorConfig().isEventsEnabled());
assertEquals(100, configService.getAgentConfig(appName).getErrorCollectorConfig().getMaxSamplesStored());
assertEquals(100, configService.getAgentConfig(appName2).getErrorCollectorConfig().getMaxSamplesStored());
// test config shut off and max event count
rpmService = new MockRPMService();
rpmService.setApplicationName(appName2);
agentData.put(AgentConfigFactory.CAPTURE_ERROR_EVENTS, false);
agentData.put(AgentConfigFactory.MAX_ERROR_EVENT_SAMPLES_STORED, 20);
connectionConfigListener.connected(rpmService, data);
assertFalse(configService.getAgentConfig(appName).getErrorCollectorConfig().isEventsEnabled());
assertFalse(configService.getAgentConfig(appName2).getErrorCollectorConfig().isEventsEnabled());
assertEquals(100, configService.getAgentConfig(appName).getErrorCollectorConfig().getMaxSamplesStored());
assertEquals(20, configService.getAgentConfig(appName2).getErrorCollectorConfig().getMaxSamplesStored());
}
@Test
public void apdexTInMillis() throws Exception {
Map<String, Object> configMap = AgentConfigFactoryTest.createStagingMap();
createServiceManager(configMap);
ConfigService configService = ServiceFactory.getConfigService();
assertEquals(1000L, configService.getAgentConfig(null).getApdexTInMillis());
String appName = configService.getDefaultAgentConfig().getApplicationName();
assertEquals(1000L, configService.getAgentConfig(appName).getApdexTInMillis());
assertEquals(1000L, configService.getAgentConfig("bogus").getApdexTInMillis());
}
@Test
public void badServerData() throws Exception {
Map<String, Object> configMap = AgentConfigFactoryTest.createStagingMap();
createServiceManager(configMap);
MockRPMServiceManager rpmServiceManager = (MockRPMServiceManager) ServiceFactory.getRPMServiceManager();
ConnectionConfigListener connectionConfigListener = rpmServiceManager.getConnectionConfigListener();
MockRPMService rpmService = (MockRPMService) rpmServiceManager.getRPMService();
Map<String, Object> data = new HashMap<>();
Map<String, Object> agentData = new HashMap<>();
data.put(AgentConfigFactory.AGENT_CONFIG, agentData);
data.put(AgentConfigImpl.APDEX_T, 0.5d);
data.put(AgentConfigFactory.COLLECT_TRACES, true);
agentData.put(AgentConfigFactory.TRANSACTION_TRACER_PREFIX + TransactionTracerConfigImpl.ENABLED, "bad");
agentData.put(AgentConfigFactory.ERROR_COLLECTOR_PREFIX + ErrorCollectorConfigImpl.ENABLED,
!ErrorCollectorConfigImpl.DEFAULT_ENABLED);
connectionConfigListener.connected(rpmService, data);
ConfigService configService = ServiceFactory.getConfigService();
assertFalse(configService.getAgentConfig(null).getTransactionTracerConfig().isEnabled());
assertEquals(ErrorCollectorConfigImpl.DEFAULT_ENABLED,
configService.getAgentConfig(null).getErrorCollectorConfig().isEnabled());
}
@Test
public void sanitizedSettings() throws Exception {
Map<String, Object> configMap = AgentConfigFactoryTest.createStagingMap();
configMap.put(AgentConfigImpl.PROXY_USER, "secret_user");
configMap.put(AgentConfigImpl.PROXY_PASS, "secret_pass");
configMap.put(AgentConfigImpl.PROXY_HOST, "secret_host");
createServiceManager(configMap);
ConfigService configService = ServiceFactory.getServiceManager().getConfigService();
Map<String, Object> sanitizedSettings = configService.getSanitizedLocalSettings();
assertEquals(sanitizedSettings.get(AgentConfigImpl.PROXY_USER), "****");
assertEquals(sanitizedSettings.get(AgentConfigImpl.PROXY_PASS), "****");
assertEquals(sanitizedSettings.get(AgentConfigImpl.PROXY_HOST), "****");
}
@Test
public void noUsernamePasswordProxy() throws Exception {
Map<String, Object> configMap = AgentConfigFactoryTest.createStagingMap();
createServiceManager(configMap);
ConfigService configService = ServiceFactory.getServiceManager().getConfigService();
Assert.assertNull(configService.getDefaultAgentConfig().getProxyHost());
Assert.assertNull(configService.getDefaultAgentConfig().getProxyPassword());
}
@Test
public void fileChangeAndThenConnectDoesActuallyChangeConfig() throws IOException {
ServiceManager mockServiceManager = mock(ServiceManager.class);
ClassTransformerService mockClassTransformerService = mock(ClassTransformerService.class);
when(mockServiceManager.getClassTransformerService()).thenReturn(mockClassTransformerService);
ServiceFactory.setServiceManager(mockServiceManager);
String appName = "Unit Test";
Map<String, Object> originalMap = ImmutableMap.<String, Object>of("app_name", appName);
File mockConfigFile = File.createTempFile("ConfigServiceTest", null);
try (OutputStream os = new FileOutputStream(mockConfigFile)) {
os.write(JSONObject.toJSONString(Collections.singletonMap("common", originalMap)).getBytes());
}
assertTrue(mockConfigFile.setLastModified(15L));
AgentConfig originalConfig = AgentConfigImpl.createAgentConfig(originalMap);
final Boolean[] circuitBreakerSetting = new Boolean[] { null };
assertTrue("Default circuitbreaker was expected to be true; it was apparently not.", originalConfig.getCircuitBreakerConfig().isEnabled());
ConfigServiceImpl target = new ConfigServiceImpl(originalConfig, mockConfigFile, originalMap, false);
target.addIAgentConfigListener(new AgentConfigListener() {
@Override
public void configChanged(String appName, AgentConfig agentConfig) {
circuitBreakerSetting[0] = agentConfig.getCircuitBreakerConfig().isEnabled();
}
});
// step 1: modify the file.
try (OutputStream os = new FileOutputStream(mockConfigFile)) {
os.write(JSONObject.toJSONString(Collections.singletonMap("common", ImmutableMap.of(
"app_name", appName,
"circuitbreaker", Collections.singletonMap("enabled", false)))).getBytes());
}
assertTrue("unable to set the last modified time on the mock config file.", mockConfigFile.setLastModified(System.currentTimeMillis()));
target.afterHarvest(appName);
assertNotNull("circuitbreaker setting should have been set; it was not", circuitBreakerSetting[0]);
assertFalse("circuitbreaker setting has not changed from true to false; it should have!", circuitBreakerSetting[0]);
circuitBreakerSetting[0] = null;
// step 2: trigger connect.
IRPMService mockRPMService = mock(IRPMService.class);
when(mockRPMService.getApplicationName()).thenReturn(appName);
target.connected(mockRPMService, Collections.<String, Object>emptyMap());
// this should not have reverted to the original contents.
assertNotNull("circuitbreaker setting should have been set; it was not", circuitBreakerSetting[0]);
assertFalse("circuitbreaker setting has changed from false; it should not have!", circuitBreakerSetting[0]);
}
@Test
public void shouldDeobfuscateLicenseKey() throws Exception {
Map<String, Object> obscuringKeyConfigProps = new HashMap<>();
obscuringKeyConfigProps.put("obscuring_key", "abc123");
ObscuredYamlPropertyWrapper obfuscatedLicenseKey =
new ObscuredYamlPropertyWrapper("NBFTAEprV1VbCFNRAgYGVwICU1FXBAQEWVsCU1FXBARTAAAAVVdVBg==");
Map<String, Object> configMap = new HashMap<>();
configMap.put(ObscuringConfig.OBSCURING_CONFIG, obscuringKeyConfigProps);
configMap.put(AgentConfigImpl.LICENSE_KEY, obfuscatedLicenseKey);
configMap.put(AgentConfigImpl.APP_NAME, "Test");
createServiceManager(configMap);
ConfigService configService = ServiceFactory.getConfigService();
AgentConfig config = configService.getAgentConfig("Test");
String expectedDeobfuscatedKey = "Us01xX6789abcdef0123456789abcdef01234567";
assertEquals(expectedDeobfuscatedKey, config.getLicenseKey());
}
} |
3e1e3d0537928d21fa0cf394c8add055a4fc920c | 1,423 | java | Java | ocl-core/src/main/java/uk/ac/ukc/cs/kmf/ocl20/syntax/ast/contexts/ConstraintAS.java | opatrascoiu/jmf | be597da51fa5964f07ee74213640894af8fff535 | [
"Apache-2.0"
] | null | null | null | ocl-core/src/main/java/uk/ac/ukc/cs/kmf/ocl20/syntax/ast/contexts/ConstraintAS.java | opatrascoiu/jmf | be597da51fa5964f07ee74213640894af8fff535 | [
"Apache-2.0"
] | null | null | null | ocl-core/src/main/java/uk/ac/ukc/cs/kmf/ocl20/syntax/ast/contexts/ConstraintAS.java | opatrascoiu/jmf | be597da51fa5964f07ee74213640894af8fff535 | [
"Apache-2.0"
] | null | null | null | 29.645833 | 109 | 0.725228 | 12,802 | /**
*
* Class ConstraintAS.java
*
* Generated by KMFStudio at 13 April 2004 12:05:02
* Visit http://www.cs.ukc.ac.uk/kmf
*
*/
package uk.ac.ukc.cs.kmf.ocl20.syntax.ast.contexts;
public interface ConstraintAS
extends
uk.ac.ukc.cs.kmf.ocl20.syntax.SyntaxElement
{
/** Get the 'name' from 'ConstraintAS' */
public String getName();
/** Set the 'name' from 'ConstraintAS' */
public void setName(String name);
/** Get the 'kind' from 'ConstraintAS' */
public ConstraintKindAS getKind();
/** Set the 'kind' from 'ConstraintAS' */
public void setKind(ConstraintKindAS kind);
/** Get the 'defOperation' from 'ConstraintAS' */
public OperationAS getDefOperation();
/** Set the 'defOperation' from 'ConstraintAS' */
public void setDefOperation(OperationAS defOperation);
/** Get the 'defVariable' from 'ConstraintAS' */
public VariableDeclarationAS getDefVariable();
/** Set the 'defVariable' from 'ConstraintAS' */
public void setDefVariable(VariableDeclarationAS defVariable);
/** Get the 'bodyExpression' from 'ConstraintAS' */
public uk.ac.ukc.cs.kmf.ocl20.syntax.ast.expressions.OclExpressionAS getBodyExpression();
/** Set the 'bodyExpression' from 'ConstraintAS' */
public void setBodyExpression(uk.ac.ukc.cs.kmf.ocl20.syntax.ast.expressions.OclExpressionAS bodyExpression);
/** Override the toString */
public String toString();
/** Clone the object */
public Object clone();
}
|
3e1e3f912c3dee697bb33f5bb049856f3877dd45 | 3,835 | java | Java | core/camel-api/src/main/java/org/apache/camel/health/HealthCheckService.java | mpaetzold/camel | 826a00507239cf8e97fd33f2827a41284f4d1349 | [
"Apache-2.0"
] | 4 | 2019-04-11T01:36:58.000Z | 2020-02-05T23:39:12.000Z | core/camel-api/src/main/java/org/apache/camel/health/HealthCheckService.java | mpaetzold/camel | 826a00507239cf8e97fd33f2827a41284f4d1349 | [
"Apache-2.0"
] | 14 | 2019-06-07T16:36:04.000Z | 2022-02-01T01:07:42.000Z | core/camel-api/src/main/java/org/apache/camel/health/HealthCheckService.java | mpaetzold/camel | 826a00507239cf8e97fd33f2827a41284f4d1349 | [
"Apache-2.0"
] | 3 | 2019-04-12T03:39:06.000Z | 2019-07-08T01:41:01.000Z | 35.183486 | 117 | 0.68605 | 12,803 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.health;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Optional;
import java.util.function.BiConsumer;
import org.apache.camel.CamelContextAware;
import org.apache.camel.Service;
/**
* An health check service that invokes the checks registered on the {@link HealthCheckRegistry}
* according to a schedule.
*/
public interface HealthCheckService extends Service, CamelContextAware {
/**
* Add a listener to invoke when the state of a check change.
*
* @param consumer the event listener.
*/
void addStateChangeListener(BiConsumer<HealthCheck.State, HealthCheck> consumer);
/**
* Remove the state change listener.
*
* @param consumer the event listener to remove.
*/
void removeStateChangeListener(BiConsumer<HealthCheck.State, HealthCheck> consumer);
/**
* Sets the options to be used when invoking the check identified by the
* given id.
*
* @param id the health check id.
* @param options the health check options.
*/
void setHealthCheckOptions(String id, Map<String, Object> options);
/**
* @see {@link #call(String, Map)}
*
* @param id the health check id.
* @return the result of the check or {@link Optional#empty()} if the id is unknown.
*/
default Optional<HealthCheck.Result> call(String id) {
return call(id, Collections.emptyMap());
}
/**
* Invokes the check identified by the given <code>id</code> with the given
* <code>options</code>.
*
* @param id the health check id.
* @param options the health check options.
* @return the result of the check or {@link Optional#empty()} if the id is unknown.
*/
Optional<HealthCheck.Result> call(String id, Map<String, Object> options);
/**
* Notify the service that a check has changed status. This may be useful for
* stateful checks like checks rely on tcp/ip connections.
*
* @param check the health check.
* @param result the health check result.
*/
void notify(HealthCheck check, HealthCheck.Result result);
/**
* Return a list of the known checks status.
*
* @return the list of results.
*/
Collection<HealthCheck.Result> getResults();
/**
* Access the underlying concrete HealthCheckService implementation to
* provide access to further features.
*
* @param clazz the proprietary class or interface of the underlying concrete HealthCheckService.
* @return an instance of the underlying concrete HealthCheckService as the required type.
*/
default <T extends HealthCheckService> T unwrap(Class<T> clazz) {
if (HealthCheckService.class.isAssignableFrom(clazz)) {
return clazz.cast(this);
}
throw new IllegalArgumentException(
"Unable to unwrap this HealthCheckService type (" + getClass() + ") to the required type (" + clazz + ")"
);
}
}
|
3e1e3fca93998059e2a75dc54bf1f753b5862e85 | 602 | java | Java | VLBusTracker/src/main/java/ru/vlbustracker/helpers/RouteDownloaderHelper.java | alexsheyko/VL-BusTracker-Android | 788aca690e11d21117ffdc5419fd6b31e10c93d8 | [
"Apache-2.0"
] | 1 | 2017-05-11T10:04:58.000Z | 2017-05-11T10:04:58.000Z | VLBusTracker/src/main/java/ru/vlbustracker/helpers/RouteDownloaderHelper.java | alexsheyko/VL-BusTracker-Android | 788aca690e11d21117ffdc5419fd6b31e10c93d8 | [
"Apache-2.0"
] | null | null | null | VLBusTracker/src/main/java/ru/vlbustracker/helpers/RouteDownloaderHelper.java | alexsheyko/VL-BusTracker-Android | 788aca690e11d21117ffdc5419fd6b31e10c93d8 | [
"Apache-2.0"
] | null | null | null | 27.363636 | 84 | 0.770764 | 12,804 | package ru.vlbustracker.helpers;
import ru.vlbustracker.models.Route;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
public class RouteDownloaderHelper implements DownloaderHelper {
public static final String ROUTE_JSON_FILE = "routeJson";
@Override
public void parse(JSONObject jsonObject) throws JSONException, IOException {
Route.parseJSON(jsonObject);
Downloader.cache(ROUTE_JSON_FILE, jsonObject);
}
public void parseArray(JSONArray jsonObject) throws JSONException, IOException {
}
}
|
3e1e40a29bca9c85bdc4d49ec6f1e4876a0327b8 | 457 | java | Java | java/org/metasyntactic/automata/compiler/util/StringList.java | avoronchenko/metasyntactic | cea5408f8c575aae4f280df983f511b5fa7ed541 | [
"Apache-2.0"
] | 1 | 2016-01-02T10:37:45.000Z | 2016-01-02T10:37:45.000Z | java/org/metasyntactic/automata/compiler/util/StringList.java | avoronchenko/metasyntactic | cea5408f8c575aae4f280df983f511b5fa7ed541 | [
"Apache-2.0"
] | 1 | 2016-12-15T12:24:46.000Z | 2016-12-15T12:24:46.000Z | java/org/metasyntactic/automata/compiler/util/StringList.java | avoronchenko/metasyntactic | cea5408f8c575aae4f280df983f511b5fa7ed541 | [
"Apache-2.0"
] | null | null | null | 20.772727 | 57 | 0.730853 | 12,805 | package org.metasyntactic.automata.compiler.util;
import org.metasyntactic.common.base.Preconditions;
import java.util.AbstractList;
public class StringList extends AbstractList<Character> {
private final String string;
public StringList(String string) {
Preconditions.checkNotNull(string);
this.string = string;
}
public Character get(int i) {
return string.charAt(i);
}
public int size() {
return string.length();
}
} |
3e1e40bad5883a419796a9f08d80750faae1a99a | 3,033 | java | Java | SenSocialServer/src/com/ubhave/sensocial/server/filters/FrequencyFilterRegistrar.java | AbhinavMehrotra/SenSocial-Library | 7e4b4cc475ae9f5eec24badfcdc231c7f2a1d91a | [
"Unlicense"
] | 1 | 2021-08-31T09:53:36.000Z | 2021-08-31T09:53:36.000Z | SenSocialServer/src/com/ubhave/sensocial/server/filters/FrequencyFilterRegistrar.java | AbhinavMehrotra/SenSocial-Library | 7e4b4cc475ae9f5eec24badfcdc231c7f2a1d91a | [
"Unlicense"
] | null | null | null | SenSocialServer/src/com/ubhave/sensocial/server/filters/FrequencyFilterRegistrar.java | AbhinavMehrotra/SenSocial-Library | 7e4b4cc475ae9f5eec24badfcdc231c7f2a1d91a | [
"Unlicense"
] | 3 | 2016-04-27T04:24:58.000Z | 2019-04-23T03:54:06.000Z | 36.142857 | 82 | 0.703887 | 12,806 | /*******************************************************************************
*
* SenSocial Middleware
*
* Copyright (c) ${2014}, University of Birmingham
* Abhinav Mehrotra, kenaa@example.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the University of Birmingham
* nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE ABOVE COPYRIGHT HOLDERS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*******************************************************************************/
package com.ubhave.sensocial.server.filters;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.ubhave.sensocial.server.data.SocialEvent;
/**
* FrequencyFilterRegistrar class is used to register the frequency based streams
*/
public class FrequencyFilterRegistrar {
private static Map<String, String> freqFilter = new HashMap<String, String>();
protected static void add(String clientStreamId, String serverStreamIds) {
freqFilter.put(clientStreamId,serverStreamIds);
}
protected static void remove(String aggregatedStreamId) {
freqFilter.remove(aggregatedStreamId);
}
/*
* Returns true if other streams are dependent of this stream.
*/
public static Boolean isPresent(String streamId){
for (Map.Entry<String, String> entry: freqFilter.entrySet()) {
if(entry.getKey().contains(streamId)){
return true;
}
}
return false;
}
public static String getStreamId(String streamId){
String stream = null;
for (Map.Entry<String, String> entry: freqFilter.entrySet()) {
if(entry.getKey().contains(streamId)){
stream = entry.getValue();
break;
}
}
return stream;
}
}
|
3e1e41a51322eba9e0d0e356b40c9b8375eb85a5 | 4,845 | java | Java | estatioapp/app/src/test/java/org/estatio/module/turnover/imports/TurnoverImport_Test.java | Gikabu/estatio | 0c645978c2f7768c71b0f3c0bd865e8bbca87c22 | [
"Apache-2.0"
] | 215 | 2015-01-06T15:34:10.000Z | 2021-05-22T08:18:02.000Z | estatioapp/app/src/test/java/org/estatio/module/turnover/imports/TurnoverImport_Test.java | NightDog/estatio | b81257ef3ad7dc75108b8b61ab08cf9554371677 | [
"Apache-2.0"
] | 52 | 2015-01-07T09:10:46.000Z | 2020-11-29T08:31:28.000Z | estatioapp/app/src/test/java/org/estatio/module/turnover/imports/TurnoverImport_Test.java | NightDog/estatio | b81257ef3ad7dc75108b8b61ab08cf9554371677 | [
"Apache-2.0"
] | 140 | 2015-01-11T21:53:35.000Z | 2022-01-17T11:58:00.000Z | 44.449541 | 110 | 0.699484 | 12,807 | package org.estatio.module.turnover.imports;
import java.math.BigDecimal;
import org.assertj.core.api.Assertions;
import org.junit.Test;
public class TurnoverImport_Test {
@Test
public void getAmountDivPercentage() {
// given
TurnoverImport turnoverImport = new TurnoverImport();
// when
turnoverImport.setNetAmountPreviousYear(new BigDecimal("1000.00"));
turnoverImport.setGrossAmountPreviousYear(new BigDecimal("1200.00"));
// then
Assertions.assertThat(turnoverImport.getNetAmountDivPercentage()).isEqualTo(new BigDecimal("-100"));
Assertions.assertThat(turnoverImport.getGrossAmountDivPercentage()).isEqualTo(new BigDecimal("-100"));
// and when
turnoverImport.setNetAmount(new BigDecimal("1234.56"));
turnoverImport.setGrossAmount(new BigDecimal("1481.47"));
turnoverImport.setNetAmountPreviousYear(null);
turnoverImport.setGrossAmountPreviousYear(null);
// then
Assertions.assertThat(turnoverImport.getNetAmountDivPercentage()).isEqualTo(new BigDecimal("100"));
Assertions.assertThat(turnoverImport.getGrossAmountDivPercentage()).isEqualTo(new BigDecimal("100"));
// and when
turnoverImport.setNetAmountPreviousYear(new BigDecimal("1234.56"));
turnoverImport.setGrossAmountPreviousYear(new BigDecimal("1481.47"));
// then
Assertions.assertThat(turnoverImport.getNetAmountDivPercentage()).isEqualTo(new BigDecimal("0"));
Assertions.assertThat(turnoverImport.getGrossAmountDivPercentage()).isEqualTo(new BigDecimal("0"));
// and when
turnoverImport.setNetAmountPreviousYear(new BigDecimal("1000.00"));
turnoverImport.setGrossAmountPreviousYear(new BigDecimal("1200.00"));
// then
Assertions.assertThat(turnoverImport.getNetAmountDivPercentage()).isEqualTo(new BigDecimal("23"));
Assertions.assertThat(turnoverImport.getGrossAmountDivPercentage()).isEqualTo(new BigDecimal("23"));
// and when
turnoverImport.setNetAmountPreviousYear(new BigDecimal("1234.56"));
turnoverImport.setNetAmount(new BigDecimal("1000.00"));
turnoverImport.setGrossAmountPreviousYear(new BigDecimal("1481.47"));
turnoverImport.setGrossAmount(new BigDecimal("1200.00"));
// then
Assertions.assertThat(turnoverImport.getNetAmountDivPercentage()).isEqualTo(new BigDecimal("-19"));
Assertions.assertThat(turnoverImport.getGrossAmountDivPercentage()).isEqualTo(new BigDecimal("-19"));
}
@Test
public void netAmountToUse_works() throws Exception {
// given
TurnoverImport turnoverImport = new TurnoverImport();
// when nothing set, then
Assertions.assertThat(turnoverImport.netAmountToUse()).isNull();
// when gross amount null
turnoverImport.setVatPercentage(new BigDecimal("25"));
// then
Assertions.assertThat(turnoverImport.netAmountToUse()).isNull();
// when net amount calculated
turnoverImport.setGrossAmount(new BigDecimal("500.55"));
turnoverImport.setVatPercentage(new BigDecimal("25"));
// then
Assertions.assertThat(turnoverImport.netAmountToUse()).isEqualTo(new BigDecimal("400.44"));
// when net amount calculated and rounded
turnoverImport.setGrossAmount(new BigDecimal("12345.67"));
turnoverImport.setVatPercentage(new BigDecimal("25"));
// then
Assertions.assertThat(turnoverImport.netAmountToUse()).isEqualTo(new BigDecimal("9876.54"));
// when gross amount zero
turnoverImport.setGrossAmount(new BigDecimal("0.00"));
turnoverImport.setVatPercentage(new BigDecimal("25"));
// then
Assertions.assertThat(turnoverImport.netAmountToUse()).isEqualTo(new BigDecimal("0.00"));
// when net amount set
turnoverImport.setGrossAmount(new BigDecimal("12345.67"));
turnoverImport.setNetAmount(new BigDecimal("123.45"));
turnoverImport.setVatPercentage(new BigDecimal("25"));
// then
Assertions.assertThat(turnoverImport.netAmountToUse()).isEqualTo(new BigDecimal("123.45"));
// when net amount set to 0
turnoverImport.setGrossAmount(new BigDecimal("12345.67"));
turnoverImport.setNetAmount(new BigDecimal("0.00"));
turnoverImport.setVatPercentage(new BigDecimal("25"));
// then
Assertions.assertThat(turnoverImport.netAmountToUse()).isEqualTo(new BigDecimal("0.00"));
// when no vat percentage set
turnoverImport.setGrossAmount(new BigDecimal("12345.67"));
turnoverImport.setNetAmount(null);
turnoverImport.setVatPercentage(null);
// then
Assertions.assertThat(turnoverImport.netAmountToUse()).isNull();
}
} |
3e1e421e909596f3847745203bc06507b9b18fad | 4,715 | java | Java | flutter-hms-ar/android/src/main/java/com/huawei/hms/plugin/ar/core/util/WorldShaderUtil.java | nestorsgarzonc/hms-flutter-plugin | 6af48a976d29659a8d7e0055a663af9f04802fc0 | [
"Apache-2.0"
] | 192 | 2020-05-24T00:47:03.000Z | 2022-03-26T13:36:08.000Z | flutter-hms-ar/android/src/main/java/com/huawei/hms/plugin/ar/core/util/WorldShaderUtil.java | nestorsgarzonc/hms-flutter-plugin | 6af48a976d29659a8d7e0055a663af9f04802fc0 | [
"Apache-2.0"
] | 145 | 2020-07-22T10:31:19.000Z | 2022-03-30T09:31:54.000Z | flutter-hms-ar/android/src/main/java/com/huawei/hms/plugin/ar/core/util/WorldShaderUtil.java | nestorsgarzonc/hms-flutter-plugin | 6af48a976d29659a8d7e0055a663af9f04802fc0 | [
"Apache-2.0"
] | 82 | 2020-07-20T07:30:16.000Z | 2022-03-28T07:04:03.000Z | 50.159574 | 114 | 0.534464 | 12,808 | /*
Copyright 2020-2021. Huawei Technologies Co., Ltd. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.huawei.hms.plugin.ar.core.util;
public class WorldShaderUtil {
private static final String LS = System.lineSeparator();
private static final String LABEL_VERTEX =
"uniform mat2 inPlanUVMatrix;" + LS
+ "uniform mat4 inMVPMatrix;" + LS
+ "attribute vec3 inPosXZAlpha;" + LS
+ "varying vec3 varTexCoordAlpha;" + LS
+ "void main() {" + LS
+ " vec4 tempPosition = vec4(inPosXZAlpha.x, 0.0, inPosXZAlpha.y, 1.0);" + LS
+ " vec2 tempUV = inPlanUVMatrix * inPosXZAlpha.xy;" + LS
+ " varTexCoordAlpha = vec3(tempUV.x + 0.5, tempUV.y + 0.5, inPosXZAlpha.z);" + LS
+ " gl_Position = inMVPMatrix * tempPosition;" + LS
+ "}";
private static final String LABEL_FRAGMENT =
"precision highp float;" + LS
+ "uniform sampler2D inTexture;" + LS
+ "varying vec3 varTexCoordAlpha;" + LS
+ "void main() {" + LS
+ " vec4 control = texture2D(inTexture, varTexCoordAlpha.xy);" + LS
+ " gl_FragColor = vec4(control.rgb, 1.0);" + LS
+ "}";
private static final String OBJECT_VERTEX =
"uniform mat4 inMVPMatrix;" + LS
+ "uniform mat4 inViewMatrix;" + LS
+ "attribute vec3 inObjectNormalVector;" + LS
+ "attribute vec4 inObjectPosition;" + LS
+ "attribute vec2 inTexCoordinate;" + LS
+ "varying vec3 varCameraNormalVector;" + LS
+ "varying vec2 varTexCoordinate;" + LS
+ "varying vec3 varCameraPos;" + LS
+ "void main() {" + LS
+ " gl_Position = inMVPMatrix * inObjectPosition;" + LS
+ " varCameraNormalVector = (inViewMatrix * vec4(inObjectNormalVector, 0.0)).xyz;" + LS
+ " varTexCoordinate = inTexCoordinate;" + LS
+ " varCameraPos = (inViewMatrix * inObjectPosition).xyz;" + LS
+ "}";
private static final String OBJECT_FRAGMENT =
"precision mediump float;" + LS
+ " uniform vec4 inLight;" + LS
+ "uniform vec4 inObjectColor;" + LS
+ "uniform sampler2D inObjectTexture;" + LS
+ "varying vec3 varCameraPos;" + LS
+ "varying vec3 varCameraNormalVector;" + LS
+ "varying vec2 varTexCoordinate;" + LS
+ "void main() {" + LS
+ " vec4 objectColor = texture2D(inObjectTexture, vec2(varTexCoordinate.x, 1.0 - " +
"varTexCoordinate.y));" + LS
+ " objectColor.rgb = inObjectColor.rgb / 255.0;" + LS
+ " vec3 viewNormal = normalize(varCameraNormalVector);" + LS
+ " vec3 reflectedLightDirection = reflect(inLight.xyz, viewNormal);" + LS
+ " vec3 normalCameraPos = normalize(varCameraPos);" + LS
+ " float specularStrength = max(0.0, dot(normalCameraPos, reflectedLightDirection));" + LS
+ " gl_FragColor.a = objectColor.a;" + LS
+ " float diffuse = inLight.w * 3.5 *" + LS
+ " 0.5 * (dot(viewNormal, inLight.xyz) + 1.0);" + LS
+ " float specular = inLight.w *" + LS
+ " pow(specularStrength, 6.0);" + LS
+ " gl_FragColor.rgb = objectColor.rgb * + diffuse + specular;" + LS
+ "}";
private WorldShaderUtil() {
}
public static int getLabelProgram() {
return OpenGLUtil.createGlProgram(LABEL_VERTEX, LABEL_FRAGMENT);
}
public static int getObjectProgram() {
return OpenGLUtil.createGlProgram(OBJECT_VERTEX, OBJECT_FRAGMENT);
}
}
|
3e1e42c38575f751ebb902bcba353c0f7a928b2e | 242 | java | Java | src/main/java/heli/htweener/ease/impl/quint/QuintIn.java | DmitriiZheleznikov/mydict | 543875553ca99598a06fdaf223afc830f99233d1 | [
"MIT"
] | null | null | null | src/main/java/heli/htweener/ease/impl/quint/QuintIn.java | DmitriiZheleznikov/mydict | 543875553ca99598a06fdaf223afc830f99233d1 | [
"MIT"
] | null | null | null | src/main/java/heli/htweener/ease/impl/quint/QuintIn.java | DmitriiZheleznikov/mydict | 543875553ca99598a06fdaf223afc830f99233d1 | [
"MIT"
] | null | null | null | 22 | 47 | 0.739669 | 12,809 | package heli.htweener.ease.impl.quint;
import heli.htweener.ease.IEaseFunction;
public class QuintIn implements IEaseFunction {
@Override
public double transform(double ratio) {
return ratio*ratio*ratio*ratio*ratio;
}
}
|
3e1e42d6494b0f4f338c421f18be932b8ebcdce6 | 3,133 | java | Java | libraries/general/text/src/main/java/com/googlecode/aluminumproject/libraries/text/actions/Mute.java | levi-h/aluminumproject | b243f09d81c7aa758a240e9d38a7277372543ecc | [
"Apache-2.0"
] | null | null | null | libraries/general/text/src/main/java/com/googlecode/aluminumproject/libraries/text/actions/Mute.java | levi-h/aluminumproject | b243f09d81c7aa758a240e9d38a7277372543ecc | [
"Apache-2.0"
] | null | null | null | libraries/general/text/src/main/java/com/googlecode/aluminumproject/libraries/text/actions/Mute.java | levi-h/aluminumproject | b243f09d81c7aa758a240e9d38a7277372543ecc | [
"Apache-2.0"
] | null | null | null | 37.746988 | 111 | 0.770508 | 12,810 | /*
* Copyright 2009-2012 Aluminum project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.aluminumproject.libraries.text.actions;
import com.googlecode.aluminumproject.AluminumException;
import com.googlecode.aluminumproject.annotations.Typed;
import com.googlecode.aluminumproject.context.Context;
import com.googlecode.aluminumproject.interceptors.AbstractActionInterceptor;
import com.googlecode.aluminumproject.libraries.actions.AbstractActionContribution;
import com.googlecode.aluminumproject.libraries.actions.ActionContributionOptions;
import com.googlecode.aluminumproject.libraries.actions.ActionParameter;
import com.googlecode.aluminumproject.templates.ActionContext;
import com.googlecode.aluminumproject.templates.ActionPhase;
import com.googlecode.aluminumproject.writers.DecorativeWriter;
import com.googlecode.aluminumproject.writers.ToggleableWriter;
import com.googlecode.aluminumproject.writers.Writer;
@SuppressWarnings("javadoc")
@Typed("boolean")
public class Mute extends AbstractActionContribution {
public void make(Context context, Writer writer, ActionParameter parameter, ActionContributionOptions options)
throws AluminumException {
final boolean mute = ((Boolean) parameter.getValue(Boolean.TYPE, context)).booleanValue();
options.addInterceptor(new AbstractActionInterceptor(ActionPhase.EXECUTION) {
public void intercept(ActionContext actionContext) throws AluminumException {
Writer originalWriter = actionContext.getWriter();
ToggleableWriter toggleableWriter = findToggleableWriter(originalWriter);
Boolean originalWrite = null;
if (toggleableWriter == null) {
actionContext.setWriter(new ToggleableWriter(originalWriter, !mute));
} else {
originalWrite = toggleableWriter.isWrite();
toggleableWriter.setWrite(!mute);
}
try {
actionContext.proceed();
} finally {
if (toggleableWriter == null) {
actionContext.setWriter(originalWriter);
} else {
toggleableWriter.setWrite(originalWrite);
}
}
}
private ToggleableWriter findToggleableWriter(Writer writer) {
ToggleableWriter toggleableWriter = null;
Writer currentWriter = writer;
do {
if (currentWriter instanceof ToggleableWriter) {
toggleableWriter = (ToggleableWriter) currentWriter;
} else if (currentWriter instanceof DecorativeWriter) {
currentWriter = ((DecorativeWriter) currentWriter).getWriter();
} else {
currentWriter = null;
}
} while ((toggleableWriter == null) && (currentWriter != null));
return toggleableWriter;
}
});
}
} |
3e1e4433241f20e51ad7ce33ce3cb31ffdaeb622 | 2,095 | java | Java | depmodule_dal/src/main/java/com/wangjiegulu/rapidooo/depmodule/dal/xdo/demo/User.java | wangjiegulu/RapidOOO | 569c31bcc7002923b9c8017377b47679e289a601 | [
"Apache-2.0"
] | 14 | 2018-04-12T12:44:57.000Z | 2021-07-12T01:35:21.000Z | depmodule_dal/src/main/java/com/wangjiegulu/rapidooo/depmodule/dal/xdo/demo/User.java | wangjiegulu/RapidOOO | 569c31bcc7002923b9c8017377b47679e289a601 | [
"Apache-2.0"
] | 1 | 2018-09-03T03:57:34.000Z | 2018-09-03T03:57:34.000Z | depmodule_dal/src/main/java/com/wangjiegulu/rapidooo/depmodule/dal/xdo/demo/User.java | wangjiegulu/RapidOOO | 569c31bcc7002923b9c8017377b47679e289a601 | [
"Apache-2.0"
] | 3 | 2018-04-23T02:28:07.000Z | 2018-05-06T02:56:56.000Z | 22.623656 | 91 | 0.612643 | 12,811 | package com.wangjiegulu.rapidooo.depmodule.dal.xdo.demo;
import android.os.Parcel;
import android.os.Parcelable;
/**
* Author: wangjie Email: efpyi@example.com Date: 2019-06-17.
*/
public class User implements Parcelable {
private Integer id;
private Boolean isBot;
private String firstName;
private String lastName;
private String username;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Boolean getBot() {
return isBot;
}
public void setBot(Boolean bot) {
isBot = bot;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeValue(this.id);
dest.writeValue(this.isBot);
dest.writeString(this.firstName);
dest.writeString(this.lastName);
dest.writeString(this.username);
}
public User() {
}
protected User(Parcel in) {
this.id = (Integer) in.readValue(Integer.class.getClassLoader());
this.isBot = (Boolean) in.readValue(Boolean.class.getClassLoader());
this.firstName = in.readString();
this.lastName = in.readString();
this.username = in.readString();
}
public static final Parcelable.Creator<User> CREATOR = new Parcelable.Creator<User>() {
@Override
public User createFromParcel(Parcel source) {
return new User(source);
}
@Override
public User[] newArray(int size) {
return new User[size];
}
};
}
|
3e1e45cddd2f7bdbbed6133594ace412848024eb | 877 | java | Java | service/src/main/java/de/assertagile/workshop/agiletesting/api/customer/CustomerService.java | mkutz/agile-testing-workshop | ae2497e1b5d54f6b7ca0acf108d51a9a947bd88a | [
"Apache-2.0"
] | null | null | null | service/src/main/java/de/assertagile/workshop/agiletesting/api/customer/CustomerService.java | mkutz/agile-testing-workshop | ae2497e1b5d54f6b7ca0acf108d51a9a947bd88a | [
"Apache-2.0"
] | null | null | null | service/src/main/java/de/assertagile/workshop/agiletesting/api/customer/CustomerService.java | mkutz/agile-testing-workshop | ae2497e1b5d54f6b7ca0acf108d51a9a947bd88a | [
"Apache-2.0"
] | null | null | null | 27.40625 | 96 | 0.758267 | 12,812 | package de.assertagile.workshop.agiletesting.api.customer;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
@Service
public class CustomerService {
private final CustomerRepository customerRepository;
@Inject
public CustomerService(final CustomerRepository customerRepository) {
this.customerRepository = customerRepository;
}
public List<Customer> getAllCustomers() {
return new ArrayList<Customer>();
}
public Optional<Customer> getCustomerById(String customerId) {
return Optional.ofNullable(Customer.fromEntity(customerRepository.findOne(customerId)));
}
public Customer saveCustomer(final Customer customer) {
return Customer.fromEntity(customerRepository.save(customer.toEntity()));
}
}
|
3e1e462611d66a8bfc1247432df16648cd114d66 | 2,012 | java | Java | searchdropdown-addon/src/main/java/org/vaadin/alump/searchdropdown/SearchEvent.java | alump/SearchDropDown | 44c3141b86bfa34fa911003adee7b99958b72173 | [
"Apache-2.0"
] | 3 | 2017-06-09T03:43:25.000Z | 2018-08-14T01:52:08.000Z | searchdropdown-addon/src/main/java/org/vaadin/alump/searchdropdown/SearchEvent.java | alump/SearchDropDown | 44c3141b86bfa34fa911003adee7b99958b72173 | [
"Apache-2.0"
] | 2 | 2017-05-09T14:59:28.000Z | 2017-09-27T13:23:17.000Z | searchdropdown-addon/src/main/java/org/vaadin/alump/searchdropdown/SearchEvent.java | alump/SearchDropDown | 44c3141b86bfa34fa911003adee7b99958b72173 | [
"Apache-2.0"
] | 4 | 2017-09-27T08:55:49.000Z | 2018-11-11T18:33:17.000Z | 28.041667 | 82 | 0.684002 | 12,813 | /**
* SearchEvent.java (SearchDropDown)
*
* Copyright 2017 Vaadin Ltd, Sami Viitanen <envkt@example.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vaadin.alump.searchdropdown;
import java.io.Serializable;
import java.util.Optional;
/**
* Event given when user has entered search
*/
public class SearchEvent<T> implements Serializable {
private final SearchDropDown<T> source;
private final String text;
private final SearchSuggestion<T> suggestion;
private final boolean clearEvent;
public SearchEvent(SearchDropDown<T> source, String text) {
this(source, text, false);
}
public SearchEvent(SearchDropDown<T> source, String text, boolean clear) {
this.source = source;
this.text = text;
this.suggestion = null;
this.clearEvent = clear;
}
public SearchEvent(SearchDropDown<T> source, SearchSuggestion<T> suggestion) {
this.source = source;
this.text = null;
this.suggestion = suggestion;
this.clearEvent = false;
}
public SearchDropDown<T> getSource() {
return source;
}
public String getText() {
return getSuggestion().map(i -> i.getText()).orElse(text);
}
public Optional<SearchSuggestion<T>> getSuggestion() {
return Optional.ofNullable(suggestion);
}
public boolean hasSuggestion() {
return getSuggestion().isPresent();
}
public boolean isClear() {
return clearEvent;
}
}
|
3e1e4738499e56eaf59f804347265bfc54ab44ec | 6,735 | java | Java | java-client/src/main/java/co/elastic/clients/elasticsearch/security/get_service_credentials/NodesCredentials.java | ksurendra/elasticsearch-java | 33cdf3a9916af93004aa25e8c6f7e2ab13db8ad9 | [
"Apache-2.0"
] | null | null | null | java-client/src/main/java/co/elastic/clients/elasticsearch/security/get_service_credentials/NodesCredentials.java | ksurendra/elasticsearch-java | 33cdf3a9916af93004aa25e8c6f7e2ab13db8ad9 | [
"Apache-2.0"
] | null | null | null | java-client/src/main/java/co/elastic/clients/elasticsearch/security/get_service_credentials/NodesCredentials.java | ksurendra/elasticsearch-java | 33cdf3a9916af93004aa25e8c6f7e2ab13db8ad9 | [
"Apache-2.0"
] | null | null | null | 30.613636 | 117 | 0.692502 | 12,814 | /*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
//----------------------------------------------------
// THIS CODE IS GENERATED. MANUAL EDITS WILL BE LOST.
//----------------------------------------------------
package co.elastic.clients.elasticsearch.security.get_service_credentials;
import co.elastic.clients.elasticsearch._types.NodeStatistics;
import co.elastic.clients.json.JsonpDeserializable;
import co.elastic.clients.json.JsonpDeserializer;
import co.elastic.clients.json.JsonpMapper;
import co.elastic.clients.json.JsonpSerializable;
import co.elastic.clients.json.ObjectBuilderDeserializer;
import co.elastic.clients.json.ObjectDeserializer;
import co.elastic.clients.util.ApiTypeHelper;
import co.elastic.clients.util.ObjectBuilder;
import co.elastic.clients.util.WithJsonObjectBuilderBase;
import jakarta.json.stream.JsonGenerator;
import java.lang.String;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import javax.annotation.Nullable;
// typedef: security.get_service_credentials.NodesCredentials
/**
*
* @see <a href=
* "../../doc-files/api-spec.html#security.get_service_credentials.NodesCredentials">API
* specification</a>
*/
@JsonpDeserializable
public class NodesCredentials implements JsonpSerializable {
private final NodeStatistics nodes;
private final Map<String, NodesCredentialsFileToken> fileTokens;
// ---------------------------------------------------------------------------------------------
private NodesCredentials(Builder builder) {
this.nodes = ApiTypeHelper.requireNonNull(builder.nodes, this, "nodes");
this.fileTokens = ApiTypeHelper.unmodifiableRequired(builder.fileTokens, this, "fileTokens");
}
public static NodesCredentials of(Function<Builder, ObjectBuilder<NodesCredentials>> fn) {
return fn.apply(new Builder()).build();
}
/**
* Required - General status showing how nodes respond to the above collection
* request
* <p>
* API name: {@code _nodes}
*/
public final NodeStatistics nodes() {
return this.nodes;
}
/**
* Required - File-backed tokens collected from all nodes
* <p>
* API name: {@code file_tokens}
*/
public final Map<String, NodesCredentialsFileToken> fileTokens() {
return this.fileTokens;
}
/**
* Serialize this object to JSON.
*/
public void serialize(JsonGenerator generator, JsonpMapper mapper) {
generator.writeStartObject();
serializeInternal(generator, mapper);
generator.writeEnd();
}
protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {
generator.writeKey("_nodes");
this.nodes.serialize(generator, mapper);
if (ApiTypeHelper.isDefined(this.fileTokens)) {
generator.writeKey("file_tokens");
generator.writeStartObject();
for (Map.Entry<String, NodesCredentialsFileToken> item0 : this.fileTokens.entrySet()) {
generator.writeKey(item0.getKey());
item0.getValue().serialize(generator, mapper);
}
generator.writeEnd();
}
}
// ---------------------------------------------------------------------------------------------
/**
* Builder for {@link NodesCredentials}.
*/
public static class Builder extends WithJsonObjectBuilderBase<Builder> implements ObjectBuilder<NodesCredentials> {
private NodeStatistics nodes;
private Map<String, NodesCredentialsFileToken> fileTokens;
/**
* Required - General status showing how nodes respond to the above collection
* request
* <p>
* API name: {@code _nodes}
*/
public final Builder nodes(NodeStatistics value) {
this.nodes = value;
return this;
}
/**
* Required - General status showing how nodes respond to the above collection
* request
* <p>
* API name: {@code _nodes}
*/
public final Builder nodes(Function<NodeStatistics.Builder, ObjectBuilder<NodeStatistics>> fn) {
return this.nodes(fn.apply(new NodeStatistics.Builder()).build());
}
/**
* Required - File-backed tokens collected from all nodes
* <p>
* API name: {@code file_tokens}
* <p>
* Adds all entries of <code>map</code> to <code>fileTokens</code>.
*/
public final Builder fileTokens(Map<String, NodesCredentialsFileToken> map) {
this.fileTokens = _mapPutAll(this.fileTokens, map);
return this;
}
/**
* Required - File-backed tokens collected from all nodes
* <p>
* API name: {@code file_tokens}
* <p>
* Adds an entry to <code>fileTokens</code>.
*/
public final Builder fileTokens(String key, NodesCredentialsFileToken value) {
this.fileTokens = _mapPut(this.fileTokens, key, value);
return this;
}
/**
* Required - File-backed tokens collected from all nodes
* <p>
* API name: {@code file_tokens}
* <p>
* Adds an entry to <code>fileTokens</code> using a builder lambda.
*/
public final Builder fileTokens(String key,
Function<NodesCredentialsFileToken.Builder, ObjectBuilder<NodesCredentialsFileToken>> fn) {
return fileTokens(key, fn.apply(new NodesCredentialsFileToken.Builder()).build());
}
@Override
protected Builder self() {
return this;
}
/**
* Builds a {@link NodesCredentials}.
*
* @throws NullPointerException
* if some of the required fields are null.
*/
public NodesCredentials build() {
_checkSingleUse();
return new NodesCredentials(this);
}
}
// ---------------------------------------------------------------------------------------------
/**
* Json deserializer for {@link NodesCredentials}
*/
public static final JsonpDeserializer<NodesCredentials> _DESERIALIZER = ObjectBuilderDeserializer.lazy(Builder::new,
NodesCredentials::setupNodesCredentialsDeserializer);
protected static void setupNodesCredentialsDeserializer(ObjectDeserializer<NodesCredentials.Builder> op) {
op.add(Builder::nodes, NodeStatistics._DESERIALIZER, "_nodes");
op.add(Builder::fileTokens, JsonpDeserializer.stringMapDeserializer(NodesCredentialsFileToken._DESERIALIZER),
"file_tokens");
}
}
|
3e1e47d4a36b95b38e15f8c8f8841a4731c46e6d | 1,000 | java | Java | common/src/main/java/com/shrimp/common/dao/BaseDao.java | Shrimp-xue/MyArchitectWay | 76384ef240438217b72f96fb1061fa88d357aa63 | [
"Apache-2.0"
] | null | null | null | common/src/main/java/com/shrimp/common/dao/BaseDao.java | Shrimp-xue/MyArchitectWay | 76384ef240438217b72f96fb1061fa88d357aa63 | [
"Apache-2.0"
] | 3 | 2019-12-21T05:21:55.000Z | 2020-01-09T13:06:19.000Z | common/src/main/java/com/shrimp/common/dao/BaseDao.java | Shrimp-xue/MyArchitectWay | 76384ef240438217b72f96fb1061fa88d357aa63 | [
"Apache-2.0"
] | null | null | null | 17.241379 | 45 | 0.533 | 12,815 | package com.shrimp.common.dao;
import java.util.List;
/**
* @Description 基础dao
* @ClassName BaseDao
* @Author Shrimp
* @date 2020.01.05 18:08
*/
public interface BaseDao<M, QM> {
/**
* @description 增加用户
* @author shrimp
* @date 2019-12-21 14:26
* @param m
* @return
*/
public void create(M m);
/**
* @description 修改用户
* @author shrimp
* @date 2019-12-21 14:28
* @param m
* @return
*/
public void update(M m);
/**
* @description 删除用户
* @author shrimp
* @date 2019-12-21 14:28
* @param uuid
* @return
*/
public void delete(int uuid);
/**
* @description 根据主键查询用户
* @author shrimp
* @date 2019-12-21 14:28
* @param uuid
* @return
*/
public M getByUuid(int uuid);
/**
* @description 根据条件查询用户
* @author shrimp
* @date 2019-12-21 14:29
* @param qm
* @return
*/
public List<M> getByConditionPage(QM qm);
}
|
3e1e49075f4c15102b3c3fbbbd1c30c519899d6b | 5,223 | java | Java | zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java | Cyft/zeppelin | d4a47a1739875a7cfd9d2adc73efb155d0d278b1 | [
"Apache-2.0"
] | 1 | 2022-03-16T08:35:03.000Z | 2022-03-16T08:35:03.000Z | zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java | Cyft/zeppelin | d4a47a1739875a7cfd9d2adc73efb155d0d278b1 | [
"Apache-2.0"
] | 6 | 2022-03-23T12:30:02.000Z | 2022-03-30T21:09:28.000Z | zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java | Cyft/zeppelin | d4a47a1739875a7cfd9d2adc73efb155d0d278b1 | [
"Apache-2.0"
] | 5 | 2016-06-08T11:35:45.000Z | 2021-11-23T08:29:29.000Z | 24.406542 | 97 | 0.701513 | 12,816 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import java.net.URL;
import java.util.List;
import java.util.Properties;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.scheduler.Scheduler;
/**
* Interpreter wrapper for lazy initialization
*/
public class LazyOpenInterpreter
extends Interpreter
implements WrappedInterpreter {
private Interpreter intp;
volatile boolean opened = false;
public LazyOpenInterpreter(Interpreter intp) {
super(new Properties());
this.intp = intp;
}
@Override
public Interpreter getInnerInterpreter() {
return intp;
}
@Override
public void setProperties(Properties properties) {
intp.setProperties(properties);
}
@Override
public Properties getProperties() {
return intp.getProperties();
}
@Override
public String getProperty(String key) {
return intp.getProperty(key);
}
@Override
public synchronized void open() throws InterpreterException {
if (opened == true) {
return;
}
synchronized (intp) {
if (opened == false) {
try {
intp.open();
opened = true;
} catch (Throwable e) {
// close interpreter to release resource,
// otherwise these resources may leak when open it again.
intp.close();
throw new InterpreterException(e);
}
}
}
}
@Override
public InterpreterResult executePrecode(InterpreterContext interpreterContext)
throws InterpreterException {
return intp.executePrecode(interpreterContext);
}
@Override
public void close() throws InterpreterException {
synchronized (intp) {
if (opened == true) {
intp.close();
opened = false;
}
}
}
public boolean isOpen() {
synchronized (intp) {
return opened;
}
}
@Override
public InterpreterResult interpret(String st, InterpreterContext context)
throws InterpreterException {
open();
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
try {
return intp.interpret(st, context);
} finally {
Thread.currentThread().setContextClassLoader(classLoader);
}
}
@Override
public void cancel(InterpreterContext context) throws InterpreterException {
open();
intp.cancel(context);
}
@Override
public FormType getFormType() throws InterpreterException {
return intp.getFormType();
}
@Override
public int getProgress(InterpreterContext context) throws InterpreterException {
if (opened) {
return intp.getProgress(context);
} else {
return 0;
}
}
@Override
public Scheduler getScheduler() {
return intp.getScheduler();
}
@Override
public List<InterpreterCompletion> completion(String buf, int cursor,
InterpreterContext interpreterContext) throws InterpreterException {
open();
List completion = intp.completion(buf, cursor, interpreterContext);
return completion;
}
@Override
public String getClassName() {
return intp.getClassName();
}
@Override
public InterpreterGroup getInterpreterGroup() {
return intp.getInterpreterGroup();
}
@Override
public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
intp.setInterpreterGroup(interpreterGroup);
}
@Override
public URL [] getClassloaderUrls() {
return intp.getClassloaderUrls();
}
@Override
public void setClassloaderUrls(URL [] urls) {
intp.setClassloaderUrls(urls);
}
@Override
public void registerHook(String noteId, String event, String cmd) throws InvalidHookException {
intp.registerHook(noteId, event, cmd);
}
@Override
public void registerHook(String event, String cmd) throws InvalidHookException {
intp.registerHook(event, cmd);
}
@Override
public String getHook(String noteId, String event) {
return intp.getHook(noteId, event);
}
@Override
public String getHook(String event) {
return intp.getHook(event);
}
@Override
public void unregisterHook(String noteId, String event) {
intp.unregisterHook(noteId, event);
}
@Override
public void unregisterHook(String event) {
intp.unregisterHook(event);
}
@Override
public void setUserName(String userName) {
this.intp.setUserName(userName);
}
@Override
public String getUserName() {
return this.intp.getUserName();
}
}
|
3e1e4955d11bebc5728e5239ade3c923c1f7531b | 6,772 | java | Java | src/test/org/apache/lucene/util/TestBitVector.java | adichad/lucene-new | ddfd6f2b12c34d97e70b1a92de69d138ee1b666d | [
"Apache-2.0"
] | 6 | 2020-10-27T06:11:59.000Z | 2021-09-09T13:52:42.000Z | src/test/org/apache/lucene/util/TestBitVector.java | adichad/lucene-new | ddfd6f2b12c34d97e70b1a92de69d138ee1b666d | [
"Apache-2.0"
] | 8 | 2020-11-16T20:41:38.000Z | 2022-02-01T01:05:45.000Z | sourcedata/lucene-solr-releases-lucene-2.2.0/src/test/org/apache/lucene/util/TestBitVector.java | DXYyang/SDP | 6ad0daf242d4062888ceca6d4a1bd4c41fd99b63 | [
"Apache-2.0"
] | null | null | null | 30.518018 | 90 | 0.589963 | 12,817 | package org.apache.lucene.util;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import junit.framework.TestCase;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
/**
* <code>TestBitVector</code> tests the <code>BitVector</code>, obviously.
*
* @author "Peter Mularien" <efpyi@example.com>
* @version $Id$
*/
public class TestBitVector extends TestCase
{
public TestBitVector(String s) {
super(s);
}
/**
* Test the default constructor on BitVectors of various sizes.
* @throws Exception
*/
public void testConstructSize() throws Exception {
doTestConstructOfSize(8);
doTestConstructOfSize(20);
doTestConstructOfSize(100);
doTestConstructOfSize(1000);
}
private void doTestConstructOfSize(int n) {
BitVector bv = new BitVector(n);
assertEquals(n,bv.size());
}
/**
* Test the get() and set() methods on BitVectors of various sizes.
* @throws Exception
*/
public void testGetSet() throws Exception {
doTestGetSetVectorOfSize(8);
doTestGetSetVectorOfSize(20);
doTestGetSetVectorOfSize(100);
doTestGetSetVectorOfSize(1000);
}
private void doTestGetSetVectorOfSize(int n) {
BitVector bv = new BitVector(n);
for(int i=0;i<bv.size();i++) {
// ensure a set bit can be git'
assertFalse(bv.get(i));
bv.set(i);
assertTrue(bv.get(i));
}
}
/**
* Test the clear() method on BitVectors of various sizes.
* @throws Exception
*/
public void testClear() throws Exception {
doTestClearVectorOfSize(8);
doTestClearVectorOfSize(20);
doTestClearVectorOfSize(100);
doTestClearVectorOfSize(1000);
}
private void doTestClearVectorOfSize(int n) {
BitVector bv = new BitVector(n);
for(int i=0;i<bv.size();i++) {
// ensure a set bit is cleared
assertFalse(bv.get(i));
bv.set(i);
assertTrue(bv.get(i));
bv.clear(i);
assertFalse(bv.get(i));
}
}
/**
* Test the count() method on BitVectors of various sizes.
* @throws Exception
*/
public void testCount() throws Exception {
doTestCountVectorOfSize(8);
doTestCountVectorOfSize(20);
doTestCountVectorOfSize(100);
doTestCountVectorOfSize(1000);
}
private void doTestCountVectorOfSize(int n) {
BitVector bv = new BitVector(n);
// test count when incrementally setting bits
for(int i=0;i<bv.size();i++) {
assertFalse(bv.get(i));
assertEquals(i,bv.count());
bv.set(i);
assertTrue(bv.get(i));
assertEquals(i+1,bv.count());
}
bv = new BitVector(n);
// test count when setting then clearing bits
for(int i=0;i<bv.size();i++) {
assertFalse(bv.get(i));
assertEquals(0,bv.count());
bv.set(i);
assertTrue(bv.get(i));
assertEquals(1,bv.count());
bv.clear(i);
assertFalse(bv.get(i));
assertEquals(0,bv.count());
}
}
/**
* Test writing and construction to/from Directory.
* @throws Exception
*/
public void testWriteRead() throws Exception {
doTestWriteRead(8);
doTestWriteRead(20);
doTestWriteRead(100);
doTestWriteRead(1000);
}
private void doTestWriteRead(int n) throws Exception {
Directory d = new RAMDirectory();
BitVector bv = new BitVector(n);
// test count when incrementally setting bits
for(int i=0;i<bv.size();i++) {
assertFalse(bv.get(i));
assertEquals(i,bv.count());
bv.set(i);
assertTrue(bv.get(i));
assertEquals(i+1,bv.count());
bv.write(d, "TESTBV");
BitVector compare = new BitVector(d, "TESTBV");
// compare bit vectors with bits set incrementally
assertTrue(doCompare(bv,compare));
}
}
/**
* Test r/w when size/count cause switching between bit-set and d-gaps file formats.
* @throws Exception
*/
public void testDgaps() throws IOException {
doTestDgaps(1,0,1);
doTestDgaps(10,0,1);
doTestDgaps(100,0,1);
doTestDgaps(1000,4,7);
doTestDgaps(10000,40,43);
doTestDgaps(100000,415,418);
doTestDgaps(1000000,3123,3126);
}
private void doTestDgaps(int size, int count1, int count2) throws IOException {
Directory d = new RAMDirectory();
BitVector bv = new BitVector(size);
for (int i=0; i<count1; i++) {
bv.set(i);
assertEquals(i+1,bv.count());
}
bv.write(d, "TESTBV");
// gradually increase number of set bits
for (int i=count1; i<count2; i++) {
BitVector bv2 = new BitVector(d, "TESTBV");
assertTrue(doCompare(bv,bv2));
bv = bv2;
bv.set(i);
assertEquals(i+1,bv.count());
bv.write(d, "TESTBV");
}
// now start decreasing number of set bits
for (int i=count2-1; i>=count1; i--) {
BitVector bv2 = new BitVector(d, "TESTBV");
assertTrue(doCompare(bv,bv2));
bv = bv2;
bv.clear(i);
assertEquals(i,bv.count());
bv.write(d, "TESTBV");
}
}
/**
* Compare two BitVectors.
* This should really be an equals method on the BitVector itself.
* @param bv One bit vector
* @param compare The second to compare
*/
private boolean doCompare(BitVector bv, BitVector compare) {
boolean equal = true;
for(int i=0;i<bv.size();i++) {
// bits must be equal
if(bv.get(i)!=compare.get(i)) {
equal = false;
break;
}
}
return equal;
}
}
|
3e1e497944b7f09a68b75dde4f3dc387385d2154 | 4,178 | java | Java | pkts-core/src/main/java/io/pkts/framer/RTPFramer.java | dm3/pkts | 175bd09293ac8ef2dc9e17f0019b46ff15c32f87 | [
"MIT"
] | 173 | 2015-01-29T04:22:35.000Z | 2022-03-12T12:21:06.000Z | pkts-core/src/main/java/io/pkts/framer/RTPFramer.java | dm3/pkts | 175bd09293ac8ef2dc9e17f0019b46ff15c32f87 | [
"MIT"
] | 67 | 2015-04-25T10:23:54.000Z | 2021-09-23T08:35:17.000Z | pkts-core/src/main/java/io/pkts/framer/RTPFramer.java | dm3/pkts | 175bd09293ac8ef2dc9e17f0019b46ff15c32f87 | [
"MIT"
] | 97 | 2015-01-14T22:20:53.000Z | 2022-03-12T12:21:10.000Z | 34.586777 | 122 | 0.585185 | 12,818 | /**
*
*/
package io.pkts.framer;
import java.io.IOException;
import io.pkts.buffer.Buffer;
import io.pkts.packet.TransportPacket;
import io.pkts.packet.rtp.RtpPacket;
import io.pkts.packet.rtp.impl.RtpPacketImpl;
import io.pkts.protocol.Protocol;
/**
* @author lyhxr@example.com
*/
public final class RTPFramer implements Framer<TransportPacket, RtpPacket> {
/**
*
*/
public RTPFramer() {
// left empty intentionally
}
@Override
public Protocol getProtocol() {
return Protocol.RTP;
}
/**
* There is no real good test to make sure that the data indeed is an RTP packet. Appendix 2 in
* RFC3550 describes one way of doing it but you really need a sequence of packets in order to
* be able to determine if this indeed is a RTP packet or not. The best is to analyze the
* session negotiation but here we are just looking at a single packet so can't do that.
*
* Also, RTP and RTCP packets are hard to distinguish from each other and you really need help
* from outside such as from the Session Description Protocol but until we connect things
* together we will do a simple check so that we do not pick up RTCP packets. Note, this check
* is not super safe, see RFC 5761.
*
* {@inheritDoc}
*/
@Override
public boolean accept(final Buffer data) throws IOException {
// a RTP packet has at least 12 bytes. Check that
if (data.getReadableBytes() < 12) {
// not enough bytes but see if we actually could
// get another 12 bytes by forcing the underlying
// implementation to read further ahead
data.markReaderIndex();
try {
final Buffer b = data.readBytes(12);
if (b.capacity() < 12) {
return false;
}
} catch (final IndexOutOfBoundsException e) {
// guess not...
return false;
}
finally {
data.resetReaderIndex();
}
}
// both RTP and RTCP has a 2 in this field
// so if that isn't the case then it cannot
// be RTP nor RTCP
final byte b = data.getByte(0);
if (!((b & 0xC0) >> 6 == 0x02)) {
return false;
}
// The second byte is for RTCP equal to the Packet Type (pt)
// and if following the guidelines of RFC 5761 (section 4) then
// RTP Payload Type + 128 != RTCP Packet Type
final byte b2 = data.getByte(1);
if (b2 == (byte) 0xc8 || b2 == (byte) 0xc9 || b2 == (byte) 0xca || b2 == (byte) 0xcb || b2 == (byte) 0xcc) {
return false;
}
return true;
}
@Override
public RtpPacket frame(final TransportPacket parent, final Buffer buffer) throws IOException {
if (parent == null) {
throw new IllegalArgumentException("The parent frame cannot be null");
}
final int index = buffer.getReaderIndex();
try {
// An RTP packet has a least 12 bytes but can contain more depending on
// extensions, padding etc. Figure that out.
final Buffer headers = buffer.readBytes(12);
final Byte b = headers.getByte(0);
final boolean hasPadding = (b & 0x20) == 0x020;
final boolean hasExtension = (b & 0x10) == 0x010;
final int csrcCount = b & 0x0F;
if (hasExtension) {
final short extensionHeaders = buffer.readShort();
final int length = buffer.readUnsignedShort();
final Buffer extensionData = buffer.readBytes(length);
}
if (hasPadding || hasExtension || csrcCount > 0) {
// throw new RuntimeException("TODO - have not implemented the case of handling padding, extensions etc");
}
final Buffer payload = buffer.slice();
return new RtpPacketImpl(parent, headers, payload);
} catch (final IndexOutOfBoundsException e) {
buffer.setReaderIndex(index);
throw e;
}
}
}
|
3e1e497bcb30bec3335ee49019c8cd615e25d31c | 10,377 | java | Java | src/main/java/cn/sinlmao/commons/network/http/ImResponse.java | sinlmao/NetworkUtils | 86b53664024b7fe57dfdf3cd0559c6e786a40f6a | [
"RSA-MD"
] | 5 | 2019-08-17T18:32:54.000Z | 2021-11-07T20:28:19.000Z | src/main/java/cn/sinlmao/commons/network/http/ImResponse.java | sinlmao/NetworkUtils | 86b53664024b7fe57dfdf3cd0559c6e786a40f6a | [
"RSA-MD"
] | null | null | null | src/main/java/cn/sinlmao/commons/network/http/ImResponse.java | sinlmao/NetworkUtils | 86b53664024b7fe57dfdf3cd0559c6e786a40f6a | [
"RSA-MD"
] | 1 | 2019-09-02T09:58:08.000Z | 2019-09-02T09:58:08.000Z | 31.910769 | 133 | 0.608717 | 12,819 | /**
* Copyright (c) 2019, Sinlmao (upchh@example.com).
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.sinlmao.commons.network.http;
import cn.sinlmao.commons.network.bean.ImResponseCookie;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* <b>HTTP Response类</b>
* <p>
* 该类为HTTP请求响应(Response)数据封装,所有与Response相关的数据均在该类封装提供。
* <br/><br/>
* <b>HTTP Response class</b>
* <p>
* This class is HTTP response (Response) data encapsulation, and all data related to Response is provided in this class package.
*
* @author Sinlmao
* @program Sinlmao Commons Network Utils
* @description HTTP Response类
* @create 2019-08-01 11:11
*/
public final class ImResponse {
private int responseCode;
private String responseMessage;
private String stringContent;
private byte[] bytesContent;
private Map<String, List<String>> headers = new HashMap<String, List<String>>();
private String cookieStr;
private Map<String, String> cookies = new HashMap<String, String>();
private Map<String, ImResponseCookie> cookiePropertys = new HashMap<String, ImResponseCookie>();
///////////////////////////////////////////////////////////////////////
/**
* [内部] 设置Response状态码
* <p>
* <font color="#666666">[Internal] Set the Response status code</font>
*
* @param responseCode Response状态码 <br/> <font color="#666666">Response status code</font>
* @return ImResponse ImResponse响应实体对象 <br/> <font color="#666666">ImResponse response entity object</font>
*/
protected ImResponse setResponseCode(int responseCode) {
this.responseCode = responseCode;
return this;
}
/**
* [内部] 设置Response状态消息
* <p>
* <font color="#666666">[Internal] Set the Response status message</font>
*
* @param responseMessage Response状态消息 <br/> <font color="#666666">Response status message</font>
* @return ImResponse ImResponse响应实体对象 <br/> <font color="#666666">ImResponse response entity object</font>
*/
protected ImResponse setResponseMessage(String responseMessage) {
this.responseMessage = responseMessage;
return this;
}
/**
* [内部] 设置Response应答String消息
* <p>
* <font color="#666666">[Internal] Set Response Reply String Message</font>
*
* @param stringContent Response应答String消息 <br/> <font color="#666666">Response response String message</font>
* @return ImResponse ImResponse响应实体对象 <br/> <font color="#666666">ImResponse response entity object</font>
*/
protected ImResponse setStringContent(String stringContent) {
this.stringContent = stringContent;
return this;
}
/**
* [内部] 设置Response应答bytes消息
* <p>
* <font color="#666666">[Internal] Set Response Reply bytes message</font>
*
* @param bytesContent Response应答bytes消息 <br/> <font color="#666666">Response response bytes message</font>
* @return ImResponse ImResponse响应实体对象 <br/> <font color="#666666">ImResponse response entity object</font>
*/
protected ImResponse setBytesContent(byte[] bytesContent) {
this.bytesContent = bytesContent;
return this;
}
/**
* [内部] 添加Cookie完整数据
* <p>
* <font color="#666666">[Internal] Add full data for cookies</font>
*
* @param data Cookie完整数据 <br/> <font color="#666666">Cookie full data</font>
* @return ImResponse ImResponse响应实体对象 <br/> <font color="#666666">ImResponse response entity object</font>
*/
protected ImResponse setFullCookie(String data) {
cookieStr = data;
return this;
}
/**
* [内部] 添加Cookie数据
* <p>
* <font color="#666666">[Internal] Add cookie data</font>
*
* @param name Cookie键 <br/> <font color="#666666">Cookie key</font>
* @param data Cookie值 <br/> <font color="#666666">Cookie value</font>
* @return ImResponse ImResponse响应实体对象 <br/> ImResponse response entity object</font>
*/
protected ImResponse addCookie(String name, String data) {
cookies.put(name, data);
return this;
}
/**
* [内部] 添加Cookies属性数据
* <p>
* <font color="#666666">[Internal] Add cookie property data</font>
*
* @param imResponseCookie Cookie键 <br/> <font color="#666666">Cookie property data</font>
* @return ImResponse ImResponse响应实体对象 <br/> ImResponse response entity object</font>
* @since 1.4.1
*/
protected ImResponse addCookieProperty(ImResponseCookie imResponseCookie) {
cookiePropertys.put(imResponseCookie.getName(), imResponseCookie);
return this;
}
/**
* [内部] 添加Header完整数据
* <p>
* <font color="#666666">[Internal] Add Header full data</font>
*
* @param headers Header完整数据 <br/> <font color="#666666">Header full data</font>
* @return ImResponse ImResponse响应实体对象 <br/> <font color="#666666">ImResponse response entity object</font>
*/
protected ImResponse setFullHeaders(Map<String, List<String>> headers) {
this.headers = headers;
return this;
}
///////////////////////////////////////////////////////////////////////
/**
* 获得Response状态码
* <p>
* <font color="#666666">Get the Response status code</font>
*
* @return Response状态码 <br/> <font color="#666666">Response status code</font>
*/
public int getResponseCode() {
return responseCode;
}
/**
* 获得Response状态消息
* <p>
* <font color="#666666">Get the Response status message</font>
*
* @return Response状态消息 <br/> <font color="#666666">Response status message</font>
*/
public String getResponseMessage() {
return responseMessage;
}
/**
* 获得Response应答String消息
* <p>
* <font color="#666666">Get the Response response String message</font>
*
* @return Response应答String消息 <br/> <font color="#666666">Response response String message</font>
*/
public String getStringContent() {
return stringContent;
}
/**
* 获得Response应答Bytes消息
* <p>
* <font color="#666666">Get the Response response bytes message</font>
*
* @return Response应答bytes消息 <br/> <font color="#666666">Response response bytes message</font>
*/
public byte[] getBytesContent() {
return bytesContent;
}
/**
* 使用传入参数Response应答Bytes消息
* <p>
* 必须确定传入的bytes数组参数是空的,否则会被覆盖
* <p>
* <font color="#666666">Reply to Bytes message with incoming parameter Response</font>
* <p>
* <font color="#666666">Must determine that the incoming bytes array parameter is empty, otherwise it will be overwritten</font>
*
* @param bytesContent 空的Response应答Bytes接收对象 <br/> <font color="#666666">Empty Response Answers Bytes Receive Object</font>
*/
public void get(byte[] bytesContent) {
// this.bytesContent = bytesContent;
bytesContent = this.bytesContent;
}
///////////////////////////////////////////////////////////////////////
/**
* 获得Cookie数据完整字符
* <p>
* <font color="#666666">Get the full character of the cookie data</font>
*
* @return Cookie数据完整字符 <br/> <font color="#666666">Full character of the cookie data</font>
*/
public String getCookieStr() {
return this.cookieStr;
}
/**
* 根据Cookie键获取对象值
* <p>
* <font color="#666666">Get the object value according to the Cookie key</font>
*
* @param name Cookie键 <br/> <font color="#666666">Cookie key</font>
* @return Cookie值 <br/> <font color="#666666">Cookie value</font>
*/
public String getCookieData(String name) {
return cookies.get(name);
}
/**
* 根据Cookie键获取对象属性
* <p>
* <font color="#666666">Get the object property according to the Cookie key</font>
*
* @param name Cookie键 <br/> <font color="#666666">Cookie key</font>
* @return Cookie值 <br/> <font color="#666666">Cookie property</font>
*/
public ImResponseCookie getCookieProperty(String name) {
return cookiePropertys.get(name);
}
/**
* 获取Cookie的所有键
* <p>
* <font color="#666666">Get all the keys of the cookie</font>
*
* @return Cookie的所有键 <br/> <font color="#666666">All the keys of the cookie</font>
*/
public Set<String> getCookieNames() {
return cookies.keySet();
}
/**
* 获取Cookie长度
* <p>
* <font color="#666666">Get the size of the cookie</font>
*
* @return Cookie长度 <br/> <font color="#666666">Cookie size</font>
*/
public int getCookieSize() {
return cookies.size();
}
///////////////////////////////////////////////////////////////////////
/**
* 根据Header键获取对象值
* <p>
* <font color="#666666">Get the object value according to the Header key</font>
*
* @param name Header键 <br/> <font color="#666666">Header key</font>
* @return Header值 <br/> <font color="#666666">Header value</font>
*/
public List<String> getHeaderData(String name) {
return headers.get(name);
}
/**
* 获取Header的所有键
* <p>
* <font color="#666666">Get all the keys of the header</font>
*
* @return Header的所有键 <br/> <font color="#666666">All the keys of the header</font>
*/
public Set<String> getHeaderNames() {
return headers.keySet();
}
/**
* 获取Header长度
* <p>
* <font color="#666666">Get the size of the header</font>
*
* @return Header长度 <br/> <font color="#666666">Header size</font>
*/
public int getHeaderSize() {
return headers.size();
}
///////////////////////////////////////////////////////////////////////
/**
* 禁止外部实例化
*/
protected ImResponse() {
}
}
|
3e1e4a08ae0a30afc073b6013849f6dd1ba8c57b | 1,268 | java | Java | discovery-plugin-strategy/discovery-plugin-strategy-starter-zuul/src/main/java/com/nepxion/discovery/plugin/strategy/zuul/processor/ZuulStrategyRouteApolloProcessor.java | zhaoyb/Discovery | 49a86b97c4646185d228827408e638d5db08d995 | [
"Apache-2.0"
] | 5,102 | 2018-06-22T06:42:43.000Z | 2022-03-30T11:56:05.000Z | discovery-plugin-strategy/discovery-plugin-strategy-starter-zuul/src/main/java/com/nepxion/discovery/plugin/strategy/zuul/processor/ZuulStrategyRouteApolloProcessor.java | zhaoyb/Discovery | 49a86b97c4646185d228827408e638d5db08d995 | [
"Apache-2.0"
] | 112 | 2018-07-08T07:05:54.000Z | 2022-03-13T11:03:07.000Z | discovery-plugin-strategy/discovery-plugin-strategy-starter-zuul/src/main/java/com/nepxion/discovery/plugin/strategy/zuul/processor/ZuulStrategyRouteApolloProcessor.java | zhaoyb/Discovery | 49a86b97c4646185d228827408e638d5db08d995 | [
"Apache-2.0"
] | 1,361 | 2018-06-28T16:03:28.000Z | 2022-03-31T06:34:10.000Z | 28.177778 | 88 | 0.742114 | 12,820 | package com.nepxion.discovery.plugin.strategy.zuul.processor;
/**
* <p>Title: Nepxion Discovery</p>
* <p>Description: Nepxion Discovery</p>
* <p>Copyright: Copyright (c) 2017-2050</p>
* <p>Company: Nepxion</p>
* @author Haojun Ren
* @version 1.0
*/
import org.springframework.beans.factory.annotation.Autowired;
import com.nepxion.discovery.common.apollo.proccessor.ApolloProcessor;
import com.nepxion.discovery.common.constant.DiscoveryConstant;
import com.nepxion.discovery.plugin.framework.adapter.PluginAdapter;
import com.nepxion.discovery.plugin.strategy.zuul.route.ZuulStrategyRoute;
public class ZuulStrategyRouteApolloProcessor extends ApolloProcessor {
@Autowired
private PluginAdapter pluginAdapter;
@Autowired
private ZuulStrategyRoute zuulStrategyRoute;
@Override
public String getGroup() {
return pluginAdapter.getGroup();
}
@Override
public String getDataId() {
return pluginAdapter.getServiceId() + "-" + DiscoveryConstant.DYNAMIC_ROUTE_KEY;
}
@Override
public String getDescription() {
return DiscoveryConstant.ZUUL_DYNAMIC_ROUTE_DESCRIPTION;
}
@Override
public void callbackConfig(String config) {
zuulStrategyRoute.updateAll(config);
}
} |
3e1e4a4b436316526144055bddb31a1669c71c75 | 1,928 | java | Java | src/main/java/io/github/mrdarcychen/arenas/SpawnPoint.java | darcy-chen/BlockyArena | 4666501f241b5cbfe9bad2b5d4554a5c0844d4ff | [
"Apache-2.0"
] | 3 | 2020-04-25T09:16:11.000Z | 2021-02-10T02:00:18.000Z | src/main/java/io/github/mrdarcychen/arenas/SpawnPoint.java | darcy-chen/BlockyArena | 4666501f241b5cbfe9bad2b5d4554a5c0844d4ff | [
"Apache-2.0"
] | 17 | 2017-09-24T06:32:24.000Z | 2018-04-11T21:49:36.000Z | src/main/java/io/github/mrdarcychen/arenas/SpawnPoint.java | Darcy-Chen/BlockyArena | 4666501f241b5cbfe9bad2b5d4554a5c0844d4ff | [
"Apache-2.0"
] | 3 | 2020-04-25T19:19:37.000Z | 2021-07-17T14:42:28.000Z | 30.125 | 97 | 0.711618 | 12,821 | /*
* Copyright 2017-2020 The BlockyArena Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.mrdarcychen.arenas;
import com.flowpowered.math.vector.Vector3d;
import org.spongepowered.api.entity.Transform;
import org.spongepowered.api.world.Location;
import org.spongepowered.api.world.World;
/**
* Represents a specific spawn location with head rotation for a Team.
*/
public class SpawnPoint {
private final Transform<World> transform;
/**
* Constructs a Team with the location of the spawn point and the rotation of the spawn point
*
* @param location the location that a player spawns
* @param rotation the heading of a player who spawns at the spawn location
*/
@Deprecated
public SpawnPoint(Location<World> location, Vector3d rotation) {
transform = new Transform<>(location.getExtent(), location.getPosition(), rotation);
}
private SpawnPoint(Transform<World> transform) {
this.transform = transform;
}
@Deprecated
public Location getSpawnLocation() {
return transform.getLocation();
}
@Deprecated
public Vector3d getSpawnRotation() {
return transform.getRotation();
}
public Transform<World> getTransform() {
return transform;
}
public static SpawnPoint of(Transform<World> transform) {
return new SpawnPoint(transform);
}
}
|
3e1e4ab76e62cb60b5b0b3c321ddf2b9afb6ea4f | 33,052 | java | Java | src/net/java/sip/communicator/impl/history/HistoryReaderImpl.java | mstyura/jitsi | 4dca2c5995be23b4380d9e30f4ddbe73fb7be3a9 | [
"Apache-2.0"
] | 3,442 | 2015-01-08T09:51:28.000Z | 2022-03-31T02:48:33.000Z | src/net/java/sip/communicator/impl/history/HistoryReaderImpl.java | fleapapa/jitsi | 805d76dad2d2218614de395f6a9c0cb3b582e664 | [
"Apache-2.0"
] | 577 | 2015-01-27T20:50:12.000Z | 2022-03-11T13:08:45.000Z | src/net/java/sip/communicator/impl/history/HistoryReaderImpl.java | fleapapa/jitsi | 805d76dad2d2218614de395f6a9c0cb3b582e664 | [
"Apache-2.0"
] | 953 | 2015-01-04T05:20:14.000Z | 2022-03-31T14:04:14.000Z | 33.151454 | 81 | 0.55337 | 12,822 | /*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.sip.communicator.impl.history;
import static
net.java.sip.communicator.service.history.HistoryService.DATE_FORMAT;
import java.text.*;
import java.util.*;
import java.util.regex.*;
import net.java.sip.communicator.service.history.*;
import net.java.sip.communicator.service.history.event.*;
import net.java.sip.communicator.service.history.records.*;
import org.apache.commons.lang3.*;
import org.w3c.dom.*;
/**
* @author Alexander Pelov
* @author Damian Minkov
* @author Yana Stamcheva
*/
public class HistoryReaderImpl
implements HistoryReader
{
private HistoryImpl historyImpl;
private Vector<HistorySearchProgressListener> progressListeners
= new Vector<HistorySearchProgressListener>();
// regexp used for index of case(in)sensitive impl
private static String REGEXP_END = ".*$";
private static String REGEXP_SENSITIVE_START = "(?s)^.*";
private static String REGEXP_INSENSITIVE_START = "(?si)^.*";
/**
* Creates an instance of <tt>HistoryReaderImpl</tt>.
* @param historyImpl the parent History implementation
*/
protected HistoryReaderImpl(HistoryImpl historyImpl)
{
this.historyImpl = historyImpl;
}
/**
* Searches the history for all records with timestamp after
* <tt>startDate</tt>.
*
* @param startDate the date after all records will be returned
* @return the found records
* @throws RuntimeException
* Thrown if an exception occurs during the execution of the
* query, such as internal IO error.
*/
public synchronized QueryResultSet<HistoryRecord> findByStartDate(
Date startDate)
throws RuntimeException
{
return find(startDate, null, null, null, false);
}
/**
* Searches the history for all records with timestamp before
* <tt>endDate</tt>.
*
* @param endDate the date before which all records will be returned
* @return the found records
* @throws RuntimeException
* Thrown if an exception occurs during the execution of the
* query, such as internal IO error.
*/
public synchronized QueryResultSet<HistoryRecord> findByEndDate(Date endDate)
throws RuntimeException
{
return find(null, endDate, null, null, false);
}
/**
* Searches the history for all records with timestamp between
* <tt>startDate</tt> and <tt>endDate</tt>.
*
* @param startDate start of the interval in which we search
* @param endDate end of the interval in which we search
* @return the found records
* @throws RuntimeException
* Thrown if an exception occurs during the execution of the
* query, such as internal IO error.
*/
public synchronized QueryResultSet<HistoryRecord>
findByPeriod(Date startDate, Date endDate)
throws RuntimeException
{
return find(startDate, endDate, null, null, false);
}
/**
* Searches the history for all records containing the <tt>keyword</tt>.
*
* @param keyword the keyword to search for
* @param field the field where to look for the keyword
* @return the found records
* @throws RuntimeException
* Thrown if an exception occurs during the execution of the
* query, such as internal IO error.
*/
public synchronized QueryResultSet<HistoryRecord>
findByKeyword(String keyword, String field)
throws RuntimeException
{
return findByKeywords(new String[] { keyword }, field);
}
/**
* Searches the history for all records containing all <tt>keywords</tt>.
*
* @param keywords array of keywords we search for
* @param field the field where to look for the keyword
* @return the found records
* @throws RuntimeException
* Thrown if an exception occurs during the execution of the
* query, such as internal IO error.
*/
public synchronized QueryResultSet<HistoryRecord>
findByKeywords(String[] keywords, String field)
throws RuntimeException
{
return find(null, null, keywords, field, false);
}
/**
* Searches for all history records containing all <tt>keywords</tt>,
* with timestamp between <tt>startDate</tt> and <tt>endDate</tt>.
*
* @param startDate start of the interval in which we search
* @param endDate end of the interval in which we search
* @param keywords array of keywords we search for
* @param field the field where to look for the keyword
* @return the found records
* @throws UnsupportedOperationException
* Thrown if an exception occurs during the execution of the
* query, such as internal IO error.
*/
public synchronized QueryResultSet<HistoryRecord>
findByPeriod(Date startDate,
Date endDate,
String[] keywords,
String field)
throws UnsupportedOperationException
{
return find(startDate, endDate, keywords, field, false);
}
/**
* Returns the last <tt>count</tt> messages.
* No progress firing as this method is supposed to be used
* in message windows and is supposed to be as quick as it can.
*
* @param count int
* @return QueryResultSet
* @throws RuntimeException
*/
public synchronized QueryResultSet<HistoryRecord> findLast(int count)
throws RuntimeException
{
return findLast(count, null, null, false);
}
/**
* Returns the supplied number of recent messages
* containing all <tt>keywords</tt>.
*
* @param count messages count
* @param keywords array of keywords we search for
* @param field the field where to look for the keyword
* @param caseSensitive is keywords search case sensitive
* @return the found records
* @throws RuntimeException
*/
public synchronized QueryResultSet<HistoryRecord> findLast(
int count,
String[] keywords,
String field,
boolean caseSensitive)
throws RuntimeException
{
// the files are supposed to be ordered from oldest to newest
Vector<String> filelist =
filterFilesByDate(this.historyImpl.getFileList(), null, null);
TreeSet<HistoryRecord> result
= new TreeSet<HistoryRecord>(new HistoryRecordComparator());
int leftCount = count;
int currentFile = filelist.size() - 1;
SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
while(leftCount > 0 && currentFile >= 0)
{
Document doc = this.historyImpl.
getDocumentForFile(filelist.get(currentFile));
if(doc == null)
{
currentFile--;
continue;
}
// will get nodes and construct a List of nodes
// so we can easily get sublist of it
List<Node> nodes = new ArrayList<Node>();
NodeList nodesList = doc.getElementsByTagName("record");
for (int i = 0; i < nodesList.getLength(); i++)
{
nodes.add(nodesList.item(i));
}
List<Node> lNodes = null;
if (nodes.size() > leftCount)
{
lNodes = nodes.subList(nodes.size() - leftCount , nodes.size());
leftCount = 0;
}
else
{
lNodes = nodes;
leftCount -= nodes.size();
}
Iterator<Node> i = lNodes.iterator();
while (i.hasNext())
{
Node node = i.next();
NodeList propertyNodes = node.getChildNodes();
Date timestamp;
String ts = node.getAttributes().getNamedItem("timestamp")
.getNodeValue();
try
{
timestamp = sdf.parse(ts);
}
catch (ParseException e)
{
timestamp = new Date(Long.parseLong(ts));
}
HistoryRecord record =
filterByKeyword(propertyNodes, timestamp,
keywords, field, caseSensitive);
if(record != null)
{
result.add(record);
}
}
currentFile--;
}
return new OrderedQueryResultSet<HistoryRecord>(result);
}
/**
* Searches the history for all records containing the <tt>keyword</tt>.
*
* @param keyword the keyword to search for
* @param field the field where to look for the keyword
* @param caseSensitive is keywords search case sensitive
* @return the found records
* @throws RuntimeException
* Thrown if an exception occurs during the execution of the
* query, such as internal IO error.
*/
public synchronized QueryResultSet<HistoryRecord>
findByKeyword(String keyword, String field, boolean caseSensitive)
throws RuntimeException
{
return findByKeywords(new String[] { keyword }, field, caseSensitive);
}
/**
* Searches the history for all records containing all <tt>keywords</tt>.
*
* @param keywords array of keywords we search for
* @param field the field where to look for the keyword
* @param caseSensitive is keywords search case sensitive
* @return the found records
* @throws RuntimeException
* Thrown if an exception occurs during the execution of the
* query, such as internal IO error.
*/
public synchronized QueryResultSet<HistoryRecord>
findByKeywords(String[] keywords, String field, boolean caseSensitive)
throws RuntimeException
{
return find(null, null, keywords, field, caseSensitive);
}
/**
* Searches for all history records containing all <tt>keywords</tt>,
* with timestamp between <tt>startDate</tt> and <tt>endDate</tt>.
*
* @param startDate start of the interval in which we search
* @param endDate end of the interval in which we search
* @param keywords array of keywords we search for
* @param field the field where to look for the keyword
* @param caseSensitive is keywords search case sensitive
* @return the found records
* @throws UnsupportedOperationException
* Thrown if an exception occurs during the execution of the
* query, such as internal IO error.
*/
public synchronized QueryResultSet<HistoryRecord>
findByPeriod(Date startDate,
Date endDate,
String[] keywords,
String field,
boolean caseSensitive)
throws UnsupportedOperationException
{
return find(startDate, endDate, keywords, field, caseSensitive);
}
/**
* Returns the supplied number of recent messages after the given date
*
* @param date messages after date
* @param count messages count
* @return QueryResultSet the found records
* @throws RuntimeException
*/
public QueryResultSet<HistoryRecord>
findFirstRecordsAfter(Date date, int count)
throws RuntimeException
{
TreeSet<HistoryRecord> result
= new TreeSet<HistoryRecord>(new HistoryRecordComparator());
Vector<String> filelist =
filterFilesByDate(this.historyImpl.getFileList(), date, null);
int leftCount = count;
int currentFile = 0;
SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
while(leftCount > 0 && currentFile < filelist.size())
{
Document doc = this.historyImpl.
getDocumentForFile(filelist.get(currentFile));
if(doc == null)
{
currentFile++;
continue;
}
NodeList nodes = doc.getElementsByTagName("record");
Node node;
for (int i = 0; i < nodes.getLength() && leftCount > 0; i++)
{
node = nodes.item(i);
NodeList propertyNodes = node.getChildNodes();
Date timestamp;
String ts = node.getAttributes().getNamedItem("timestamp")
.getNodeValue();
try
{
timestamp = sdf.parse(ts);
}
catch (ParseException e)
{
timestamp = new Date(Long.parseLong(ts));
}
if(!isInPeriod(timestamp, date, null))
continue;
ArrayList<String> nameVals = new ArrayList<String>();
boolean isRecordOK = true;
int len = propertyNodes.getLength();
for (int j = 0; j < len; j++)
{
Node propertyNode = propertyNodes.item(j);
if (propertyNode.getNodeType() == Node.ELEMENT_NODE)
{
// Get nested TEXT node's value
Node nodeValue = propertyNode.getFirstChild();
if(nodeValue != null)
{
nameVals.add(propertyNode.getNodeName());
nameVals.add(nodeValue.getNodeValue());
}
else
isRecordOK = false;
}
}
// if we found a broken record - just skip it
if(!isRecordOK)
continue;
String[] propertyNames = new String[nameVals.size() / 2];
String[] propertyValues = new String[propertyNames.length];
for (int j = 0; j < propertyNames.length; j++)
{
propertyNames[j] = nameVals.get(j * 2);
propertyValues[j] = nameVals.get(j * 2 + 1);
}
HistoryRecord record = new HistoryRecord(propertyNames,
propertyValues, timestamp);
result.add(record);
leftCount--;
}
currentFile++;
}
return new OrderedQueryResultSet<HistoryRecord>(result);
}
/**
* Returns the supplied number of recent messages before the given date
*
* @param date messages before date
* @param count messages count
* @return QueryResultSet the found records
* @throws RuntimeException
*/
public QueryResultSet<HistoryRecord>
findLastRecordsBefore(Date date, int count)
throws RuntimeException
{
// the files are supposed to be ordered from oldest to newest
Vector<String> filelist =
filterFilesByDate(this.historyImpl.getFileList(), null, date);
TreeSet<HistoryRecord> result
= new TreeSet<HistoryRecord>(new HistoryRecordComparator());
int leftCount = count;
int currentFile = filelist.size() - 1;
SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
while(leftCount > 0 && currentFile >= 0)
{
Document doc = this.historyImpl.
getDocumentForFile(filelist.get(currentFile));
if(doc == null)
{
currentFile--;
continue;
}
NodeList nodes = doc.getElementsByTagName("record");
Node node;
for (int i = nodes.getLength() - 1; i >= 0 && leftCount > 0; i--)
{
node = nodes.item(i);
NodeList propertyNodes = node.getChildNodes();
Date timestamp;
String ts = node.getAttributes().getNamedItem("timestamp")
.getNodeValue();
try
{
timestamp = sdf.parse(ts);
}
catch (ParseException e)
{
timestamp = new Date(Long.parseLong(ts));
}
if(!isInPeriod(timestamp, null, date))
continue;
ArrayList<String> nameVals = new ArrayList<String>();
boolean isRecordOK = true;
int len = propertyNodes.getLength();
for (int j = 0; j < len; j++)
{
Node propertyNode = propertyNodes.item(j);
if (propertyNode.getNodeType() == Node.ELEMENT_NODE)
{
// Get nested TEXT node's value
Node nodeValue = propertyNode.getFirstChild();
if(nodeValue != null)
{
nameVals.add(propertyNode.getNodeName());
nameVals.add(nodeValue.getNodeValue());
}
else
isRecordOK = false;
}
}
// if we found a broken record - just skip it
if(!isRecordOK)
continue;
String[] propertyNames = new String[nameVals.size() / 2];
String[] propertyValues = new String[propertyNames.length];
for (int j = 0; j < propertyNames.length; j++)
{
propertyNames[j] = nameVals.get(j * 2);
propertyValues[j] = nameVals.get(j * 2 + 1);
}
HistoryRecord record = new HistoryRecord(propertyNames,
propertyValues, timestamp);
result.add(record);
leftCount--;
}
currentFile--;
}
return new OrderedQueryResultSet<HistoryRecord>(result);
}
private QueryResultSet<HistoryRecord> find(
Date startDate, Date endDate,
String[] keywords, String field, boolean caseSensitive)
{
TreeSet<HistoryRecord> result
= new TreeSet<HistoryRecord>(new HistoryRecordComparator());
Vector<String> filelist
= filterFilesByDate(this.historyImpl.getFileList(),
startDate, endDate);
double currentProgress
= HistorySearchProgressListener.PROGRESS_MINIMUM_VALUE;
double fileProgressStep
= HistorySearchProgressListener.PROGRESS_MAXIMUM_VALUE;
if(filelist.size() != 0)
fileProgressStep
= HistorySearchProgressListener.PROGRESS_MAXIMUM_VALUE
/ filelist.size();
// start progress - minimum value
fireProgressStateChanged(startDate, endDate,
keywords, HistorySearchProgressListener.PROGRESS_MINIMUM_VALUE);
SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
Iterator<String> fileIterator = filelist.iterator();
while (fileIterator.hasNext())
{
String filename = fileIterator.next();
Document doc = this.historyImpl.getDocumentForFile(filename);
if(doc == null)
continue;
NodeList nodes = doc.getElementsByTagName("record");
double nodesProgressStep = fileProgressStep;
if(nodes.getLength() != 0)
nodesProgressStep = fileProgressStep / nodes.getLength();
Node node;
for (int i = 0; i < nodes.getLength(); i++)
{
node = nodes.item(i);
Date timestamp;
String ts = node.getAttributes().getNamedItem("timestamp")
.getNodeValue();
try
{
timestamp = sdf.parse(ts);
}
catch (ParseException e)
{
timestamp = new Date(Long.parseLong(ts));
}
if(isInPeriod(timestamp, startDate, endDate))
{
NodeList propertyNodes = node.getChildNodes();
HistoryRecord record =
filterByKeyword(propertyNodes, timestamp,
keywords, field, caseSensitive);
if(record != null)
{
result.add(record);
}
}
currentProgress += nodesProgressStep;
fireProgressStateChanged(
startDate, endDate, keywords, (int)currentProgress);
}
}
// if maximum value is not reached fire an event
if((int)currentProgress
< HistorySearchProgressListener.PROGRESS_MAXIMUM_VALUE)
{
fireProgressStateChanged(startDate, endDate, keywords,
HistorySearchProgressListener.
PROGRESS_MAXIMUM_VALUE);
}
return new OrderedQueryResultSet<HistoryRecord>(result);
}
/**
* Evaluetes does <tt>timestamp</tt> is in the given time period.
*
* @param timestamp Date
* @param startDate Date the start of the period
* @param endDate Date the end of the period
* @return boolean
*/
static boolean isInPeriod(Date timestamp, Date startDate, Date endDate)
{
Long startLong;
Long endLong;
Long tsLong = timestamp.getTime();
if(startDate == null)
startLong = Long.MIN_VALUE;
else
startLong = startDate.getTime();
if(endDate == null)
endLong = Long.MAX_VALUE;
else
endLong = endDate.getTime();
return startLong <= tsLong && tsLong < endLong;
}
/**
* If there is keyword restriction and doesn't match the conditions
* return null. Otherwise return the HistoryRecord corresponding the
* given nodes.
*
* @param propertyNodes NodeList
* @param timestamp Date
* @param keywords String[]
* @param field String
* @param caseSensitive boolean
* @return HistoryRecord
*/
static HistoryRecord filterByKeyword( NodeList propertyNodes,
Date timestamp,
String[] keywords,
String field,
boolean caseSensitive)
{
ArrayList<String> nameVals = new ArrayList<String>();
int len = propertyNodes.getLength();
boolean targetNodeFound = false;
for (int j = 0; j < len; j++)
{
Node propertyNode = propertyNodes.item(j);
if (propertyNode.getNodeType() == Node.ELEMENT_NODE)
{
String nodeName = propertyNode.getNodeName();
Node nestedNode = propertyNode.getFirstChild();
if(nestedNode == null)
continue;
// Get nested TEXT node's value
String nodeValue = nestedNode.getNodeValue();
// unescape xml chars, we have escaped when writing values
nodeValue = StringEscapeUtils.unescapeXml(nodeValue);
if(field != null && field.equals(nodeName))
{
targetNodeFound = true;
if(!matchKeyword(nodeValue, keywords, caseSensitive))
return null; // doesn't match the given keyword(s)
// so return nothing
}
nameVals.add(nodeName);
// Get nested TEXT node's value
nameVals.add(nodeValue);
}
}
// if we need to find a particular record but the target node is not
// present skip this record
if(keywords != null && keywords.length > 0 && !targetNodeFound)
{
return null;
}
String[] propertyNames = new String[nameVals.size() / 2];
String[] propertyValues = new String[propertyNames.length];
for (int j = 0; j < propertyNames.length; j++)
{
propertyNames[j] = nameVals.get(j * 2);
propertyValues[j] = nameVals.get(j * 2 + 1);
}
return new HistoryRecord(propertyNames, propertyValues, timestamp);
}
/**
* Check if a value is in the given keyword(s)
* If no keyword(s) given must return true
*
* @param value String
* @param keywords String[]
* @param caseSensitive boolean
* @return boolean
*/
static boolean matchKeyword(String value, String[] keywords,
boolean caseSensitive)
{
if(keywords != null)
{
String regexpStart = null;
if(caseSensitive)
regexpStart = REGEXP_SENSITIVE_START;
else
regexpStart = REGEXP_INSENSITIVE_START;
for (int i = 0; i < keywords.length; i++)
{
if(!value.matches(
regexpStart + Pattern.quote(keywords[i]) + REGEXP_END))
return false;
}
// all keywords match return true
return true;
}
// if no keyword or keywords given
// we must not filter this record so will return true
return true;
}
/**
* Used to limit the files if any starting or ending date exist
* So only few files to be searched.
*
* @param filelist Iterator
* @param startDate Date
* @param endDate Date
* @return Iterator
*/
static Vector<String> filterFilesByDate(
Iterator<String> filelist, Date startDate, Date endDate)
{
return filterFilesByDate(filelist, startDate, endDate, false);
}
/**
* Used to limit the files if any starting or ending date exist
* So only few files to be searched.
*
* @param filelist Iterator
* @param startDate Date
* @param endDate Date
* @param reverseOrder reverse order of files
* @return Vector
*/
static Vector<String> filterFilesByDate(
Iterator<String> filelist, Date startDate, Date endDate,
final boolean reverseOrder)
{
if(startDate == null && endDate == null)
{
// no filtering needed then just return the same list
Vector<String> result = new Vector<String>();
while (filelist.hasNext())
{
result.add(filelist.next());
}
Collections.sort(result, new Comparator<String>() {
public int compare(String o1, String o2)
{
if(reverseOrder)
return o2.compareTo(o1);
else
return o1.compareTo(o2);
}
});
return result;
}
// first convert all files to long
TreeSet<Long> files = new TreeSet<Long>();
while (filelist.hasNext())
{
String filename = filelist.next();
files.add(
Long.parseLong(filename.substring(0, filename.length() - 4)));
}
TreeSet<Long> resultAsLong = new TreeSet<Long>();
// Temporary fix of a NoSuchElementException
if(files.size() == 0)
{
return new Vector<String>();
}
Long startLong;
Long endLong;
if(startDate == null)
startLong = Long.MIN_VALUE;
else
startLong = startDate.getTime();
if(endDate == null)
endLong = Long.MAX_VALUE;
else
endLong = endDate.getTime();
// get all records inclusive the one before the startdate
for(Long f : files)
{
if(startLong <= f
&& f <= endLong)
{
resultAsLong.add(f);
}
}
// get the subset before the start date, to get its last element
// if exists
if(!files.isEmpty() && files.first() <= startLong)
{
SortedSet<Long> setBeforeTheInterval =
files.subSet(files.first(), true, startLong, true);
if(!setBeforeTheInterval.isEmpty())
resultAsLong.add(setBeforeTheInterval.last());
}
Vector<String> result = new Vector<String>();
Iterator<Long> iter = resultAsLong.iterator();
while (iter.hasNext())
{
Long item = iter.next();
result.add(item.toString() + ".xml");
}
Collections.sort(result, new Comparator<String>() {
public int compare(String o1, String o2)
{
if(reverseOrder)
return o2.compareTo(o1);
else
return o1.compareTo(o2);
}
});
return result;
}
private void fireProgressStateChanged(Date startDate, Date endDate,
String[] keywords, int progress)
{
ProgressEvent event =
new ProgressEvent(this, startDate, endDate, keywords, progress);
synchronized(progressListeners)
{
Iterator<HistorySearchProgressListener> iter
= progressListeners.iterator();
while (iter.hasNext())
{
HistorySearchProgressListener item = iter.next();
item.progressChanged(event);
}
}
}
/**
* Adding progress listener for monitoring progress of search process
*
* @param listener HistorySearchProgressListener
*/
public void addSearchProgressListener(HistorySearchProgressListener
listener)
{
synchronized(progressListeners){
progressListeners.add(listener);
}
}
/**
* Removing progress listener
*
* @param listener HistorySearchProgressListener
*/
public void removeSearchProgressListener(HistorySearchProgressListener
listener)
{
synchronized(progressListeners){
progressListeners.remove(listener);
}
}
/**
* Count the number of messages that a search will return
* Actually only the last file is parsed and its nodes are counted.
* We accept that the other files are full with max records,
* this way we escape parsing all files which will significantly
* slow the process and for one search will parse the files twice.
*
* @return the number of searched messages
* @throws UnsupportedOperationException
* Thrown if an exception occurs during the execution of the
* query, such as internal IO error.
*/
public int countRecords()
throws UnsupportedOperationException
{
int result = 0;
String lastFile = null;
Iterator<String> filelistIter = this.historyImpl.getFileList();
while (filelistIter.hasNext())
{
lastFile = filelistIter.next();
result += HistoryWriterImpl.MAX_RECORDS_PER_FILE;
}
if(lastFile == null)
return result;
Document doc = this.historyImpl.getDocumentForFile(lastFile);
if(doc == null)
return result;
NodeList nodes = doc.getElementsByTagName("record");
result += nodes.getLength();
return result;
}
/**
* Used to compare HistoryRecords
* ant to be ordered in TreeSet
*/
private static class HistoryRecordComparator
implements Comparator<HistoryRecord>
{
public int compare(HistoryRecord h1, HistoryRecord h2)
{
return h1.getTimestamp().compareTo(h2.getTimestamp());
}
}
}
|
3e1e4b2f7164e1919eb4d038acdd8166c1f46f54 | 947 | java | Java | src/test/java/com/rainbowpunch/jetedge/core/analyzer/DefaultPojoAnalyzerTest.java | Bekreth/jetedge | 72c84112d67fb87b45dc933de8f56c42432b3337 | [
"Apache-2.0"
] | 7 | 2018-07-03T16:08:08.000Z | 2018-09-07T13:54:40.000Z | src/test/java/com/rainbowpunch/jetedge/core/analyzer/DefaultPojoAnalyzerTest.java | Bekreth/jetedge | 72c84112d67fb87b45dc933de8f56c42432b3337 | [
"Apache-2.0"
] | 71 | 2017-10-27T02:07:56.000Z | 2021-02-13T13:45:51.000Z | src/test/java/com/rainbowpunch/jetedge/core/analyzer/DefaultPojoAnalyzerTest.java | Bekreth/jetedge | 72c84112d67fb87b45dc933de8f56c42432b3337 | [
"Apache-2.0"
] | 8 | 2017-11-07T03:51:50.000Z | 2018-09-07T13:54:44.000Z | 36.423077 | 112 | 0.737064 | 12,823 | package com.rainbowpunch.jetedge.core.analyzer;
import com.rainbowpunch.jetedge.core.reflection.ClassAttributes;
import com.rainbowpunch.jetedge.core.reflection.FieldAttributes;
import com.rainbowpunch.jetedge.test.Pojos.Vehicle;
import java.util.HashSet;
import java.util.Set;
import org.junit.Test;
import static java.util.Arrays.asList;
import static java.util.stream.Collectors.toSet;
import static org.junit.Assert.assertEquals;
public class DefaultPojoAnalyzerTest {
@Test
public void testDefaultPojoAnalyzer() {
Set<String> expectedFields = new HashSet<>(asList("hasTintedWindows", "name", "numWheels", "engineType",
"owners", "maxSpeed", "salesPerson"));
Set<String> actualFields = Analyzers.DEFAULT.extractFields(ClassAttributes.create(Vehicle.class))
.map(FieldAttributes::getName)
.collect(toSet());
assertEquals(expectedFields, actualFields);
}
}
|
3e1e4b96e2ca7f2c83fb596abe6e83340895a906 | 364 | java | Java | FlickrDemo/app/src/main/java/com/simplifynowsoftware/flickrdemo/retrofit/response/PhotoCommon.java | TimMackenzie/flickr-demo | c4f0014edcda6203c11b07c34840fd2e683c78b3 | [
"Apache-2.0"
] | 2 | 2016-07-08T02:33:07.000Z | 2018-08-13T21:07:41.000Z | FlickrDemo/app/src/main/java/com/simplifynowsoftware/flickrdemo/retrofit/response/PhotoCommon.java | TimMackenzie/flickr-demo | c4f0014edcda6203c11b07c34840fd2e683c78b3 | [
"Apache-2.0"
] | null | null | null | FlickrDemo/app/src/main/java/com/simplifynowsoftware/flickrdemo/retrofit/response/PhotoCommon.java | TimMackenzie/flickr-demo | c4f0014edcda6203c11b07c34840fd2e683c78b3 | [
"Apache-2.0"
] | 2 | 2015-08-10T06:55:52.000Z | 2019-02-13T17:35:16.000Z | 28 | 99 | 0.711538 | 12,824 | package com.simplifynowsoftware.flickrdemo.retrofit.response;
/**
* This interface attempts to make it easier to deal with the auto-generated collisions for Flickr
* data types (e.g. interestingness.Photo and Photoset.Photo
*/
public interface PhotoCommon {
String getId();
int getFarm();
String getServer();
String getSecret();
}
|
3e1e4cf017b0232deda3a9c027d31fd68e124af2 | 1,537 | java | Java | Source/Plugins/Core/com.equella.core/src/com/tle/web/notification/section/RootNotificationListSection.java | infiniticg/equella | 75ddd69bca3a94e213705dcfe90cf7077cc89872 | [
"Apache-2.0"
] | 1 | 2018-07-25T02:34:16.000Z | 2018-07-25T02:34:16.000Z | Source/Plugins/Core/com.equella.core/src/com/tle/web/notification/section/RootNotificationListSection.java | infiniticg/equella | 75ddd69bca3a94e213705dcfe90cf7077cc89872 | [
"Apache-2.0"
] | null | null | null | Source/Plugins/Core/com.equella.core/src/com/tle/web/notification/section/RootNotificationListSection.java | infiniticg/equella | 75ddd69bca3a94e213705dcfe90cf7077cc89872 | [
"Apache-2.0"
] | 1 | 2018-04-11T20:31:51.000Z | 2018-04-11T20:31:51.000Z | 26.050847 | 105 | 0.775537 | 12,825 | /*
* Copyright 2017 Apereo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tle.web.notification.section;
import javax.inject.Inject;
import com.tle.web.navigation.TopbarLinkService;
import com.tle.web.search.base.ContextableSearchSection;
import com.tle.web.sections.SectionInfo;
import com.tle.web.sections.annotations.DirectEvent;
import com.tle.web.sections.equella.annotation.PlugKey;
import com.tle.web.sections.render.Label;
public class RootNotificationListSection extends ContextableSearchSection<ContextableSearchSection.Model>
{
@PlugKey("notification.title")
private static Label LABEL_TITLE;
@SuppressWarnings("nls")
public static final String URL = "/access/notifications.do";
@Inject
private TopbarLinkService topbarLinkService;
@Override
protected String getSessionKey()
{
return URL;
}
@Override
public Label getTitle(SectionInfo info)
{
return LABEL_TITLE;
}
@DirectEvent
public void updateTopbar(SectionInfo info)
{
topbarLinkService.clearCachedData();
}
}
|
3e1e4d1e8fae9d3b0acfb786dcd20bfca59efe05 | 14,476 | java | Java | core/net/src/test/java/org/onosproject/net/device/impl/DeviceManagerTest.java | abruno06/onos | cc22546d624e6ee4f83cd12be237939873179ecc | [
"Apache-2.0"
] | 1,091 | 2015-01-06T11:10:40.000Z | 2022-03-30T09:09:05.000Z | core/net/src/test/java/org/onosproject/net/device/impl/DeviceManagerTest.java | abruno06/onos | cc22546d624e6ee4f83cd12be237939873179ecc | [
"Apache-2.0"
] | 39 | 2015-02-13T19:58:32.000Z | 2022-03-02T02:38:07.000Z | core/net/src/test/java/org/onosproject/net/device/impl/DeviceManagerTest.java | abruno06/onos | cc22546d624e6ee4f83cd12be237939873179ecc | [
"Apache-2.0"
] | 914 | 2015-01-05T19:42:35.000Z | 2022-03-30T09:25:18.000Z | 38.913978 | 95 | 0.698259 | 12,826 | /*
* Copyright 2014-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.net.device.impl;
import com.google.common.collect.Sets;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.onlab.packet.ChassisId;
import org.onlab.packet.IpAddress;
import org.onosproject.cluster.ClusterServiceAdapter;
import org.onosproject.cluster.ControllerNode;
import org.onosproject.cluster.DefaultControllerNode;
import org.onosproject.cluster.NodeId;
import org.onosproject.common.event.impl.TestEventDispatcher;
import org.onosproject.event.Event;
import org.onosproject.net.config.NetworkConfigServiceAdapter;
import org.onosproject.mastership.MastershipServiceAdapter;
import org.onosproject.mastership.MastershipTerm;
import org.onosproject.mastership.MastershipTermService;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.MastershipRole;
import org.onosproject.net.Port;
import org.onosproject.net.PortNumber;
import org.onosproject.net.device.DefaultDeviceDescription;
import org.onosproject.net.device.DefaultPortDescription;
import org.onosproject.net.device.DeviceAdminService;
import org.onosproject.net.device.DeviceDescription;
import org.onosproject.net.device.DeviceEvent;
import org.onosproject.net.device.DeviceListener;
import org.onosproject.net.device.DeviceProvider;
import org.onosproject.net.device.DeviceProviderRegistry;
import org.onosproject.net.device.DeviceProviderService;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.device.PortDescription;
import org.onosproject.net.provider.AbstractProvider;
import org.onosproject.net.provider.ProviderId;
import org.onosproject.store.cluster.messaging.ClusterCommunicationServiceAdapter;
import org.onosproject.store.trivial.SimpleDeviceStore;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import static org.junit.Assert.*;
import static org.onosproject.net.Device.Type.SWITCH;
import static org.onosproject.net.DeviceId.deviceId;
import static org.onosproject.net.NetTestTools.injectEventDispatcher;
import static org.onosproject.net.device.DeviceEvent.Type.*;
/**
* Test codifying the device service & device provider service contracts.
*/
public class DeviceManagerTest {
private static final ProviderId PID = new ProviderId("of", "foo");
private static final DeviceId DID1 = deviceId("of:foo");
private static final DeviceId DID2 = deviceId("of:bar");
private static final String MFR = "whitebox";
private static final String HW = "1.1.x";
private static final String SW1 = "3.8.1";
private static final String SW2 = "3.9.5";
private static final String SN = "43311-12345";
private static final ChassisId CID = new ChassisId();
private static final PortNumber P1 = PortNumber.portNumber(1);
private static final PortNumber P2 = PortNumber.portNumber(2);
private static final PortNumber P3 = PortNumber.portNumber(3);
private static final NodeId NID_LOCAL = new NodeId("local");
private static final IpAddress LOCALHOST = IpAddress.valueOf("127.0.0.1");
private DeviceManager mgr;
protected DeviceService service;
protected DeviceAdminService admin;
protected DeviceProviderRegistry registry;
protected DeviceProviderService providerService;
protected TestProvider provider;
protected TestListener listener = new TestListener();
@Before
public void setUp() {
mgr = new DeviceManager();
service = mgr;
admin = mgr;
registry = mgr;
mgr.store = new SimpleDeviceStore();
injectEventDispatcher(mgr, new TestEventDispatcher());
TestMastershipManager mastershipManager = new TestMastershipManager();
mgr.mastershipService = mastershipManager;
mgr.termService = mastershipManager;
mgr.clusterService = new TestClusterService();
mgr.networkConfigService = new TestNetworkConfigService();
mgr.communicationService = new TestClusterCommunicationService();
mgr.activate();
service.addListener(listener);
provider = new TestProvider();
providerService = registry.register(provider);
assertTrue("provider should be registered",
registry.getProviders().contains(provider.id()));
}
@After
public void tearDown() {
registry.unregister(provider);
assertFalse("provider should not be registered",
registry.getProviders().contains(provider.id()));
service.removeListener(listener);
mgr.deactivate();
}
private void connectDevice(DeviceId deviceId, String swVersion) {
DeviceDescription description =
new DefaultDeviceDescription(deviceId.uri(), SWITCH, MFR,
HW, swVersion, SN, CID);
providerService.deviceConnected(deviceId, description);
assertNotNull("device should be found", service.getDevice(DID1));
}
@Test
public void deviceConnected() {
assertNull("device should not be found", service.getDevice(DID1));
connectDevice(DID1, SW1);
validateEvents(DEVICE_ADDED);
Iterator<Device> it = service.getDevices().iterator();
assertNotNull("one device expected", it.next());
assertFalse("only one device expected", it.hasNext());
assertEquals("incorrect device count", 1, service.getDeviceCount());
assertEquals("incorrect available device count", 1, service.getAvailableDeviceCount());
assertTrue("device should be available", service.isAvailable(DID1));
}
@Test
public void deviceDisconnected() {
connectDevice(DID1, SW1);
connectDevice(DID2, SW1);
validateEvents(DEVICE_ADDED, DEVICE_ADDED);
assertTrue("device should be available", service.isAvailable(DID1));
// Disconnect
providerService.deviceDisconnected(DID1);
assertNotNull("device should not be found", service.getDevice(DID1));
assertFalse("device should not be available", service.isAvailable(DID1));
validateEvents(DEVICE_AVAILABILITY_CHANGED);
// Reconnect
connectDevice(DID1, SW1);
validateEvents(DEVICE_AVAILABILITY_CHANGED);
assertEquals("incorrect device count", 2, service.getDeviceCount());
assertEquals("incorrect available device count", 2, service.getAvailableDeviceCount());
}
@Test
public void deviceUpdated() {
connectDevice(DID1, SW1);
validateEvents(DEVICE_ADDED);
connectDevice(DID1, SW2);
validateEvents(DEVICE_UPDATED);
}
@Test
public void getRole() {
connectDevice(DID1, SW1);
assertEquals("incorrect role", MastershipRole.MASTER, service.getRole(DID1));
}
@Test
public void updatePorts() {
connectDevice(DID1, SW1);
List<PortDescription> pds = new ArrayList<>();
pds.add(DefaultPortDescription.builder().withPortNumber(P1).isEnabled(true).build());
pds.add(DefaultPortDescription.builder().withPortNumber(P2).isEnabled(true).build());
pds.add(DefaultPortDescription.builder().withPortNumber(P3).isEnabled(true).build());
providerService.updatePorts(DID1, pds);
validateEvents(DEVICE_ADDED, PORT_ADDED, PORT_ADDED, PORT_ADDED);
pds.clear();
pds.add(DefaultPortDescription.builder().withPortNumber(P1).isEnabled(false).build());
pds.add(DefaultPortDescription.builder().withPortNumber(P3).isEnabled(true).build());
providerService.updatePorts(DID1, pds);
validateEvents(PORT_UPDATED, PORT_REMOVED);
}
@Test
public void updatePortStatus() {
connectDevice(DID1, SW1);
List<PortDescription> pds = new ArrayList<>();
pds.add(DefaultPortDescription.builder().withPortNumber(P1).isEnabled(true).build());
pds.add(DefaultPortDescription.builder().withPortNumber(P2).isEnabled(true).build());
providerService.updatePorts(DID1, pds);
validateEvents(DEVICE_ADDED, PORT_ADDED, PORT_ADDED);
providerService.portStatusChanged(DID1, DefaultPortDescription.builder()
.withPortNumber(P1).isEnabled(false).build());
validateEvents(PORT_UPDATED);
providerService.portStatusChanged(DID1, DefaultPortDescription.builder()
.withPortNumber(P1).isEnabled(false).build());
assertTrue("no events expected", listener.events.isEmpty());
}
@Test
public void getPorts() {
connectDevice(DID1, SW1);
List<PortDescription> pds = new ArrayList<>();
pds.add(DefaultPortDescription.builder().withPortNumber(P1).isEnabled(true).build());
pds.add(DefaultPortDescription.builder().withPortNumber(P2).isEnabled(true).build());
providerService.updatePorts(DID1, pds);
validateEvents(DEVICE_ADDED, PORT_ADDED, PORT_ADDED);
assertEquals("wrong port count", 2, service.getPorts(DID1).size());
Port port = service.getPort(DID1, P1);
assertEquals("incorrect port", P1, port.number());
assertEquals("incorrect state", true, port.isEnabled());
}
@Test
public void removeDevice() {
connectDevice(DID1, SW1);
connectDevice(DID2, SW2);
assertEquals("incorrect device count", 2, service.getDeviceCount());
assertEquals("incorrect available device count", 2, service.getAvailableDeviceCount());
admin.removeDevice(DID1);
assertNull("device should not be found", service.getDevice(DID1));
assertNotNull("device should be found", service.getDevice(DID2));
assertEquals("incorrect device count", 1, service.getDeviceCount());
assertEquals("incorrect available device count", 1, service.getAvailableDeviceCount());
}
@Test
public void removeDevicePorts() {
connectDevice(DID1, SW1);
List<PortDescription> pds = new ArrayList<>();
pds.add(DefaultPortDescription.builder().withPortNumber(P1).isEnabled(true).build());
pds.add(DefaultPortDescription.builder().withPortNumber(P2).isEnabled(true).build());
pds.add(DefaultPortDescription.builder().withPortNumber(P3).isEnabled(true).build());
providerService.updatePorts(DID1, pds);
validateEvents(DEVICE_ADDED, PORT_ADDED, PORT_ADDED, PORT_ADDED);
// Try removing ports while device is available/connected; it should be a no-op.
admin.removeDevicePorts(DID1);
assertEquals("wrong port count", 3, service.getPorts(DID1).size());
// Disconnect device
providerService.deviceDisconnected(DID1);
assertFalse("device should not be available", service.isAvailable(DID1));
validateEvents(DEVICE_AVAILABILITY_CHANGED);
// Now remove ports for real
admin.removeDevicePorts(DID1);
validateEvents(PORT_REMOVED, PORT_REMOVED, PORT_REMOVED);
assertEquals("wrong port count", 0, service.getPorts(DID1).size());
}
protected void validateEvents(Enum... types) {
int i = 0;
assertEquals("wrong events received", types.length, listener.events.size());
for (Event event : listener.events) {
assertEquals("incorrect event type", types[i], event.type());
i++;
}
listener.events.clear();
}
private class TestProvider extends AbstractProvider implements DeviceProvider {
private DeviceId deviceReceived;
private MastershipRole roleReceived;
public TestProvider() {
super(PID);
}
@Override
public void triggerProbe(DeviceId deviceId) {
}
@Override
public void roleChanged(DeviceId device, MastershipRole newRole) {
deviceReceived = device;
roleReceived = newRole;
}
@Override
public boolean isReachable(DeviceId device) {
return false;
}
@Override
public void changePortState(DeviceId deviceId, PortNumber portNumber,
boolean enable) {
}
}
private static class TestListener implements DeviceListener {
final List<DeviceEvent> events = new ArrayList<>();
@Override
public void event(DeviceEvent event) {
events.add(event);
}
}
private static class TestMastershipManager
extends MastershipServiceAdapter implements MastershipTermService {
@Override
public MastershipRole getLocalRole(DeviceId deviceId) {
return MastershipRole.MASTER;
}
@Override
public Set<DeviceId> getDevicesOf(NodeId nodeId) {
return Sets.newHashSet(DID1, DID2);
}
@Override
public CompletableFuture<MastershipRole> requestRoleFor(DeviceId deviceId) {
return CompletableFuture.completedFuture(MastershipRole.MASTER);
}
@Override
public CompletableFuture<Void> relinquishMastership(DeviceId deviceId) {
return CompletableFuture.completedFuture(null);
}
@Override
public MastershipTerm getMastershipTerm(DeviceId deviceId) {
// FIXME: just returning something not null
return MastershipTerm.of(NID_LOCAL, 1);
}
}
// code clone
private final class TestClusterService extends ClusterServiceAdapter {
ControllerNode local = new DefaultControllerNode(NID_LOCAL, LOCALHOST);
@Override
public ControllerNode getLocalNode() {
return local;
}
}
private class TestNetworkConfigService extends NetworkConfigServiceAdapter {
}
private class TestClusterCommunicationService extends ClusterCommunicationServiceAdapter {
}
}
|
3e1e4d8bd58df7a92745270a0a7a2d96c5a9f072 | 450 | java | Java | sources/p005cm/aptoide/p006pt/dataprovider/p010ws/p013v7/ReadPost.java | tusharchoudhary0003/Custom-Football-Game | 47283462b2066ad5c53b3c901182e7ae62a34fc8 | [
"MIT"
] | 1 | 2019-10-01T11:34:10.000Z | 2019-10-01T11:34:10.000Z | sources/p005cm/aptoide/p006pt/dataprovider/p010ws/p013v7/ReadPost.java | tusharchoudhary0003/Custom-Football-Game | 47283462b2066ad5c53b3c901182e7ae62a34fc8 | [
"MIT"
] | null | null | null | sources/p005cm/aptoide/p006pt/dataprovider/p010ws/p013v7/ReadPost.java | tusharchoudhary0003/Custom-Football-Game | 47283462b2066ad5c53b3c901182e7ae62a34fc8 | [
"MIT"
] | 1 | 2020-05-26T05:10:33.000Z | 2020-05-26T05:10:33.000Z | 26.470588 | 61 | 0.717778 | 12,827 | package p005cm.aptoide.p006pt.dataprovider.p010ws.p013v7;
import com.fasterxml.jackson.annotation.JsonProperty;
/* renamed from: cm.aptoide.pt.dataprovider.ws.v7.ReadPost */
public class ReadPost {
@JsonProperty("uid")
private final String postId;
@JsonProperty("type")
private final String postType;
public ReadPost(String postId2, String postType2) {
this.postId = postId2;
this.postType = postType2;
}
}
|
3e1e4df77cfcd291e2424994729da36acc0feb67 | 1,813 | java | Java | molicode-common/src/main/groovy/com/shareyi/molicode/common/vo/replace/DirTrans.java | cn2oo8/molicode | 66940f13d91e7fc8ec4b4926a977296d3d75cc43 | [
"Apache-2.0"
] | 81 | 2019-04-10T01:09:52.000Z | 2020-12-08T03:30:46.000Z | molicode-common/src/main/groovy/com/shareyi/molicode/common/vo/replace/DirTrans.java | darryring102555/molicode | f117042f2d590ac7d2842834d4a183a9e23838ff | [
"Apache-2.0"
] | 12 | 2019-04-30T02:34:42.000Z | 2021-12-14T21:18:30.000Z | molicode-common/src/main/groovy/com/shareyi/molicode/common/vo/replace/DirTrans.java | darryring102555/molicode | f117042f2d590ac7d2842834d4a183a9e23838ff | [
"Apache-2.0"
] | 30 | 2019-04-10T00:24:14.000Z | 2021-12-24T11:06:15.000Z | 23.24359 | 161 | 0.651958 | 12,828 | package com.shareyi.molicode.common.vo.replace;
import com.shareyi.molicode.common.enums.DirTransType;
import org.apache.commons.lang.StringUtils;
import java.util.ArrayList;
import java.util.List;
/**
* Created by davidon 2016/1/5.
*/
public class DirTrans {
private String originDirName;
private String targetDirName;
private List<String> targetDirList;
private DirTransType dirTransType;
public String getOriginDirName() {
return originDirName;
}
public void setOriginDirName(String originDirName) {
this.originDirName = originDirName;
}
public String getTargetDirName() {
return targetDirName;
}
public void setTargetDirName(String targetDirName) {
this.targetDirName = targetDirName;
}
public List<String> getTargetDirList() {
return targetDirList;
}
public void setTargetDirList(List<String> targetDirList) {
this.targetDirList = targetDirList;
}
public DirTransType getDirTransType() {
return dirTransType;
}
public void setDirTransType(DirTransType dirTransType) {
this.dirTransType = dirTransType;
}
public void addTargetDir(String targetDir) {
if (targetDirList == null) {
targetDirList = new ArrayList<String>();
}
targetDirList.add(targetDir);
}
@Override
public String toString() {
return "{originDirName:" + originDirName + ",targetDirName:" + targetDirName + ",targetDirList:" + targetDirList + ",dirTransType:" + dirTransType + "}";
}
/**
* 获取目标路径的拼接版本数据
*
* @return
*/
public String getTargetDirListStr() {
if (targetDirList != null) {
return StringUtils.join(targetDirList, "/");
}
return null;
}
}
|
3e1e4e320b539d353974e5428eaa0b8030c9f383 | 4,588 | java | Java | src/main/java/org/kuali/maven/ec2/DescribeInstancesMojo.java | infrared5/ec2-maven-plugin | 5dc5cb4acdb4a31f72f8304f090432bc7436765c | [
"ECL-2.0"
] | null | null | null | src/main/java/org/kuali/maven/ec2/DescribeInstancesMojo.java | infrared5/ec2-maven-plugin | 5dc5cb4acdb4a31f72f8304f090432bc7436765c | [
"ECL-2.0"
] | null | null | null | src/main/java/org/kuali/maven/ec2/DescribeInstancesMojo.java | infrared5/ec2-maven-plugin | 5dc5cb4acdb4a31f72f8304f090432bc7436765c | [
"ECL-2.0"
] | 1 | 2021-01-19T19:10:54.000Z | 2021-01-19T19:10:54.000Z | 32.771429 | 84 | 0.602441 | 12,829 | package org.kuali.maven.ec2;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.plugin.MojoExecutionException;
import org.kuali.maven.ec2.pojo.Column;
import org.kuali.maven.ec2.pojo.Row;
import org.kuali.maven.ec2.pojo.RowComparator;
import org.kuali.maven.ec2.pojo.Table;
import com.amazonaws.services.ec2.model.GroupIdentifier;
import com.amazonaws.services.ec2.model.Instance;
/**
* Connect to EC2 and list instances associated with this account
*
* @goal describeinstances
*/
public class DescribeInstancesMojo extends AbstractEC2Mojo {
/**
* The tag to display for each instance.
*
* @parameter property="ec2.tag" default-value="Name"
*/
private String tag;
/**
* A list of instance id's to display information about
*
* @parameter
*/
private List<String> instanceIds;
@Override
public void execute(EC2Utils ec2Utils) throws MojoExecutionException {
List<Instance> instances = ec2Utils.getEC2Instances(instanceIds);
Table table = getTable(instances, ec2Utils);
getLog().info(getDisplay(table.getColumns()));
for (Row row : table.getRows()) {
getLog().info(getDisplay(table, row));
}
}
protected String getDisplay(List<Column> columns) {
StringBuilder sb = new StringBuilder();
for (Column c : columns) {
sb.append(StringUtils.rightPad(c.getTitle(), c.getWidth(), " ") + " ");
}
return sb.toString();
}
protected String getDisplay(Table table, Row row) {
StringBuilder sb = new StringBuilder();
List<String> elements = row.getElements();
for (int i = 0; i < elements.size(); i++) {
int width = table.getColumns().get(i).getWidth();
sb.append(StringUtils.rightPad(elements.get(i), width, " ") + " ");
}
return sb.toString();
}
protected String getSecurityGroupsDisplay(Instance i) {
List<GroupIdentifier> groups = i.getSecurityGroups();
StringBuilder sb = new StringBuilder();
for (int j = 0; j < groups.size(); j++) {
if (j != 0) {
sb.append(",");
}
GroupIdentifier group = groups.get(j);
sb.append(group.getGroupName());
}
return sb.toString();
}
protected List<Row> getRows(List<Instance> instances, EC2Utils ec2Utils) {
List<Row> rows = new ArrayList<Row>();
for (Instance i : instances) {
Row row = new Row();
List<String> elements = new ArrayList<String>();
elements.add(ec2Utils.getTagValue(i, tag));
elements.add(i.getInstanceId());
elements.add(i.getImageId());
elements.add(i.getPlacement().getAvailabilityZone());
elements.add(i.getInstanceType());
elements.add(i.getState().getName());
elements.add(getSecurityGroupsDisplay(i));
elements.add(i.getKeyName());
row.setElements(elements);
rows.add(row);
}
Collections.sort(rows, new RowComparator());
Collections.reverse(rows);
return rows;
}
protected List<Column> getColumns(List<Row> rows) {
List<Column> columns = new ArrayList<Column>();
columns.add(new Column(tag));
columns.add(new Column("Instance"));
columns.add(new Column("AMI"));
columns.add(new Column("Availability Zone"));
columns.add(new Column("Type"));
columns.add(new Column("State"));
columns.add(new Column("Security Groups"));
columns.add(new Column("Key Pair"));
setWidths(columns, rows);
return columns;
}
protected void setWidths(List<Column> columns, List<Row> rows) {
for (int i = 0; i < rows.size(); i++) {
List<String> elements = rows.get(i).getElements();
for (int j = 0; j < elements.size(); j++) {
Column c = columns.get(j);
c.setWidth(Math.max(c.getWidth(), elements.get(j).length()));
}
}
}
protected Table getTable(List<Instance> instances, EC2Utils ec2Utils) {
Table table = new Table();
table.setRows(getRows(instances, ec2Utils));
table.setColumns(getColumns(table.getRows()));
return table;
}
public String getTag() {
return tag;
}
public void setTag(String displayTag) {
this.tag = displayTag;
}
}
|
3e1e4ed7d5aae3b547a78e3fbd79e86dce5c7d5f | 1,493 | java | Java | fluat_auth/android/src/main/java/cn/mxhchina/fluat_auth/AppUtils.java | AlanJN/fluat_auth | f74a6499a1d8d5363f2f3ca23f5cb07043520ca8 | [
"Apache-2.0"
] | null | null | null | fluat_auth/android/src/main/java/cn/mxhchina/fluat_auth/AppUtils.java | AlanJN/fluat_auth | f74a6499a1d8d5363f2f3ca23f5cb07043520ca8 | [
"Apache-2.0"
] | null | null | null | fluat_auth/android/src/main/java/cn/mxhchina/fluat_auth/AppUtils.java | AlanJN/fluat_auth | f74a6499a1d8d5363f2f3ca23f5cb07043520ca8 | [
"Apache-2.0"
] | null | null | null | 29.86 | 92 | 0.633624 | 12,830 | package cn.mxhchina.fluat_auth;
import android.content.Context;
import android.util.DisplayMetrics;
import android.view.WindowManager;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
public class AppUtils {
public static int dp2px(Context context, float dipValue) {
try {
final float scale = context.getResources().getDisplayMetrics().density;
return (int) (dipValue * scale + 0.5f);
} catch (Exception e) {
return (int) dipValue;
}
}
public static int px2dp(Context context, float px) {
try {
final float scale = context.getResources().getDisplayMetrics().density;
return (int) (px / scale + 0.5f);
} catch (Exception e) {
return (int) px;
}
}
public static int getPhoneWidthPixels(Context context) {
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
DisplayMetrics var2 = new DisplayMetrics();
if (wm != null) {
wm.getDefaultDisplay().getMetrics(var2);
}
return var2.widthPixels;
}
public static int getPhoneHeightPixels(Context context) {
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
DisplayMetrics var2 = new DisplayMetrics();
if (wm != null) {
wm.getDefaultDisplay().getMetrics(var2);
}
return var2.heightPixels;
}
}
|
3e1e506c3ff04abfce2ef8e24caba4239f1e64b0 | 31,924 | java | Java | jobs/pacman-rule-engine-2.0/src/main/java/com/tmobile/pacman/autofix/publicaccess/PublicAccessAutoFix.java | lifer84/pacbot | 997c240c123d81cf3f55ff5093127c5fda6119c3 | [
"Apache-2.0"
] | 1,165 | 2018-10-05T19:07:34.000Z | 2022-03-28T19:34:27.000Z | jobs/pacman-rule-engine-2.0/src/main/java/com/tmobile/pacman/autofix/publicaccess/PublicAccessAutoFix.java | lifer84/pacbot | 997c240c123d81cf3f55ff5093127c5fda6119c3 | [
"Apache-2.0"
] | 334 | 2018-10-10T14:00:41.000Z | 2022-03-19T16:32:08.000Z | jobs/pacman-rule-engine-2.0/src/main/java/com/tmobile/pacman/autofix/publicaccess/PublicAccessAutoFix.java | lifer84/pacbot | 997c240c123d81cf3f55ff5093127c5fda6119c3 | [
"Apache-2.0"
] | 268 | 2018-10-05T19:53:25.000Z | 2022-03-31T07:39:47.000Z | 42.452128 | 256 | 0.718644 | 12,831 | package com.tmobile.pacman.autofix.publicaccess;
import io.github.resilience4j.retry.Retry;
import io.github.resilience4j.retry.RetryConfig;
import io.github.resilience4j.retry.RetryRegistry;
import java.lang.reflect.Type;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.ec2.AmazonEC2;
import com.amazonaws.services.ec2.model.AuthorizeSecurityGroupIngressRequest;
import com.amazonaws.services.ec2.model.CreateSecurityGroupRequest;
import com.amazonaws.services.ec2.model.CreateSecurityGroupResult;
import com.amazonaws.services.ec2.model.CreateTagsRequest;
import com.amazonaws.services.ec2.model.DeleteSecurityGroupRequest;
import com.amazonaws.services.ec2.model.DescribeInstancesRequest;
import com.amazonaws.services.ec2.model.DescribeInstancesResult;
import com.amazonaws.services.ec2.model.DescribeSecurityGroupsRequest;
import com.amazonaws.services.ec2.model.DescribeSecurityGroupsResult;
import com.amazonaws.services.ec2.model.Instance;
import com.amazonaws.services.ec2.model.IpPermission;
import com.amazonaws.services.ec2.model.IpRange;
import com.amazonaws.services.ec2.model.Ipv6Range;
import com.amazonaws.services.ec2.model.ModifyInstanceAttributeRequest;
import com.amazonaws.services.ec2.model.Reservation;
import com.amazonaws.services.ec2.model.SecurityGroup;
import com.amazonaws.services.ec2.model.Tag;
import com.amazonaws.services.ec2.model.UserIdGroupPair;
import com.amazonaws.services.elasticloadbalancing.AmazonElasticLoadBalancing;
import com.amazonaws.services.elasticloadbalancing.model.ApplySecurityGroupsToLoadBalancerRequest;
import com.amazonaws.services.elasticloadbalancing.model.LoadBalancerDescription;
import com.amazonaws.services.elasticloadbalancingv2.model.DescribeLoadBalancersRequest;
import com.amazonaws.services.elasticloadbalancingv2.model.DescribeLoadBalancersResult;
import com.amazonaws.services.elasticloadbalancingv2.model.LoadBalancer;
import com.amazonaws.services.elasticloadbalancingv2.model.SetSecurityGroupsRequest;
import com.amazonaws.services.elasticsearch.AWSElasticsearch;
import com.amazonaws.services.elasticsearch.model.DescribeElasticsearchDomainRequest;
import com.amazonaws.services.elasticsearch.model.DescribeElasticsearchDomainResult;
import com.amazonaws.services.elasticsearch.model.ElasticsearchDomainStatus;
import com.amazonaws.services.elasticsearch.model.UpdateElasticsearchDomainConfigRequest;
import com.amazonaws.services.elasticsearch.model.VPCOptions;
import com.amazonaws.services.rds.AmazonRDS;
import com.amazonaws.services.rds.model.DBInstance;
import com.amazonaws.services.rds.model.DescribeDBInstancesRequest;
import com.amazonaws.services.rds.model.DescribeDBInstancesResult;
import com.amazonaws.services.rds.model.ModifyDBInstanceRequest;
import com.amazonaws.services.redshift.AmazonRedshift;
import com.amazonaws.services.redshift.model.Cluster;
import com.amazonaws.services.redshift.model.DescribeClustersRequest;
import com.amazonaws.services.redshift.model.DescribeClustersResult;
import com.amazonaws.services.redshift.model.ModifyClusterRequest;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.reflect.TypeToken;
import com.tmobile.pacman.common.PacmanSdkConstants;
import com.tmobile.pacman.commons.AWSService;
import com.tmobile.pacman.commons.aws.clients.AWSClientManager;
import com.tmobile.pacman.commons.aws.clients.impl.AWSClientManagerImpl;
import com.tmobile.pacman.commons.exception.RuleExecutionFailedExeption;
import com.tmobile.pacman.commons.exception.UnableToCreateClientException;
import com.tmobile.pacman.util.CommonUtils;
public class PublicAccessAutoFix {
/** The Constant logger. */
private static final Logger logger = LoggerFactory.getLogger(PublicAccessAutoFix.class);
/** The Constant WAIT_INTERVAL. */
final static Long WAIT_INTERVAL= 50L;
/** The Constant MAX_ATTEMPTS. */
final static int MAX_ATTEMPTS= 5;
/** The clinet map. */
Map<String, Object> clinetMap = null;
/** The pac tag. */
static String pacTag = "PacBot created SG During Autofix ";
/** The cidr ip. */
static String cidrIp = "0.0.0.0/0";
/** The cidr ipv 6. */
static String cidrIpv6 = "::/0";
/**
* Creates the security group description.
*
* @param securityGroupId the security group id
* @return the string
*/
private static String createSecurityGroupDescription(String securityGroupId) {
Date todayDate = new Date();
return "PacBot copied this SG from " + securityGroupId+ " and removed its inbound rule: 0.0.0.0/0 on " + todayDate;
}
/**
* Creates the security group name.
*
* @param pacTag the pac tag
* @param reourceId the reource id
* @return securitygroupName
*/
private static String createSecurityGroupName(String pacTag, String reourceId) {
long millis = System.currentTimeMillis();
return pacTag + reourceId + Long.toString(millis);
}
/**
* Gets the AWS client.
*
* @param targetType the target type
* @param annotation the annotation
* @param ruleIdentifyingString the rule identifying string
* @return the AWS client
* @throws Exception the exception
*/
public static Map<String, Object> getAWSClient(String targetType, Map<String, String> annotation, String ruleIdentifyingString) throws Exception {
StringBuilder roleArn = new StringBuilder();
Map<String, Object> clientMap = null;
roleArn.append(PacmanSdkConstants.ROLE_ARN_PREFIX).append(annotation.get(PacmanSdkConstants.ACCOUNT_ID)).append(":").append(ruleIdentifyingString);
AWSClientManager awsClientManager = new AWSClientManagerImpl();
try {
clientMap = awsClientManager.getClient(annotation.get(PacmanSdkConstants.ACCOUNT_ID),roleArn.toString(), AWSService.valueOf(targetType.toUpperCase()),Regions.fromName(annotation.get(PacmanSdkConstants.REGION) == null ? Regions.DEFAULT_REGION
.getName() : annotation
.get(PacmanSdkConstants.REGION)), ruleIdentifyingString);
} catch (UnableToCreateClientException e1) {
String msg = String.format("unable to create client for account %s and region %s",annotation.get(PacmanSdkConstants.ACCOUNT_ID), annotation.get(PacmanSdkConstants.REGION));
logger.error(msg);
throw new Exception(msg);
}
return clientMap;
}
/**
* Nested security group details.
*
* @param groupId the group id
* @param ipPermissionstobeAdded the ip permissionstobe added
* @param ec2Client the ec 2 client
* @param publiclyAccessible the publicly accessible
* @param alreadyCheckedSgSet the already checked sg set
* @param portToCheck the port to check
* @return the sets the
*/
public static Set<String> nestedSecurityGroupDetails(String groupId, Collection<IpPermission> ipPermissionstobeAdded, AmazonEC2 ec2Client, Set<String> publiclyAccessible,Set<String> alreadyCheckedSgSet,Integer portToCheck) {
Set<String> sgSet = new HashSet<>();
sgSet.add(groupId);
List<SecurityGroup> securityGroups = getExistingSecurityGroupDetails(sgSet, ec2Client);
List<IpRange> updatedIpranges;
List<Ipv6Range> updatedIp6ranges;
for (SecurityGroup securityGroup : securityGroups) {
for (IpPermission ipPermission : securityGroup.getIpPermissions()) {
updatedIpranges = new ArrayList<>();
updatedIp6ranges = new ArrayList<>();
for (IpRange ipRangeValue : ipPermission.getIpv4Ranges()) {
if (ipRangeValue.getCidrIp().equals(cidrIp)) {
if ((portToCheck > 0 && ipPermission.getFromPort().equals(portToCheck)) || portToCheck.equals(0)) {
publiclyAccessible.add("Yes");
for (UserIdGroupPair usergroupPair : ipPermission.getUserIdGroupPairs()) {
IpRange ipv4Ranges = new IpRange();
ipPermission.setIpv4Ranges(Arrays.asList(ipv4Ranges));
ipPermission.setUserIdGroupPairs(Arrays.asList(usergroupPair));
ipPermissionstobeAdded.add(ipPermission);
}
}else {
updatedIpranges.add(ipRangeValue);
}
} else {
updatedIpranges.add(ipRangeValue);
}
}
if(ipPermission.getIpv4Ranges().isEmpty() && ipPermission.getIpv6Ranges().isEmpty() ){
for (UserIdGroupPair usergroupPair : ipPermission.getUserIdGroupPairs()) {
IpPermission requiredIpPermission = new IpPermission();
requiredIpPermission.setFromPort(ipPermission.getFromPort());
requiredIpPermission.setToPort(ipPermission.getToPort());
requiredIpPermission.setIpProtocol(ipPermission.getIpProtocol());
requiredIpPermission.setIpRanges(ipPermission.getIpRanges());
requiredIpPermission.setIpv4Ranges(ipPermission.getIpv4Ranges());
requiredIpPermission.setIpv6Ranges(ipPermission.getIpv6Ranges());
requiredIpPermission.setUserIdGroupPairs(Arrays.asList(usergroupPair));
requiredIpPermission.setPrefixListIds(ipPermission.getPrefixListIds());
ipPermissionstobeAdded.add(requiredIpPermission);
}
}
for (Ipv6Range ip6RangeValue : ipPermission.getIpv6Ranges()) {
if (ip6RangeValue.getCidrIpv6()!=null && ip6RangeValue.getCidrIpv6().equals(cidrIpv6)) {
if((portToCheck > 0 && ipPermission.getFromPort().equals(portToCheck)) || portToCheck.equals(0)){
publiclyAccessible.add("Yes");
for (UserIdGroupPair usergroupPair : ipPermission.getUserIdGroupPairs()) {
Ipv6Range ipv6Ranges = new Ipv6Range();
ipPermission.setIpv6Ranges(Arrays.asList(ipv6Ranges));
ipPermission.setUserIdGroupPairs(Arrays.asList(usergroupPair));
ipPermissionstobeAdded.add(ipPermission);
}
}else{
updatedIp6ranges.add(ip6RangeValue);
}
} else {
updatedIp6ranges.add(ip6RangeValue);
}
}
if (!updatedIpranges.isEmpty()) {
ipPermission.setIpv4Ranges(updatedIpranges);
for (Ipv6Range ip6RangeValue : ipPermission.getIpv6Ranges()) {
if (ip6RangeValue.getCidrIpv6()!=null && ip6RangeValue.getCidrIpv6().equals(cidrIpv6)) {
List<Ipv6Range> clearIpv6ranges = new ArrayList<>();
ipPermission.setIpv6Ranges(clearIpv6ranges);
}
}
}
if (!updatedIp6ranges.isEmpty()) {
ipPermission.setIpv6Ranges(updatedIp6ranges);
}
if ((!updatedIpranges.isEmpty() || !updatedIp6ranges.isEmpty()) && (!ipPermissionstobeAdded.contains(ipPermission))) {
ipPermissionstobeAdded.add(ipPermission);
}
}
}
return publiclyAccessible;
}
/**
* Creates the security group.
*
* @param sourceSecurityGroupId the source security group id
* @param vpcId the vpc id
* @param ec2Client the ec 2 client
* @param ipPermissionsToBeAdded the ip permissions to be added
* @param resourceId the resource id
* @param defaultCidrIp the default cidr ip
* @param existingIpPermissions the existing ip permissions
* @return the string
* @throws Exception the exception
*/
public static String createSecurityGroup(String sourceSecurityGroupId, String vpcId, AmazonEC2 ec2Client, Collection<IpPermission> ipPermissionsToBeAdded, String resourceId,String defaultCidrIp,List<IpPermission> existingIpPermissions) throws Exception {
String createdSecurityGroupId = null;
try {
CreateSecurityGroupRequest createsgRequest = new CreateSecurityGroupRequest();
createsgRequest.setGroupName(createSecurityGroupName(pacTag,resourceId));
createsgRequest.setVpcId(vpcId);
createsgRequest.setDescription(createSecurityGroupDescription(sourceSecurityGroupId));
CreateSecurityGroupResult createResult = ec2Client.createSecurityGroup(createsgRequest);
createdSecurityGroupId = createResult.getGroupId();
if (!createdSecurityGroupId.isEmpty()) {
logger.info("Security Group {} created successfully" ,createdSecurityGroupId);
// Authorize newly created securityGroup with Inbound Rules
AuthorizeSecurityGroupIngressRequest authRequest = new AuthorizeSecurityGroupIngressRequest();
authRequest.setGroupId(createdSecurityGroupId);
if(ipPermissionsToBeAdded.isEmpty()){
IpRange ipv4Ranges = new IpRange();
ipv4Ranges.setCidrIp(defaultCidrIp);
for (IpPermission ipPermission : existingIpPermissions) {
if (!ipPermission.getIpv4Ranges().isEmpty()) {
ipPermission.setIpv4Ranges(Arrays.asList(ipv4Ranges));
}
if (!ipPermission.getIpv6Ranges().isEmpty()) {
Ipv6Range ipv6Range = new Ipv6Range();
ipPermission.setIpv6Ranges(Arrays.asList(ipv6Range));
}
if (!ipPermission.getIpv4Ranges().isEmpty() || !ipPermission.getIpv6Ranges().isEmpty()) {
ipPermissionsToBeAdded.add(ipPermission);
}
}
}
authRequest.setIpPermissions(ipPermissionsToBeAdded);
ec2Client.authorizeSecurityGroupIngress(authRequest);
//adding tag
String deleteSgTag = CommonUtils.getPropValue("deleteSgTag");
Map<String, String> tagMap = new HashMap();
tagMap.put(deleteSgTag, "true");
CreateTagsRequest createTagsRequest = new CreateTagsRequest(Arrays.asList(createdSecurityGroupId), new ArrayList<>());
createTagsRequest.setTags(tagMap.entrySet().stream().map(t -> new Tag(t.getKey(), t.getValue())).collect(Collectors.toList()));
try {
ec2Client.createTags(createTagsRequest);
} catch (AmazonServiceException ase) {
logger.error("error tagging sg - > " + resourceId, ase);
throw ase;
}
}
} catch (Exception e) {
logger.error(e.getMessage());
logger.debug(e.getMessage());
throw new RuntimeException(sourceSecurityGroupId+ " SG copy failed");
}
return createdSecurityGroupId;
}
/**
* Gets the existing security group details.
*
* @param securityGroupList the security group list
* @param ec2Client the ec 2 client
* @return the existing security group details
*/
public static List<SecurityGroup> getExistingSecurityGroupDetails(Set<String> securityGroupList, AmazonEC2 ec2Client) {
RetryConfig config = RetryConfig.custom().maxAttempts(MAX_ATTEMPTS).waitDuration(Duration.ofSeconds(WAIT_INTERVAL)).build();
RetryRegistry registry = RetryRegistry.of(config);
DescribeSecurityGroupsRequest securityGroups = new DescribeSecurityGroupsRequest();
securityGroups.setGroupIds(securityGroupList);
Retry retry = registry.retry(securityGroups.toString());
Function<Integer, List<SecurityGroup>> decorated
= Retry.decorateFunction(retry, (Integer s) -> {
DescribeSecurityGroupsResult groupsResult = ec2Client.describeSecurityGroups(securityGroups);
return groupsResult.getSecurityGroups();
});
return decorated.apply(1);
}
/**
* Apply security groups to ec 2.
*
* @param amazonEC2 the amazon EC 2
* @param sgIdToBeAttached the sg id to be attached
* @param resourceId the resource id
* @return true, if successful
* @throws Exception the exception
*/
public static boolean applySecurityGroupsToEc2(AmazonEC2 amazonEC2, Set<String> sgIdToBeAttached, String resourceId) throws Exception {
boolean applysgFlg = false;
try {
ModifyInstanceAttributeRequest modifyInstanceAttributeRequest = new ModifyInstanceAttributeRequest();
modifyInstanceAttributeRequest.setInstanceId(resourceId);
modifyInstanceAttributeRequest.setGroups(sgIdToBeAttached);
amazonEC2.modifyInstanceAttribute(modifyInstanceAttributeRequest);
applysgFlg = true;
} catch (Exception e) {
logger.error("Apply Security Group operation failed for ec2 {}" , resourceId );
throw new Exception(e);
}
return applysgFlg;
}
/**
* Gets the instance details for ec 2.
*
* @param clientMap the client map
* @param resourceId the resource id
* @return the instance details for ec 2
* @throws Exception the exception
*/
public static Instance getInstanceDetailsForEc2(Map<String,Object> clientMap,String resourceId) throws Exception {
AmazonEC2 ec2Client = (AmazonEC2) clientMap.get("client");
DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest();
describeInstancesRequest.setInstanceIds(Arrays.asList(resourceId));
RetryConfig config = RetryConfig.custom().maxAttempts(MAX_ATTEMPTS).waitDuration(Duration.ofSeconds(WAIT_INTERVAL)).build();
RetryRegistry registry = RetryRegistry.of(config);
Retry retry = registry.retry(describeInstancesRequest.toString());
Function<Integer, Instance> decorated
= Retry.decorateFunction(retry, (Integer s) -> {
DescribeInstancesResult describeInstancesResult = ec2Client.describeInstances(describeInstancesRequest);
List<Reservation> reservations = describeInstancesResult.getReservations();
Reservation reservation = reservations.get(0);
List<Instance> instances = reservation.getInstances();
return instances.get(0);
});
return decorated.apply(1);
}
/**
* Gets the sg list for app elb resource.
*
* @param clientMap the client map
* @param resourceId the resource id
* @return the sg list for app elb resource
* @throws Exception the exception
*/
public static List<String> getSgListForAppElbResource(Map<String,Object> clientMap,String resourceId) throws Exception {
com.amazonaws.services.elasticloadbalancingv2.AmazonElasticLoadBalancing amazonApplicationElasticLoadBalancing = (com.amazonaws.services.elasticloadbalancingv2.AmazonElasticLoadBalancing) clientMap.get("client");
DescribeLoadBalancersRequest describeLoadBalancersRequest = new DescribeLoadBalancersRequest();
describeLoadBalancersRequest.setLoadBalancerArns(Arrays.asList(resourceId));
DescribeLoadBalancersResult balancersResult = amazonApplicationElasticLoadBalancing.describeLoadBalancers(describeLoadBalancersRequest);
List<LoadBalancer> balancers = balancersResult.getLoadBalancers();
return balancers.get(0).getSecurityGroups();
}
/**
* Gets the sg list for classic elb resource.
*
* @param clientMap the client map
* @param resourceId the resource id
* @return the sg list for classic elb resource
* @throws Exception the exception
*/
public static List<String> getSgListForClassicElbResource(Map<String,Object> clientMap,String resourceId) throws Exception {
AmazonElasticLoadBalancing amazonApplicationElasticLoadBalancing = (AmazonElasticLoadBalancing) clientMap.get("client");
com.amazonaws.services.elasticloadbalancing.model.DescribeLoadBalancersRequest describeLoadBalancersRequest = new com.amazonaws.services.elasticloadbalancing.model.DescribeLoadBalancersRequest();
describeLoadBalancersRequest.setLoadBalancerNames(Arrays.asList(resourceId));
com.amazonaws.services.elasticloadbalancing.model.DescribeLoadBalancersResult balancersResult = amazonApplicationElasticLoadBalancing.describeLoadBalancers(describeLoadBalancersRequest);
List<LoadBalancerDescription> balancers = balancersResult.getLoadBalancerDescriptions();
return balancers.get(0).getSecurityGroups();
}
/**
* Apply security groups to classic ELB.
*
* @param amazonClassicElasticLoadBalancing the amazon classic elastic load balancing
* @param sgIdToBeAttached the sg id to be attached
* @param resourceId the resource id
* @return true, if successful
* @throws Exception the exception
*/
public static boolean applySecurityGroupsToClassicELB(AmazonElasticLoadBalancing amazonClassicElasticLoadBalancing, List<String> sgIdToBeAttached, String resourceId) throws Exception {
boolean applysgFlg = false;
try {
ApplySecurityGroupsToLoadBalancerRequest groupsRequest = new ApplySecurityGroupsToLoadBalancerRequest();
groupsRequest.setSecurityGroups(sgIdToBeAttached);
groupsRequest.setLoadBalancerName(resourceId);
amazonClassicElasticLoadBalancing.applySecurityGroupsToLoadBalancer(groupsRequest);
applysgFlg = true;
} catch (Exception e) {
logger.error("Apply Security Group operation failed for classic ELB {}",resourceId);
return applysgFlg;
}
return applysgFlg;
}
/**
* Apply security groups to app ELB.
*
* @param amazonApplicationElasticLoadBalancing the amazon application elastic load balancing
* @param sgIdToBeAttached the sg id to be attached
* @param resourceId the resource id
* @return true, if successful
* @throws Exception the exception
*/
public static boolean applySecurityGroupsToAppELB(com.amazonaws.services.elasticloadbalancingv2.AmazonElasticLoadBalancing amazonApplicationElasticLoadBalancing,Set<String> sgIdToBeAttached, String resourceId) throws Exception {
boolean applysgFlg = false;
try {
SetSecurityGroupsRequest groupsRequest = new SetSecurityGroupsRequest();
groupsRequest.setSecurityGroups(sgIdToBeAttached);
groupsRequest.setLoadBalancerArn(resourceId);
amazonApplicationElasticLoadBalancing.setSecurityGroups(groupsRequest);
applysgFlg = true;
} catch (Exception e) {
logger.error("Apply Security Group operation failed for app ELB {}", resourceId);
throw new Exception(e);
}
return applysgFlg;
}
/**
* Gets the cluster for redhift resource.
*
* @param clientMap the client map
* @param resourceId the resource id
* @return the cluster for redhift resource
* @throws Exception the exception
*/
public static List<Cluster> getClusterForRedhiftResource(Map<String,Object> clientMap,String resourceId) throws Exception {
AmazonRedshift amazonRedshift = (AmazonRedshift) clientMap.get("client");
DescribeClustersRequest describeClustersRequest = new DescribeClustersRequest();
describeClustersRequest.setClusterIdentifier(resourceId);
DescribeClustersResult clustersResult = amazonRedshift.describeClusters(describeClustersRequest);
return clustersResult.getClusters();
}
/**
* Apply security groups to redshift.
*
* @param amazonRedshift the amazon redshift
* @param sgIdToBeAttached the sg id to be attached
* @param resourceId the resource id
* @return true, if successful
* @throws Exception the exception
*/
public static boolean applySecurityGroupsToRedshift(AmazonRedshift amazonRedshift, Set<String> sgIdToBeAttached, String resourceId) throws Exception {
boolean applysgFlg = false;
try {
ModifyClusterRequest clusterRequest = new ModifyClusterRequest();
clusterRequest.setClusterIdentifier(resourceId);
clusterRequest.setVpcSecurityGroupIds(sgIdToBeAttached);
amazonRedshift.modifyCluster(clusterRequest);
applysgFlg = true;
} catch (Exception e) {
logger.error("Apply Security Group operation failed for redshift {}" ,resourceId );
throw new Exception(e);
}
return applysgFlg;
}
/**
* Gets the DB instance for rds db resource.
*
* @param clientMap the client map
* @param resourceId the resource id
* @return the DB instance for rds db resource
* @throws Exception the exception
*/
public static List<DBInstance> getDBInstanceForRdsDbResource(Map<String,Object> clientMap,String resourceId) throws Exception {
AmazonRDS amazonRDS = (AmazonRDS) clientMap.get("client");
DescribeDBInstancesRequest instancesRequest = new DescribeDBInstancesRequest();
instancesRequest.setDBInstanceIdentifier(resourceId);
DescribeDBInstancesResult dbInstancesResult = amazonRDS.describeDBInstances(instancesRequest);
return dbInstancesResult.getDBInstances();
}
/**
* Apply security groups to rds db.
*
* @param amazonRDS the amazon RDS
* @param sgIdToBeAttached the sg id to be attached
* @param resourceId the resource id
* @return true, if successful
* @throws Exception the exception
*/
public static boolean applySecurityGroupsToRdsDb(AmazonRDS amazonRDS, Set<String> sgIdToBeAttached, String resourceId) throws Exception {
boolean applysgFlg = false;
try {
ModifyDBInstanceRequest instanceRequest = new ModifyDBInstanceRequest();
instanceRequest.setDBInstanceIdentifier(resourceId);
instanceRequest.setVpcSecurityGroupIds(sgIdToBeAttached);
amazonRDS.modifyDBInstance(instanceRequest);
applysgFlg = true;
} catch (Exception e) {
logger.error("Apply Security Group operation failed for rdsdb {}" , resourceId );
throw new Exception(e);
}
return applysgFlg;
}
/**
* Delete security group.
*
* @param resourceId the resource id
* @param ec2Client the ec 2 client
* @return the boolean
* @throws Exception the exception
*/
public static Boolean deleteSecurityGroup(String resourceId,AmazonEC2 ec2Client) throws Exception {
DeleteSecurityGroupRequest deleteSecurityGroupRequest = new DeleteSecurityGroupRequest();
deleteSecurityGroupRequest.setGroupId(resourceId);
ec2Client.deleteSecurityGroup(deleteSecurityGroupRequest);
return true;
}
/**
* Gets the domain status for es resource.
*
* @param clientMap the client map
* @param resourceId the resource id
* @return the domain status for es resource
* @throws Exception the exception
*/
public static ElasticsearchDomainStatus getDomainStatusForEsResource(Map<String,Object> clientMap,String resourceId) throws Exception {
AWSElasticsearch awsElasticsearch =(AWSElasticsearch) clientMap.get("client");
DescribeElasticsearchDomainRequest request = new DescribeElasticsearchDomainRequest();
request.setDomainName(resourceId);
DescribeElasticsearchDomainResult result = awsElasticsearch.describeElasticsearchDomain(request);
return result.getDomainStatus();
}
/**
* Checks if is es having public access.
*
* @param jsonArray the json array
* @param cidrIp the cidr ip
* @return true, if is es having public access
*/
public static boolean isEsHavingPublicAccess(JsonArray jsonArray,String cidrIp) {
boolean isPublicAccess = false;
JsonObject conditionJsonObject = new JsonObject();
JsonArray conditionJsonArray = new JsonArray();
String conditionStr = null;
JsonObject principal = new JsonObject();
String effect = null;
String principalStr = null;
String aws = null;
if (jsonArray.size() > 0) {
for (int i = 0; i < jsonArray.size(); i++) {
JsonObject firstObject = (JsonObject) jsonArray.get(i);
if (firstObject.has("Principal") && firstObject.get("Principal").isJsonObject()) {
principal = firstObject.get("Principal").getAsJsonObject();
} else {
principalStr = firstObject.get("Principal").getAsString();
}
try {
if (principal.has("AWS") || "*".equals(principalStr)) {
JsonArray awsArray = null;
effect = firstObject.get("Effect").getAsString();
if (principal.has("AWS") && principal.get("AWS").isJsonArray()) {
awsArray = principal.get("AWS").getAsJsonArray();
if (awsArray.size() > 0) {
logger.debug("Not checking the s3 read/write public access for principal array values : {}",awsArray);
}
}
if (principal.has("AWS") && !principal.get("AWS").isJsonArray()) {
aws = principal.get("AWS").getAsString();
}
if ("*".equals(principalStr)) {
aws = firstObject.get("Principal").getAsString();
}
if ("*".equals(aws) && !firstObject.has("Condition")) {
if (effect.equals("Allow")) {
isPublicAccess = true;
}
} else if ("*".equals(aws) && firstObject.has("Condition") && effect.equals("Allow")) {
if (firstObject.has("Condition")
&& (firstObject.get("Condition")
.getAsJsonObject().has("IpAddress"))
&& (firstObject.get("Condition")
.getAsJsonObject().get("IpAddress")
.getAsJsonObject()
.has("aws:SourceIp"))) {
if (firstObject.get("Condition")
.getAsJsonObject().get("IpAddress")
.getAsJsonObject().get("aws:SourceIp")
.isJsonObject()) {
conditionJsonObject = firstObject
.get("Condition").getAsJsonObject()
.get("IpAddress").getAsJsonObject()
.get("aws:SourceIp")
.getAsJsonObject();
} else if (firstObject.get("Condition")
.getAsJsonObject().get("IpAddress")
.getAsJsonObject().get("aws:SourceIp")
.isJsonArray()) {
conditionJsonArray = firstObject
.get("Condition").getAsJsonObject()
.get("IpAddress").getAsJsonObject()
.get("aws:SourceIp")
.getAsJsonArray();
} else {
conditionStr = firstObject.get("Condition")
.getAsJsonObject().get("IpAddress")
.getAsJsonObject()
.get("aws:SourceIp").getAsString();
}
}
JsonElement cJson = conditionJsonArray;
Type listType = new TypeToken<List<String>>() {
}.getType();
List<String> conditionList = new Gson().fromJson(cJson, listType);
if (!conditionJsonObject.isJsonNull() && conditionJsonObject.toString().equals(cidrIp)) {
isPublicAccess = true;
}
if (null != conditionStr && conditionStr.contains(cidrIp)) {
isPublicAccess = true;
}
if (conditionList.contains(cidrIp)) {
isPublicAccess = true;
}
}
}
} catch (Exception e1) {
logger.error("error in public access autofic ", e1);
throw new RuleExecutionFailedExeption(e1.getMessage());
}
}
}
return isPublicAccess;
}
/**
* Apply security groups to elactic search.
*
* @param awsElasticsearch the aws elasticsearch
* @param sgIdToBeAttached the sg id to be attached
* @param resourceId the resource id
* @param domainConfigRequest the domain config request
* @return true, if successful
* @throws Exception the exception
*/
public static boolean applySecurityGroupsToElacticSearch(AWSElasticsearch awsElasticsearch, List<String> sgIdToBeAttached,String resourceId, UpdateElasticsearchDomainConfigRequest domainConfigRequest) throws Exception {
boolean applysgFlg = false;
try {
VPCOptions vPCOptions = new VPCOptions();
vPCOptions.setSecurityGroupIds(sgIdToBeAttached);
domainConfigRequest.setVPCOptions(vPCOptions);
domainConfigRequest.setDomainName(resourceId);
awsElasticsearch.updateElasticsearchDomainConfig(domainConfigRequest);
applysgFlg = true;
} catch (Exception e) {
logger.error("Apply Security Group operation failed for elasticache {}",resourceId);
throw new Exception(e);
}
return applysgFlg;
}
}
|
3e1e5111d54352b6684f37b6abd3cc5defb5b759 | 515 | java | Java | chapter_003/src/test/java/ru/job4j/dictionary/PhoneDictionaryTest.java | Web-Programmer-B-P/job4j | 2a963b5cfe1cf1e4cea395dff8a7ec74545de1e9 | [
"Apache-2.0"
] | null | null | null | chapter_003/src/test/java/ru/job4j/dictionary/PhoneDictionaryTest.java | Web-Programmer-B-P/job4j | 2a963b5cfe1cf1e4cea395dff8a7ec74545de1e9 | [
"Apache-2.0"
] | 2 | 2021-12-10T01:25:54.000Z | 2022-02-16T01:08:36.000Z | chapter_003/src/test/java/ru/job4j/dictionary/PhoneDictionaryTest.java | Web-Programmer-B-P/job4j | 2a963b5cfe1cf1e4cea395dff8a7ec74545de1e9 | [
"Apache-2.0"
] | null | null | null | 27.105263 | 70 | 0.683495 | 12,832 | package ru.job4j.dictionary;
import org.junit.Test;
import ru.job4j.dictionary.model.Person;
import java.util.List;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
public class PhoneDictionaryTest {
@Test
public void whenFindByName() {
var phones = new PhoneDictionary();
phones.add(new Person("This", "Is", "234329956", "Voronezh"));
var persons = phones.find("56");
assertThat(persons.iterator().next().getSurname(), is("Is"));
}
}
|
3e1e51e306dc7d472dda1c2558b66b4ff8d8d01f | 10,693 | java | Java | twister2/twister2/comms/src/java/edu/iu/dsc/tws/comms/dfw/DataFlowBroadcast.java | vibhatha/Twister2-Docker | 7557ad0e11d9fdc96c2a6bea01972ec567a27d19 | [
"Apache-2.0"
] | null | null | null | twister2/twister2/comms/src/java/edu/iu/dsc/tws/comms/dfw/DataFlowBroadcast.java | vibhatha/Twister2-Docker | 7557ad0e11d9fdc96c2a6bea01972ec567a27d19 | [
"Apache-2.0"
] | null | null | null | twister2/twister2/comms/src/java/edu/iu/dsc/tws/comms/dfw/DataFlowBroadcast.java | vibhatha/Twister2-Docker | 7557ad0e11d9fdc96c2a6bea01972ec567a27d19 | [
"Apache-2.0"
] | null | null | null | 34.605178 | 98 | 0.713364 | 12,833 | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package edu.iu.dsc.tws.comms.dfw;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import edu.iu.dsc.tws.common.config.Config;
import edu.iu.dsc.tws.comms.api.DataFlowOperation;
import edu.iu.dsc.tws.comms.api.MessageHeader;
import edu.iu.dsc.tws.comms.api.MessageReceiver;
import edu.iu.dsc.tws.comms.api.MessageType;
import edu.iu.dsc.tws.comms.api.TWSChannel;
import edu.iu.dsc.tws.comms.core.TaskPlan;
import edu.iu.dsc.tws.comms.dfw.io.MessageDeSerializer;
import edu.iu.dsc.tws.comms.dfw.io.MessageSerializer;
import edu.iu.dsc.tws.comms.dfw.io.SingleMessageDeSerializer;
import edu.iu.dsc.tws.comms.dfw.io.SingleMessageSerializer;
import edu.iu.dsc.tws.comms.routing.BinaryTreeRouter;
import edu.iu.dsc.tws.comms.utils.KryoSerializer;
public class DataFlowBroadcast implements DataFlowOperation, ChannelReceiver {
private static final Logger LOG = Logger.getLogger(DataFlowBroadcast.class.getName());
private int source;
private Set<Integer> destinations;
private BinaryTreeRouter router;
private MessageReceiver finalReceiver;
private ChannelDataFlowOperation delegete;
private Config config;
private TaskPlan instancePlan;
private int executor;
private int edge;
private MessageType type;
private Map<Integer, ArrayBlockingQueue<Pair<Object, OutMessage>>>
pendingSendMessagesPerSource = new HashMap<>();
private Lock lock = new ReentrantLock();
private Map<Integer, RoutingParameters> routingParametersCache = new HashMap<>();
public DataFlowBroadcast(TWSChannel channel, int src, Set<Integer> dests,
MessageReceiver finalRcvr) {
this.source = src;
this.destinations = dests;
this.finalReceiver = finalRcvr;
this.delegete = new ChannelDataFlowOperation(channel);
}
@Override
public void close() {
}
@Override
public void finish(int target) {
}
@Override
public TaskPlan getTaskPlan() {
return instancePlan;
}
public boolean receiveMessage(ChannelMessage currentMessage, Object object) {
MessageHeader header = currentMessage.getHeader();
// we always receive to the main task
return finalReceiver.onMessage(
header.getSourceId(), DataFlowContext.DEFAULT_DESTINATION,
router.mainTaskOfExecutor(instancePlan.getThisExecutor(),
DataFlowContext.DEFAULT_DESTINATION), header.getFlags(), object);
}
/**
* Initialize
* @param cfg
* @param t
* @param tPlan
* @param ed
*/
public void init(Config cfg, MessageType t, TaskPlan tPlan, int ed) {
this.config = cfg;
this.instancePlan = tPlan;
this.type = t;
this.edge = ed;
this.executor = tPlan.getThisExecutor();
// we will only have one distinct route
router = new BinaryTreeRouter(cfg, tPlan, source, destinations);
if (this.finalReceiver != null) {
this.finalReceiver.init(cfg, this, receiveExpectedTaskIds());
} else {
throw new RuntimeException("Final receiver is required");
}
LOG.log(Level.FINE, String.format("%d bast sources %d dest %s send tasks: %s", executor,
source, destinations, router.sendQueueIds()));
Map<Integer, Queue<Pair<Object, ChannelMessage>>> pendingReceiveMessagesPerSource =
new HashMap<>();
Map<Integer, Queue<ChannelMessage>> pendingReceiveDeSerializations = new HashMap<>();
Map<Integer, MessageSerializer> serializerMap = new HashMap<>();
Map<Integer, MessageDeSerializer> deSerializerMap = new HashMap<>();
Set<Integer> srcs = router.sendQueueIds();
for (int s : srcs) {
// later look at how not to allocate pairs for this each time
ArrayBlockingQueue<Pair<Object, OutMessage>> pendingSendMessages =
new ArrayBlockingQueue<Pair<Object, OutMessage>>(
DataFlowContext.sendPendingMax(cfg));
pendingSendMessagesPerSource.put(s, pendingSendMessages);
serializerMap.put(s, new SingleMessageSerializer(new KryoSerializer()));
}
int maxReceiveBuffers = DataFlowContext.receiveBufferCount(cfg);
int receiveExecutorsSize = receivingExecutors().size();
if (receiveExecutorsSize == 0) {
receiveExecutorsSize = 1;
}
Set<Integer> execs = router.receivingExecutors();
for (int e : execs) {
int capacity = maxReceiveBuffers * 2 * receiveExecutorsSize;
Queue<Pair<Object, ChannelMessage>> pendingReceiveMessages =
new ArrayBlockingQueue<Pair<Object, ChannelMessage>>(
capacity);
pendingReceiveMessagesPerSource.put(e, pendingReceiveMessages);
pendingReceiveDeSerializations.put(e, new ArrayBlockingQueue<ChannelMessage>(capacity));
deSerializerMap.put(e, new SingleMessageDeSerializer(new KryoSerializer()));
}
for (Integer s : srcs) {
routingParametersCache.put(s, sendRoutingParameters(s, 0));
}
delegete.init(cfg, t, tPlan, ed,
router.receivingExecutors(), router.isLastReceiver(), this,
pendingSendMessagesPerSource, pendingReceiveMessagesPerSource,
pendingReceiveDeSerializations, serializerMap, deSerializerMap, false);
}
@Override
public boolean sendPartial(int src, Object message, int flags) {
throw new RuntimeException("Not supported method");
}
public boolean isComplete() {
boolean done = delegete.isComplete();
if (lock.tryLock()) {
try {
boolean needsFurtherProgress = finalReceiver.progress();
return done && !needsFurtherProgress;
} finally {
lock.unlock();
}
}
return true;
}
@Override
public boolean send(int src, Object message, int flags) {
RoutingParameters routingParameters = sendRoutingParameters(src, 0);
return delegete.sendMessage(src, message, 0, flags, routingParameters);
}
@Override
public boolean send(int src, Object message, int flags, int target) {
RoutingParameters routingParameters = sendRoutingParameters(src, 0);
return delegete.sendMessage(src, message, target, flags, routingParameters);
}
@Override
public boolean sendPartial(int src, Object message, int flags, int target) {
return false;
}
@Override
public boolean progress() {
boolean partialNeedsProgress = false;
try {
delegete.progress();
if (lock.tryLock()) {
try {
partialNeedsProgress = finalReceiver.progress();
} finally {
lock.unlock();
}
}
} catch (Throwable t) {
LOG.log(Level.SEVERE, "un-expected error", t);
throw new RuntimeException(t);
}
return partialNeedsProgress;
}
public boolean passMessageDownstream(Object object, ChannelMessage currentMessage) {
int src = router.mainTaskOfExecutor(instancePlan.getThisExecutor(),
DataFlowContext.DEFAULT_DESTINATION);
RoutingParameters routingParameters;
if (routingParametersCache.containsKey(src)) {
routingParameters = routingParametersCache.get(src);
} else {
routingParameters = sendRoutingParameters(src, DataFlowContext.DEFAULT_DESTINATION);
}
ArrayBlockingQueue<Pair<Object, OutMessage>> pendingSendMessages =
pendingSendMessagesPerSource.get(src);
ChannelMessage channelMessage = new ChannelMessage(src, type,
MessageDirection.OUT, delegete);
// create a send message to keep track of the serialization
// at the intial stage the sub-edge is 0
int di = -1;
if (routingParameters.getExternalRoutes().size() > 0) {
di = routingParameters.getDestinationId();
}
OutMessage sendMessage = new OutMessage(src, channelMessage,
currentMessage.getHeader().getEdge(),
di, DataFlowContext.DEFAULT_DESTINATION, currentMessage.getHeader().getFlags(),
routingParameters.getInternalRoutes(),
routingParameters.getExternalRoutes());
// now try to put this into pending
return pendingSendMessages.offer(
new ImmutablePair<Object, OutMessage>(object, sendMessage));
}
private RoutingParameters sendRoutingParameters(int s, int path) {
if (routingParametersCache.containsKey(s)) {
return routingParametersCache.get(s);
} else {
RoutingParameters routingParameters = new RoutingParameters();
// get the expected routes
Map<Integer, Set<Integer>> internalRouting = router.getInternalSendTasks(source);
if (internalRouting == null) {
throw new RuntimeException("Un-expected message from source: " + s);
}
Set<Integer> internalSourceRouting = internalRouting.get(s);
if (internalSourceRouting != null) {
// we always use path 0 because only one path
routingParameters.addInternalRoutes(internalSourceRouting);
} else {
LOG.info(String.format("%d No internal routes for source %d", executor, s));
}
// get the expected routes
Map<Integer, Set<Integer>> externalRouting = router.getExternalSendTasks(s);
if (externalRouting == null) {
throw new RuntimeException("Un-expected message from source: " + s);
}
Set<Integer> externalSourceRouting = externalRouting.get(s);
if (externalSourceRouting != null) {
routingParameters.addExternalRoutes(externalSourceRouting);
}
return routingParameters;
}
}
@Override
public boolean receiveSendInternally(int src, int target, int path, int flags, Object message) {
return finalReceiver.onMessage(src, path, target, flags, message);
}
protected Set<Integer> receivingExecutors() {
return router.receivingExecutors();
}
public Map<Integer, List<Integer>> receiveExpectedTaskIds() {
return router.receiveExpectedTaskIds();
}
protected boolean isLast(int src, int path, int taskIdentifier) {
return false;
}
protected boolean isLastReceiver() {
return true;
}
}
|
3e1e527db05e4666b46570dd0cef19c67d00e7ed | 2,538 | java | Java | ABAP Quick Fix Plugin/src/com/abapblog/adt/quickfix/assist/comments/TranslateCommentToEnglish.java | lc-leuc/ABAPQuickFix | deee5fad8cf2b413d454f0e49a8386139a875b09 | [
"MIT"
] | 20 | 2020-04-25T23:43:19.000Z | 2022-03-28T10:16:36.000Z | ABAP Quick Fix Plugin/src/com/abapblog/adt/quickfix/assist/comments/TranslateCommentToEnglish.java | lc-leuc/ABAPQuickFix | deee5fad8cf2b413d454f0e49a8386139a875b09 | [
"MIT"
] | 17 | 2019-06-14T10:51:04.000Z | 2021-12-20T09:36:20.000Z | ABAP Quick Fix Plugin/src/com/abapblog/adt/quickfix/assist/comments/TranslateCommentToEnglish.java | lc-leuc/ABAPQuickFix | deee5fad8cf2b413d454f0e49a8386139a875b09 | [
"MIT"
] | 8 | 2019-01-13T12:36:37.000Z | 2021-10-02T12:53:25.000Z | 32.538462 | 101 | 0.756501 | 12,834 | package com.abapblog.adt.quickfix.assist.comments;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.text.contentassist.CompletionProposal;
import org.eclipse.jface.text.contentassist.ICompletionProposal;
import org.eclipse.jface.text.quickassist.IQuickAssistInvocationContext;
import org.eclipse.jface.text.quickassist.IQuickAssistProcessor;
import org.eclipse.jface.text.source.Annotation;
import org.eclipse.swt.graphics.Image;
import com.abapblog.adt.quickfix.Activator;
import com.abapblog.adt.quickfix.preferences.PreferenceConstants;
public class TranslateCommentToEnglish implements IQuickAssistProcessor {
AbapQuickFixRemoveCommentsCodeParser commentParser;
@Override
public boolean canAssist(IQuickAssistInvocationContext context) {
if ( checkQuickFixAllowed() ) {
commentParser = new AbapQuickFixRemoveCommentsCodeParser();
String sourceCode = context.getSourceViewer().getDocument().get();
int lenght = context.getSourceViewer().getSelectedRange().y;
int offset = context.getSourceViewer().getSelectedRange().x;
return commentParser.haveComment(sourceCode, offset, offset + lenght);
};
return false;
}
@Override
public boolean canFix(Annotation arg0) {
return false;
}
@Override
public ICompletionProposal[] computeQuickAssistProposals(IQuickAssistInvocationContext context) {
List<ICompletionProposal> proposals = new ArrayList<>();
if (canAssist(context)) {
int lenght = context.getSourceViewer().getSelectedRange().y;
int offset = context.getSourceViewer().getSelectedRange().x;
String sourceCode = context.getSourceViewer().getDocument().get();
Image image = null;
CompletionProposal cPropSelectedComments;
try {
String translatedText = Translator.main( sourceCode.substring( offset, offset + lenght) );
cPropSelectedComments = new CompletionProposal(
translatedText , offset, lenght, 0, image,
"Translate selection to English", null,
translatedText);
proposals.add(cPropSelectedComments);
return proposals.toArray(new ICompletionProposal[1]);
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
@Override
public String getErrorMessage() {
return null;
}
private boolean checkQuickFixAllowed() {
return Activator.getDefault().getPreferenceStore().getBoolean(PreferenceConstants.P_TCTE_ALLOWED);
}
}
|
3e1e528df204135be49ea37fa5b511a2334566cf | 3,056 | java | Java | src/in/dreamlab/wicm/algorithms/icm_luds/UByteREACH_D.java | animeshbaranawal/wicm | 95547396e1de21f2afa8fe9088ad91d3ad60194c | [
"Apache-2.0"
] | null | null | null | src/in/dreamlab/wicm/algorithms/icm_luds/UByteREACH_D.java | animeshbaranawal/wicm | 95547396e1de21f2afa8fe9088ad91d3ad60194c | [
"Apache-2.0"
] | null | null | null | src/in/dreamlab/wicm/algorithms/icm_luds/UByteREACH_D.java | animeshbaranawal/wicm | 95547396e1de21f2afa8fe9088ad91d3ad60194c | [
"Apache-2.0"
] | null | null | null | 48.507937 | 193 | 0.76178 | 12,835 | package in.dreamlab.wicm.algorithms.icm_luds;
import in.dreamlab.graphite.graph.IntervalVertex;
import in.dreamlab.graphite.types.Interval;
import in.dreamlab.wicm.comm.messages.UByteBooleanStartSlimMessage;
import in.dreamlab.wicm.graph.computation.DebugBlockWarpBasicIntervalComputation;
import in.dreamlab.wicm.graphData.UByteBooleanIntervalData;
import in.dreamlab.wicm.types.UnsignedByte;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.giraph.conf.IntConfOption;
import org.apache.giraph.edge.Edge;
import org.apache.hadoop.io.IntWritable;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
public class UByteREACH_D extends
DebugBlockWarpBasicIntervalComputation<IntWritable, UnsignedByte, Boolean, UByteBooleanIntervalData, Boolean, UByteBooleanIntervalData, Boolean, Boolean, UByteBooleanStartSlimMessage> {
public static final IntConfOption SOURCE_ID = new IntConfOption("sourceId", 1, "Reachability Source Vertex ID");
public static final int travelTime = 1;
@Override
public boolean init(
IntervalVertex<IntWritable, UnsignedByte, Boolean, UByteBooleanIntervalData, Boolean, UByteBooleanIntervalData, Boolean, Boolean, UByteBooleanStartSlimMessage> intervalVertex) {
intervalVertex.getValue().getPropertyMap().clear();
if (intervalVertex.getId().get() == SOURCE_ID.get(getConf())) {
intervalVertex.setState(intervalVertex.getLifespan(), true);
return true;
} else {
intervalVertex.setState(intervalVertex.getLifespan(), false);
}
return false;
}
@Override
public Collection<Pair<Interval<UnsignedByte>, Boolean>> compute(
IntervalVertex<IntWritable, UnsignedByte, Boolean, UByteBooleanIntervalData, Boolean, UByteBooleanIntervalData, Boolean, Boolean, UByteBooleanStartSlimMessage> intervalVertex,
Interval<UnsignedByte> interval, Boolean currentReachabilityState, Boolean candidateReachabilityState) throws IOException {
if (!currentReachabilityState) {
intervalVertex.setState(interval, candidateReachabilityState);
return Collections.singleton(new ImmutablePair<>(interval, candidateReachabilityState));
}
return Collections.emptySet();
}
@Override
public Iterable<UByteBooleanStartSlimMessage> scatter(
IntervalVertex<IntWritable, UnsignedByte, Boolean, UByteBooleanIntervalData, Boolean, UByteBooleanIntervalData, Boolean, Boolean, UByteBooleanStartSlimMessage> intervalVertex,
Edge<IntWritable, UByteBooleanIntervalData> edge, Interval<UnsignedByte> interval, Boolean reachabilityState,
Boolean nullProperty) {
Integer reachingTime = interval.getStart().intValue() + travelTime;
return Collections.singleton(new UByteBooleanStartSlimMessage(reachingTime, true));
}
@Override
protected Character getPropertyLabelForScatter() {
return null;
}
}
|
3e1e52f41dc76bd8214c066f5b75e54895dd0f3a | 1,265 | java | Java | graduate-design-user-info-manage-service/src/main/java/fun/liwudi/graduatedesignuserinfomanage/domain/UserTokenInfo.java | Liwu-di/spring-cloud-check-system | b2871d4e0e913fad400220fb0929d9d352f7c679 | [
"Apache-2.0"
] | null | null | null | graduate-design-user-info-manage-service/src/main/java/fun/liwudi/graduatedesignuserinfomanage/domain/UserTokenInfo.java | Liwu-di/spring-cloud-check-system | b2871d4e0e913fad400220fb0929d9d352f7c679 | [
"Apache-2.0"
] | null | null | null | graduate-design-user-info-manage-service/src/main/java/fun/liwudi/graduatedesignuserinfomanage/domain/UserTokenInfo.java | Liwu-di/spring-cloud-check-system | b2871d4e0e913fad400220fb0929d9d352f7c679 | [
"Apache-2.0"
] | null | null | null | 21.440678 | 74 | 0.583399 | 12,836 | package fun.liwudi.graduatedesignuserinfomanage.domain;
import org.apache.commons.lang.StringUtils;
/**
* @author 李武第
*/
public class UserTokenInfo {
private String userName;
private String passWord;
private String time;
public Boolean isEqual(UserTokenInfo userTokenInfo){
if(StringUtils.equals(this.time,userTokenInfo.getTime())
&& StringUtils.equals(this.userName,userTokenInfo.getUserName())
&& StringUtils.equals(this.passWord,userTokenInfo.getPassWord())){
return true;
}
return false;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getPassWord() {
return passWord;
}
public void setPassWord(String passWord) {
this.passWord = passWord;
}
public String getTime() {
return time;
}
public void setTime(String time) {
this.time = time;
}
@Override
public String toString() {
return "UserTokenInfo{" +
"userName='" + userName + '\'' +
", passWord='" + passWord + '\'' +
", time='" + time + '\'' +
'}';
}
}
|
3e1e5327fce689650c0c0ecc30b38b64572ce53d | 1,048 | java | Java | UnitTest/Junit/src/test/java/exercise18_5/ProductStockTest.java | pluselc/SandBox | b27e02d6f5f8e2ea1d75743007549b3bde49e22c | [
"MIT"
] | null | null | null | UnitTest/Junit/src/test/java/exercise18_5/ProductStockTest.java | pluselc/SandBox | b27e02d6f5f8e2ea1d75743007549b3bde49e22c | [
"MIT"
] | null | null | null | UnitTest/Junit/src/test/java/exercise18_5/ProductStockTest.java | pluselc/SandBox | b27e02d6f5f8e2ea1d75743007549b3bde49e22c | [
"MIT"
] | null | null | null | 26.2 | 80 | 0.696565 | 12,837 | package exercise18_5;
import static exercise18_5.ProductStockTestHelper.*;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import java.lang.reflect.Field;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import exercise18_5.Product;
import exercise18_5.ProductStock;
public class ProductStockTest {
private ProductStock sut;
private Field field;
@Before
public void setUp() throws Exception {
sut = new ProductStock();
field = ProductStock.class.getDeclaredField("productMap");
field.setAccessible(true);
}
@Test
public void addで商品が追加される() throws Exception {
sut.add(new Product("dummy1", 100));
@SuppressWarnings("unchecked")
Map<String, Product> productMap = (Map<String, Product>) field.get(sut);
Product actual = productMap.get("dummy1");
Product expected = Productの生成_dummy1();
assertThat(actual.name, is(expected.name));
assertThat(actual.price, is(expected.price));
}
}
|
3e1e5596b9f89541c2572f375cf1cbf7473864be | 1,014 | java | Java | _src/Chapter02/math/RandomNumberGenerator.java | paullewallencom/neural-java-978-1-7871-2605-3 | bd8a9bbfda361373ddcf8a7a368f83db333395e1 | [
"Apache-2.0"
] | 53 | 2017-04-02T05:27:26.000Z | 2021-11-08T13:13:26.000Z | _src/Chapter02/math/RandomNumberGenerator.java | paullewallencom/neural-java-978-1-7871-2605-3 | bd8a9bbfda361373ddcf8a7a368f83db333395e1 | [
"Apache-2.0"
] | 1 | 2018-07-02T09:24:18.000Z | 2018-07-02T09:24:18.000Z | _src/Chapter02/math/RandomNumberGenerator.java | paullewallencom/neural-java-978-1-7871-2605-3 | bd8a9bbfda361373ddcf8a7a368f83db333395e1 | [
"Apache-2.0"
] | 58 | 2017-04-09T16:57:21.000Z | 2022-02-06T11:52:01.000Z | 23.045455 | 80 | 0.621302 | 12,838 | package edu.packt.neuralnet.math;
import java.util.Random;
/**
*
* RandomNumberGenerator
* This class generates double precision random numbers according to a seed. It
* is used in weights initialization, for example.
*
* @author Alan de Souza, Fábio Soares
* @version 0.1
*/
public class RandomNumberGenerator {
/**
* Seed that is used for random number generation
*/
public static long seed=0;
/**
* Random singleton object that actually generates the random numbers
*/
public static Random r;
/**
* GenerateNext
* Static method that returns a newly random number
* @return
*/
public static double GenerateNext(){
if(r==null)
r = new Random(seed);
return r.nextDouble();
}
/**
* setSeed
* Sets a new seed for the random generator
* @param seed new seed for random generator
*/
public static void setSeed(long seed){
seed=seed;
r.setSeed(seed);
}
}
|
3e1e55b75bd040c2de83119b2a3098ca4df362dd | 932 | java | Java | FindRestaurent/src/com/maya/findrestaurent/SplashActivity.java | jarinj/excercise | 496271e2b9977ef31a6c09038c7a3628aa80dfbd | [
"Apache-2.0"
] | null | null | null | FindRestaurent/src/com/maya/findrestaurent/SplashActivity.java | jarinj/excercise | 496271e2b9977ef31a6c09038c7a3628aa80dfbd | [
"Apache-2.0"
] | null | null | null | FindRestaurent/src/com/maya/findrestaurent/SplashActivity.java | jarinj/excercise | 496271e2b9977ef31a6c09038c7a3628aa80dfbd | [
"Apache-2.0"
] | null | null | null | 22.190476 | 70 | 0.701717 | 12,839 | package com.maya.findrestaurent;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
public class SplashActivity extends ActionBarActivity {
SharedPreferences prefs;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.splash);
prefs = this.getSharedPreferences("settings", Context.MODE_PRIVATE);
/****** Create Thread that will sleep for 2 seconds *************/
Thread background = new Thread() {
public void run() {
try {
// Thread will sleep for 2 seconds
sleep(2 * 1000);
Intent i = new Intent(getBaseContext(), HomeActivity.class);
startActivity(i);
// Remove activity
finish();
} catch (Exception e) {
}
}
};
// start thread
background.start();
}
}
|
3e1e55c4b193bdbff7f2b118e2b74afff1ec6e06 | 788 | java | Java | src/main/java/com/alipay/api/domain/AlipayOpenPublicLabelUserCreateModel.java | alipay/alipay-sdk-java-all | e87bc8e7f6750e168a5f9d37221124c085d1e3c1 | [
"Apache-2.0"
] | 333 | 2018-08-28T09:26:55.000Z | 2022-03-31T07:26:42.000Z | src/main/java/com/alipay/api/domain/AlipayOpenPublicLabelUserCreateModel.java | alipay/alipay-sdk-java-all | e87bc8e7f6750e168a5f9d37221124c085d1e3c1 | [
"Apache-2.0"
] | 46 | 2018-09-27T03:52:42.000Z | 2021-08-10T07:54:57.000Z | src/main/java/com/alipay/api/domain/AlipayOpenPublicLabelUserCreateModel.java | alipay/alipay-sdk-java-all | e87bc8e7f6750e168a5f9d37221124c085d1e3c1 | [
"Apache-2.0"
] | 158 | 2018-12-07T17:03:43.000Z | 2022-03-17T09:32:43.000Z | 18.325581 | 73 | 0.677665 | 12,840 | package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 用户增加标签接口
*
* @author auto create
* @since 1.0, 2021-06-23 14:43:33
*/
public class AlipayOpenPublicLabelUserCreateModel extends AlipayObject {
private static final long serialVersionUID = 7238424978933324768L;
/**
* 要绑定的标签Id
*/
@ApiField("label_id")
private Long labelId;
/**
* 支付宝用户id,2088开头长度为16位的字符串
*/
@ApiField("user_id")
private String userId;
public Long getLabelId() {
return this.labelId;
}
public void setLabelId(Long labelId) {
this.labelId = labelId;
}
public String getUserId() {
return this.userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
}
|
3e1e55fa0a66fe05cb7199911c6b9fafa21a5b66 | 4,426 | java | Java | src/main/java/org/w3c/css/properties/css3/CssUserModify.java | lacostej/css-validator | 76f3948f0e96aab03dff49348eb05e8c218ee062 | [
"W3C-19980720"
] | 2 | 2017-09-09T16:38:11.000Z | 2017-11-02T14:23:44.000Z | src/main/java/org/w3c/css/properties/css3/CssUserModify.java | lacostej/css-validator | 76f3948f0e96aab03dff49348eb05e8c218ee062 | [
"W3C-19980720"
] | null | null | null | src/main/java/org/w3c/css/properties/css3/CssUserModify.java | lacostej/css-validator | 76f3948f0e96aab03dff49348eb05e8c218ee062 | [
"W3C-19980720"
] | null | null | null | 27.012195 | 83 | 0.66456 | 12,841 | //
// $Id: CssUserModify.java,v 1.3 2010-01-05 13:49:56 ylafon Exp $
// From Sijtsche de Jong (kenaa@example.com)
//
// (c) COPYRIGHT 1995-2000 World Wide Web Consortium (MIT, INRIA, Keio University)
// Please first read the full copyright statement at
// http://www.w3.org/Consortium/Legal/copyright-software-19980720
package org.w3c.css.properties.css3;
import org.w3c.css.parser.CssStyle;
import org.w3c.css.properties.css.CssProperty;
import org.w3c.css.util.ApplContext;
import org.w3c.css.util.InvalidParamException;
import org.w3c.css.values.CssExpression;
import org.w3c.css.values.CssIdent;
import org.w3c.css.values.CssValue;
/**
* <P>
* <EM>Value:</EM> read-only || read-write || write-only || inherit<BR>
* <EM>Initial:</EM>read-only<BR>
* <EM>Applies to:</EM>all elements<BR>
* <EM>Inherited:</EM>yes<BR>
* <EM>Percentages:</EM>no<BR>
* <EM>Media:</EM>:interactive
* <P>
* The purpose of this property is to allow finer control over which user
* interface elements are user modifiable. Input elements can be pre-filled
* in, with the user-modify property set to read-only so that the user cannot
* change them. This is useful for templates, or for update forms. The user
* can still activate a read-only element and copy content out of
* it, if that is appropriate for the content model of the element.
* This is different from making the element "user-input:disabled" because
* that would prevent the user from activating the element.
*/
public class CssUserModify extends CssProperty {
CssValue um;
static CssIdent readonly = new CssIdent("read-only");
static CssIdent readwrite = new CssIdent("read-write");
static CssIdent writeonly = new CssIdent("write-only");
/**
* Create a new CssUserModify
*/
public CssUserModify() {
um = readonly;
}
/**
* Create a new CssUserModify
*
* @param expression The expression for this property
* @exception InvalidParamException Incorrect value
*/
public CssUserModify(ApplContext ac, CssExpression expression,
boolean check) throws InvalidParamException {
setByUser();
CssValue val = expression.getValue();
if (val.equals(readonly)) {
um = readonly;
expression.next();
}
else if (val.equals(readwrite)) {
um = readwrite;
expression.next();
}
else if (val.equals(writeonly)) {
um = writeonly;
expression.next();
}
else if (val.equals(inherit)) {
um = inherit;
expression.next();
}
else {
throw new InvalidParamException("value", expression.getValue(),
getPropertyName(), ac);
}
}
public CssUserModify(ApplContext ac, CssExpression expression)
throws InvalidParamException {
this(ac, expression, false);
}
/**
* Add this property to the CssStyle
*
* @param style The CssStyle
*/
public void addToStyle(ApplContext ac, CssStyle style) {
if (((Css3Style) style).cssUserModify != null)
style.addRedefinitionWarning(ac, this);
((Css3Style) style).cssUserModify = this;
}
/**
* Get this property in the style.
*
* @param style The style where the property is
* @param resolve if true, resolve the style to find this property
*/
public CssProperty getPropertyInStyle(CssStyle style, boolean resolve) {
if (resolve) {
return ((Css3Style) style).getUserModify();
}
else {
return ((Css3Style) style).cssUserModify;
}
}
/**
* Compares two properties for equality.
*
* @param value The other property.
*/
public boolean equals(CssProperty property) {
return (property instanceof CssUserModify &&
um.equals(((CssUserModify) property).um));
}
/**
* Returns the name of this property
*/
public String getPropertyName() {
return "user-modify";
}
/**
* Returns the value of this property
*/
public Object get() {
return um;
}
/**
* Returns true if this property is "softly" inherited
*/
public boolean isSoftlyInherited() {
return um.equals(inherit);
}
/**
* Returns a string representation of the object
*/
public String toString() {
return um.toString();
}
/**
* Is the value of this property a default value
* It is used by alle macro for the function <code>print</code>
*/
public boolean isDefault() {
return um == readonly;
}
}
|
3e1e5665daaefe6218ff460debf6fddd979f3d0d | 7,678 | java | Java | src/main/java/org/sinmetal/WordCount.java | sinmetal/irontiger | ecd227d1ca7cf094b46f44fafa4dda46e5485d50 | [
"MIT"
] | null | null | null | src/main/java/org/sinmetal/WordCount.java | sinmetal/irontiger | ecd227d1ca7cf094b46f44fafa4dda46e5485d50 | [
"MIT"
] | null | null | null | src/main/java/org/sinmetal/WordCount.java | sinmetal/irontiger | ecd227d1ca7cf094b46f44fafa4dda46e5485d50 | [
"MIT"
] | null | null | null | 37.453659 | 100 | 0.710081 | 12,842 | /*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.sinmetal;
import com.google.cloud.dataflow.sdk.Pipeline;
import com.google.cloud.dataflow.sdk.io.TextIO;
import com.google.cloud.dataflow.sdk.options.DataflowPipelineOptions;
import com.google.cloud.dataflow.sdk.options.Default;
import com.google.cloud.dataflow.sdk.options.DefaultValueFactory;
import com.google.cloud.dataflow.sdk.options.Description;
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory;
import com.google.cloud.dataflow.sdk.transforms.Aggregator;
import com.google.cloud.dataflow.sdk.transforms.Count;
import com.google.cloud.dataflow.sdk.transforms.DoFn;
import com.google.cloud.dataflow.sdk.transforms.PTransform;
import com.google.cloud.dataflow.sdk.transforms.ParDo;
import com.google.cloud.dataflow.sdk.transforms.Sum;
import com.google.cloud.dataflow.sdk.util.gcsfs.GcsPath;
import com.google.cloud.dataflow.sdk.values.KV;
import com.google.cloud.dataflow.sdk.values.PCollection;
/**
* An example that counts words in Shakespeare and includes Dataflow best practices.
*
* <p>This class, {@link WordCount}, is the second in a series of four successively more detailed
* 'word count' examples. You may first want to take a look at {@link MinimalWordCount}.
* After you've looked at this example, then see the {@link DebuggingWordCount}
* pipeline, for introduction of additional concepts.
*
* <p>For a detailed walkthrough of this example, see
* <a href="https://cloud.google.com/dataflow/java-sdk/wordcount-example">
* https://cloud.google.com/dataflow/java-sdk/wordcount-example
* </a>
*
* <p>Basic concepts, also in the MinimalWordCount example:
* Reading text files; counting a PCollection; writing to GCS.
*
* <p>New Concepts:
* <pre>
* 1. Executing a Pipeline both locally and using the Dataflow service
* 2. Using ParDo with static DoFns defined out-of-line
* 3. Building a composite transform
* 4. Defining your own pipeline options
* </pre>
*
* <p>Concept #1: you can execute this pipeline either locally or using the Dataflow service.
* These are now command-line options and not hard-coded as they were in the MinimalWordCount
* example.
* To execute this pipeline locally, specify general pipeline configuration:
* <pre>{@code
* --project=YOUR_PROJECT_ID
* }
* </pre>
* and a local output file or output prefix on GCS:
* <pre>{@code
* --output=[YOUR_LOCAL_FILE | gs://YOUR_OUTPUT_PREFIX]
* }</pre>
*
* <p>To execute this pipeline using the Dataflow service, specify pipeline configuration:
* <pre>{@code
* --project=YOUR_PROJECT_ID
* --stagingLocation=gs://YOUR_STAGING_DIRECTORY
* --runner=BlockingDataflowPipelineRunner
* }
* </pre>
* and an output prefix on GCS:
* <pre>{@code
* --output=gs://YOUR_OUTPUT_PREFIX
* }</pre>
*
* <p>The input file defaults to {@code gs://dataflow-samples/shakespeare/kinglear.txt} and can be
* overridden with {@code --inputFile}.
*/
public class WordCount {
/**
* Concept #2: You can make your pipeline code less verbose by defining your DoFns statically out-
* of-line. This DoFn tokenizes lines of text into individual words; we pass it to a ParDo in the
* pipeline.
*/
static class ExtractWordsFn extends DoFn<String, String> {
private final Aggregator<Long, Long> emptyLines =
createAggregator("emptyLines", new Sum.SumLongFn());
@Override
public void processElement(ProcessContext c) {
if (c.element().trim().isEmpty()) {
emptyLines.addValue(1L);
}
// Split the line into words.
String[] words = c.element().split("[^a-zA-Z']+");
// Output each word encountered into the output PCollection.
for (String word : words) {
if (!word.isEmpty()) {
c.output(word);
}
}
}
}
/** A DoFn that converts a Word and Count into a printable string. */
public static class FormatAsTextFn extends DoFn<KV<String, Long>, String> {
@Override
public void processElement(ProcessContext c) {
c.output(c.element().getKey() + ": " + c.element().getValue());
}
}
/**
* A PTransform that converts a PCollection containing lines of text into a PCollection of
* formatted word counts.
*
* <p>Concept #3: This is a custom composite transform that bundles two transforms (ParDo and
* Count) as a reusable PTransform subclass. Using composite transforms allows for easy reuse,
* modular testing, and an improved monitoring experience.
*/
public static class CountWords extends PTransform<PCollection<String>,
PCollection<KV<String, Long>>> {
@Override
public PCollection<KV<String, Long>> apply(PCollection<String> lines) {
// Convert lines of text into individual words.
PCollection<String> words = lines.apply(
ParDo.of(new ExtractWordsFn()));
// Count the number of times each word occurs.
PCollection<KV<String, Long>> wordCounts =
words.apply(Count.<String>perElement());
return wordCounts;
}
}
/**
* Options supported by {@link WordCount}.
*
* <p>Concept #4: Defining your own configuration options. Here, you can add your own arguments
* to be processed by the command-line parser, and specify default values for them. You can then
* access the options values in your pipeline code.
*
* <p>Inherits standard configuration options.
*/
public interface WordCountOptions extends PipelineOptions {
@Description("Path of the file to read from")
@Default.String("gs://dataflow-samples/shakespeare/kinglear.txt")
String getInputFile();
void setInputFile(String value);
@Description("Path of the file to write to")
@Default.InstanceFactory(OutputFactory.class)
String getOutput();
void setOutput(String value);
/**
* Returns "gs://${YOUR_STAGING_DIRECTORY}/counts.txt" as the default destination.
*/
class OutputFactory implements DefaultValueFactory<String> {
@Override
public String create(PipelineOptions options) {
DataflowPipelineOptions dataflowOptions = options.as(DataflowPipelineOptions.class);
if (dataflowOptions.getStagingLocation() != null) {
return GcsPath.fromUri(dataflowOptions.getStagingLocation())
.resolve("counts.txt").toString();
} else {
throw new IllegalArgumentException("Must specify --output or --stagingLocation");
}
}
}
}
public static void main(String[] args) {
WordCountOptions options = PipelineOptionsFactory.fromArgs(args).withValidation()
.as(WordCountOptions.class);
Pipeline p = Pipeline.create(options);
// Concepts #2 and #3: Our pipeline applies the composite CountWords transform, and passes the
// static FormatAsTextFn() to the ParDo transform.
p.apply(TextIO.Read.named("ReadLines").from(options.getInputFile()))
.apply(new CountWords())
.apply(ParDo.of(new FormatAsTextFn()))
.apply(TextIO.Write.named("WriteCounts").to(options.getOutput()));
p.run();
}
}
|
3e1e56e20a7b7dde3a7c3553eaf8dbf8ecc1d000 | 1,096 | java | Java | openbp-cockpit/src/main/java/org/openbp/cockpit/modeler/figures/generic/Expandable.java | CleanCode1115/openbp | d2d655499247c8ca4b7d63e773235afa5e023b69 | [
"Apache-2.0"
] | null | null | null | openbp-cockpit/src/main/java/org/openbp/cockpit/modeler/figures/generic/Expandable.java | CleanCode1115/openbp | d2d655499247c8ca4b7d63e773235afa5e023b69 | [
"Apache-2.0"
] | null | null | null | openbp-cockpit/src/main/java/org/openbp/cockpit/modeler/figures/generic/Expandable.java | CleanCode1115/openbp | d2d655499247c8ca4b7d63e773235afa5e023b69 | [
"Apache-2.0"
] | null | null | null | 32.235294 | 94 | 0.703467 | 12,843 | /*
* Copyright 2007 skynamics AG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openbp.cockpit.modeler.figures.generic;
import java.awt.Rectangle;
/**
* An expandable figure has sub figures that are attached to the figure.
* This interface defines a single method that returns the 'basic' display box of the figure.
*
* @author Stephan Moritz
*/
public interface Expandable
{
/**
* Returns the display box of the figure, excluding any appendices.
* @nowarn
*/
public Rectangle compactDisplayBox();
}
|
3e1e571ff2b2dd65231f1a4d6ff10e20eeecdd48 | 1,072 | java | Java | algorithms/java/src/test/java/org/jessenpan/leetcode/string/S165CompareVersionNumbersTest.java | JessenPan/leetcode-java | 7181adeb0a9f43a3ebb3d95f0ccf51e8ec99e1bf | [
"Apache-2.0"
] | 6 | 2019-05-04T09:02:41.000Z | 2022-01-09T00:26:34.000Z | algorithms/java/src/test/java/org/jessenpan/leetcode/string/S165CompareVersionNumbersTest.java | JessenPan/leetcode-java | 7181adeb0a9f43a3ebb3d95f0ccf51e8ec99e1bf | [
"Apache-2.0"
] | null | null | null | algorithms/java/src/test/java/org/jessenpan/leetcode/string/S165CompareVersionNumbersTest.java | JessenPan/leetcode-java | 7181adeb0a9f43a3ebb3d95f0ccf51e8ec99e1bf | [
"Apache-2.0"
] | 1 | 2019-11-06T08:14:24.000Z | 2019-11-06T08:14:24.000Z | 24.363636 | 94 | 0.647388 | 12,844 | package org.jessenpan.leetcode.string;
import org.junit.Assert;
import org.junit.Test;
/**
* @author jessenpan
*/
public class S165CompareVersionNumbersTest {
private S165CompareVersionNumbers compareVersionNumbers = new S165CompareVersionNumbers();
@Test
public void test1(){
int compared= compareVersionNumbers.compareVersion("0.1", "1.1");
Assert.assertEquals(-1, compared);
}
@Test
public void test2(){
int compared= compareVersionNumbers.compareVersion("1.0.1", "1");
Assert.assertEquals(1, compared);
}
@Test
public void test3(){
int compared= compareVersionNumbers.compareVersion("7.5.2.4", "7.5.3");
Assert.assertEquals(-1, compared);
}
@Test
public void test4(){
int compared= compareVersionNumbers.compareVersion("1.01", "1.001");
Assert.assertEquals(0, compared);
}
@Test
public void test5(){
int compared= compareVersionNumbers.compareVersion("1.0", "1.0.0");
Assert.assertEquals(0, compared);
}
}
|
3e1e57d12f261551c72fdaa2ab53ece58aee5a8a | 3,545 | java | Java | genson/src/main/java/com/owlike/genson/reflect/PropertyAccessor.java | azukovskij89/genson | ed60adc0fae99d6f27f9555a1dc3ebd3b6ee63a4 | [
"Apache-2.0"
] | 213 | 2015-01-01T19:35:30.000Z | 2022-03-16T20:31:36.000Z | genson/src/main/java/com/owlike/genson/reflect/PropertyAccessor.java | azukovskij89/genson | ed60adc0fae99d6f27f9555a1dc3ebd3b6ee63a4 | [
"Apache-2.0"
] | 126 | 2015-01-02T18:17:15.000Z | 2022-01-24T17:41:37.000Z | genson/src/main/java/com/owlike/genson/reflect/PropertyAccessor.java | azukovskij89/genson | ed60adc0fae99d6f27f9555a1dc3ebd3b6ee63a4 | [
"Apache-2.0"
] | 76 | 2015-01-02T12:59:11.000Z | 2022-03-16T19:12:57.000Z | 30.042373 | 115 | 0.685755 | 12,845 | package com.owlike.genson.reflect;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import com.owlike.genson.*;
import com.owlike.genson.stream.JsonWriter;
import com.owlike.genson.stream.ObjectWriter;
public abstract class PropertyAccessor extends BeanProperty implements Comparable<PropertyAccessor> {
Serializer<Object> propertySerializer;
private final char[] escapedName;
protected PropertyAccessor(String name, Type type, Class<?> declaringClass, Class<?> concreteClass,
Annotation[] annotations, int modifiers) {
super(name, type, declaringClass, concreteClass, annotations, modifiers);
escapedName = JsonWriter.escapeString(name);
}
public void serialize(Object propertySource, ObjectWriter writer, Context ctx) {
Object propertyValue = access(propertySource);
writer.writeEscapedName(escapedName);
try {
propertySerializer.serialize(propertyValue, writer, ctx);
} catch (Throwable th) {
throw couldNotSerialize(th);
}
}
public abstract Object access(final Object target);
public int compareTo(PropertyAccessor o) {
return o.priority() - priority();
}
protected JsonBindingException couldNotAccess(Exception e) {
return new JsonBindingException("Could not access value of property named '"
+ name + "' using accessor " + signature() + " from class "
+ declaringClass.getName(), e);
}
protected JsonBindingException couldNotSerialize(Throwable e) {
return new JsonBindingException("Could not serialize property '" + name
+ "' from class " + declaringClass.getName(), e);
}
public static class MethodAccessor extends PropertyAccessor {
protected final Method _getter;
public MethodAccessor(String name, Method getter, Type type, Class<?> concreteClass) {
super(name, type, getter.getDeclaringClass(), concreteClass, getter.getAnnotations(), getter.getModifiers());
this._getter = getter;
if (!_getter.isAccessible()) {
_getter.setAccessible(true);
}
}
@Override
public Object access(final Object target) {
try {
return _getter.invoke(target);
} catch (IllegalArgumentException e) {
throw couldNotAccess(e);
} catch (IllegalAccessException e) {
throw couldNotAccess(e);
} catch (InvocationTargetException e) {
throw couldNotAccess(e);
}
}
@Override
String signature() {
return _getter.toGenericString();
}
@Override
int priority() {
return 100;
}
}
public static class FieldAccessor extends PropertyAccessor {
protected final Field _field;
public FieldAccessor(String name, Field field, Type type, Class<?> concreteClass) {
super(name, type, field.getDeclaringClass(), concreteClass, field.getAnnotations(), field.getModifiers());
this._field = field;
if (!_field.isAccessible()) {
_field.setAccessible(true);
}
}
@Override
public Object access(final Object target) {
try {
return _field.get(target);
} catch (IllegalArgumentException e) {
throw couldNotAccess(e);
} catch (IllegalAccessException e) {
throw couldNotAccess(e);
}
}
@Override
public String signature() {
return _field.toGenericString();
}
@Override
public int priority() {
return 50;
}
}
}
|
3e1e5a207788ef7980c89580bd20e5d7538d396d | 2,743 | java | Java | src/test/java/com/github/arangobee/dao/ChangeEntryIndexDaoTest.java | 5aboteur/arangobee | 4c2c14b0f76fd9a51cda29bb1232a312a0fccb91 | [
"Apache-2.0"
] | 4 | 2018-07-27T09:20:58.000Z | 2021-03-19T17:32:59.000Z | src/test/java/com/github/arangobee/dao/ChangeEntryIndexDaoTest.java | 5aboteur/arangobee | 4c2c14b0f76fd9a51cda29bb1232a312a0fccb91 | [
"Apache-2.0"
] | 1 | 2020-01-22T10:06:33.000Z | 2020-01-22T10:06:33.000Z | src/test/java/com/github/arangobee/dao/ChangeEntryIndexDaoTest.java | 5aboteur/arangobee | 4c2c14b0f76fd9a51cda29bb1232a312a0fccb91 | [
"Apache-2.0"
] | 3 | 2020-01-18T07:26:05.000Z | 2021-01-03T16:54:09.000Z | 38.633803 | 141 | 0.635436 | 12,846 | package com.github.arangobee.dao;
/**
* @author lstolowski
* @since 10.12.14
*/
public class ChangeEntryIndexDaoTest {
// @TODO: write test for ArangoDB
// private static final String TEST_SERVER = "testServer";
// private static final String DB_NAME = "mongobeetest";
// private static final String CHANGEID_AUTHOR_INDEX_NAME = "changeId_1_author_1";
// private static final String CHANGELOG_COLLECTION_NAME = "dbchangelog";
//
// private ChangeEntryIndexDao dao = new ChangeEntryIndexDao(CHANGELOG_COLLECTION_NAME);
//
// @Test
// public void shouldCreateRequiredUniqueIndex() {
// // given
// MongoClient mongo = mock(MongoClient.class);
// MongoDatabase db = new Fongo(TEST_SERVER).getDatabase(DB_NAME);
// when(mongo.getDatabase(Mockito.anyString())).thenReturn(db);
//
// // when
// dao.createRequiredUniqueIndex(db.getCollection(CHANGELOG_COLLECTION_NAME));
//
// // then
// Document createdIndex = findIndex(db, CHANGEID_AUTHOR_INDEX_NAME);
// assertNotNull(createdIndex);
// assertTrue(dao.isUnique(createdIndex));
// }
//
// @Test
// @Ignore("Fongo has not implemented dropIndex for MongoCollection object (issue with mongo driver 3.x)")
// public void shouldDropWrongIndex() {
// // init
// MongoClient mongo = mock(MongoClient.class);
// MongoDatabase db = new Fongo(TEST_SERVER).getDatabase(DB_NAME);
// when(mongo.getDatabase(Mockito.anyString())).thenReturn(db);
//
// MongoCollection<Document> collection = db.getCollection(CHANGELOG_COLLECTION_NAME);
// collection.createIndex(new Document()
// .append(ChangeEntry.KEY_CHANGEID, 1)
// .append(ChangeEntry.KEY_AUTHOR, 1));
// Document index = new Document("name", CHANGEID_AUTHOR_INDEX_NAME);
//
// // given
// Document createdIndex = findIndex(db, CHANGEID_AUTHOR_INDEX_NAME);
// assertNotNull(createdIndex);
// assertFalse(dao.isUnique(createdIndex));
//
// // when
// dao.dropIndex(db.getCollection(CHANGELOG_COLLECTION_NAME), index);
//
// // then
// assertNull(findIndex(db, CHANGEID_AUTHOR_INDEX_NAME));
// }
//
// private Document findIndex(MongoDatabase db, String indexName) {
//
// for (MongoCursor<Document> iterator = db.getCollection(CHANGELOG_COLLECTION_NAME).listIndexes().iterator(); iterator.hasNext(); ) {
// Document index = iterator.next();
// String name = (String) index.get("name");
// if (indexName.equals(name)) {
// return index;
// }
// }
// return null;
// }
}
|
3e1e5a7c98bf5534e8664027dcb4b4801553c1aa | 2,040 | java | Java | gosu-xml/src/main/java/gw/internal/schema/gw/xsd/w3c/soap12/enums/TStyleChoice.java | vakuum/gosu-lang-old | 48c598458abd412aa9f2d21b8088120e8aa9de00 | [
"Apache-2.0"
] | 1 | 2019-09-11T14:08:50.000Z | 2019-09-11T14:08:50.000Z | gosu-xml/src/main/java/gw/internal/schema/gw/xsd/w3c/soap12/enums/TStyleChoice.java | gosu-lang/old-gosu-repo | 48c598458abd412aa9f2d21b8088120e8aa9de00 | [
"Apache-2.0"
] | null | null | null | gosu-xml/src/main/java/gw/internal/schema/gw/xsd/w3c/soap12/enums/TStyleChoice.java | gosu-lang/old-gosu-repo | 48c598458abd412aa9f2d21b8088120e8aa9de00 | [
"Apache-2.0"
] | 2 | 2019-06-11T04:34:07.000Z | 2020-01-21T02:58:10.000Z | 30.447761 | 194 | 0.642647 | 12,847 | package gw.internal.schema.gw.xsd.w3c.soap12.enums;
/***************************************************************************/
/* THIS IS AUTOGENERATED CODE - DO NOT MODIFY OR YOUR CHANGES WILL BE LOST */
/* THIS CODE CAN BE REGENERATED USING 'xsd-codegen' */
/***************************************************************************/
public enum TStyleChoice implements gw.lang.reflect.gs.IGosuObject, gw.lang.reflect.IEnumValue, gw.xml.IXmlSchemaEnumValue, gw.internal.xml.IXmlGeneratedClass {
Rpc( "rpc" ), Document( "document" );
public static final gw.util.concurrent.LockingLazyVar<gw.lang.reflect.IType> TYPE = new gw.util.concurrent.LockingLazyVar<gw.lang.reflect.IType>( gw.lang.reflect.TypeSystem.getGlobalLock() ) {
@Override
protected gw.lang.reflect.IType init() {
return gw.lang.reflect.TypeSystem.getByFullName( "gw.xsd.w3c.soap12.enums.TStyleChoice" );
}
};
private final java.lang.String _serializedValue;
private TStyleChoice( java.lang.String serializedValue ) {
_serializedValue = serializedValue;
}
@Override
public gw.lang.reflect.IType getIntrinsicType() {
return TYPE.get();
}
@Override
public java.lang.String toString() {
return _serializedValue;
}
@Override
public java.lang.Object getValue() {
return this;
}
@Override
public java.lang.String getCode() {
return name();
}
@Override
public int getOrdinal() {
return ordinal();
}
@Override
public java.lang.String getDisplayName() {
return name();
}
public java.lang.String getGosuValue() {
return (java.lang.String) TYPE.get().getTypeInfo().getProperty( "GosuValue" ).getAccessor().getValue( this );
}
public java.lang.String getSerializedValue() {
return (java.lang.String) TYPE.get().getTypeInfo().getProperty( "SerializedValue" ).getAccessor().getValue( this );
}
@SuppressWarnings( {"UnusedDeclaration"} )
private static final long FINGERPRINT = 2359489450031053463L;
}
|
3e1e5a9bb670da729555d359c34e51854b05f2ab | 214 | java | Java | steps/Exercices/exercice-3/src/test/java/com/sfeir/exercice3/Exercice3ApplicationTests.java | Schultz-Thomas/sfeir-school-spring-security | d29b9a7561021dfdbe7c7df6b70c932d6ae9dff6 | [
"Apache-2.0"
] | 1 | 2021-11-20T17:10:35.000Z | 2021-11-20T17:10:35.000Z | steps/Exercices/exercice-3/src/test/java/com/sfeir/exercice3/Exercice3ApplicationTests.java | Schultz-Thomas/sfeir-school-spring-security | d29b9a7561021dfdbe7c7df6b70c932d6ae9dff6 | [
"Apache-2.0"
] | 29 | 2021-02-23T05:04:34.000Z | 2021-11-20T17:06:40.000Z | steps/Exercices/exercice-3/src/test/java/com/sfeir/exercice3/Exercice3ApplicationTests.java | Schultz-Thomas/sfeir-school-spring-security | d29b9a7561021dfdbe7c7df6b70c932d6ae9dff6 | [
"Apache-2.0"
] | 1 | 2022-01-26T14:38:24.000Z | 2022-01-26T14:38:24.000Z | 15.285714 | 60 | 0.78972 | 12,848 | package com.sfeir.exercice3;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class Exercice3ApplicationTests {
@Test
void contextLoads() {
}
}
|
3e1e5ab192a1ab0c74bdc893e93c5bb16deb54e0 | 3,868 | java | Java | engine/src/main/java/nl/inl/blacklab/searches/SearchHitGroupsFromHits.java | jan-niestadt/BlackLab | 4ce7c7018f75717c12be5c768f5b2f26c442b196 | [
"Apache-2.0"
] | 1 | 2018-12-10T17:58:23.000Z | 2018-12-10T17:58:23.000Z | engine/src/main/java/nl/inl/blacklab/searches/SearchHitGroupsFromHits.java | jan-niestadt/BlackLab | 4ce7c7018f75717c12be5c768f5b2f26c442b196 | [
"Apache-2.0"
] | null | null | null | engine/src/main/java/nl/inl/blacklab/searches/SearchHitGroupsFromHits.java | jan-niestadt/BlackLab | 4ce7c7018f75717c12be5c768f5b2f26c442b196 | [
"Apache-2.0"
] | null | null | null | 37.921569 | 155 | 0.656412 | 12,849 | package nl.inl.blacklab.searches;
import nl.inl.blacklab.exceptions.InvalidQuery;
import nl.inl.blacklab.resultproperty.HitProperty;
import nl.inl.blacklab.search.results.HitGroups;
import nl.inl.blacklab.search.results.HitGroupsTokenFrequencies;
import nl.inl.blacklab.search.results.QueryInfo;
/**
* A search operation that yields groups of hits.
*/
public class SearchHitGroupsFromHits extends SearchHitGroups {
private final SearchHits source;
private final HitProperty property;
private final int maxResultsToStorePerGroup;
private final boolean mustStoreHits;
/**
* A hit-grouping search.
*
* NOTE: When using the fast path, backing hits are not stored in the groups.
* This saves a large amount of memory and time, but transforms the query into more of a statistical view on the data
* because the individual hits are lost. If this is a problem, set mustStoreHits to true.
*
* @param queryInfo query info
* @param hitsSearch search to group hits from
* @param groupBy what to group by
* @param maxResultsToStorePerGroup maximum number of results to store (if any are stored)
* @param mustStoreHits if true, up to maxResultsToStorePerGroup hits will be stored. If false, no hits may be
* stored, depending on how the grouping is performed.
*/
public SearchHitGroupsFromHits(QueryInfo queryInfo, SearchHits hitsSearch, HitProperty groupBy, int maxResultsToStorePerGroup, boolean mustStoreHits) {
super(queryInfo);
this.source = hitsSearch;
this.property = groupBy;
this.maxResultsToStorePerGroup = maxResultsToStorePerGroup;
this.mustStoreHits = mustStoreHits;
}
/**
* Execute the search operation, returning the final response.
*
* @return result of the operation
* @throws InvalidQuery if the query is invalid
*/
@Override
public HitGroups executeInternal() throws InvalidQuery {
if (HitGroupsTokenFrequencies.canUse(mustStoreHits, source, property)) {
// Any token query, group by hit text or doc metadata! Choose faster path that just "looks up"
// token frequencies in the forward index(es).
return HitGroupsTokenFrequencies.get(source, property);
} else {
// Just find all the hits and group them.
return HitGroups.fromHits(source.executeNoQueue(), property, maxResultsToStorePerGroup);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + maxResultsToStorePerGroup;
result = prime * result + ((property == null) ? 0 : property.hashCode());
result = prime * result + ((source == null) ? 0 : source.hashCode());
result = prime * result + Boolean.hashCode(mustStoreHits);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
SearchHitGroupsFromHits other = (SearchHitGroupsFromHits) obj;
if (maxResultsToStorePerGroup != other.maxResultsToStorePerGroup)
return false;
if (property == null) {
if (other.property != null)
return false;
} else if (!property.equals(other.property))
return false;
if (source == null) {
if (other.source != null)
return false;
} else if (!source.equals(other.source))
return false;
return other.mustStoreHits == mustStoreHits;
}
@Override
public String toString() {
return toString("group", source, property, maxResultsToStorePerGroup);
}
}
|
3e1e5aee816fb8c83fe0fce92c2d5eff387793f4 | 1,148 | java | Java | softwaremill-test/softwaremill-test-ui-web/src/main/java/com/softwaremill/common/test/web/selenium/SeleniumBrowserProperties.java | softwaremill/softwaremill-common | c58895b43ab55c2ec969326f807e3db44ce826b9 | [
"Apache-2.0"
] | 10 | 2015-01-07T23:07:07.000Z | 2018-12-31T23:19:16.000Z | softwaremill-test/softwaremill-test-ui-web/src/main/java/com/softwaremill/common/test/web/selenium/SeleniumBrowserProperties.java | softwaremill/softwaremill-common | c58895b43ab55c2ec969326f807e3db44ce826b9 | [
"Apache-2.0"
] | 3 | 2016-04-03T16:24:58.000Z | 2019-07-02T17:29:49.000Z | softwaremill-test/softwaremill-test-ui-web/src/main/java/com/softwaremill/common/test/web/selenium/SeleniumBrowserProperties.java | softwaremill/softwaremill-common | c58895b43ab55c2ec969326f807e3db44ce826b9 | [
"Apache-2.0"
] | 11 | 2015-03-06T02:27:28.000Z | 2021-01-13T22:45:44.000Z | 22.509804 | 100 | 0.66899 | 12,850 | package com.softwaremill.common.test.web.selenium;
/**
* Properties for selenium browser
*
* @author maciek
*/
public class SeleniumBrowserProperties {
private String browserCommand;
private String browserURL;
private String browserPort;
/**
* @param browserCommand For example "*firefox"
* @param browserURL
* @param browserPort
*/
public SeleniumBrowserProperties(String browserCommand, String browserURL, String browserPort) {
this.browserCommand = browserCommand;
this.browserURL = browserURL;
this.browserPort = browserPort;
}
public String getBrowserCommand() {
return browserCommand;
}
public void setBrowserCommand(String browserCommand) {
this.browserCommand = browserCommand;
}
public String getBrowserURL() {
return browserURL;
}
public void setBrowserURL(String browserURL) {
this.browserURL = browserURL;
}
public String getBrowserPort() {
return browserPort;
}
public void setBrowserPort(String browserPort) {
this.browserPort = browserPort;
}
}
|
3e1e5b1c439207477fc97b8d9074e87b3a56de9f | 245 | java | Java | springboot-03-mybatis/src/test/java/org/example/springboot03mybatis/Springboot03MybatisApplicationTests.java | ouyangjunfei/SpringBootStudy | b6d75755b5428278c8e2898fb4fe15a3f35e15a0 | [
"Apache-2.0"
] | 3 | 2021-05-10T06:58:26.000Z | 2021-12-31T17:37:19.000Z | springboot-03-mybatis/src/test/java/org/example/springboot03mybatis/Springboot03MybatisApplicationTests.java | ouyangjunfei/SpringBootStudy | b6d75755b5428278c8e2898fb4fe15a3f35e15a0 | [
"Apache-2.0"
] | 1 | 2021-07-26T04:00:27.000Z | 2021-09-06T09:18:07.000Z | springboot-03-mybatis/src/test/java/org/example/springboot03mybatis/Springboot03MybatisApplicationTests.java | ouyangjunfei/SpringBootStudy | b6d75755b5428278c8e2898fb4fe15a3f35e15a0 | [
"Apache-2.0"
] | null | null | null | 17.5 | 60 | 0.779592 | 12,851 | package org.example.springboot03mybatis;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class Springboot03MybatisApplicationTests {
@Test
void contextLoads() {
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.