text
stringlengths 7
1.01M
|
|---|
package com.cy.util;
import com.cy.util.image.ImageUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.mime.HttpMultipartMode;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by Kentun on 2015-08-28.
*/
public class HttpclientUtilsTest {
@Test
public void testPost(){
// try {
// List<NameValuePair> formparams= new ArrayList<NameValuePair>();
// formparams.add(new BasicNameValuePair("username","NTY000000"));
// formparams.add(new BasicNameValuePair("scode","123456"));
// formparams.add(new BasicNameValuePair("mobile","13805100000"));
// formparams.add(new BasicNameValuePair("content","你好101540"));
// String content = HttpclientUtils.post("http://222.185.228.25:8000/msm/sdk/http/sendsms.jsp", formparams);
// System.out.println(content);
// } catch (NetServiceException e) {
// e.printStackTrace();
// }
}
@Test
public void testPostSMS(){
// try {
// List<NameValuePair> formparams= new ArrayList<NameValuePair>();
// formparams.add(new BasicNameValuePair("action","send"));
// formparams.add(new BasicNameValuePair("account","xiaokedou"));
// formparams.add(new BasicNameValuePair("password","200036"));
// formparams.add(new BasicNameValuePair("mobile","15311412612"));
// formparams.add(new BasicNameValuePair("content","欢迎注册口腔圈APP,您的验证码为12323,请在30分钟内使用此验证码【小蝌蚪】"));
// String content = HttpclientUtils.post("http://sms.chanzor.com:8001/sms.aspx", formparams);
// System.out.println(content);
// } catch (NetServiceException e) {
// e.printStackTrace();
// }
}
@Test
public void testPostSMS2(){
// try {
// List<NameValuePair> formparams= new ArrayList<NameValuePair>();
// formparams.add(new BasicNameValuePair("action","query"));
// formparams.add(new BasicNameValuePair("account","xiaokedou"));
// formparams.add(new BasicNameValuePair("password","200036"));
// String content = HttpclientUtils.post("http://sms.chanzor.com:8001/statusApi.aspx", formparams);
// System.out.println(content);
// } catch (NetServiceException e) {
// e.printStackTrace();
// }
}
@Test
public void testGet(){
// try {
//// HttpEntity httpEntity = HttpclientUtils.get("http://www.ifeng.com");
// String content = HttpclientUtils.get("http://www.ifeng.com");
// System.out.println(content);
// } catch (NetServiceException e) {
// e.printStackTrace();
// }
}
@Test
public void testUpload() throws ClientProtocolException, IOException{
String url="http://192.168.0.24:8080/cy/userProfile/userProfileAction!doNotNeedSessionAndSecurity_userProfileHandler.action";
// String url="http://114.215.194.18/userProfile/userProfileAction!doNotNeedSessionAndSecurity_userProfileHandler.action";
// String path="/Users/liuzhen/Documents/temp2.jpg";
// Map<String,String> params=new HashMap<String, String>();
// String jsonData = "{\n" +
// " \"command\": \"1\",\n" +
// " \"content\": {\n" +
// " \"accountNum\": \"448\",\n" +
// " \"password\": \"111111\",\n" +
// " \"type\": \"1\"\n" +
// " }\n" +
// "}";
// System.out.println(jsonData);
// params.put("jsonStr", jsonData); //
//
// HttpClient client=new DefaultHttpClient();// 开启一个客户端 HTTP 请求
// HttpPost post = new HttpPost(url);//创建 HTTP POST 请求
// MultipartEntityBuilder builder = MultipartEntityBuilder.create();
// builder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);//设置浏览器兼容模式
// int count=0;
// builder.addTextBody("uploadFileBase64", ImageUtils.GetImageStr("/Users/liuzhen/Documents/temp1.jpg"));//设置请求参数
// builder.addTextBody("uploadFileName", "temp1.jpg");//设置请求参数
// builder.addTextBody("jsonStr", params.get("jsonStr"));//设置请求参数
// HttpEntity entity = builder.build();// 生成 HTTP POST 实体
// post.setEntity(entity);//设置请求参数
// HttpResponse response = client.execute(post);// 发起请求 并返回请求的响应
// if (response.getStatusLine().getStatusCode()==200) {
// HttpEntity httpEntity = response.getEntity();
// System.out.println(EntityUtils.toString(httpEntity, "UTF-8"));
// }else{
// System.out.println("失败");
// }
}
// @Test
// public void testUpload() throws ClientProtocolException, IOException{
// String url="http://114.215.194.18/userProfile/userProfileAction!doNotNeedSessionAndSecurity_userProfileHandler.action";
// String path="/Users/liuzhen/Documents/temp2.jpg";
// Map<String,String> params=new HashMap<String, String>();
// String jsonData = "{\n" +
// " \"command\": \"1\",\n" +
// " \"content\": {\n" +
// " \"accountNum\": \"448\",\n" +
// " \"password\": \"111111\",\n" +
// " \"type\": \"1\"\n" +
// " }\n" +
// "}";
//// String jsonData = "{\n" +
//// " \"command\": \"1\",\n" +
//// " \"content\": {\n" +
//// " \"accountNum\": \"448\",\n" +
//// " \"password\": \"111111\",\n" +
//// " \"type\": \"1\"\n" +
//// " }\n" +
//// "}";
// System.out.println(jsonData);
// params.put("jsonStr", jsonData); //
//
// ArrayList<File> files = new ArrayList<File>();
// File file = new File(path);
//// File[] tempList = file.listFiles();
//// for(File f:tempList){
//// files.add(f);
//// }
// files.add(file);
// HttpClient client=new DefaultHttpClient();// 开启一个客户端 HTTP 请求
// HttpPost post = new HttpPost(url);//创建 HTTP POST 请求
// MultipartEntityBuilder builder = MultipartEntityBuilder.create();
// builder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);//设置浏览器兼容模式
// int count=0;
// for (File f:files) {
// builder.addBinaryBody("upload", f);
// count++;
// }
// builder.addTextBody("jsonStr", params.get("jsonStr"));//设置请求参数
// HttpEntity entity = builder.build();// 生成 HTTP POST 实体
// post.setEntity(entity);//设置请求参数
// HttpResponse response = client.execute(post);// 发起请求 并返回请求的响应
// if (response.getStatusLine().getStatusCode()==200) {
// HttpEntity httpEntity = response.getEntity();
// System.out.println(EntityUtils.toString(httpEntity, "UTF-8"));
// }else{
// System.out.println("失败");
// }
// }
public static void main(String[] args) throws Exception {
// new HttpclientUtilsTest().testUpload() ;
String jsonData = "{\n" +
" \"command\": \"1\",\n" +
" \"content\": {\n" +
" \"accountNum\": \"448\",\n" +
" \"password\": \"111111\",\n" +
" \"type\": \"1\"\n" +
" }\n" +
"}";
System.out.println(jsonData);
}
}
|
package asr.proyectoFinal.services;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.ibm.cloud.sdk.core.security.Authenticator;
import com.ibm.cloud.sdk.core.security.IamAuthenticator;
import com.ibm.watson.natural_language_understanding.v1.NaturalLanguageUnderstanding;
import com.ibm.watson.natural_language_understanding.v1.model.AnalysisResults;
import com.ibm.watson.natural_language_understanding.v1.model.AnalyzeOptions;
import com.ibm.watson.natural_language_understanding.v1.model.EntitiesOptions;
import com.ibm.watson.natural_language_understanding.v1.model.Features;
import com.ibm.watson.natural_language_understanding.v1.model.KeywordsOptions;
import com.ibm.watson.natural_language_understanding.v1.model.RelationsOptions;
public class AnalisisLP
{
/*
public static void main(String[] args) {
AnalisisLP.analizarLenguaje();
}
*/
public static String analizarLenguaje(String text, String idioma)
{
IamAuthenticator authenticator = new IamAuthenticator("b9n5u524Bl2kbTIYiqBnm3DepxRKUgDyp3CJkDXnhI-M");
NaturalLanguageUnderstanding naturalLanguageUnderstanding = new NaturalLanguageUnderstanding("2020-08-01", authenticator);
naturalLanguageUnderstanding.setServiceUrl("https://api.eu-gb.natural-language-understanding.watson.cloud.ibm.com/instances/c60817dd-867f-47a4-89ea-8162faf33f44");
EntitiesOptions entitiesOptions = new EntitiesOptions.Builder()
.emotion(true)
.sentiment(true)
.limit(2)
.build();
KeywordsOptions keywordsOptions = new KeywordsOptions.Builder()
.emotion(true)
.sentiment(true)
.limit(2)
.build();
/*
RelationsOptions relations = new RelationsOptions.Builder()
.build();
*/
/*
Features features = new Features.Builder()
.relations(relations)
.build();
*/
Features features = new Features.Builder()
.entities(entitiesOptions)
.keywords(keywordsOptions)
.build();
AnalyzeOptions parameters = new AnalyzeOptions.Builder()
.text(text)
.features(features)
.build();
AnalysisResults response = naturalLanguageUnderstanding
.analyze(parameters)
.execute()
.getResult();
String traduccionJSON = response.toString();
JsonObject rootObj = JsonParser.parseString(traduccionJSON).getAsJsonObject();
JsonArray analisisNLP = rootObj.getAsJsonArray("keywords");
String analisisConcreto = "NO se ha encontrado tu JSON";
/*
if(analisisNLP.size()>0)
analisisConcreto = analisisNLP.get(0).getAsJsonObject().get("sentiment").getAsString();
*/
//System.out.println(response);
//return analisisConcreto;
return traduccionJSON;
}
}
|
/*
* #%L
* BroadleafCommerce Profile Web
* %%
* Copyright (C) 2009 - 2013 Broadleaf Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.broadleafcommerce.common.dao;
import org.broadleafcommerce.common.persistence.EntityConfiguration;
import org.broadleafcommerce.common.util.dao.DynamicDaoHelperImpl;
import org.broadleafcommerce.common.util.dao.TypedQueryBuilder;
import org.hibernate.FlushMode;
import org.hibernate.Session;
import org.hibernate.ejb.HibernateEntityManager;
import org.hibernate.type.AbstractSingleColumnStandardBasicType;
import org.hibernate.type.IntegerType;
import org.hibernate.type.LongType;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Repository;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
@Repository("blGenericEntityDao")
public class GenericEntityDaoImpl implements GenericEntityDao, ApplicationContextAware {
private static ApplicationContext applicationContext;
private static GenericEntityDaoImpl dao;
public static GenericEntityDaoImpl getGenericEntityDao() {
if (applicationContext == null) {
return null;
}
if (dao == null) {
dao = (GenericEntityDaoImpl) applicationContext.getBean("blGenericEntityDao");
}
return dao;
}
@PersistenceContext(unitName = "blPU")
protected EntityManager em;
@Resource(name = "blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
protected DynamicDaoHelperImpl daoHelper = new DynamicDaoHelperImpl();
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
public <T> T readGenericEntity(Class<T> clazz, Object id) {
clazz = (Class<T>) DynamicDaoHelperImpl.getNonProxyImplementationClassIfNecessary(clazz);
Map<String, Object> md = daoHelper.getIdMetadata(clazz, (HibernateEntityManager) em);
AbstractSingleColumnStandardBasicType type = (AbstractSingleColumnStandardBasicType) md.get("type");
if (type instanceof LongType) {
id = Long.parseLong(String.valueOf(id));
} else if (type instanceof IntegerType) {
id = Integer.parseInt(String.valueOf(id));
}
return em.find(clazz, id);
}
@Override
public <T> Long readCountGenericEntity(Class<T> clazz) {
clazz = (Class<T>) DynamicDaoHelperImpl.getNonProxyImplementationClassIfNecessary(clazz);
TypedQuery<Long> q = new TypedQueryBuilder<T>(clazz, "root").toCountQuery(em);
return q.getSingleResult();
}
@Override
public <T> List<T> readAllGenericEntity(Class<T> clazz, int limit, int offset) {
clazz = (Class<T>) DynamicDaoHelperImpl.getNonProxyImplementationClassIfNecessary(clazz);
TypedQuery<T> q = new TypedQueryBuilder<T>(clazz, "root").toQuery(em);
q.setMaxResults(limit);
q.setFirstResult(offset);
return q.getResultList();
}
@Override
public <T> List<T> readAllGenericEntity(Class<T> clazz) {
clazz = (Class<T>) DynamicDaoHelperImpl.getNonProxyImplementationClassIfNecessary(clazz);
TypedQuery<T> q = new TypedQueryBuilder<T>(clazz, "root").toQuery(em);
return q.getResultList();
}
@Override
public List<Long> readAllGenericEntityId(Class<?> clazz) {
clazz = DynamicDaoHelperImpl.getNonProxyImplementationClassIfNecessary(clazz);
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<Long> criteria = builder.createQuery(Long.class);
Root root = criteria.from(clazz);
criteria.select(root.get(getIdField(clazz).getName()).as(Long.class));
criteria.orderBy(builder.asc(root.get(getIdField(clazz).getName())));
return em.createQuery(criteria).getResultList();
}
@Override
public Class<?> getImplClass(String className) {
Class<?> clazz = null;
try {
clazz = entityConfiguration.lookupEntityClass(className);
} catch (NoSuchBeanDefinitionException e) {
//do nothing
}
if (clazz == null) {
try {
clazz = Class.forName(className);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
clazz = DynamicDaoHelperImpl.getNonProxyImplementationClassIfNecessary(clazz);
}
return clazz;
}
@Override
public Class<?> getCeilingImplClass(String className) {
Class<?> clazz;
try {
clazz = Class.forName(className);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
Class<?>[] entitiesFromCeiling = daoHelper.getAllPolymorphicEntitiesFromCeiling(clazz, em.unwrap(Session.class).getSessionFactory(), true, true);
if (entitiesFromCeiling == null || entitiesFromCeiling.length < 1) {
clazz = DynamicDaoHelperImpl.getNonProxyImplementationClassIfNecessary(clazz);
entitiesFromCeiling = daoHelper.getAllPolymorphicEntitiesFromCeiling(clazz, em.unwrap(Session.class).getSessionFactory(), true, true);
}
if (entitiesFromCeiling == null || entitiesFromCeiling.length < 1) {
throw new IllegalArgumentException(String.format("Unable to find ceiling implementation for the requested class name (%s)", className));
}
clazz = entitiesFromCeiling[entitiesFromCeiling.length - 1];
return clazz;
}
@Override
public Serializable getIdentifier(Object entity) {
return daoHelper.getIdentifier(entity, em);
}
protected Field getIdField(Class<?> clazz) {
return daoHelper.getIdField(clazz, em);
}
@Override
public <T> T save(T object) {
return em.merge(object);
}
@Override
public void persist(Object object) {
em.persist(object);
}
@Override
public void remove(Object object) {
em.remove(object);
}
@Override
public void flush() {
em.flush();
}
@Override
public void clearAutoFlushMode() {
em.unwrap(Session.class).setFlushMode(FlushMode.MANUAL);
}
@Override
public void enableAutoFlushMode() {
em.unwrap(Session.class).setFlushMode(FlushMode.AUTO);
}
@Override
public void clear() {
em.clear();
}
@Override
public boolean sessionContains(Object object) {
return em.contains(object);
}
@Override
public boolean idAssigned(Object object) {
return getIdentifier(object) != null;
}
@Override
public EntityManager getEntityManager() {
return em;
}
}
|
/***** BEGIN LICENSE BLOCK *****
* Version: EPL 2.0/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Eclipse Public
* License Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.eclipse.org/legal/epl-v20.html
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
* Copyright (C) 2007 William N Dortch <bill.dortch@gmail.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the EPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the EPL, the GPL or the LGPL.
***** END LICENSE BLOCK *****/
package org.jruby.runtime.builtin;
public interface Variable<BaseObjectType> {
String getName();
BaseObjectType getValue();
boolean isInstanceVariable();
boolean isClassVariable();
boolean isConstant();
/**
* Returns true if the variable is an instance variable, class variable, or constant;
* otherwise, false.
*
* @return true if the variable is an instance variable, class variable, or constant,
* else false
*/
boolean isRubyVariable();
}
|
/*
* The contents of this file are subject to the terms
* of the Common Development and Distribution License
* (the "License"). You may not use this file except
* in compliance with the License.
*
* You can obtain a copy of the license at
* glassfish/bootstrap/legal/CDDLv1.0.txt or
* https://glassfish.dev.java.net/public/CDDLv1.0.html.
* See the License for the specific language governing
* permissions and limitations under the License.
*
* When distributing Covered Code, include this CDDL
* HEADER in each file and include the License file at
* glassfish/bootstrap/legal/CDDLv1.0.txt. If applicable,
* add the following below this CDDL HEADER, with the
* fields enclosed by brackets "[]" replaced with your
* own identifying information: Portions Copyright [yyyy]
* [name of copyright owner]
*/
/*
* @(#)MimeTypeFile.java 1.8 05/11/16
*
* Copyright 1997-2005 Sun Microsystems, Inc. All Rights Reserved.
*/
package com.sun.activation.registries;
import java.io.*;
import java.util.*;
public class MimeTypeFile {
private String fname = null;
private Hashtable type_hash = new Hashtable();
/**
* The construtor that takes a filename as an argument.
*
* @param new_fname The file name of the mime types file.
*/
public MimeTypeFile(String new_fname) throws IOException {
File mime_file = null;
FileReader fr = null;
fname = new_fname; // remember the file name
mime_file = new File(fname); // get a file object
fr = new FileReader(mime_file);
try {
parse(new BufferedReader(fr));
} finally {
try {
fr.close(); // close it
} catch (IOException e) {
// ignore it
}
}
}
public MimeTypeFile(InputStream is) throws IOException {
parse(new BufferedReader(new InputStreamReader(is, "iso-8859-1")));
}
/**
* Creates an empty DB.
*/
public MimeTypeFile() {
}
/**
* get the MimeTypeEntry based on the file extension
*/
public MimeTypeEntry getMimeTypeEntry(String file_ext) {
return (MimeTypeEntry)type_hash.get((Object)file_ext);
}
/**
* Get the MIME type string corresponding to the file extension.
*/
public String getMIMETypeString(String file_ext) {
MimeTypeEntry entry = this.getMimeTypeEntry(file_ext);
if (entry != null)
return entry.getMIMEType();
else
return null;
}
/**
* Appends string of entries to the types registry, must be valid
* .mime.types format.
* A mime.types entry is one of two forms:
*
* type/subtype ext1 ext2 ...
* or
* type=type/subtype desc="description of type" exts=ext1,ext2,...
*
* Example:
* # this is a test
* audio/basic au
* text/plain txt text
* type=application/postscript exts=ps,eps
*/
public void appendToRegistry(String mime_types) {
try {
parse(new BufferedReader(new StringReader(mime_types)));
} catch (IOException ex) {
// can't happen
}
}
/**
* Parse a stream of mime.types entries.
*/
private void parse(BufferedReader buf_reader) throws IOException {
String line = null, prev = null;
while ((line = buf_reader.readLine()) != null) {
if (prev == null)
prev = line;
else
prev += line;
int end = prev.length();
if (prev.length() > 0 && prev.charAt(end - 1) == '\\') {
prev = prev.substring(0, end - 1);
continue;
}
this.parseEntry(prev);
prev = null;
}
if (prev != null)
this.parseEntry(prev);
}
/**
* Parse single mime.types entry.
*/
private void parseEntry(String line) {
String mime_type = null;
String file_ext = null;
line = line.trim();
if (line.length() == 0) // empty line...
return; // BAIL!
// check to see if this is a comment line?
if (line.charAt(0) == '#')
return; // then we are done!
// is it a new format line or old format?
if (line.indexOf('=') > 0) {
// new format
LineTokenizer lt = new LineTokenizer(line);
while (lt.hasMoreTokens()) {
String name = lt.nextToken();
String value = null;
if (lt.hasMoreTokens() && lt.nextToken().equals("=") &&
lt.hasMoreTokens())
value = lt.nextToken();
if (value == null) {
if (LogSupport.isLoggable())
LogSupport.log("Bad .mime.types entry: " + line);
return;
}
if (name.equals("type"))
mime_type = value;
else if (name.equals("exts")) {
StringTokenizer st = new StringTokenizer(value, ",");
while (st.hasMoreTokens()) {
file_ext = st.nextToken();
MimeTypeEntry entry =
new MimeTypeEntry(mime_type, file_ext);
type_hash.put(file_ext, entry);
if (LogSupport.isLoggable())
LogSupport.log("Added: " + entry.toString());
}
}
}
} else {
// old format
// count the tokens
StringTokenizer strtok = new StringTokenizer(line);
int num_tok = strtok.countTokens();
if (num_tok == 0) // empty line
return;
mime_type = strtok.nextToken(); // get the MIME type
while (strtok.hasMoreTokens()) {
MimeTypeEntry entry = null;
file_ext = strtok.nextToken();
entry = new MimeTypeEntry(mime_type, file_ext);
type_hash.put(file_ext, entry);
if (LogSupport.isLoggable())
LogSupport.log("Added: " + entry.toString());
}
}
}
// for debugging
/*
public static void main(String[] argv) throws Exception {
MimeTypeFile mf = new MimeTypeFile(argv[0]);
System.out.println("ext " + argv[1] + " type " +
mf.getMIMETypeString(argv[1]));
System.exit(0);
}
*/
}
class LineTokenizer {
private int currentPosition;
private int maxPosition;
private String str;
private Vector stack = new Vector();
private static final String singles = "="; // single character tokens
/**
* Constructs a tokenizer for the specified string.
* <p>
*
* @param str a string to be parsed.
*/
public LineTokenizer(String str) {
currentPosition = 0;
this.str = str;
maxPosition = str.length();
}
/**
* Skips white space.
*/
private void skipWhiteSpace() {
while ((currentPosition < maxPosition) &&
Character.isWhitespace(str.charAt(currentPosition))) {
currentPosition++;
}
}
/**
* Tests if there are more tokens available from this tokenizer's string.
*
* @return <code>true</code> if there are more tokens available from this
* tokenizer's string; <code>false</code> otherwise.
*/
public boolean hasMoreTokens() {
if (stack.size() > 0)
return true;
skipWhiteSpace();
return (currentPosition < maxPosition);
}
/**
* Returns the next token from this tokenizer.
*
* @return the next token from this tokenizer.
* @exception NoSuchElementException if there are no more tokens in this
* tokenizer's string.
*/
public String nextToken() {
int size = stack.size();
if (size > 0) {
String t = (String)stack.elementAt(size - 1);
stack.removeElementAt(size - 1);
return t;
}
skipWhiteSpace();
if (currentPosition >= maxPosition) {
throw new NoSuchElementException();
}
int start = currentPosition;
char c = str.charAt(start);
if (c == '"') {
currentPosition++;
boolean filter = false;
while (currentPosition < maxPosition) {
c = str.charAt(currentPosition++);
if (c == '\\') {
currentPosition++;
filter = true;
} else if (c == '"') {
String s;
if (filter) {
StringBuffer sb = new StringBuffer();
for (int i = start + 1; i < currentPosition - 1; i++) {
c = str.charAt(i);
if (c != '\\')
sb.append(c);
}
s = sb.toString();
} else
s = str.substring(start + 1, currentPosition - 1);
return s;
}
}
} else if (singles.indexOf(c) >= 0) {
currentPosition++;
} else {
while ((currentPosition < maxPosition) &&
singles.indexOf(str.charAt(currentPosition)) < 0 &&
!Character.isWhitespace(str.charAt(currentPosition))) {
currentPosition++;
}
}
return str.substring(start, currentPosition);
}
public void pushToken(String token) {
stack.addElement(token);
}
}
|
package org.nutz.dao.impl.jdbc;
import java.lang.reflect.Array;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.nutz.dao.Chain;
import org.nutz.dao.entity.Entity;
import org.nutz.dao.impl.sql.NutStatement;
import org.nutz.dao.jdbc.ValueAdaptor;
import org.nutz.dao.pager.Pager;
import org.nutz.dao.sql.PItem;
import org.nutz.dao.sql.Pojo;
import org.nutz.dao.sql.PojoCallback;
import org.nutz.dao.sql.SqlType;
import org.nutz.dao.util.Pojos;
import org.nutz.lang.Lang;
public class NutPojo extends NutStatement implements Pojo {
/**
*
*/
private static final long serialVersionUID = -8499040181844973777L;
private PojoCallback before;
private PojoCallback after;
/**
* 参数表
*/
private LinkedList<Object> params;
/**
* 缓存语句共包括的参数数量
*/
private int _pmnum;
/**
* 当前操作对象
*/
private Object obj;
/**
* 供子类访问的语句元素
*/
protected ArrayList<PItem> items;
public NutPojo() {
super();
params = new LinkedList<Object>();
items = new ArrayList<PItem>(6);
_pmnum = -1;
append(Pojos.Items.sqlType());
}
@Override
public ValueAdaptor[] getAdaptors() {
ValueAdaptor[] adaptors = new ValueAdaptor[_params_count()];
int i = 0;
for (PItem item : items) {
i = item.joinAdaptor(getEntity(), adaptors, i);
}
return adaptors;
}
@Override
public Object[][] getParamMatrix() {
Object[][] re;
/*
* 木有参数对象,但是有参数,循环一下,看看元素们会给出什么样的参数
*/
if (_params_count() > 0 && params.isEmpty()) {
re = new Object[1][_params_count()];
int i = 0;
for (PItem item : items) {
i = item.joinParams(getEntity(), null, re[0], i);
}
}
/*
* 依照参数列表循环获取参数矩阵
*/
else {
re = new Object[params.size()][_params_count()];
int row = 0;
for (Object obj : params) {
int i = 0;
for (PItem item : items) {
i = item.joinParams(getEntity(), obj, re[row], i);
}
row++;
}
}
return re;
}
@Override
public String toPreparedStatement() {
StringBuilder sb = new StringBuilder();
for (PItem item : items) {
item.joinSql(getEntity(), sb);
}
return sb.toString();
}
@Override
public void onBefore(Connection conn) throws SQLException {
if (null != before) {
before.invoke(conn, null, this, null);
}
}
@Override
public void onAfter(Connection conn, ResultSet rs, Statement stmt) throws SQLException {
if (null != after) {
getContext().setResult(after.invoke(conn, rs, this, stmt));
}
}
@Override
public Pojo setBefore(PojoCallback before) {
this.before = before;
return this;
}
@Override
public Pojo setAfter(PojoCallback after) {
this.after = after;
return this;
}
@Override
public Pojo setPager(Pager pager) {
this.getContext().setPager(pager);
return this;
}
@Override
public Pojo addParamsBy(Object obj) {
if (null == obj) {
return this;
}
// 集合
if (obj instanceof Collection<?>) {
for (Object ele : (Collection<?>) obj) {
addParamsBy(ele);
}
}// 数组
else if (obj.getClass().isArray()) {
int len = Array.getLength(obj);
for (int i = 0; i < len; i++) {
addParamsBy(Array.get(obj, i));
}
}
// 链: 变成 Map
else if (obj instanceof Chain) {
params.add(((Chain) obj).updateBy(this.getEntity()).toMap());
}// 迭带器 : TODO 以后是不是考虑 params 也变成迭代器,这样可以允许无限多的对象被执行 ...
else if (obj instanceof Iterator<?>) {
Iterator<?> it = (Iterator<?>) obj;
while (it.hasNext()) {
addParamsBy(it.next());
}
}
// 其他对象,直接保存,占一行
else {
params.add(obj);
}
return this;
}
@Override
public Object getLastParams() {
return params.isEmpty() ? null : params.getLast();
}
@Override
public List<Object> params() {
return params;
}
@Override
public Object getOperatingObject() {
return obj;
}
@Override
public Pojo setOperatingObject(Object obj) {
this.obj = obj;
return this;
}
@Override
public Pojo clear() {
this.params.clear();
return this;
}
@Override
public Pojo append(PItem... itemAry) {
if (null != itemAry) {
for (PItem item : itemAry) {
if (null != item) {
items.add(item);
item.setPojo(this);
}
}
}
return this;
}
@Override
public Pojo insertFirst(PItem... itemAry) {
items.addAll(0, Lang.list(itemAry));
for (PItem pi : itemAry) {
pi.setPojo(this);
}
return this;
}
@Override
public Pojo setItem(int index, PItem pi) {
items.set(index, pi);
pi.setPojo(this);
return this;
}
@Override
public PItem getItem(int index) {
return items.get(index);
}
@Override
public Pojo removeItem(int index) {
items.remove(index);
return this;
}
@Override
public NutPojo setSqlType(SqlType sqlType) {
return (NutPojo) super.setSqlType(sqlType);
}
@Override
public String toString() {
if (SqlType.RUN == this.getSqlType()) {
return this.getSqlType().name()
+ (null == before ? "" : " :before{...}")
+ (null == after ? "" : " :after{...}");
}
return super.toString();
}
@Override
public Pojo duplicate() {
throw Lang.noImplement();
}
private int _params_count() {
if (_pmnum < 0) {
_pmnum = 0;
Entity<?> en = getEntity();
for (PItem item : items) {
_pmnum += item.paramCount(en);
}
}
return _pmnum;
}
}
|
/*
* Copyright (c) 2001-2008 Caucho Technology, Inc. All rights reserved.
*
* The Apache Software License, Version 1.1
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Caucho Technology (http://www.caucho.com/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "Burlap", "Resin", and "Caucho" must not be used to
* endorse or promote products derived from this software without prior
* written permission. For written permission, please contact
* info@caucho.com.
*
* 5. Products derived from this software may not be called "Resin"
* nor may "Resin" appear in their names without prior written
* permission of Caucho Technology.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL CAUCHO TECHNOLOGY OR ITS CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
* OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* @author Scott Ferguson
*/
package com.caucho.hessian.io;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
/**
* Serializing an object for known object types.
*/
public class BasicDeserializer extends AbstractDeserializer {
public static final int NULL = BasicSerializer.NULL;
public static final int BOOLEAN = BasicSerializer.BOOLEAN;
public static final int BYTE = BasicSerializer.BYTE;
public static final int SHORT = BasicSerializer.SHORT;
public static final int INTEGER = BasicSerializer.INTEGER;
public static final int LONG = BasicSerializer.LONG;
public static final int FLOAT = BasicSerializer.FLOAT;
public static final int DOUBLE = BasicSerializer.DOUBLE;
public static final int CHARACTER = BasicSerializer.CHARACTER;
public static final int CHARACTER_OBJECT = BasicSerializer.CHARACTER_OBJECT;
public static final int STRING = BasicSerializer.STRING;
public static final int DATE = BasicSerializer.DATE;
public static final int NUMBER = BasicSerializer.NUMBER;
public static final int OBJECT = BasicSerializer.OBJECT;
public static final int BOOLEAN_ARRAY = BasicSerializer.BOOLEAN_ARRAY;
public static final int BYTE_ARRAY = BasicSerializer.BYTE_ARRAY;
public static final int SHORT_ARRAY = BasicSerializer.SHORT_ARRAY;
public static final int INTEGER_ARRAY = BasicSerializer.INTEGER_ARRAY;
public static final int LONG_ARRAY = BasicSerializer.LONG_ARRAY;
public static final int FLOAT_ARRAY = BasicSerializer.FLOAT_ARRAY;
public static final int DOUBLE_ARRAY = BasicSerializer.DOUBLE_ARRAY;
public static final int CHARACTER_ARRAY = BasicSerializer.CHARACTER_ARRAY;
public static final int STRING_ARRAY = BasicSerializer.STRING_ARRAY;
public static final int OBJECT_ARRAY = BasicSerializer.OBJECT_ARRAY;
private int _code;
public BasicDeserializer(int code)
{
_code = code;
}
public Class getType()
{
switch (_code) {
case NULL:
return void.class;
case BOOLEAN:
return Boolean.class;
case BYTE:
return Byte.class;
case SHORT:
return Short.class;
case INTEGER:
return Integer.class;
case LONG:
return Long.class;
case FLOAT:
return Float.class;
case DOUBLE:
return Double.class;
case CHARACTER:
return Character.class;
case CHARACTER_OBJECT:
return Character.class;
case STRING:
return String.class;
case DATE:
return Date.class;
case NUMBER:
return Number.class;
case OBJECT:
return Object.class;
case BOOLEAN_ARRAY:
return boolean[].class;
case BYTE_ARRAY:
return byte[].class;
case SHORT_ARRAY:
return short[].class;
case INTEGER_ARRAY:
return int[].class;
case LONG_ARRAY:
return long[].class;
case FLOAT_ARRAY:
return float[].class;
case DOUBLE_ARRAY:
return double[].class;
case CHARACTER_ARRAY:
return char[].class;
case STRING_ARRAY:
return String[].class;
case OBJECT_ARRAY:
return Object[].class;
default:
throw new UnsupportedOperationException();
}
}
public Object readObject(AbstractHessianInput in)
throws IOException
{
switch (_code) {
case NULL:
// hessian/3490
in.readObject();
return null;
case BOOLEAN:
return Boolean.valueOf(in.readBoolean());
case BYTE:
return Byte.valueOf((byte) in.readInt());
case SHORT:
return Short.valueOf((short) in.readInt());
case INTEGER:
return Integer.valueOf(in.readInt());
case LONG:
return Long.valueOf(in.readLong());
case FLOAT:
return Float.valueOf((float) in.readDouble());
case DOUBLE:
return Double.valueOf(in.readDouble());
case STRING:
return in.readString();
case OBJECT:
return in.readObject();
case CHARACTER:
{
String s = in.readString();
if (s == null || s.equals(""))
return Character.valueOf((char) 0);
else
return Character.valueOf(s.charAt(0));
}
case CHARACTER_OBJECT:
{
String s = in.readString();
if (s == null || s.equals(""))
return null;
else
return Character.valueOf(s.charAt(0));
}
case DATE:
return new Date(in.readUTCDate());
case NUMBER:
return in.readObject();
case BYTE_ARRAY:
return in.readBytes();
case CHARACTER_ARRAY:
{
String s = in.readString();
if (s == null)
return null;
else {
int len = s.length();
char []chars = new char[len];
s.getChars(0, len, chars, 0);
return chars;
}
}
case BOOLEAN_ARRAY:
case SHORT_ARRAY:
case INTEGER_ARRAY:
case LONG_ARRAY:
case FLOAT_ARRAY:
case DOUBLE_ARRAY:
case STRING_ARRAY:
{
int code = in.readListStart();
switch (code) {
case 'N':
return null;
case 0x10: case 0x11: case 0x12: case 0x13:
case 0x14: case 0x15: case 0x16: case 0x17:
case 0x18: case 0x19: case 0x1a: case 0x1b:
case 0x1c: case 0x1d: case 0x1e: case 0x1f:
int length = code - 0x10;
in.readInt();
return readLengthList(in, length);
default:
String type = in.readType();
length = in.readLength();
return readList(in, length);
}
}
default:
throw new UnsupportedOperationException();
}
}
public Object readList(AbstractHessianInput in, int length)
throws IOException
{
switch (_code) {
case BOOLEAN_ARRAY: {
if (length >= 0) {
boolean []data = new boolean[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = in.readBoolean();
in.readEnd();
return data;
}
else {
ArrayList list = new ArrayList();
while (! in.isEnd())
list.add(Boolean.valueOf(in.readBoolean()));
in.readEnd();
boolean []data = new boolean[list.size()];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = ((Boolean) list.get(i)).booleanValue();
return data;
}
}
case SHORT_ARRAY: {
if (length >= 0) {
short []data = new short[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = (short) in.readInt();
in.readEnd();
return data;
}
else {
ArrayList list = new ArrayList();
while (! in.isEnd())
list.add(Short.valueOf((short) in.readInt()));
in.readEnd();
short []data = new short[list.size()];
for (int i = 0; i < data.length; i++)
data[i] = ((Short) list.get(i)).shortValue();
in.addRef(data);
return data;
}
}
case INTEGER_ARRAY: {
if (length >= 0) {
int []data = new int[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = in.readInt();
in.readEnd();
return data;
}
else {
ArrayList list = new ArrayList();
while (! in.isEnd())
list.add(Integer.valueOf(in.readInt()));
in.readEnd();
int []data = new int[list.size()];
for (int i = 0; i < data.length; i++)
data[i] = ((Integer) list.get(i)).intValue();
in.addRef(data);
return data;
}
}
case LONG_ARRAY: {
if (length >= 0) {
long []data = new long[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = in.readLong();
in.readEnd();
return data;
}
else {
ArrayList list = new ArrayList();
while (! in.isEnd())
list.add(Long.valueOf(in.readLong()));
in.readEnd();
long []data = new long[list.size()];
for (int i = 0; i < data.length; i++)
data[i] = ((Long) list.get(i)).longValue();
in.addRef(data);
return data;
}
}
case FLOAT_ARRAY: {
if (length >= 0) {
float []data = new float[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = (float) in.readDouble();
in.readEnd();
return data;
}
else {
ArrayList list = new ArrayList();
while (! in.isEnd())
list.add(new Float(in.readDouble()));
in.readEnd();
float []data = new float[list.size()];
for (int i = 0; i < data.length; i++)
data[i] = ((Float) list.get(i)).floatValue();
in.addRef(data);
return data;
}
}
case DOUBLE_ARRAY: {
if (length >= 0) {
double []data = new double[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = in.readDouble();
in.readEnd();
return data;
}
else {
ArrayList list = new ArrayList();
while (! in.isEnd())
list.add(new Double(in.readDouble()));
in.readEnd();
double []data = new double[list.size()];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = ((Double) list.get(i)).doubleValue();
return data;
}
}
case STRING_ARRAY: {
if (length >= 0) {
String []data = new String[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = in.readString();
in.readEnd();
return data;
}
else {
ArrayList list = new ArrayList();
while (! in.isEnd())
list.add(in.readString());
in.readEnd();
String []data = new String[list.size()];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = (String) list.get(i);
return data;
}
}
case OBJECT_ARRAY: {
if (length >= 0) {
Object []data = new Object[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = in.readObject();
in.readEnd();
return data;
}
else {
ArrayList list = new ArrayList();
in.addRef(list); // XXX: potential issues here
while (! in.isEnd())
list.add(in.readObject());
in.readEnd();
Object []data = new Object[list.size()];
for (int i = 0; i < data.length; i++)
data[i] = (Object) list.get(i);
return data;
}
}
default:
throw new UnsupportedOperationException(String.valueOf(this));
}
}
public Object readLengthList(AbstractHessianInput in, int length)
throws IOException
{
switch (_code) {
case BOOLEAN_ARRAY: {
boolean []data = new boolean[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = in.readBoolean();
return data;
}
case SHORT_ARRAY: {
short []data = new short[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = (short) in.readInt();
return data;
}
case INTEGER_ARRAY: {
int []data = new int[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = in.readInt();
return data;
}
case LONG_ARRAY: {
long []data = new long[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = in.readLong();
return data;
}
case FLOAT_ARRAY: {
float []data = new float[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = (float) in.readDouble();
return data;
}
case DOUBLE_ARRAY: {
double []data = new double[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = in.readDouble();
return data;
}
case STRING_ARRAY: {
String []data = new String[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = in.readString();
return data;
}
case OBJECT_ARRAY: {
Object []data = new Object[length];
in.addRef(data);
for (int i = 0; i < data.length; i++)
data[i] = in.readObject();
return data;
}
default:
throw new UnsupportedOperationException(String.valueOf(this));
}
}
}
|
/*
* 07/23/2011
*
* EditMacrosAction.java - Action that opens the Options dialog to the "macros"
* panel.
* Copyright (C) 2011 Robert Futrell
* http://fifesoft.com/rtext
* Licensed under a modified BSD license.
* See the included license file for details.
*/
package org.fife.rtext.plugins.macros;
import java.awt.event.ActionEvent;
import java.util.ResourceBundle;
import org.fife.rtext.RText;
import org.fife.ui.OptionsDialog;
import org.fife.ui.app.AppAction;
import org.fife.ui.app.themes.FlatDarkTheme;
import org.fife.ui.app.themes.FlatLightTheme;
import javax.swing.*;
/**
* Action that opens the Options dialog to the "Macros" panel.
*
* @author Robert Futrell
* @version 1.0
*/
class EditMacrosAction extends AppAction<RText> {
/**
* Constructor.
*
* @param owner The parent RText instance.
* @param msg The resource bundle to use for localization.
*/
EditMacrosAction(RText owner, ResourceBundle msg) {
super(owner, msg, "EditMacrosAction");
}
@Override
public void actionPerformed(ActionEvent e) {
RText owner = getApplication();
OptionsDialog od = owner.getOptionsDialog();
ResourceBundle msg = MacroPlugin.MSG;
od.setSelectedOptionsPanel(msg.getString(MacroOptionPanel.TITLE_KEY));
od.initialize();
od.setVisible(true);
}
void restoreDefaultIcon() {
// In flat themes, cog_add === just a cog, so show no icon for this action
switch (getApplication().getTheme().getId()) {
case FlatDarkTheme.ID, FlatLightTheme.ID -> setIcon((Icon)null);
default -> setIcon("eclipse/cog.png");
}
}
}
|
package com.box.l10n.mojito.service.cli;
import com.box.l10n.mojito.mustache.MustacheBaseContext;
import java.net.URL;
public class InstallCliContext extends MustacheBaseContext {
String installDirectory;
String scheme;
String host;
String port;
public InstallCliContext(String installDirectory, String scheme, String host, String port) {
this.installDirectory = installDirectory;
this.scheme = scheme;
this.host = host;
this.port = port;
}
}
|
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.restapi;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.UriBuilder;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.util.EntityUtils;
import org.assertj.core.api.Assertions;
import org.junit.Assert;
import org.junit.Test;
import org.olat.basesecurity.Group;
import org.olat.basesecurity.OrganisationService;
import org.olat.core.commons.persistence.DB;
import org.olat.core.id.Identity;
import org.olat.core.id.Organisation;
import org.olat.core.logging.OLog;
import org.olat.core.logging.Tracing;
import org.olat.course.CourseFactory;
import org.olat.course.ICourse;
import org.olat.modules.curriculum.Curriculum;
import org.olat.modules.curriculum.CurriculumCalendars;
import org.olat.modules.curriculum.CurriculumElement;
import org.olat.modules.curriculum.CurriculumElementStatus;
import org.olat.modules.curriculum.CurriculumLectures;
import org.olat.modules.curriculum.CurriculumService;
import org.olat.modules.lecture.LectureBlock;
import org.olat.modules.lecture.LectureBlockStatus;
import org.olat.modules.lecture.LectureBlockToTaxonomyLevel;
import org.olat.modules.lecture.LectureRollCallStatus;
import org.olat.modules.lecture.LectureService;
import org.olat.modules.lecture.RepositoryEntryLectureConfiguration;
import org.olat.modules.lecture.manager.LectureBlockToTaxonomyLevelDAO;
import org.olat.modules.lecture.model.LectureBlockRefImpl;
import org.olat.modules.lecture.restapi.LectureBlockVO;
import org.olat.modules.lecture.restapi.RepositoryEntryLectureConfigurationVO;
import org.olat.modules.taxonomy.Taxonomy;
import org.olat.modules.taxonomy.TaxonomyLevel;
import org.olat.modules.taxonomy.manager.TaxonomyDAO;
import org.olat.modules.taxonomy.manager.TaxonomyLevelDAO;
import org.olat.modules.taxonomy.restapi.TaxonomyLevelVO;
import org.olat.repository.RepositoryEntry;
import org.olat.repository.RepositoryService;
import org.olat.test.JunitTestHelper;
import org.olat.test.OlatJerseyTestCase;
import org.springframework.beans.factory.annotation.Autowired;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
*
*
* Initial date: 8 juin 2017<br>
* @author srosse, stephane.rosse@frentix.com, http://www.frentix.com
*
*/
public class LecturesBlocksTest extends OlatJerseyTestCase {
private static final OLog log = Tracing.createLoggerFor(LecturesBlocksTest.class);
@Autowired
private DB dbInstance;
@Autowired
private TaxonomyDAO taxonomyDao;
@Autowired
private TaxonomyLevelDAO taxonomyLevelDao;
@Autowired
private LectureService lectureService;
@Autowired
private RepositoryService repositoryService;
@Autowired
private CurriculumService curriculumService;
@Autowired
private OrganisationService organisationService;
@Autowired
private LectureBlockToTaxonomyLevelDAO lectureBlockToTaxonomyLevelDao;
/**
* Get the list of lecture block through the course.
*
* @throws IOException
* @throws URISyntaxException
*/
@Test
public void getLecturesBlock_course()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry courseEntry = JunitTestHelper.deployBasicCourse(author);
ICourse course = CourseFactory.loadCourse(courseEntry);
RepositoryEntry entry = course.getCourseEnvironment().getCourseGroupManager().getCourseEntry();
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("courses")
.path(course.getResourceableId().toString()).path("lectureblocks").build();
HttpGet method = conn.createGet(uri, MediaType.APPLICATION_JSON, true);
HttpResponse response = conn.execute(method);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
List<LectureBlockVO> voList = parseLectureBlockArray(response.getEntity());
Assert.assertNotNull(voList);
Assert.assertEquals(1, voList.size());
LectureBlockVO blockVo = voList.get(0);
Assert.assertEquals(block.getKey(), blockVo.getKey());
Assert.assertEquals(entry.getKey(), blockVo.getRepoEntryKey());
}
/**
* Get the list of lecture block through the repository entry.
*
* @throws IOException
* @throws URISyntaxException
*/
@Test
public void getLecturesBlock_repository()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry courseEntry = JunitTestHelper.deployBasicCourse(author);
ICourse course = CourseFactory.loadCourse(courseEntry);
RepositoryEntry entry = course.getCourseEnvironment().getCourseGroupManager().getCourseEntry();
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString()).path("lectureblocks").build();
HttpGet method = conn.createGet(uri, MediaType.APPLICATION_JSON, true);
HttpResponse response = conn.execute(method);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
List<LectureBlockVO> voList = parseLectureBlockArray(response.getEntity());
Assert.assertNotNull(voList);
Assert.assertEquals(1, voList.size());
LectureBlockVO blockVo = voList.get(0);
Assert.assertEquals(block.getKey(), blockVo.getKey());
Assert.assertEquals(entry.getKey(), blockVo.getRepoEntryKey());
}
@Test
public void putLecturesBlock_repository()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry courseEntry = JunitTestHelper.deployBasicCourse(author);
ICourse course = CourseFactory.loadCourse(courseEntry);
RepositoryEntry entry = course.getCourseEnvironment().getCourseGroupManager().getCourseEntry();
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
String externalId = UUID.randomUUID().toString();
LectureBlockVO lectureBlockVo = new LectureBlockVO();
lectureBlockVo.setTitle("New block");
lectureBlockVo.setDescription("A little description");
lectureBlockVo.setComment("A comment");
lectureBlockVo.setLocation("The secret location");
lectureBlockVo.setManagedFlagsString("all");
lectureBlockVo.setPreparation("Lot of");
lectureBlockVo.setPlannedLectures(4);
lectureBlockVo.setExternalId(externalId);
lectureBlockVo.setStartDate(new Date());
lectureBlockVo.setEndDate(new Date());
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString()).path("lectureblocks").build();
HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true);
conn.addJsonEntity(method, lectureBlockVo);
HttpResponse response = conn.execute(method);
// check the response
Assertions.assertThat(response.getStatusLine().getStatusCode()).isIn(200, 201);
LectureBlockVO blockVo = conn.parse(response.getEntity(), LectureBlockVO.class);
Assert.assertNotNull(blockVo);
Assert.assertEquals(entry.getKey(), blockVo.getRepoEntryKey());
Assert.assertEquals("New block", blockVo.getTitle());
Assert.assertEquals("A little description", blockVo.getDescription());
Assert.assertEquals("A comment", blockVo.getComment());
Assert.assertEquals("The secret location", blockVo.getLocation());
Assert.assertEquals("all", blockVo.getManagedFlagsString());
Assert.assertEquals(4, blockVo.getPlannedLectures());
Assert.assertEquals(externalId, blockVo.getExternalId());
Assert.assertNotNull(blockVo.getStartDate());
Assert.assertNotNull(blockVo.getEndDate());
// check the database
LectureBlock dbBlock = lectureService.getLectureBlock(new LectureBlockRefImpl(blockVo.getKey()));
Assert.assertNotNull(dbBlock);
Assert.assertEquals("New block", dbBlock.getTitle());
Assert.assertEquals("A little description", dbBlock.getDescription());
Assert.assertEquals("A comment", dbBlock.getComment());
Assert.assertEquals("The secret location", dbBlock.getLocation());
Assert.assertEquals("all", dbBlock.getManagedFlagsString());
Assert.assertEquals(4, dbBlock.getPlannedLecturesNumber());
Assert.assertEquals(externalId, dbBlock.getExternalId());
Assert.assertNotNull(dbBlock.getStartDate());
Assert.assertNotNull(dbBlock.getEndDate());
}
/**
* Check that the done and autoclosed status are set.
*
* @throws IOException
* @throws URISyntaxException
*/
@Test
public void putLecturesBlock_autoclosed()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry courseEntry = JunitTestHelper.deployBasicCourse(author);
ICourse course = CourseFactory.loadCourse(courseEntry);
RepositoryEntry entry = course.getCourseEnvironment().getCourseGroupManager().getCourseEntry();
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
String externalId = UUID.randomUUID().toString();
LectureBlockVO lectureBlockVo = new LectureBlockVO();
lectureBlockVo.setTitle("A block to close");
lectureBlockVo.setDescription("A description");
lectureBlockVo.setManagedFlagsString("all");
lectureBlockVo.setPlannedLectures(4);
lectureBlockVo.setExternalId(externalId);
lectureBlockVo.setStartDate(new Date());
lectureBlockVo.setEndDate(new Date());
lectureBlockVo.setStatus("done");
lectureBlockVo.setRollCallStatus("autoclosed");
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString()).path("lectureblocks").build();
HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true);
conn.addJsonEntity(method, lectureBlockVo);
HttpResponse response = conn.execute(method);
// check the response
Assertions.assertThat(response.getStatusLine().getStatusCode()).isIn(200, 201);
LectureBlockVO blockVo = conn.parse(response.getEntity(), LectureBlockVO.class);
Assert.assertNotNull(blockVo);
// check the database
LectureBlock dbBlock = lectureService.getLectureBlock(new LectureBlockRefImpl(blockVo.getKey()));
Assert.assertNotNull(dbBlock);
Assert.assertEquals("A block to close", dbBlock.getTitle());
Assert.assertEquals("A description", dbBlock.getDescription());
Assert.assertEquals("all", dbBlock.getManagedFlagsString());
Assert.assertEquals(4, dbBlock.getPlannedLecturesNumber());
Assert.assertEquals(externalId, dbBlock.getExternalId());
Assert.assertNotNull(dbBlock.getStartDate());
Assert.assertNotNull(dbBlock.getEndDate());
Assert.assertEquals(LectureBlockStatus.done, dbBlock.getStatus());
Assert.assertEquals(LectureRollCallStatus.autoclosed, dbBlock.getRollCallStatus());
}
@Test
public void getLecturesBlockConfiguration()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry courseEntry = JunitTestHelper.deployBasicCourse(author);
ICourse course = CourseFactory.loadCourse(courseEntry);
RepositoryEntry entry = course.getCourseEnvironment().getCourseGroupManager().getCourseEntry();
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString()).path("lectureblocks").path("configuration").build();
HttpGet method = conn.createGet(uri, MediaType.APPLICATION_JSON, true);
HttpResponse response = conn.execute(method);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
RepositoryEntryLectureConfigurationVO configVo = conn.parse(response, RepositoryEntryLectureConfigurationVO.class);
Assert.assertNotNull(configVo);
}
@Test
public void updateLecturesBlockConfiguration()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
RepositoryEntryLectureConfigurationVO configVo = new RepositoryEntryLectureConfigurationVO();
configVo.setLectureEnabled(Boolean.TRUE);
configVo.setCalculateAttendanceRate(Boolean.TRUE);
configVo.setOverrideModuleDefault(Boolean.TRUE);
configVo.setCourseCalendarSyncEnabled(Boolean.TRUE);
configVo.setRequiredAttendanceRate(34.0d);
configVo.setRollCallEnabled(Boolean.TRUE);
configVo.setTeacherCalendarSyncEnabled(Boolean.TRUE);
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString()).path("lectureblocks").path("configuration").build();
HttpPost method = conn.createPost(uri, MediaType.APPLICATION_JSON);
conn.addJsonEntity(method, configVo);
HttpResponse response = conn.execute(method);
// check the response
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
RepositoryEntryLectureConfigurationVO updateConfigVo = conn.parse(response, RepositoryEntryLectureConfigurationVO.class);
Assert.assertNotNull(updateConfigVo);
Assert.assertEquals(Boolean.TRUE, updateConfigVo.getLectureEnabled());
Assert.assertEquals(Boolean.TRUE, updateConfigVo.getCalculateAttendanceRate());
Assert.assertEquals(Boolean.TRUE, updateConfigVo.getOverrideModuleDefault());
Assert.assertEquals(Boolean.TRUE, updateConfigVo.getCourseCalendarSyncEnabled());
Assert.assertEquals(34.0d, updateConfigVo.getRequiredAttendanceRate(), 0000.1);
Assert.assertEquals(Boolean.TRUE, updateConfigVo.getRollCallEnabled());
Assert.assertEquals(Boolean.TRUE, updateConfigVo.getTeacherCalendarSyncEnabled());
// check the database
RepositoryEntryLectureConfiguration dbConfig = lectureService.getRepositoryEntryLectureConfiguration(entry);
Assert.assertNotNull(dbConfig);
Assert.assertTrue(dbConfig.isLectureEnabled());
Assert.assertEquals(Boolean.TRUE, dbConfig.getCalculateAttendanceRate());
Assert.assertTrue(dbConfig.isOverrideModuleDefault());
Assert.assertEquals(Boolean.TRUE, dbConfig.getCourseCalendarSyncEnabled());
Assert.assertEquals(34.0d, dbConfig.getRequiredAttendanceRate(), 0000.1);
Assert.assertEquals(Boolean.TRUE, dbConfig.getRollCallEnabled());
Assert.assertEquals(Boolean.TRUE, dbConfig.getTeacherCalendarSyncEnabled());
}
@Test
public void getLectureBlock()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString())
.path("lectureblocks").path(block.getKey().toString()).build();
HttpGet method = conn.createGet(uri, MediaType.APPLICATION_JSON, true);
HttpResponse response = conn.execute(method);
// check the response
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
LectureBlockVO blockVo = conn.parse(response, LectureBlockVO.class);
Assert.assertNotNull(blockVo);
Assert.assertEquals(block.getKey(), blockVo.getKey());
Assert.assertEquals(entry.getKey(), blockVo.getRepoEntryKey());
}
@Test
public void deleteLectureBlock()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString())
.path("lectureblocks").path(block.getKey().toString()).build();
HttpDelete method = conn.createDelete(uri, MediaType.APPLICATION_JSON);
HttpResponse response = conn.execute(method);
// check the response
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
LectureBlock deletedBlock = lectureService.getLectureBlock(block);
Assert.assertNull(deletedBlock);
}
@Test
public void addRepositoryEntryDefaultGroupToLectureBlock()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString())
.path("lectureblocks").path(block.getKey().toString())
.path("participants").path("repositoryentry").build();
HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true);
HttpResponse response = conn.execute(method);
// check the response
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
EntityUtils.consume(response.getEntity());
//check the database
List<Group> groups = lectureService.getLectureBlockToGroups(block);
Assert.assertNotNull(groups);
Assert.assertEquals(1, groups.size());
Group defGroup = repositoryService.getDefaultGroup(entry);
Assert.assertEquals(defGroup, groups.get(0));
}
@Test
public void removeRepositoryEntryDefaultGroupToLectureBlock()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
LectureBlock block = createLectureBlock(entry);
Group defGroup = repositoryService.getDefaultGroup(entry);
lectureService.save(block, Collections.singletonList(defGroup));
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString())
.path("lectureblocks").path(block.getKey().toString())
.path("participants").path("repositoryentry").build();
HttpDelete method = conn.createDelete(uri, MediaType.APPLICATION_JSON);
HttpResponse response = conn.execute(method);
// check the response
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
EntityUtils.consume(response.getEntity());
//check the database
List<Group> groups = lectureService.getLectureBlockToGroups(block);
Assert.assertNotNull(groups);
Assert.assertEquals(0, groups.size());
}
@Test
public void addRepositoryEntryCurriculumElementToLectureBlock()
throws IOException, URISyntaxException {
// prepare a course with a curriculum element and a lecture block
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
Organisation defOrganisation = organisationService.getDefaultOrganisation();
Curriculum curriculum = curriculumService.createCurriculum("add-group", "Add group REST", "", defOrganisation);
CurriculumElement curriculumElement = curriculumService.createCurriculumElement("add-group", "Add element group", CurriculumElementStatus.active,
null, null, null, null, CurriculumCalendars.disabled, CurriculumLectures.disabled, curriculum);
curriculumService.addRepositoryEntry(curriculumElement, entry, true);
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString())
.path("lectureblocks").path(block.getKey().toString())
.path("participants").path("curriculum").build();
HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true);
HttpResponse response = conn.execute(method);
// check the response
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
EntityUtils.consume(response.getEntity());
//check the database
List<Group> groups = lectureService.getLectureBlockToGroups(block);
Assert.assertNotNull(groups);
Assert.assertEquals(1, groups.size());
Assert.assertTrue(groups.contains(curriculumElement.getGroup()));
}
@Test
public void removeRepositoryEntryCurriculumElementToLectureBlock()
throws IOException, URISyntaxException {
// prepare a course with a curriculum element and a lecture block
// the lecture block use already the curriculum element as source of participants
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
Organisation defOrganisation = organisationService.getDefaultOrganisation();
Curriculum curriculum = curriculumService.createCurriculum("rm-group", "Remove group REST", "", defOrganisation);
CurriculumElement curriculumElement = curriculumService.createCurriculumElement("rm-group", "Remove element group", CurriculumElementStatus.active,
null, null, null, null, CurriculumCalendars.disabled, CurriculumLectures.disabled, curriculum);
curriculumService.addRepositoryEntry(curriculumElement, entry, true);
dbInstance.commit();
lectureService.save(block, Collections.singletonList(curriculumElement.getGroup()));
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString())
.path("lectureblocks").path(block.getKey().toString())
.path("participants").path("curriculum").build();
HttpDelete method = conn.createDelete(uri, MediaType.APPLICATION_JSON);
HttpResponse response = conn.execute(method);
// check the response
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
EntityUtils.consume(response.getEntity());
//check the database
List<Group> groups = lectureService.getLectureBlockToGroups(block);
Assert.assertNotNull(groups);
Assert.assertTrue(groups.isEmpty());
}
@Test
public void addTeacherToLectureBlock()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
Identity teacher = JunitTestHelper.createAndPersistIdentityAsRndUser("teacher-1");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString())
.path("lectureblocks").path(block.getKey().toString())
.path("teachers").path(teacher.getKey().toString()).build();
HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true);
HttpResponse response = conn.execute(method);
// check the response
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
EntityUtils.consume(response.getEntity());
//check the database
List<Identity> teachers = lectureService.getTeachers(block);
Assert.assertTrue(teachers.contains(teacher));
}
@Test
public void removeTeacherToLectureBlock()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
Identity teacher1 = JunitTestHelper.createAndPersistIdentityAsRndUser("teacher-2");
Identity teacher2 = JunitTestHelper.createAndPersistIdentityAsRndUser("teacher-3");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
lectureService.addTeacher(block, teacher1);
lectureService.addTeacher(block, teacher2);
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString())
.path("lectureblocks").path(block.getKey().toString())
.path("teachers").path(teacher1.getKey().toString()).build();
HttpDelete method = conn.createDelete(uri, MediaType.APPLICATION_JSON);
HttpResponse response = conn.execute(method);
// check the response
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
EntityUtils.consume(response.getEntity());
//check the database
List<Identity> teachers = lectureService.getTeachers(block);
Assert.assertEquals(1, teachers.size());
Assert.assertFalse(teachers.contains(teacher1));
Assert.assertTrue(teachers.contains(teacher2));
}
/**
* Move a lecture block from one course to the other.
*
* @throws IOException
* @throws URISyntaxException
*/
@Test
public void moveLectureBlock()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
RepositoryEntry entryOrigin = JunitTestHelper.deployBasicCourse(author);
RepositoryEntry entryTarget = JunitTestHelper.deployBasicCourse(author);
ICourse courseOrigin = CourseFactory.loadCourse(entryOrigin);
entryOrigin = courseOrigin.getCourseEnvironment().getCourseGroupManager().getCourseEntry();
LectureBlock block = createLectureBlock(entryOrigin);
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("courses").path(courseOrigin.getResourceableId().toString())
.path("lectureblocks").path(block.getKey().toString())
.path("entry").path(entryTarget.getKey().toString()).build();
HttpPost method = conn.createPost(uri, MediaType.APPLICATION_JSON);
HttpResponse response = conn.execute(method);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
LectureBlockVO movedBlock = conn.parse(response, LectureBlockVO.class);
Assert.assertNotNull(movedBlock);
Assert.assertEquals(entryTarget.getKey(), movedBlock.getRepoEntryKey());
// check lecture blocks of origin
List<LectureBlock> originBlocks = lectureService.getLectureBlocks(entryOrigin);
Assert.assertNotNull(originBlocks);
Assert.assertTrue(originBlocks.isEmpty());
// check lecture block of target
List<LectureBlock> targetBlocks = lectureService.getLectureBlocks(entryTarget);
Assert.assertNotNull(targetBlocks);
Assert.assertEquals(1, targetBlocks.size());
Assert.assertEquals(block.getKey(), targetBlocks.get(0).getKey());
}
@Test
public void getTaxonomyLevels()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
Identity teacher = JunitTestHelper.createAndPersistIdentityAsRndUser("teacher-2");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
lectureService.addTeacher(block, teacher);
Taxonomy taxonomy = taxonomyDao.createTaxonomy("ID-200", "Leveled taxonomy", null, null);
TaxonomyLevel level = taxonomyLevelDao.createTaxonomyLevel("ID-Level-0", "My first taxonomy level", "A basic level", null, null, null, null, taxonomy);
lectureBlockToTaxonomyLevelDao.createRelation(block, level);
dbInstance.commit();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString())
.path("lectureblocks").path(block.getKey().toString())
.path("taxonomy").path("levels").build();
HttpGet method = conn.createGet(uri, MediaType.APPLICATION_JSON, true);
HttpResponse response = conn.execute(method);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
List<TaxonomyLevelVO> levelVoes = parseTaxonomyLevelArray(response.getEntity());
Assert.assertNotNull(levelVoes);
Assert.assertEquals(1, levelVoes.size());
}
@Test
public void addTaxonomyLevels()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
Identity teacher = JunitTestHelper.createAndPersistIdentityAsRndUser("teacher-2");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
lectureService.addTeacher(block, teacher);
Taxonomy taxonomy = taxonomyDao.createTaxonomy("ID-200", "Leveled taxonomy", null, null);
TaxonomyLevel level = taxonomyLevelDao.createTaxonomyLevel("ID-Level-0", "My first taxonomy level", "A basic level", null, null, null, null, taxonomy);
dbInstance.commitAndCloseSession();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString())
.path("lectureblocks").path(block.getKey().toString())
.path("taxonomy").path("levels").path(level.getKey().toString()).build();
HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true);
HttpResponse response = conn.execute(method);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
EntityUtils.consume(response.getEntity());
LectureBlock reloadedBlock = lectureService.getLectureBlock(block);
Set<LectureBlockToTaxonomyLevel> relationToLevels = reloadedBlock.getTaxonomyLevels();
Assert.assertNotNull(relationToLevels);
Assert.assertEquals(1, relationToLevels.size());
LectureBlockToTaxonomyLevel relationToLevel = relationToLevels.iterator().next();
Assert.assertEquals(level, relationToLevel.getTaxonomyLevel());
}
@Test
public void addTwiceTaxonomyLevels()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
Identity teacher = JunitTestHelper.createAndPersistIdentityAsRndUser("teacher-2");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
lectureService.addTeacher(block, teacher);
Taxonomy taxonomy = taxonomyDao.createTaxonomy("ID-200", "Leveled taxonomy", null, null);
TaxonomyLevel level = taxonomyLevelDao.createTaxonomyLevel("ID-Level-0", "My first taxonomy level", "A basic level", null, null, null, null, taxonomy);
lectureBlockToTaxonomyLevelDao.createRelation(block, level);
dbInstance.commitAndCloseSession();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString())
.path("lectureblocks").path(block.getKey().toString())
.path("taxonomy").path("levels").path(level.getKey().toString()).build();
HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true);
HttpResponse response = conn.execute(method);
Assert.assertEquals(304, response.getStatusLine().getStatusCode());
EntityUtils.consume(response.getEntity());
LectureBlock reloadedBlock = lectureService.getLectureBlock(block);
Set<LectureBlockToTaxonomyLevel> relationToLevels = reloadedBlock.getTaxonomyLevels();
Assert.assertNotNull(relationToLevels);
Assert.assertEquals(1, relationToLevels.size());
LectureBlockToTaxonomyLevel relationToLevel = relationToLevels.iterator().next();
Assert.assertEquals(level, relationToLevel.getTaxonomyLevel());
}
@Test
public void deleteTaxonomyLevel()
throws IOException, URISyntaxException {
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("lect-1");
Identity teacher = JunitTestHelper.createAndPersistIdentityAsRndUser("teacher-2");
RepositoryEntry entry = JunitTestHelper.deployBasicCourse(author);
LectureBlock block = createLectureBlock(entry);
dbInstance.commit();
lectureService.addTeacher(block, teacher);
Taxonomy taxonomy = taxonomyDao.createTaxonomy("ID-202", "Leveled taxonomy", null, null);
TaxonomyLevel level1 = taxonomyLevelDao.createTaxonomyLevel("ID-Level-0", "My first taxonomy level", "A basic level", null, null, null, null, taxonomy);
TaxonomyLevel level2 = taxonomyLevelDao.createTaxonomyLevel("ID-Level-0", "My first taxonomy level", "A basic level", null, null, null, null, taxonomy);
lectureBlockToTaxonomyLevelDao.createRelation(block, level1);
lectureBlockToTaxonomyLevelDao.createRelation(block, level2);
dbInstance.commitAndCloseSession();
// make sure we have something to delete
List<TaxonomyLevel> levels = lectureBlockToTaxonomyLevelDao.getTaxonomyLevels(block);
Assert.assertEquals(2, levels.size());
dbInstance.commitAndCloseSession();
RestConnection conn = new RestConnection();
Assert.assertTrue(conn.login("administrator", "openolat"));
URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries")
.path(entry.getKey().toString())
.path("lectureblocks").path(block.getKey().toString())
.path("taxonomy").path("levels").path(level1.getKey().toString()).build();
HttpDelete method = conn.createDelete(uri, MediaType.APPLICATION_JSON);
HttpResponse response = conn.execute(method);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
EntityUtils.consume(response.getEntity());
// check that the right relation was deleted
List<TaxonomyLevel> survivingLevels = lectureBlockToTaxonomyLevelDao.getTaxonomyLevels(block);
Assert.assertEquals(1, survivingLevels.size());
Assert.assertEquals(level2, survivingLevels.get(0));
}
private LectureBlock createLectureBlock(RepositoryEntry entry) {
LectureBlock lectureBlock = lectureService.createLectureBlock(entry);
lectureBlock.setStartDate(new Date());
lectureBlock.setEndDate(new Date());
lectureBlock.setTitle("Hello lecturers");
lectureBlock.setPlannedLecturesNumber(4);
return lectureService.save(lectureBlock, null);
}
protected List<TaxonomyLevelVO> parseTaxonomyLevelArray(HttpEntity entity) {
try(InputStream in = entity.getContent()) {
ObjectMapper mapper = new ObjectMapper(jsonFactory);
return mapper.readValue(in, new TypeReference<List<TaxonomyLevelVO>>(){/* */});
} catch (Exception e) {
log.error("", e);
return null;
}
}
protected List<LectureBlockVO> parseLectureBlockArray(HttpEntity entity) {
try(InputStream in = entity.getContent()) {
ObjectMapper mapper = new ObjectMapper(jsonFactory);
return mapper.readValue(in, new TypeReference<List<LectureBlockVO>>(){/* */});
} catch (Exception e) {
log.error("", e);
return null;
}
}
}
|
package gson.gson2.serialiser;
import com.google.gson.*;
import gson.gson2.bean.Book;
import java.lang.reflect.Type;
/**
* 反列化时候,对特殊字段的处理
*/
public class BookDeserializer implements JsonDeserializer<Book> {
@Override
public Book deserialize(final JsonElement json, final Type typeOfT, final JsonDeserializationContext context)
throws JsonParseException {
System.out.println("---");
final JsonObject jsonObject = json.getAsJsonObject();
final JsonElement jsonTitle = jsonObject.get("title");
final String title = jsonTitle.getAsString();
final String isbn10 = jsonObject.get("isbn-10").getAsString();
final String isbn13 = jsonObject.get("isbn-13").getAsString();
final JsonArray jsonAuthorsArray = jsonObject.get("authors").getAsJsonArray();
final String[] authors = new String[jsonAuthorsArray.size()];
for (int i = 0; i < authors.length; i++) {
final JsonElement jsonAuthor = jsonAuthorsArray.get(i);
authors[i] = jsonAuthor.getAsString();
}
final Book book = new Book();
book.setTitle(title);
book.setIsbn10(isbn10);
book.setIsbn13(isbn13);
book.setAuthors(authors);
return book;
}
}
|
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/chat/trunk/chat-tool/tool/src/java/org/sakaiproject/chat/tool/ColorMapper.java $
* $Id: ColorMapper.java 8206 2006-04-24 19:40:15Z ggolden@umich.edu $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.chat2.tool;
import java.lang.reflect.Array;
import java.util.List;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Map;
/**
* <p>ColorMapper is a wrapper for a Hashtable that maps user names (or any set of Strings) to colors.</p>
* <p>The colors are standard names for HTML colors.</p>
*/
public class ColorMapper
{
// The index of the next color in the COLORS array that will be assigned to a name
protected int m_next = 0;
// A mapping of names to colors
protected Map m_map;
// An array of Strings representing standard HTML colors.
protected static final String[] COLORS =
{ "red", "blue", "green", "orange", "firebrick", "teal", "goldenrod",
"darkgreen", "darkviolet", "lightslategray", "peru", "deeppink", "dodgerblue",
"limegreen", "rosybrown", "cornflowerblue", "crimson", "turquoise", "darkorange",
"blueviolet", "royalblue", "brown", "magenta", "olive", "saddlebrown", "purple",
"coral", "mediumslateblue", "sienna", "mediumturquoise", "hotpink", "lawngreen",
"mediumvioletred", "slateblue", "indianred", "slategray", "indigo", "darkcyan",
"springgreen", "darkgoldenrod", "steelblue", "darkgray", "orchid", "darksalmon",
"lime", "gold", "darkturquoise", "navy", "orangered", "darkkhaki", "darkmagenta",
"darkolivegreen", "tomato", "aqua", "darkred", "olivedrab"
};
// the size of the COLORS array
protected static final int NumColors = Array.getLength(COLORS);
/**
* Construct the ColorMapper.
*/
public ColorMapper()
{
m_map = new Hashtable();
} // ColorMapper
/**
* get the color associated with a string. if name not already associated with a color,
* make a new association and determine the next color that will be used. the same string will
* always have the same color
*/
public String getColor(String name)
{
String color;
if(m_map.containsKey(name))
{
color = (String) m_map.get(name);
}
else
{
color = COLORS[m_next++];
m_map.put(name, color);
if(m_next >= NumColors)
{
m_next = 0;
}
}
return color;
} // getColor
/**
* Returns the mapping of names to colors.
*/
public Map getMapping()
{
return m_map;
} // getMapping
public class KeyValue {
String k,v;
public KeyValue(String k, String v){this.k = k; this.v = v;}
public String getKey() {return k;}
public String getValue() {return v;}
}
/**
* Returns the mapping of names to colors.
*/
public List getMappingList()
{
List mapList = new ArrayList();
for(Iterator i = m_map.keySet().iterator(); i.hasNext(); ) {
String key = (String)i.next();
String value = (String)m_map.get(key);
mapList.add(new KeyValue(key,value));
}
return mapList;
} // getMapping
/**
* Returns the index of the next color in the COLORS array that will be assigned to a name.
*/
public int getNext()
{
return m_next;
} // getNext
/**
* Returns the entire array of color names.
*/
public String[] getColors()
{
return COLORS;
} // getColors
/**
* Returns the size of the array of color names.
*/
public int getNum_colors()
{
return NumColors;
} // getNum_colors
} // ColorMapper
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.smartsuites.python;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Properties;
import com.smartsuites.display.AngularObjectRegistry;
import com.smartsuites.display.GUI;
import com.smartsuites.interpreter.Interpreter;
import com.smartsuites.interpreter.InterpreterContext;
import com.smartsuites.interpreter.InterpreterContextRunner;
import com.smartsuites.interpreter.InterpreterException;
import com.smartsuites.interpreter.InterpreterGroup;
import com.smartsuites.interpreter.InterpreterOutput;
import com.smartsuites.interpreter.InterpreterOutputListener;
import com.smartsuites.interpreter.InterpreterResult;
import com.smartsuites.interpreter.InterpreterResultMessageOutput;
import com.smartsuites.resource.LocalResourcePool;
import com.smartsuites.user.AuthenticationInfo;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class PythonInterpreterTest implements InterpreterOutputListener {
PythonInterpreter pythonInterpreter = null;
String cmdHistory;
private InterpreterContext context;
InterpreterOutput out;
public static Properties getPythonTestProperties() {
Properties p = new Properties();
p.setProperty(PythonInterpreter.ZEPPELIN_PYTHON, PythonInterpreter.DEFAULT_ZEPPELIN_PYTHON);
p.setProperty(PythonInterpreter.MAX_RESULT, "1000");
p.setProperty("zeppelin.python.useIPython", "false");
return p;
}
@Before
public void beforeTest() throws IOException, InterpreterException {
cmdHistory = "";
// python interpreter
pythonInterpreter = new PythonInterpreter(getPythonTestProperties());
// create interpreter group
InterpreterGroup group = new InterpreterGroup();
group.put("note", new LinkedList<Interpreter>());
group.get("note").add(pythonInterpreter);
pythonInterpreter.setInterpreterGroup(group);
out = new InterpreterOutput(this);
context = new InterpreterContext("note", "id", null, "title", "text",
new AuthenticationInfo(),
new HashMap<String, Object>(),
new GUI(),
new AngularObjectRegistry(group.getId(), null),
new LocalResourcePool("id"),
new LinkedList<InterpreterContextRunner>(),
out);
InterpreterContext.set(context);
pythonInterpreter.open();
}
@After
public void afterTest() throws IOException {
pythonInterpreter.close();
}
@Test
public void testInterpret() throws InterruptedException, IOException, InterpreterException {
InterpreterResult result = pythonInterpreter.interpret("print (\"hi\")", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
}
@Test
public void testInterpretInvalidSyntax() throws IOException, InterpreterException {
InterpreterResult result = pythonInterpreter.interpret("for x in range(0,3): print (\"hi\")\n", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertTrue(new String(out.getOutputAt(0).toByteArray()).contains("hi\nhi\nhi"));
}
@Test
public void testRedefinitionZeppelinContext() throws InterpreterException {
String pyRedefinitionCode = "z = 1\n";
String pyRestoreCode = "z = __zeppelin__\n";
String pyValidCode = "z.input(\"test\")\n";
assertEquals(InterpreterResult.Code.SUCCESS, pythonInterpreter.interpret(pyValidCode, context).code());
assertEquals(InterpreterResult.Code.SUCCESS, pythonInterpreter.interpret(pyRedefinitionCode, context).code());
assertEquals(InterpreterResult.Code.ERROR, pythonInterpreter.interpret(pyValidCode, context).code());
assertEquals(InterpreterResult.Code.SUCCESS, pythonInterpreter.interpret(pyRestoreCode, context).code());
assertEquals(InterpreterResult.Code.SUCCESS, pythonInterpreter.interpret(pyValidCode, context).code());
}
@Override
public void onUpdateAll(InterpreterOutput out) {
}
@Override
public void onAppend(int index, InterpreterResultMessageOutput out, byte[] line) {
}
@Override
public void onUpdate(int index, InterpreterResultMessageOutput out) {
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.runtimeservices.recognizer.dae;
import java.util.Optional;
import javax.inject.Inject;
import javax.inject.Named;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.core.annotation.Order;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.dao.DataRetrievalFailureException;
import org.springframework.dao.NonTransientDataAccessException;
import org.springframework.dao.RecoverableDataAccessException;
import org.springframework.dao.TransientDataAccessException;
import org.springframework.stereotype.Service;
import org.apache.isis.applib.annotation.OrderPrecedence;
import org.apache.isis.applib.services.exceprecog.Category;
import org.apache.isis.applib.services.exceprecog.ExceptionRecognizer;
import org.apache.isis.applib.services.exceprecog.Recognition;
import org.apache.isis.commons.internal.base._Strings;
import org.apache.isis.core.config.IsisConfiguration;
import lombok.Getter;
import lombok.val;
/**
* Translates {@link DataAccessException}(s) to {@link Recognition}(s),
* unless disabled via {@link IsisConfiguration}.
*/
@Service
@Named("isis.runtime.ExceptionRecognizerForDataAccessException")
@Order(OrderPrecedence.MIDPOINT)
@Qualifier("Default")
public class ExceptionRecognizerForDataAccessException
implements ExceptionRecognizer {
@Getter
private final boolean disabled;
@Inject
public ExceptionRecognizerForDataAccessException(IsisConfiguration conf) {
this.disabled = conf.getCore().getRuntimeServices()
.getExceptionRecognizer().getDae().isDisable();
}
@Override
public Optional<Recognition> recognize(Throwable ex) {
if(ex instanceof DataAccessException
&& !isDisabled()) {
return recognizeDae((DataAccessException)ex);
}
return Optional.empty();
}
// -- HELPER
private Optional<Recognition> recognizeDae(DataAccessException ex) {
if(ex instanceof ConcurrencyFailureException) {
return recognitionOf(Category.CONCURRENCY, ex);
}
if(ex instanceof TransientDataAccessException
|| ex instanceof RecoverableDataAccessException) {
return recognitionOf(Category.RETRYABLE, ex);
}
if(ex instanceof DataIntegrityViolationException) {
// eg. Data or related data already exists
return recognitionOf(Category.CONSTRAINT_VIOLATION, ex);
}
if(ex instanceof DataRetrievalFailureException) {
// Unable to load object. eg. Has it been deleted by someone else?
return recognitionOf(Category.NOT_FOUND, ex);
}
if(ex instanceof NonTransientDataAccessException) {
// eg. Unable to save changes. Does similar data already exist,
// or has referenced data been deleted?"
return recognitionOf(Category.SERVER_ERROR, ex);
}
return recognitionOf(Category.OTHER, ex);
}
private Optional<Recognition> recognitionOf(Category category, DataAccessException ex) {
val causeMessage = _Strings.nullToEmpty(ex.getMostSpecificCause().getMessage()).trim();
val exceptionFriendlyName = _Strings.asNaturalName2
.apply(ex.getClass().getSimpleName())
.toLowerCase();
val friendlyMessage = String.format("%s (%s): %s",
category.getFriendlyName(),
exceptionFriendlyName,
_Strings.isEmpty(causeMessage)
? "Cannot find any details for what is causing the issue."
: causeMessage);
return Recognition.of(category, friendlyMessage);
}
}
|
/*
*
*
* Copyright 2016 Symphony Communication Services, LLC
*
* Licensed to Symphony Communication Services, LLC under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package com.symphony.clients;
import com.symphony.api.auth.model.Token;
import com.symphony.configurations.IConfigurationProvider;
import com.symphony.models.ISymphonyUser;
import com.symphony.models.Message;
import com.symphony.models.SymphonyUser;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedHashMap;
import javax.ws.rs.core.MultivaluedMap;
/**
* Created by ryan.dsouza on 7/26/16.
*
* Simple class to make calls to Symphony's REST API - used to augment Symphony SDK
*/
public class SymphonyRESTClient {
protected static final Logger LOG = LoggerFactory.getLogger(SymphonyRESTClient.class);
private final IConfigurationProvider configurationProvider;
private final long myUserId;
private Token sessionToken;
private Token keyManagerToken;
public SymphonyRESTClient(IConfigurationProvider configurationProvider, Token sessionToken,
Token keyManagerToken) {
this.configurationProvider = configurationProvider;
this.myUserId = configurationProvider.getBotUserId();
this.sessionToken = sessionToken;
this.keyManagerToken = keyManagerToken;
}
/**
* Returns an enriched user for that userId
* @return
*/
public ISymphonyUser getUserForId(Long userId) {
String fullURL = configurationProvider.getSymphonyUserInfoPath();
MultivaluedMap formData = new MultivaluedHashMap();
formData.add("action", "usercurrent");
formData.add("userid", String.valueOf(userId));
formData.add("includeFollowing", String.valueOf(false));
String jsonResponse = ClientBuilder.newClient()
.target(fullURL)
.request(MediaType.APPLICATION_JSON)
.header("X-Symphony-CSRF-Token", sessionToken.getToken())
.cookie("skey", sessionToken.getToken())
.post(Entity.entity(formData, MediaType.APPLICATION_FORM_URLENCODED_TYPE), String.class);
try {
JSONObject response = new JSONObject(jsonResponse);
if (response.getString("status").equals("OK")) {
JSONObject person = response.getJSONObject("person");
String emailAddress = person.getString("emailAddress");
String displayName = person.getString("prettyName");
return new SymphonyUser(userId, emailAddress, displayName);
}
} catch (JSONException e) {
LOG.error("Error parsing JSON when getting user " + jsonResponse, e);
}
return null;
}
/**
* Returns messages from long polling the Symphony DataFeed API
* @param datafeedId
* @return
*/
public List<Message> getMessagesForDataFeed(String datafeedId) {
String url =
configurationProvider.getSymphonyAgentPath() + "/v2/datafeed/" + datafeedId + "/read";
List<Message> messages = new ArrayList<>();
String jsonResponse = ClientBuilder.newClient()
.target(url)
.request(MediaType.APPLICATION_JSON)
.header("sessionToken", sessionToken.getToken())
.header("keyManagerToken", keyManagerToken.getToken())
.get(String.class);
if (jsonResponse == null || jsonResponse.length() == 0) {
return messages;
}
try {
JSONArray array = new JSONArray(jsonResponse);
for (int i = 0; i < array.length(); i++) {
JSONObject object = array.getJSONObject(i);
if (object.getString("v2messageType").equals("V2Message")) {
long senderId = object.getLong("fromUserId");
if (senderId != this.myUserId) {
Message message = new Message(object);
messages.add(message);
}
}
}
} catch (JSONException exception) {
LOG.error(exception.toString());
}
return messages;
}
public void setSessionToken(Token sessionToken) {
this.sessionToken = sessionToken;
}
public void setKeyManagerToken(Token keyManagerToken) {
this.keyManagerToken = keyManagerToken;
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package bean;
import datos.Usuario;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
/**
*
*
*/
@Stateless
public class UsuarioFacade extends AbstractFacade<Usuario> {
@PersistenceContext(unitName = "GC_DAIPU")
private EntityManager em;
@Override
protected EntityManager getEntityManager() {
return em;
}
public UsuarioFacade() {
super(Usuario.class);
}
}
|
/**
* ProposalLineItemPage.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.dfp.axis.v201308;
/**
* Captures a page of {@link ProposalLineItem} objects.
*/
public class ProposalLineItemPage implements java.io.Serializable {
/* The collection of proposal line items contained within this
* page. */
private com.google.api.ads.dfp.axis.v201308.ProposalLineItem[] results;
/* The absolute index in the total result set on which this page
* begins. */
private java.lang.Integer startIndex;
/* The size of the total result set to which this page belongs. */
private java.lang.Integer totalResultSetSize;
public ProposalLineItemPage() {
}
public ProposalLineItemPage(
com.google.api.ads.dfp.axis.v201308.ProposalLineItem[] results,
java.lang.Integer startIndex,
java.lang.Integer totalResultSetSize) {
this.results = results;
this.startIndex = startIndex;
this.totalResultSetSize = totalResultSetSize;
}
/**
* Gets the results value for this ProposalLineItemPage.
*
* @return results * The collection of proposal line items contained within this
* page.
*/
public com.google.api.ads.dfp.axis.v201308.ProposalLineItem[] getResults() {
return results;
}
/**
* Sets the results value for this ProposalLineItemPage.
*
* @param results * The collection of proposal line items contained within this
* page.
*/
public void setResults(com.google.api.ads.dfp.axis.v201308.ProposalLineItem[] results) {
this.results = results;
}
public com.google.api.ads.dfp.axis.v201308.ProposalLineItem getResults(int i) {
return this.results[i];
}
public void setResults(int i, com.google.api.ads.dfp.axis.v201308.ProposalLineItem _value) {
this.results[i] = _value;
}
/**
* Gets the startIndex value for this ProposalLineItemPage.
*
* @return startIndex * The absolute index in the total result set on which this page
* begins.
*/
public java.lang.Integer getStartIndex() {
return startIndex;
}
/**
* Sets the startIndex value for this ProposalLineItemPage.
*
* @param startIndex * The absolute index in the total result set on which this page
* begins.
*/
public void setStartIndex(java.lang.Integer startIndex) {
this.startIndex = startIndex;
}
/**
* Gets the totalResultSetSize value for this ProposalLineItemPage.
*
* @return totalResultSetSize * The size of the total result set to which this page belongs.
*/
public java.lang.Integer getTotalResultSetSize() {
return totalResultSetSize;
}
/**
* Sets the totalResultSetSize value for this ProposalLineItemPage.
*
* @param totalResultSetSize * The size of the total result set to which this page belongs.
*/
public void setTotalResultSetSize(java.lang.Integer totalResultSetSize) {
this.totalResultSetSize = totalResultSetSize;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof ProposalLineItemPage)) return false;
ProposalLineItemPage other = (ProposalLineItemPage) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.results==null && other.getResults()==null) ||
(this.results!=null &&
java.util.Arrays.equals(this.results, other.getResults()))) &&
((this.startIndex==null && other.getStartIndex()==null) ||
(this.startIndex!=null &&
this.startIndex.equals(other.getStartIndex()))) &&
((this.totalResultSetSize==null && other.getTotalResultSetSize()==null) ||
(this.totalResultSetSize!=null &&
this.totalResultSetSize.equals(other.getTotalResultSetSize())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getResults() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getResults());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getResults(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
if (getStartIndex() != null) {
_hashCode += getStartIndex().hashCode();
}
if (getTotalResultSetSize() != null) {
_hashCode += getTotalResultSetSize().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(ProposalLineItemPage.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201308", "ProposalLineItemPage"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("results");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201308", "results"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201308", "ProposalLineItem"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("startIndex");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201308", "startIndex"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("totalResultSetSize");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201308", "totalResultSetSize"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
|
package com.aoyanhao.coder.utils;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.TypedValue;
public class Utils {
public static float dp2px(float dp) {
return TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,
dp, Resources.getSystem().getDisplayMetrics());
}
public static Bitmap getDrawableBitmap(Context context,
int drawble,
float width) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeResource(context.getResources(), drawble, options);
options.inJustDecodeBounds = false;
options.inDensity = options.outWidth;
options.inTargetDensity = (int) width;
return BitmapFactory.decodeResource(context.getResources(), drawble, options);
}
}
|
/**
* Daniel Ricci {@literal <thedanny09@icloud.com>}
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so, subject
* to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
package editor.controllers;
import java.util.Optional;
import java.util.logging.Level;
import framework.communication.internal.signal.ISignalListener;
import framework.communication.internal.signal.ISignalReceiver;
import framework.communication.internal.signal.arguments.ControllerEventArgs;
import framework.communication.internal.signal.arguments.ModelEventArgs;
import framework.core.factories.AbstractFactory;
import framework.core.factories.ControllerFactory;
import framework.core.mvc.controller.BaseController;
import framework.utils.logging.Tracelog;
import editor.models.TileMapModel;
import editor.models.TileModel;
/**
* This controller provides functionality that deals with a single tile map and the tiles that it occupies
*
* @author Daniel Ricci {@literal <thedanny09@icloud.com>}
*
*/
public class TileMapController extends BaseController {
/**
* Signal name indicating that this tile map is to clear its currently selected tile
*/
private final String EVENT_CLEAR_SELECTION = "EVENT_CLEAR_SELECTION";
/**
* The setup model associated to the tile map
*/
private final TileMapModel _tileMapModel;
/**
* The currently selected tile
*/
private TileModel _currentlySelectedTile;
/**
* The currently selected tile map
*/
private TileMapModel _currentlySelectedTileMap;
/**
* Constructs a new instance of this class type
*
* @param setupModel The tile map setup parameters
*/
public TileMapController(TileMapModel setupModel) {
_tileMapModel = setupModel;
setupModel.addListener(this);
registerAllSignals();
}
/**
* Adds the specified tile model to the list of tile models owned by this controller
*
* Note: Adding the tile model to this controller will cause the model to change sizes
* to match the cell width and cell height of the setup associated to this controller
*
* @param tileModel The tile model to add to this controller
*/
public void addTileEntity(TileModel tileModel) {
// Normalized the specified tile model to the dimensions specified
// by this tile map
tileModel.setWidth(_tileMapModel.getWidth());
tileModel.setHeight(_tileMapModel.getHeight());
// Add the tile model to the list of tiles
_tileMapModel.getTiles().add(tileModel);
}
/**
* Gets the setup model associated to this controller
*
* @return The setup model
*/
public TileMapModel getSetupDetails() {
return _tileMapModel;
}
/**
* Toggles the selection state of this tile map
*
* @return TRUE if the selection is now on, FALSE otherwise
*/
public boolean toggleSelected() {
_tileMapModel.setSelected(!_tileMapModel.getSelected());
return _tileMapModel.getSelected();
}
/**
* Toggles the selection state of a tile associated to this tile map
*
* @param listener The listener of the entity to be selected
*/
public void toggleSelectedTile(ISignalListener listener) {
// Get the tile model associated to the specified listener
TileModel tileModel = _tileMapModel.getTiles().stream().filter(z -> z.isModelListening(listener)).findFirst().get();
// Get the value of the tile model found and switch its selection state
tileModel.setSelected(!tileModel.getSelected());
}
/**
* Updates the properties window with the contents of this tile
*
* @param listener The listener associated to where the event for updating took place
*/
public void updateProperties(ISignalListener listener) {
// Get the properties controller
PropertiesController properties = AbstractFactory.getFactory(ControllerFactory.class).get(PropertiesController.class);
// If the properties controller exists
if(properties != null) {
TileModel tileModel = null;
if(listener instanceof TileModel) {
tileModel = (TileModel) listener;
}
else {
// Look for a tile that is selected
Optional<TileModel> optionalTileModel = null;
// Attempt to get the tile that was listened in on
if(listener != null) {
optionalTileModel = _tileMapModel.getTiles().stream().filter(z -> z.isModelListening(listener)).findFirst();
}
// If there was no tile that could be found that was being listened in on, try and
// find the currently selected tile
if(optionalTileModel == null || !optionalTileModel.isPresent()) {
optionalTileModel = _tileMapModel.getTiles().stream().filter(z -> z.getSelected()).findFirst();
}
// Get the tile model that was found (if any)
if(optionalTileModel.isPresent()) {
tileModel = optionalTileModel.get();
}
}
if(properties.getSelectedTile() == tileModel) {
}
else if(!properties.isTileSelected()) {
properties.loadContents(tileModel);
}
}
}
/**
* Sets the location of this tile map
*
* @param x The x-coordinate
* @param y The y-coordinate
*/
public void updateTileMapPosition(int x, int y) {
_tileMapModel.setCoordinate(x, y);
}
private void registerAllSignals() {
addSignal(EVENT_CLEAR_SELECTION, new ISignalReceiver<ControllerEventArgs>() {
@Override public void signalReceived(ControllerEventArgs event) {
if(_currentlySelectedTile != null && _currentlySelectedTile.getSelected()) {
_currentlySelectedTile.setSelected(false);
}
if(_currentlySelectedTileMap != null && _currentlySelectedTileMap.getSelected()) {
_currentlySelectedTileMap.setSelected(false);
}
_currentlySelectedTile = null;
_currentlySelectedTileMap = null;
}
});
addSignal(TileModel.EVENT_TILE_SELECTION_CHANGED, new ISignalReceiver<ModelEventArgs>() {
@Override public void signalReceived(ModelEventArgs event) {
// Unregister this controller from listening to this event, to prevent an event from being re-fired back here
setSignalEnabled(event.getOperationName(), false);
TileModel tileModel = (TileModel) event.getSource();
if(tileModel.getSelected()) {
// If there is a tile held by this controller then remove it's selection. This occurs
// when a tile within the same tile map is selected, when there was already a tile selected
// within the same tile map prior
if(_currentlySelectedTile != null && _currentlySelectedTile != tileModel) {
_currentlySelectedTile.setSelected(false);
}
// Assign over the new selection
_currentlySelectedTile = tileModel;
// Inform all the tile map controllers to clear their selection (if any)
AbstractFactory.getFactory(ControllerFactory.class).multicastSignalListeners(
TileMapController.class,
new ControllerEventArgs(TileMapController.this, EVENT_CLEAR_SELECTION)
);
// If this tile map is already selected, then remove it's selection to allow the tile
// to be selected.
//
// Note: If a tile map is selected, then selecting on a tile should remove the tile map selection
if(_currentlySelectedTileMap != null && _currentlySelectedTileMap.getSelected()) {
_currentlySelectedTileMap.setSelected(false);
_currentlySelectedTileMap = null;
}
// Update the properties window
updateProperties(_currentlySelectedTile);
}
// If we receive the same tile, then at this point it is already unselected so just clear the reference
else if(tileModel.equals(_currentlySelectedTile)){
// Update the properties window
updateProperties(_currentlySelectedTile);
_currentlySelectedTile = null;
}
else {
Tracelog.log(Level.SEVERE, true, "Something bad happened...we need to handle the tile selection cases better");
}
// Register back the original listener so that we can continue to receive signals
setSignalEnabled(event.getOperationName(), true);
}
});
addSignal(TileMapModel.EVENT_TILEMAP_SELECTION_CHANGED, new ISignalReceiver<ModelEventArgs>() {
@Override public void signalReceived(ModelEventArgs event) {
// Unregister this controller from listening to this event, to prevent
// an event from being re-fired back here
setSignalEnabled(event.getOperationName(), false);
TileMapModel tileMapModel = (TileMapModel) event.getSource();
if(tileMapModel.getSelected()) {
// Assign over the new selection
_currentlySelectedTileMap = tileMapModel;
// Inform all the tile map controllers to clear their selection (if any)
AbstractFactory.getFactory(ControllerFactory.class).multicastSignalListeners(
TileMapController.class,
new ControllerEventArgs(TileMapController.this, EVENT_CLEAR_SELECTION)
);
// If a tile within this tile map is selected, remove it's selection to allow the tile map selection to occur
//
// Note: If a tile is selected, then selecting on a tile map should remove the tile selection
if(_currentlySelectedTile != null && _currentlySelectedTile.getSelected()) {
_currentlySelectedTile.setSelected(false);
_currentlySelectedTile = null;
}
}
// If we receive the same tile, then at this point it is already unselected so just clear the reference
else if(tileMapModel.equals(_currentlySelectedTileMap)){
_currentlySelectedTileMap = null;
}
else {
Tracelog.log(Level.SEVERE, true, "Something bad happened...we need to handle the tile selection cases better");
}
// Register back the original listener so that we can continue to receive signals
setSignalEnabled(event.getOperationName(), true);
}
});
}
}
|
/*
* Copyright (C) 2010 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.airline.args;
import io.airlift.airline.Arguments;
import io.airlift.airline.Option;
import java.util.ArrayList;
import java.util.List;
public class ArgsDefault
{
@Arguments
public List<String> parameters = new ArrayList<>();
@Option(name = "-log", description = "Level of verbosity")
public Integer log = 1;
@Option(name = "-groups", description = "Comma-separated list of group names to be run")
public String groups;
@Option(name = "-debug", description = "Debug mode")
public boolean debug;
@Option(name = "-level", description = "A long number")
public long level;
}
|
package io.github.rpc.spring;
import feign.*;
import feign.codec.Decoder;
import feign.codec.Encoder;
import feign.codec.ErrorDecoder;
import feign.httpclient.ApacheHttpClient;
import io.github.rpc.feign.DefaultLogFactory;
import io.github.rpc.feign.FeignLogFactory;
import okhttp3.OkHttpClient;
import org.apache.http.client.HttpClient;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.util.Assert;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* @Author:pjs
* @Date:2020/4/27
* @description:
* @ModifiedBy:
*/
public class MmFeignFactoryBean implements FactoryBean<Object>, InitializingBean, ApplicationContextAware {
/**
* spring context
*/
private ApplicationContext applicationContext;
private Client client;
/**
* 代理接口
*/
private Class<?> type;
/**
* 请求url
*/
private String url;
/**
* 反序列化
*/
private Decoder decoder;
/**
* 序列化
*/
private Encoder encoder;
/**
* 日志
*/
private Logger logger;
/**
* 日志级别
*/
private Logger.Level logLevel;
/**
* 错误处理
*/
private ErrorDecoder errorDecoder;
/**
* 处理对象
*/
private Contract contract;
/**
* feign拦截器
*/
private List<RequestInterceptor> interceptors = new LinkedList<>();
/**
* 日志工厂
*/
private FeignLogFactory feignLogFactory;
@Override
public Object getObject() throws Exception {
return createFeignTarget();
}
private Object createFeignTarget() {
Feign.Builder builder = new Feign.Builder();
builder = builder
.decoder(decoder)
.encoder(encoder)
.logger(logger)
.logLevel(logLevel)
.requestInterceptors(interceptors);
if (client != null) {
builder.client(client);
}
if (errorDecoder != null) {
builder.errorDecoder(errorDecoder);
}
if (contract != null) {
builder.contract(contract);
}
return builder.target(type, url);
}
@Override
public Class<?> getObjectType() {
return this.type;
}
@Override
public boolean isSingleton() {
return true;
}
@Override
public void afterPropertiesSet() throws Exception {
Assert.hasText(url, "远程调用接口必须设置url地址");
Assert.notNull(type, "type must not be null");
this.decoder = applicationContext.getBean(Decoder.class);
this.encoder = applicationContext.getBean(Encoder.class);
this.errorDecoder = applicationContext.getBean(ErrorDecoder.class);
this.client = realClient();
this.contract = applicationContext.getBean(Contract.class);
this.feignLogFactory = new DefaultLogFactory(logger);
this.logger = this.feignLogFactory.create(type);
if (null == this.logLevel) {
this.logLevel = Logger.Level.BASIC;
}
//添加拦截器
Map<String, RequestInterceptor> beans = applicationContext.getBeansOfType(RequestInterceptor.class);
for (RequestInterceptor interceptor : beans.values()) {
addInterceptor(interceptor);
}
}
private Client realClient() {
HttpClient httpClient = null;
OkHttpClient okHttpClient = null;
try {
httpClient = applicationContext.getBean(HttpClient.class);
} catch (BeansException e) {
try {
okHttpClient = applicationContext.getBean(OkHttpClient.class);
} catch (BeansException e1) {
//ignore
}
}
if (Objects.isNull(httpClient) && Objects.isNull(okHttpClient)) {
throw new IllegalStateException("can not found any client");
} else if (null != okHttpClient) {
return new feign.okhttp.OkHttpClient(okHttpClient);
} else {
return new ApacheHttpClient(httpClient);
}
}
public void setType(Class<?> type) {
this.type = type;
}
public void setUrl(String url) {
this.url = url;
}
public void setDecoder(Decoder decoder) {
this.decoder = decoder;
}
public void setEncoder(Encoder encoder) {
this.encoder = encoder;
}
public void setLogger(Logger logger) {
this.logger = logger;
}
public void setLogLevel(Logger.Level logLevel) {
this.logLevel = logLevel;
}
public void setErrorDecoder(ErrorDecoder errorDecoder) {
this.errorDecoder = errorDecoder;
}
public Class<?> getType() {
return type;
}
public String getUrl() {
return url;
}
public Decoder getDecoder() {
return decoder;
}
public Encoder getEncoder() {
return encoder;
}
public Logger getLogger() {
return logger;
}
public Logger.Level getLogLevel() {
return logLevel;
}
public ErrorDecoder getErrorDecoder() {
return errorDecoder;
}
public void addInterceptor(RequestInterceptor interceptor) {
this.interceptors.add(interceptor);
}
public Client getClient() {
return client;
}
public void setClient(Client client) {
this.client = client;
}
public FeignLogFactory getFeignLogFactory() {
return feignLogFactory;
}
public void setFeignLogFactory(FeignLogFactory feignLogFactory) {
this.feignLogFactory = feignLogFactory;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MmFeignFactoryBean that = (MmFeignFactoryBean) o;
if (type != null ? !type.equals(that.type) : that.type != null) return false;
if (url != null ? !url.equals(that.url) : that.url != null) return false;
if (decoder != null ? !decoder.equals(that.decoder) : that.decoder != null) return false;
if (encoder != null ? !encoder.equals(that.encoder) : that.encoder != null) return false;
if (logger != null ? !logger.equals(that.logger) : that.logger != null) return false;
if (logLevel != that.logLevel) return false;
return errorDecoder != null ? errorDecoder.equals(that.errorDecoder) : that.errorDecoder == null;
}
@Override
public int hashCode() {
int result = type != null ? type.hashCode() : 0;
result = 31 * result + (url != null ? url.hashCode() : 0);
result = 31 * result + (decoder != null ? decoder.hashCode() : 0);
result = 31 * result + (encoder != null ? encoder.hashCode() : 0);
result = 31 * result + (logger != null ? logger.hashCode() : 0);
result = 31 * result + (logLevel != null ? logLevel.hashCode() : 0);
result = 31 * result + (errorDecoder != null ? errorDecoder.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "QmpFeignFactoryBean{" +
"type=" + type +
", url='" + url + '\'' +
", decoder=" + decoder +
", encoder=" + encoder +
", logger=" + logger +
", logLevel=" + logLevel +
", errorDecoder=" + errorDecoder +
'}';
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
}
|
/*
* Copyright 2014 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.openbidder.bench;
import com.google.caliper.Benchmark;
import com.google.caliper.runner.CaliperMain;
import com.google.common.collect.ImmutableList;
import com.google.doubleclick.openrtb.DoubleClickOpenRtbMapper;
import com.google.doubleclick.openrtb.ExtMapper;
import com.google.doubleclick.util.DoubleClickMetadata;
import com.google.openrtb.json.OpenRtbJsonFactory;
import com.google.openrtb.json.OpenRtbJsonReader;
import com.google.openrtb.json.OpenRtbJsonWriter;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import com.codahale.metrics.MetricRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.zip.GZIPOutputStream;
/**
* Benchmark for OpenRTB models, DoubleClick model, JSON serialization.
*/
public class Bench extends Benchmark {
private static final Logger logger = LoggerFactory.getLogger(Bench.class);
// DoubleClick/Proto
static final DoubleClickOpenRtbMapper mapper = new DoubleClickOpenRtbMapper(
new MetricRegistry(),
new DoubleClickMetadata(new DoubleClickMetadata.ResourceTransport()),
null,
ImmutableList.<ExtMapper>of());
static final com.google.protos.adx.NetworkBid.BidRequest
PROTODC = DoubleClickData.newRequest(true);
static final ByteString PROTODC_BUF = DoubleClickData.newRequest(true).toByteString();
static final ByteString PROTODC_BUF_GZ = zip(PROTODC_BUF);
// OpenRTB/Proto
static final com.google.openrtb.OpenRtb.BidRequest
PROTORTB = OpenRtbData.newBidRequest(true);
static final ByteString PROTORTB_BUF = PROTORTB.toByteString();
static final ByteString PROTORTB_BUF_GZ = zip(PROTORTB_BUF);
// OpenRTB/JSON
static final OpenRtbJsonFactory jsonFactory = OpenRtbJsonFactory.create();
static final OpenRtbJsonWriter protortbJsonWriter = jsonFactory.newWriter();
static final OpenRtbJsonReader protortbJsonReader = jsonFactory.newReader();
static final String ORTB_JSON;
static final ByteString ORTB_JSON_BS;
static final ByteString ORTB_JSON_GZ;
static {
ByteString json = null;
try {
ByteString.Output bos = ByteString.newOutput();
protortbJsonWriter.writeBidRequest(PROTORTB, bos);
json = bos.toByteString();
} catch (IOException e) {
throw new IllegalStateException(e);
}
ORTB_JSON_BS = json;
ORTB_JSON_GZ = zip(json);
ORTB_JSON = json.toStringUtf8();
}
public static void main(String[] args) {
logger.info(
"\nDoubleClick/Protobuf : {} bytes"
+ "\nDoubleClick/Protobuf/gzip: {} bytes"
+ "\nOpenRTB/Protobuf : {} bytes"
+ "\nOpenRTB/Protobuf/gzip : {} bytes"
+ "\nOpenRTB/JSON : {} bytes"
+ "\nOpenRTB/JSON/gzip : {} bytes"
+ "\n"
+ "\nOpenRTB/Protobuf Model:\n{}"
+ "\n"
+ "\nOpenRTB/JSON Model:\n{}\n",
PROTODC_BUF.size(), PROTODC_BUF_GZ.size(),
PROTORTB_BUF.size(), PROTORTB_BUF_GZ.size(),
ORTB_JSON.length(), ORTB_JSON_GZ.size(),
PROTORTB, ORTB_JSON);
CaliperMain.main(Bench.class, args);
}
public int time_build_ProtoOrtb(int reps) {
int dummy = 0;
for (int i = 0; i < reps; ++i) {
dummy += OpenRtbData.newBidRequest(true).getUser().getId().length();
}
return dummy;
}
public int time_buf_protoDc(int reps) throws InvalidProtocolBufferException {
int dummy = 0;
for (int i = 0; i < reps; ++i) {
dummy += com.google.protos.adx.NetworkBid.BidRequest
.parseFrom(PROTODC_BUF).getGoogleUserId().length();
}
return dummy;
}
public int time_buf_protoOrtb(int reps) throws InvalidProtocolBufferException {
int dummy = 0;
for (int i = 0; i < reps; ++i) {
dummy += com.google.openrtb.OpenRtb.BidRequest
.parseFrom(PROTORTB_BUF).getUser().getId().length();
}
return dummy;
}
public int time_json_protoOrtb(int reps) throws IOException {
int dummy = 0;
for (int i = 0; i < reps; ++i) {
dummy += protortbJsonReader.readBidRequest(ORTB_JSON_BS).getUser().getId().length();
}
return dummy;
}
public int time_protoDc_protoOrtb(int reps) {
int dummy = 0;
for (int i = 0; i < reps; ++i) {
dummy += mapper.toOpenRtbBidRequest(PROTODC).getUser().getId().length();
}
return dummy;
}
public int protoOrtb_buf(int reps) {
int dummy = 0;
for (int i = 0; i < reps; ++i) {
dummy += PROTORTB.toByteString().size();
}
return dummy;
}
public int protoOrtb_json(int reps) throws IOException {
int dummy = 0;
for (int i = 0; i < reps; ++i) {
dummy += protortbJsonWriter.writeBidRequest(PROTORTB).length();
}
return dummy;
}
static ByteString zip(ByteString data) {
try (ByteString.Output bso = ByteString.newOutput(data.size());
GZIPOutputStream zos = new GZIPOutputStream(bso, true)) {
data.writeTo(zos);
zos.flush();
return bso.toByteString();
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
}
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1;
import com.google.api.core.BetaApi;
import com.google.api.gax.httpjson.ApiMessage;
import java.util.List;
import java.util.Objects;
import javax.annotation.Generated;
import javax.annotation.Nullable;
@Generated("by GAPIC")
@BetaApi
/**
* Request object for method compute.targetPools.removeHealthCheck. Removes health check URL from a
* target pool.
*/
public final class RemoveHealthCheckTargetPoolHttpRequest implements ApiMessage {
private final String access_token;
private final String callback;
private final String fields;
private final String key;
private final String prettyPrint;
private final String quotaUser;
private final String requestId;
private final String targetPool;
private final TargetPoolsRemoveHealthCheckRequest targetPoolsRemoveHealthCheckRequestResource;
private final String userIp;
private RemoveHealthCheckTargetPoolHttpRequest() {
this.access_token = null;
this.callback = null;
this.fields = null;
this.key = null;
this.prettyPrint = null;
this.quotaUser = null;
this.requestId = null;
this.targetPool = null;
this.targetPoolsRemoveHealthCheckRequestResource = null;
this.userIp = null;
}
private RemoveHealthCheckTargetPoolHttpRequest(
String access_token,
String callback,
String fields,
String key,
String prettyPrint,
String quotaUser,
String requestId,
String targetPool,
TargetPoolsRemoveHealthCheckRequest targetPoolsRemoveHealthCheckRequestResource,
String userIp) {
this.access_token = access_token;
this.callback = callback;
this.fields = fields;
this.key = key;
this.prettyPrint = prettyPrint;
this.quotaUser = quotaUser;
this.requestId = requestId;
this.targetPool = targetPool;
this.targetPoolsRemoveHealthCheckRequestResource = targetPoolsRemoveHealthCheckRequestResource;
this.userIp = userIp;
}
@Override
public Object getFieldValue(String fieldName) {
if ("access_token".equals(fieldName)) {
return access_token;
}
if ("callback".equals(fieldName)) {
return callback;
}
if ("fields".equals(fieldName)) {
return fields;
}
if ("key".equals(fieldName)) {
return key;
}
if ("prettyPrint".equals(fieldName)) {
return prettyPrint;
}
if ("quotaUser".equals(fieldName)) {
return quotaUser;
}
if ("requestId".equals(fieldName)) {
return requestId;
}
if ("targetPool".equals(fieldName)) {
return targetPool;
}
if ("targetPoolsRemoveHealthCheckRequestResource".equals(fieldName)) {
return targetPoolsRemoveHealthCheckRequestResource;
}
if ("userIp".equals(fieldName)) {
return userIp;
}
return null;
}
@Nullable
@Override
public TargetPoolsRemoveHealthCheckRequest getApiMessageRequestBody() {
return targetPoolsRemoveHealthCheckRequestResource;
}
@Nullable
@Override
/**
* The fields that should be serialized (even if they have empty values). If the containing
* message object has a non-null fieldmask, then all the fields in the field mask (and only those
* fields in the field mask) will be serialized. If the containing object does not have a
* fieldmask, then only non-empty fields will be serialized.
*/
public List<String> getFieldMask() {
return null;
}
/** OAuth 2.0 token for the current user. */
public String getAccessToken() {
return access_token;
}
/** Name of the JavaScript callback function that handles the response. */
public String getCallback() {
return callback;
}
/** Selector specifying a subset of fields to include in the response. */
public String getFields() {
return fields;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public String getKey() {
return key;
}
/** Returns response with indentations and line breaks. */
public String getPrettyPrint() {
return prettyPrint;
}
/** Alternative to userIp. */
public String getQuotaUser() {
return quotaUser;
}
/**
* An optional request ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has already been
* completed.
*
* <p>For example, consider a situation where you make an initial request and the request times
* out. If you make the request again with the same request ID, the server can check if original
* operation with the same request ID was received, and if so, will ignore the second request.
* This prevents clients from accidentally creating duplicate commitments.
*
* <p>The request ID must be a valid UUID with the exception that zero UUID is not supported
* (00000000-0000-0000-0000-000000000000).
*/
public String getRequestId() {
return requestId;
}
/**
* Name of the target pool to remove health checks from. It must have the format
* `{project}/regions/{region}/targetPools/{targetPool}/removeHealthCheck`. \`{targetPool}\` must
* start with a letter, and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes
* (\`-\`), * underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent
* * signs (\`%\`). It must be between 3 and 255 characters in length, and it * must not
* start with \`"goog"\`.
*/
public String getTargetPool() {
return targetPool;
}
public TargetPoolsRemoveHealthCheckRequest getTargetPoolsRemoveHealthCheckRequestResource() {
return targetPoolsRemoveHealthCheckRequestResource;
}
/** IP address of the end user for whom the API call is being made. */
public String getUserIp() {
return userIp;
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(RemoveHealthCheckTargetPoolHttpRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
public static RemoveHealthCheckTargetPoolHttpRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final RemoveHealthCheckTargetPoolHttpRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new RemoveHealthCheckTargetPoolHttpRequest();
}
public static class Builder {
private String access_token;
private String callback;
private String fields;
private String key;
private String prettyPrint;
private String quotaUser;
private String requestId;
private String targetPool;
private TargetPoolsRemoveHealthCheckRequest targetPoolsRemoveHealthCheckRequestResource;
private String userIp;
Builder() {}
public Builder mergeFrom(RemoveHealthCheckTargetPoolHttpRequest other) {
if (other == RemoveHealthCheckTargetPoolHttpRequest.getDefaultInstance()) return this;
if (other.getAccessToken() != null) {
this.access_token = other.access_token;
}
if (other.getCallback() != null) {
this.callback = other.callback;
}
if (other.getFields() != null) {
this.fields = other.fields;
}
if (other.getKey() != null) {
this.key = other.key;
}
if (other.getPrettyPrint() != null) {
this.prettyPrint = other.prettyPrint;
}
if (other.getQuotaUser() != null) {
this.quotaUser = other.quotaUser;
}
if (other.getRequestId() != null) {
this.requestId = other.requestId;
}
if (other.getTargetPool() != null) {
this.targetPool = other.targetPool;
}
if (other.getTargetPoolsRemoveHealthCheckRequestResource() != null) {
this.targetPoolsRemoveHealthCheckRequestResource =
other.targetPoolsRemoveHealthCheckRequestResource;
}
if (other.getUserIp() != null) {
this.userIp = other.userIp;
}
return this;
}
Builder(RemoveHealthCheckTargetPoolHttpRequest source) {
this.access_token = source.access_token;
this.callback = source.callback;
this.fields = source.fields;
this.key = source.key;
this.prettyPrint = source.prettyPrint;
this.quotaUser = source.quotaUser;
this.requestId = source.requestId;
this.targetPool = source.targetPool;
this.targetPoolsRemoveHealthCheckRequestResource =
source.targetPoolsRemoveHealthCheckRequestResource;
this.userIp = source.userIp;
}
/** OAuth 2.0 token for the current user. */
public String getAccessToken() {
return access_token;
}
/** OAuth 2.0 token for the current user. */
public Builder setAccessToken(String access_token) {
this.access_token = access_token;
return this;
}
/** Name of the JavaScript callback function that handles the response. */
public String getCallback() {
return callback;
}
/** Name of the JavaScript callback function that handles the response. */
public Builder setCallback(String callback) {
this.callback = callback;
return this;
}
/** Selector specifying a subset of fields to include in the response. */
public String getFields() {
return fields;
}
/** Selector specifying a subset of fields to include in the response. */
public Builder setFields(String fields) {
this.fields = fields;
return this;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public String getKey() {
return key;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public Builder setKey(String key) {
this.key = key;
return this;
}
/** Returns response with indentations and line breaks. */
public String getPrettyPrint() {
return prettyPrint;
}
/** Returns response with indentations and line breaks. */
public Builder setPrettyPrint(String prettyPrint) {
this.prettyPrint = prettyPrint;
return this;
}
/** Alternative to userIp. */
public String getQuotaUser() {
return quotaUser;
}
/** Alternative to userIp. */
public Builder setQuotaUser(String quotaUser) {
this.quotaUser = quotaUser;
return this;
}
/**
* An optional request ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has already been
* completed.
*
* <p>For example, consider a situation where you make an initial request and the request times
* out. If you make the request again with the same request ID, the server can check if original
* operation with the same request ID was received, and if so, will ignore the second request.
* This prevents clients from accidentally creating duplicate commitments.
*
* <p>The request ID must be a valid UUID with the exception that zero UUID is not supported
* (00000000-0000-0000-0000-000000000000).
*/
public String getRequestId() {
return requestId;
}
/**
* An optional request ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has already been
* completed.
*
* <p>For example, consider a situation where you make an initial request and the request times
* out. If you make the request again with the same request ID, the server can check if original
* operation with the same request ID was received, and if so, will ignore the second request.
* This prevents clients from accidentally creating duplicate commitments.
*
* <p>The request ID must be a valid UUID with the exception that zero UUID is not supported
* (00000000-0000-0000-0000-000000000000).
*/
public Builder setRequestId(String requestId) {
this.requestId = requestId;
return this;
}
/**
* Name of the target pool to remove health checks from. It must have the format
* `{project}/regions/{region}/targetPools/{targetPool}/removeHealthCheck`. \`{targetPool}\`
* must start with a letter, and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`),
* dashes (\`-\`), * underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or
* percent * signs (\`%\`). It must be between 3 and 255 characters in length, and it *
* must not start with \`"goog"\`.
*/
public String getTargetPool() {
return targetPool;
}
/**
* Name of the target pool to remove health checks from. It must have the format
* `{project}/regions/{region}/targetPools/{targetPool}/removeHealthCheck`. \`{targetPool}\`
* must start with a letter, and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`),
* dashes (\`-\`), * underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or
* percent * signs (\`%\`). It must be between 3 and 255 characters in length, and it *
* must not start with \`"goog"\`.
*/
public Builder setTargetPool(String targetPool) {
this.targetPool = targetPool;
return this;
}
public TargetPoolsRemoveHealthCheckRequest getTargetPoolsRemoveHealthCheckRequestResource() {
return targetPoolsRemoveHealthCheckRequestResource;
}
public Builder setTargetPoolsRemoveHealthCheckRequestResource(
TargetPoolsRemoveHealthCheckRequest targetPoolsRemoveHealthCheckRequestResource) {
this.targetPoolsRemoveHealthCheckRequestResource =
targetPoolsRemoveHealthCheckRequestResource;
return this;
}
/** IP address of the end user for whom the API call is being made. */
public String getUserIp() {
return userIp;
}
/** IP address of the end user for whom the API call is being made. */
public Builder setUserIp(String userIp) {
this.userIp = userIp;
return this;
}
public RemoveHealthCheckTargetPoolHttpRequest build() {
String missing = "";
if (targetPool == null) {
missing += " targetPool";
}
if (!missing.isEmpty()) {
throw new IllegalStateException("Missing required properties:" + missing);
}
return new RemoveHealthCheckTargetPoolHttpRequest(
access_token,
callback,
fields,
key,
prettyPrint,
quotaUser,
requestId,
targetPool,
targetPoolsRemoveHealthCheckRequestResource,
userIp);
}
public Builder clone() {
Builder newBuilder = new Builder();
newBuilder.setAccessToken(this.access_token);
newBuilder.setCallback(this.callback);
newBuilder.setFields(this.fields);
newBuilder.setKey(this.key);
newBuilder.setPrettyPrint(this.prettyPrint);
newBuilder.setQuotaUser(this.quotaUser);
newBuilder.setRequestId(this.requestId);
newBuilder.setTargetPool(this.targetPool);
newBuilder.setTargetPoolsRemoveHealthCheckRequestResource(
this.targetPoolsRemoveHealthCheckRequestResource);
newBuilder.setUserIp(this.userIp);
return newBuilder;
}
}
@Override
public String toString() {
return "RemoveHealthCheckTargetPoolHttpRequest{"
+ "access_token="
+ access_token
+ ", "
+ "callback="
+ callback
+ ", "
+ "fields="
+ fields
+ ", "
+ "key="
+ key
+ ", "
+ "prettyPrint="
+ prettyPrint
+ ", "
+ "quotaUser="
+ quotaUser
+ ", "
+ "requestId="
+ requestId
+ ", "
+ "targetPool="
+ targetPool
+ ", "
+ "targetPoolsRemoveHealthCheckRequestResource="
+ targetPoolsRemoveHealthCheckRequestResource
+ ", "
+ "userIp="
+ userIp
+ "}";
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof RemoveHealthCheckTargetPoolHttpRequest) {
RemoveHealthCheckTargetPoolHttpRequest that = (RemoveHealthCheckTargetPoolHttpRequest) o;
return Objects.equals(this.access_token, that.getAccessToken())
&& Objects.equals(this.callback, that.getCallback())
&& Objects.equals(this.fields, that.getFields())
&& Objects.equals(this.key, that.getKey())
&& Objects.equals(this.prettyPrint, that.getPrettyPrint())
&& Objects.equals(this.quotaUser, that.getQuotaUser())
&& Objects.equals(this.requestId, that.getRequestId())
&& Objects.equals(this.targetPool, that.getTargetPool())
&& Objects.equals(
this.targetPoolsRemoveHealthCheckRequestResource,
that.getTargetPoolsRemoveHealthCheckRequestResource())
&& Objects.equals(this.userIp, that.getUserIp());
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(
access_token,
callback,
fields,
key,
prettyPrint,
quotaUser,
requestId,
targetPool,
targetPoolsRemoveHealthCheckRequestResource,
userIp);
}
}
|
/*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.tests.event;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.HashSet;
import org.junit.Test;
import com.vaadin.event.ShortcutAction;
import com.vaadin.shared.util.SharedUtil;
import com.vaadin.tests.design.DesignFormatterTest;
/**
* Tests various things about shortcut actions.
*
* @since 7.4
* @author Vaadin Ltd
*/
public class ShortcutActionTest {
private static final String[] KEYS = "a b c d e f g h i j k l m n o p q r s t u v w x y z"
.split("\\s+");
@Test
public void testHashCodeUniqueness() {
HashSet<ShortcutAction> set = new HashSet<>();
for (String modifier : new String[] { "^", "&", "_", "&^", "&_", "_^",
"&^_" }) {
for (String key : KEYS) {
ShortcutAction action = new ShortcutAction(modifier + key);
for (ShortcutAction other : set) {
assertFalse(equals(action, other));
}
set.add(action);
}
}
}
@Test
public void testModifierOrderIrrelevant() {
for (String key : KEYS) {
// two modifiers
for (String modifier : new String[] { "&^", "&_", "_^" }) {
ShortcutAction action1 = new ShortcutAction(modifier + key);
ShortcutAction action2 = new ShortcutAction(
modifier.substring(1) + modifier.substring(0, 1) + key);
assertTrue(modifier + key, equals(action1, action2));
}
// three modifiers
ShortcutAction action1 = new ShortcutAction("&^_" + key);
for (String modifier : new String[] { "&_^", "^&_", "^_&", "_^&",
"_&^" }) {
ShortcutAction action2 = new ShortcutAction(modifier + key);
assertTrue(modifier + key, equals(action1, action2));
}
}
}
@Test
public void testSameKeycodeDifferentCaptions() {
ShortcutAction act1 = new ShortcutAction("E&xit");
ShortcutAction act2 = new ShortcutAction("Lu&xtorpeda - Autystyczny");
assertFalse(equals(act1, act2));
}
/**
* A static method to allow comparison two different actions.
*
* @see DesignFormatterTest
*
* @param act
* One action to compare.
* @param other
* Second action to compare.
* @return <b>true</b> when both actions are the same (caption, icon, and
* key combination).
*/
public static final boolean equals(ShortcutAction act,
ShortcutAction other) {
if (SharedUtil.equals(other.getCaption(), act.getCaption())
&& SharedUtil.equals(other.getIcon(), act.getIcon())
&& act.getKeyCode() == other.getKeyCode()
&& act.getModifiers().length == other.getModifiers().length) {
HashSet<Integer> thisSet = new HashSet<>(act.getModifiers().length);
// this is a bit tricky comparison, but there is no nice way of
// making int[] into a Set
for (int mod : act.getModifiers()) {
thisSet.add(mod);
}
for (int mod : other.getModifiers()) {
thisSet.remove(mod);
}
return thisSet.isEmpty();
}
return false;
}
}
|
package domain;
import java.util.ArrayList;
import java.util.List;
public class PlayerObservable implements Observable {
private static PlayerObservable instance;
public List<Observer> observers = new ArrayList<>();
public PlayerObservable() {
}
public synchronized static PlayerObservable getInstance() {
if (instance == null)
instance = new PlayerObservable();
return instance;
}
@Override
public void notifyObservers(String key, String action, Object obj) {
for (Observer o : observers) {
o.update(key, action, obj);
}
}
@Override
public void addObserver(Observer o) {
// TODO Auto-generated method stub
observers.add(o);
}
@Override
public void removeObserver(Observer o) {
// TODO Auto-generated method stub
observers.remove(o);
}
}
|
package org.batfish.question;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.batfish.common.Answerer;
import org.batfish.common.BatfishException;
import org.batfish.common.Pair;
import org.batfish.common.plugin.IBatfish;
import org.batfish.datamodel.Configuration;
import org.batfish.datamodel.Interface;
import org.batfish.datamodel.Ip;
import org.batfish.datamodel.Prefix;
import org.batfish.datamodel.Vrf;
import org.batfish.datamodel.answers.AnswerElement;
import org.batfish.datamodel.collections.MultiSet;
import org.batfish.datamodel.collections.TreeMultiSet;
import org.batfish.datamodel.questions.Question;
public class SelfAdjacenciesQuestionPlugin extends QuestionPlugin {
public static class SelfAdjacenciesAnswerElement implements AnswerElement {
public static class InterfaceIpPair extends Pair<String, Ip> {
private static final String PROP_INTERFACE_NAME = "interfaceName";
private static final String PROP_IP = "ip";
/** */
private static final long serialVersionUID = 1L;
@JsonCreator
public InterfaceIpPair(
@JsonProperty(PROP_INTERFACE_NAME) String t1, @JsonProperty(PROP_IP) Ip t2) {
super(t1, t2);
}
@JsonProperty(PROP_INTERFACE_NAME)
public String getInterfaceName() {
return _first;
}
@JsonProperty(PROP_IP)
public Ip getIp() {
return _second;
}
}
private SortedMap<String, SortedMap<Prefix, SortedSet<InterfaceIpPair>>> _selfAdjacencies;
public SelfAdjacenciesAnswerElement() {
_selfAdjacencies = new TreeMap<>();
}
public void add(String hostname, Prefix prefix, String interfaceName, Ip address) {
SortedMap<Prefix, SortedSet<InterfaceIpPair>> prefixMap =
_selfAdjacencies.computeIfAbsent(hostname, k -> new TreeMap<>());
SortedSet<InterfaceIpPair> interfaces =
prefixMap.computeIfAbsent(prefix, k -> new TreeSet<>());
interfaces.add(new InterfaceIpPair(interfaceName, address));
}
public SortedMap<String, SortedMap<Prefix, SortedSet<InterfaceIpPair>>> getSelfAdjacencies() {
return _selfAdjacencies;
}
public void setSelfAdjacencies(
SortedMap<String, SortedMap<Prefix, SortedSet<InterfaceIpPair>>> selfAdjacencies) {
_selfAdjacencies = selfAdjacencies;
}
}
public static class SelfAdjacenciesAnswerer extends Answerer {
public SelfAdjacenciesAnswerer(Question question, IBatfish batfish) {
super(question, batfish);
}
@Override
public AnswerElement answer() {
SelfAdjacenciesQuestion question = (SelfAdjacenciesQuestion) _question;
Pattern nodeRegex;
try {
nodeRegex = Pattern.compile(question.getNodeRegex());
} catch (PatternSyntaxException e) {
throw new BatfishException(
"Supplied regex for nodes is not a valid java regex: \""
+ question.getNodeRegex()
+ "\"",
e);
}
SelfAdjacenciesAnswerElement answerElement = new SelfAdjacenciesAnswerElement();
Map<String, Configuration> configurations = _batfish.loadConfigurations();
configurations.forEach(
(hostname, c) -> {
if (nodeRegex.matcher(hostname).matches()) {
for (Vrf vrf : c.getVrfs().values()) {
MultiSet<Prefix> nodePrefixes = new TreeMultiSet<>();
for (Interface iface : vrf.getInterfaces().values()) {
Set<Prefix> ifaceBasePrefixes = new HashSet<>();
if (iface.getActive()) {
for (Prefix prefix : iface.getAllPrefixes()) {
Prefix basePrefix = prefix.getNetworkPrefix();
if (!ifaceBasePrefixes.contains(basePrefix)) {
ifaceBasePrefixes.add(basePrefix);
nodePrefixes.add(basePrefix);
}
}
}
}
for (Interface iface : vrf.getInterfaces().values()) {
for (Prefix prefix : iface.getAllPrefixes()) {
Prefix basePrefix = prefix.getNetworkPrefix();
if (nodePrefixes.count(basePrefix) > 1) {
Ip address = prefix.getAddress();
String interfaceName = iface.getName();
answerElement.add(hostname, basePrefix, interfaceName, address);
}
}
}
}
}
});
return answerElement;
}
}
// <question_page_comment>
/**
* Outputs cases where two interfaces on the same node are in the same subnet.
*
* <p>This occurrence likely indicates an error in IP address assignment.
*
* @type SelfAdjacencies onefile
* @param nodeRegex Regular expression for names of nodes to include. Default value is '.*' (all
* nodes).
* @example bf_answer("SelfAdjacencies", nodeRegex="as1.*") Analyze nodes whose names begin with
* "as1".
*/
public static class SelfAdjacenciesQuestion extends Question {
private static final String PROP_NODE_REGEX = "nodeRegex";
private String _nodeRegex;
public SelfAdjacenciesQuestion() {
_nodeRegex = ".*";
}
@Override
public boolean getDataPlane() {
return false;
}
@Override
public String getName() {
return "selfadjacencies";
}
@JsonProperty(PROP_NODE_REGEX)
public String getNodeRegex() {
return _nodeRegex;
}
@Override
public boolean getTraffic() {
return false;
}
@JsonProperty(PROP_NODE_REGEX)
public void setNodeRegex(String nodeRegex) {
_nodeRegex = nodeRegex;
}
}
@Override
protected Answerer createAnswerer(Question question, IBatfish batfish) {
return new SelfAdjacenciesAnswerer(question, batfish);
}
@Override
protected Question createQuestion() {
return new SelfAdjacenciesQuestion();
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.Delayed;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.geode.internal.monitoring.ThreadsMonitoring;
/**
* A ScheduledThreadPoolExecutor which allows threads to time out after the keep alive time. With
* the normal ScheduledThreadPoolExecutor, there is no way to configure it such that it only add
* threads as needed.
*
* This executor is not very useful if you only want to have 1 thread. Use the
* ScheduledThreadPoolExecutor in that case. This class with throw an exception if you try to
* configure it with one thread.
*
*
*/
@SuppressWarnings("synthetic-access")
public class ScheduledThreadPoolExecutorWithKeepAlive extends ThreadPoolExecutor
implements ScheduledExecutorService {
private final ScheduledThreadPoolExecutor timer;
private final ThreadsMonitoring threadMonitoring;
public ScheduledThreadPoolExecutorWithKeepAlive(int corePoolSize, long keepAlive,
TimeUnit timeUnit, ThreadFactory threadFactory, ThreadsMonitoring tMonitoring) {
super(0, corePoolSize - 1, keepAlive, timeUnit, new SynchronousQueue(), threadFactory,
new BlockCallerPolicy());
timer = new ScheduledThreadPoolExecutor(1, threadFactory) {
@Override
protected void terminated() {
super.terminated();
ScheduledThreadPoolExecutorWithKeepAlive.super.shutdown();
}
};
this.threadMonitoring = tMonitoring;
}
@Override
public void execute(Runnable command) {
timer.execute(new HandOffTask(command));
}
@Override
protected void beforeExecute(Thread t, Runnable r) {
if (this.threadMonitoring != null) {
threadMonitoring.startMonitor(ThreadsMonitoring.Mode.ScheduledThreadExecutor);
}
}
@Override
protected void afterExecute(Runnable r, Throwable ex) {
if (this.threadMonitoring != null) {
threadMonitoring.endMonitor();
}
}
@Override
public Future submit(Callable task) {
return schedule(task, 0, TimeUnit.NANOSECONDS);
}
@Override
public Future submit(Runnable task, Object result) {
return schedule(task, 0, TimeUnit.NANOSECONDS, result);
}
@Override
public Future submit(Runnable task) {
return schedule(task, 0, TimeUnit.NANOSECONDS);
}
@Override
public ScheduledFuture schedule(Callable callable, long delay, TimeUnit unit) {
DelegatingScheduledFuture future = new DelegatingScheduledFuture(callable);
ScheduledFuture timerFuture = timer.schedule(new HandOffTask(future), delay, unit);
future.setDelegate(timerFuture);
return future;
}
@Override
public ScheduledFuture schedule(Runnable command, long delay, TimeUnit unit) {
return schedule(command, delay, unit, null);
}
private ScheduledFuture schedule(Runnable command, long delay, TimeUnit unit, Object result) {
DelegatingScheduledFuture future = new DelegatingScheduledFuture(command, result);
ScheduledFuture timerFuture = timer.schedule(new HandOffTask(future), delay, unit);
future.setDelegate(timerFuture);
return future;
}
@Override
public ScheduledFuture scheduleAtFixedRate(Runnable command, long initialDelay, long period,
TimeUnit unit) {
DelegatingScheduledFuture future = new DelegatingScheduledFuture(command, null, true);
ScheduledFuture timerFuture =
timer.scheduleAtFixedRate(new HandOffTask(future), initialDelay, period, unit);
future.setDelegate(timerFuture);
return future;
}
@Override
public ScheduledFuture scheduleWithFixedDelay(Runnable command, long initialDelay, long delay,
TimeUnit unit) {
DelegatingScheduledFuture future = new DelegatingScheduledFuture(command, null, true);
ScheduledFuture timerFuture =
timer.scheduleWithFixedDelay(new HandOffTask(future), initialDelay, delay, unit);
future.setDelegate(timerFuture);
return future;
}
@Override
public void shutdown() {
// note - the timer has a "hook" which will shutdown our
// worker pool once the timer is shutdown.
timer.shutdown();
}
/**
* Shutdown the executor immediately, returning a list of tasks that haven't been run. Like
* ScheduledThreadPoolExecutor, this returns a list of RunnableScheduledFuture objects, instead of
* the actual tasks submitted. However, these Future objects are even less useful than the ones
* returned by ScheduledThreadPoolExecutor. In particular, they don't match the future returned by
* the {{@link #submit(Runnable)} method, and the run method won't do anything useful. This list
* should only be used as a count of the number of tasks that didn't execute.
*
* @see ScheduledThreadPoolExecutor#shutdownNow()
*/
@Override
public List shutdownNow() {
List tasks = timer.shutdownNow();
super.shutdownNow();
return tasks;
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
long start = System.nanoTime();
if (!timer.awaitTermination(timeout, unit)) {
return false;
}
long elapsed = System.nanoTime() - start;
long remaining = unit.toNanos(timeout) - elapsed;
if (remaining < 0) {
return false;
}
return super.awaitTermination(remaining, TimeUnit.NANOSECONDS);
}
@Override
public int getCorePoolSize() {
return super.getCorePoolSize() + 1;
}
@Override
public int getLargestPoolSize() {
return super.getLargestPoolSize() + 1;
}
@Override
public int getMaximumPoolSize() {
return super.getMaximumPoolSize() + 1;
}
@Override
public int getPoolSize() {
return super.getPoolSize() + 1;
}
@Override
public boolean isShutdown() {
return timer.isShutdown();
}
@Override
public boolean isTerminated() {
return super.isTerminated() && timer.isTerminated();
}
// method that is in ScheduledThreadPoolExecutor that we should expose here
public void setContinueExistingPeriodicTasksAfterShutdownPolicy(boolean b) {
timer.setContinueExistingPeriodicTasksAfterShutdownPolicy(b);
}
// method that is in ScheduledThreadPoolExecutor that we should expose here
public void setExecuteExistingDelayedTasksAfterShutdownPolicy(boolean b) {
timer.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
}
/**
* A Runnable which we put in the timer which simply hands off the contain task for execution in
* the thread pool when the timer fires.
*
*/
private class HandOffTask implements Runnable {
private final Runnable task;
public HandOffTask(Runnable task) {
this.task = task;
}
@Override
public void run() {
try {
ScheduledThreadPoolExecutorWithKeepAlive.super.execute(task);
} catch (RejectedExecutionException e) {
// do nothing, we'll only get this if we're shutting down.
}
}
}
/**
* The future returned by the schedule* methods on this class. This future will not return a value
* until the task has actually executed in the thread pool, but it allows us to cancel associated
* timer task.
*
*/
private static class DelegatingScheduledFuture<V> extends FutureTask<V>
implements ScheduledFuture<V> {
private ScheduledFuture<V> delegate;
private final boolean periodic;
public DelegatingScheduledFuture(Runnable runnable, V result) {
this(runnable, result, false);
}
public DelegatingScheduledFuture(Callable<V> callable) {
this(callable, false);
}
public DelegatingScheduledFuture(Runnable runnable, V result, boolean periodic) {
super(runnable, result);
this.periodic = periodic;
}
public DelegatingScheduledFuture(Callable<V> callable, boolean periodic) {
super(callable);
this.periodic = periodic;
}
@Override
public void run() {
if (periodic) {
super.runAndReset();
} else {
super.run();
}
}
public void setDelegate(ScheduledFuture<V> future) {
this.delegate = future;
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
delegate.cancel(true);
return super.cancel(mayInterruptIfRunning);
}
@Override
public long getDelay(TimeUnit unit) {
return delegate.getDelay(unit);
}
@Override
public int compareTo(Delayed o) {
return delegate.compareTo(o);
}
@Override
public boolean equals(Object o) {
return delegate.equals(o);
}
@Override
public int hashCode() {
return delegate.hashCode();
}
}
/**
* A RejectedExecutionHandler which causes the caller to block until there is space in the queue
* for the task.
*/
protected static class BlockCallerPolicy implements RejectedExecutionHandler {
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
if (executor.isShutdown()) {
throw new RejectedExecutionException("executor has been shutdown");
} else {
try {
executor.getQueue().put(r);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
RejectedExecutionException e = new RejectedExecutionException("interrupted");
e.initCause(ie);
throw e;
}
}
}
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author william.scosta2
*/
public class TesteGit {
public static void main(String[] args) {
System.out.println("TESTE GI");
}
}
|
// ============================================================================
//
// Copyright (C) 2006-2016 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.cwm.db.connection.file;
import java.io.IOException;
import org.talend.dataquality.matchmerge.Record;
/**
* created by yyin on 2014-9-22 Detailled comment
*
*/
public interface IFileReader {
boolean hasNext() throws IOException;
Record next() throws IOException;
void close() throws IOException;
}
|
package com.javarush.task.task12.task1204;
/*
То ли птица, то ли лампа
*/
public class Solution {
public static void main(String[] args) {
printObjectType(new Cat());
printObjectType(new Bird());
printObjectType(new Lamp());
printObjectType(new Cat());
printObjectType(new Dog());
}
public static void printObjectType(Object o) {
if (o instanceof Cat){
System.out.println("Кошка");
}
if (o instanceof Bird){
System.out.println("Птица");
}
if (o instanceof Lamp){
System.out.println("Лампа");
}
if (o instanceof Dog){
System.out.println("Собака");
}
}
public static class Cat {
}
public static class Dog {
}
public static class Bird {
}
public static class Lamp {
}
}
|
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.lib.http;
import com.streamsets.pipeline.api.ErrorCode;
import com.streamsets.pipeline.api.GenerateResourceBundle;
@GenerateResourceBundle
public enum Errors implements ErrorCode {
HTTP_00("Cannot parse record. HTTP-Status: {} Reason: {}"),
HTTP_01("Error fetching resource. HTTP-Status: {} Reason: {}"),
HTTP_02("JSON parser found more than one record in chunk. Verify that the correct delimiter is configured."),
HTTP_03("Error fetching resource. HTTP-Status: {} Reason: {}"),
HTTP_04("The file '{}' does not exist or is inaccessible."),
HTTP_05("Password is required for Key Store/Trust Store."),
HTTP_06("Error evaluating expression: {}"),
HTTP_07("Vault EL is only available when the resource scheme is https."),
HTTP_08("HTTP-Status: {}. When using pagination, the results field must be a list but a {} was found."),
HTTP_09("Chunked transfer encoding is not supported when using pagination."),
HTTP_10("{} is not a supported data format when using pagination"),
HTTP_11("HTTP-Status: {}. Record already contains field {}, cannot write response header."),
HTTP_12("HTTP-Status: {}. Record does not contain result field path '{}'"),
HTTP_13("Invalid Proxy URI. Reason : {}"),
HTTP_14("Failing stage as per configuration. Status {}. Reason : {}"),
HTTP_15("When using backoff, base interval must be greater than 0"),
HTTP_16("Actions can only be configured for non-OK statuses (i.e. not in the [200,300) range)"),
HTTP_17("A particular status code can only be mapped to one action. Code {} was mapped more than once."),
HTTP_18("Failing stage as per configuration for read timeout"),
HTTP_19("Failing stage because number of request retries exceeded configured maximum of {}"),
HTTP_20("Content-Type header was present but was a {}, not a String"),
HTTP_21("OAuth2 authentication failed. Please make sure the credentials are valid."),
HTTP_22("OAuth2 authentication response does not contain access token"),
HTTP_23("Token returned by authorization service does not have the authority to access the service. Please verify the credentials provided."),
HTTP_24("Token URL was not found. Please verify that the URL: '{}' is correct, and the transfer encoding: '{}' is accepted"),
HTTP_25("Unable to parse expression"), // Don't log expression as it could contain secure data
HTTP_26("Algorithm '{}' is unavailable"),
HTTP_27("Key is invalid: {}"),
HTTP_28("Exception in post processing or cleanup. HTTP-Status: {} Reason: {}"),
HTTP_29("Can't resolve credential value for {}: {}"),
HTTP_30("Can't resolve OAuth2 credentials: {}"),
HTTP_31("Can't resolve OAuth1 credentials: {}"),
HTTP_32("Error executing request. HTTP-Status: {} Reason: {}"),
HTTP_33("Null authorization token - checked for '{}', '{}' and '{}'"),
HTTP_34("HTTP-Status: {}. Received no entity in the HTTP message body."),
HTTP_35("Batch size greater than maximal batch size allowed in sdc.properties, maxBatchSize: {}"),
// HTTP Target
HTTP_40("Error sending resource. HTTP-Status: {} Reason: {}"),
HTTP_41("Error sending resource. Reason: {}"),
// WebSocket Target
HTTP_50("Error sending resource. Reason: {}"),
HTTP_51("Invalid Resource URI."),
HTTP_52("Invalid Resource URI. Reason : {}"),
HTTP_53("Invalid header: {}"),
// HTTP Processor
HTTP_61("HTTP-Status: {}. Cannot parse the field '{}' for record '{}': {}"),
HTTP_62("Cannot parse the field '{}' as type {} is not supported"),
HTTP_63("{} parsing the field '{}' as type {} for record '{}': {}"),
HTTP_64("HTTP-Status: {}. IOException attempting to parse whole file field '{}' for record '{}': {}"),
HTTP_65("HTTP-Status: {}. Input field '{}' does not exist in record '{}'"),
HTTP_66("HTTP-Status: {}. Link field '{}' does not exist in record"),
;
private final String msg;
Errors(String msg) {
this.msg = msg;
}
@Override
public String getCode() {
return name();
}
@Override
public String getMessage() {
return msg;
}
}
|
/*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sample.data.mongo;
import org.springframework.data.annotation.Id;
public class Customer {
@Id
private String id;
private String firstName;
private String lastName;
public Customer() {
}
public Customer(String firstName, String lastName) {
this.firstName = firstName;
this.lastName = lastName;
}
@Override
public String toString() {
return String.format("Customer[id=%s, firstName='%s', lastName='%s']", this.id,
this.firstName, this.lastName);
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.gateway.launcher;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class Streamer extends Thread {
InputStream input;
OutputStream output;
byte[] buffer;
private Streamer( String name, InputStream input, OutputStream output, int buffer, int priority ) {
this.buffer = new byte[ buffer ];
this.setName( name );
this.setPriority( priority );
this.start();
this.input = input;
this.output = output;
}
@Override
public void run() {
try {
int read = input.read( buffer );
while( read >= 0 ) {
output.write( buffer, 0, read );
read = input.read( buffer );
}
} catch( IOException e ) {
e.printStackTrace(); // I18N not possible/required.
}
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.table;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.avro.model.HoodieCleanerPlan;
import org.apache.hudi.avro.model.HoodieClusteringPlan;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.avro.model.HoodieRestoreMetadata;
import org.apache.hudi.avro.model.HoodieRollbackMetadata;
import org.apache.hudi.avro.model.HoodieRollbackPlan;
import org.apache.hudi.avro.model.HoodieSavepointMetadata;
import org.apache.hudi.common.config.HoodieMetadataConfig;
import org.apache.hudi.common.config.SerializableConfiguration;
import org.apache.hudi.common.engine.HoodieEngineContext;
import org.apache.hudi.common.engine.HoodieLocalEngineContext;
import org.apache.hudi.common.engine.TaskContextSupplier;
import org.apache.hudi.common.fs.ConsistencyGuard;
import org.apache.hudi.common.fs.ConsistencyGuard.FileVisibility;
import org.apache.hudi.common.fs.ConsistencyGuardConfig;
import org.apache.hudi.common.fs.FailSafeConsistencyGuard;
import org.apache.hudi.common.fs.OptimisticConsistencyGuard;
import org.apache.hudi.common.model.HoodieFileFormat;
import org.apache.hudi.common.model.HoodieKey;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.model.HoodieWriteStat;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.TableSchemaResolver;
import org.apache.hudi.common.table.log.block.HoodieLogBlock.HoodieLogBlockType;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.view.FileSystemViewManager;
import org.apache.hudi.common.table.view.FileSystemViewStorageConfig;
import org.apache.hudi.common.table.view.HoodieTableFileSystemView;
import org.apache.hudi.common.table.view.SyncableFileSystemView;
import org.apache.hudi.common.table.view.TableFileSystemView;
import org.apache.hudi.common.table.view.TableFileSystemView.BaseFileOnlyView;
import org.apache.hudi.common.table.view.TableFileSystemView.SliceView;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.exception.HoodieInsertException;
import org.apache.hudi.exception.HoodieUpsertException;
import org.apache.hudi.index.HoodieIndex;
import org.apache.hudi.metadata.HoodieTableMetadata;
import org.apache.hudi.table.action.HoodieWriteMetadata;
import org.apache.hudi.table.action.bootstrap.HoodieBootstrapWriteMetadata;
import org.apache.hudi.table.marker.WriteMarkers;
import org.apache.hudi.table.marker.WriteMarkersFactory;
import org.apache.avro.Schema;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Abstract implementation of a HoodieTable.
*
* @param <T> Sub type of HoodieRecordPayload
* @param <I> Type of inputs
* @param <K> Type of keys
* @param <O> Type of outputs
*/
public abstract class HoodieTable<T extends HoodieRecordPayload, I, K, O> implements Serializable {
private static final Logger LOG = LogManager.getLogger(HoodieTable.class);
protected final HoodieWriteConfig config;
protected final HoodieTableMetaClient metaClient;
protected final HoodieIndex<T, I, K, O> index;
private SerializableConfiguration hadoopConfiguration;
protected final TaskContextSupplier taskContextSupplier;
private final HoodieTableMetadata metadata;
private transient FileSystemViewManager viewManager;
protected final transient HoodieEngineContext context;
protected HoodieTable(HoodieWriteConfig config, HoodieEngineContext context, HoodieTableMetaClient metaClient) {
this.config = config;
this.hadoopConfiguration = context.getHadoopConf();
this.context = context;
HoodieMetadataConfig metadataConfig = HoodieMetadataConfig.newBuilder().fromProperties(config.getMetadataConfig().getProps())
.build();
this.metadata = HoodieTableMetadata.create(context, metadataConfig, config.getBasePath(),
FileSystemViewStorageConfig.SPILLABLE_DIR.defaultValue());
this.viewManager = FileSystemViewManager.createViewManager(context, config.getMetadataConfig(), config.getViewStorageConfig(), config.getCommonConfig(), () -> metadata);
this.metaClient = metaClient;
this.index = getIndex(config, context);
this.taskContextSupplier = context.getTaskContextSupplier();
}
protected abstract HoodieIndex<T, I, K, O> getIndex(HoodieWriteConfig config, HoodieEngineContext context);
private synchronized FileSystemViewManager getViewManager() {
if (null == viewManager) {
viewManager = FileSystemViewManager.createViewManager(getContext(), config.getMetadataConfig(), config.getViewStorageConfig(), config.getCommonConfig(), () -> metadata);
}
return viewManager;
}
/**
* Upsert a batch of new records into Hoodie table at the supplied instantTime.
* @param context HoodieEngineContext
* @param instantTime Instant Time for the action
* @param records hoodieRecords to upsert
* @return HoodieWriteMetadata
*/
public abstract HoodieWriteMetadata<O> upsert(HoodieEngineContext context, String instantTime,
I records);
/**
* Insert a batch of new records into Hoodie table at the supplied instantTime.
* @param context HoodieEngineContext
* @param instantTime Instant Time for the action
* @param records hoodieRecords to upsert
* @return HoodieWriteMetadata
*/
public abstract HoodieWriteMetadata<O> insert(HoodieEngineContext context, String instantTime,
I records);
/**
* Bulk Insert a batch of new records into Hoodie table at the supplied instantTime.
* @param context HoodieEngineContext
* @param instantTime Instant Time for the action
* @param records hoodieRecords to upsert
* @param bulkInsertPartitioner User Defined Partitioner
* @return HoodieWriteMetadata
*/
public abstract HoodieWriteMetadata<O> bulkInsert(HoodieEngineContext context, String instantTime,
I records, Option<BulkInsertPartitioner<I>> bulkInsertPartitioner);
/**
* Deletes a list of {@link HoodieKey}s from the Hoodie table, at the supplied instantTime {@link HoodieKey}s will be
* de-duped and non existent keys will be removed before deleting.
*
* @param context HoodieEngineContext
* @param instantTime Instant Time for the action
* @param keys {@link List} of {@link HoodieKey}s to be deleted
* @return HoodieWriteMetadata
*/
public abstract HoodieWriteMetadata<O> delete(HoodieEngineContext context, String instantTime, K keys);
/**
* Deletes all data of partitions.
* @param context HoodieEngineContext
* @param instantTime Instant Time for the action
* @param partitions {@link List} of partition to be deleted
* @return HoodieWriteMetadata
*/
public abstract HoodieWriteMetadata deletePartitions(HoodieEngineContext context, String instantTime, List<String> partitions);
/**
* Upserts the given prepared records into the Hoodie table, at the supplied instantTime.
* <p>
* This implementation requires that the input records are already tagged, and de-duped if needed.
* @param context HoodieEngineContext
* @param instantTime Instant Time for the action
* @param preppedRecords hoodieRecords to upsert
* @return HoodieWriteMetadata
*/
public abstract HoodieWriteMetadata<O> upsertPrepped(HoodieEngineContext context, String instantTime,
I preppedRecords);
/**
* Inserts the given prepared records into the Hoodie table, at the supplied instantTime.
* <p>
* This implementation requires that the input records are already tagged, and de-duped if needed.
* @param context HoodieEngineContext
* @param instantTime Instant Time for the action
* @param preppedRecords hoodieRecords to upsert
* @return HoodieWriteMetadata
*/
public abstract HoodieWriteMetadata<O> insertPrepped(HoodieEngineContext context, String instantTime,
I preppedRecords);
/**
* Bulk Insert the given prepared records into the Hoodie table, at the supplied instantTime.
* <p>
* This implementation requires that the input records are already tagged, and de-duped if needed.
* @param context HoodieEngineContext
* @param instantTime Instant Time for the action
* @param preppedRecords hoodieRecords to upsert
* @param bulkInsertPartitioner User Defined Partitioner
* @return HoodieWriteMetadata
*/
public abstract HoodieWriteMetadata<O> bulkInsertPrepped(HoodieEngineContext context, String instantTime,
I preppedRecords, Option<BulkInsertPartitioner<I>> bulkInsertPartitioner);
/**
* Replaces all the existing records and inserts the specified new records into Hoodie table at the supplied instantTime,
* for the partition paths contained in input records.
*
* @param context HoodieEngineContext
* @param instantTime Instant time for the replace action
* @param records input records
* @return HoodieWriteMetadata
*/
public abstract HoodieWriteMetadata<O> insertOverwrite(HoodieEngineContext context, String instantTime, I records);
/**
* Delete all the existing records of the Hoodie table and inserts the specified new records into Hoodie table at the supplied instantTime,
* for the partition paths contained in input records.
*
* @param context HoodieEngineContext
* @param instantTime Instant time for the replace action
* @param records input records
* @return HoodieWriteMetadata
*/
public abstract HoodieWriteMetadata<O> insertOverwriteTable(HoodieEngineContext context, String instantTime, I records);
public HoodieWriteConfig getConfig() {
return config;
}
public HoodieTableMetaClient getMetaClient() {
return metaClient;
}
public Configuration getHadoopConf() {
return metaClient.getHadoopConf();
}
/**
* Get the view of the file system for this table.
*/
public TableFileSystemView getFileSystemView() {
return new HoodieTableFileSystemView(metaClient, getCompletedCommitsTimeline());
}
/**
* Get the base file only view of the file system for this table.
*/
public BaseFileOnlyView getBaseFileOnlyView() {
return getViewManager().getFileSystemView(metaClient);
}
/**
* Get the full view of the file system for this table.
*/
public SliceView getSliceView() {
return getViewManager().getFileSystemView(metaClient);
}
/**
* Get complete view of the file system for this table with ability to force sync.
*/
public SyncableFileSystemView getHoodieView() {
return getViewManager().getFileSystemView(metaClient);
}
/**
* Get only the completed (no-inflights) commit + deltacommit timeline.
*/
public HoodieTimeline getCompletedCommitsTimeline() {
return metaClient.getCommitsTimeline().filterCompletedInstants();
}
/**
* Get only the completed (no-inflights) commit timeline.
*/
public HoodieTimeline getCompletedCommitTimeline() {
return metaClient.getCommitTimeline().filterCompletedInstants();
}
/**
* Get only the inflights (no-completed) commit timeline.
*/
public HoodieTimeline getPendingCommitTimeline() {
return metaClient.getCommitsTimeline().filterPendingExcludingCompaction();
}
/**
* Get only the completed (no-inflights) clean timeline.
*/
public HoodieTimeline getCompletedCleanTimeline() {
return getActiveTimeline().getCleanerTimeline().filterCompletedInstants();
}
/**
* Get clean timeline.
*/
public HoodieTimeline getCleanTimeline() {
return getActiveTimeline().getCleanerTimeline();
}
/**
* Get rollback timeline.
*/
public HoodieTimeline getRollbackTimeline() {
return getActiveTimeline().getRollbackTimeline();
}
/**
* Get only the completed (no-inflights) savepoint timeline.
*/
public HoodieTimeline getCompletedSavepointTimeline() {
return getActiveTimeline().getSavePointTimeline().filterCompletedInstants();
}
/**
* Get the list of savepoints in this table.
*/
public List<String> getSavepoints() {
return getCompletedSavepointTimeline().getInstants().map(HoodieInstant::getTimestamp).collect(Collectors.toList());
}
public HoodieActiveTimeline getActiveTimeline() {
return metaClient.getActiveTimeline();
}
/**
* Return the index.
*/
public HoodieIndex<T, I, K, O> getIndex() {
return index;
}
/**
* Schedule compaction for the instant time.
*
* @param context HoodieEngineContext
* @param instantTime Instant Time for scheduling compaction
* @param extraMetadata additional metadata to write into plan
* @return
*/
public abstract Option<HoodieCompactionPlan> scheduleCompaction(HoodieEngineContext context,
String instantTime,
Option<Map<String, String>> extraMetadata);
/**
* Run Compaction on the table. Compaction arranges the data so that it is optimized for data access.
*
* @param context HoodieEngineContext
* @param compactionInstantTime Instant Time
*/
public abstract HoodieWriteMetadata<O> compact(HoodieEngineContext context,
String compactionInstantTime);
/**
* Schedule clustering for the instant time.
*
* @param context HoodieEngineContext
* @param instantTime Instant Time for scheduling clustering
* @param extraMetadata additional metadata to write into plan
* @return HoodieClusteringPlan, if there is enough data for clustering.
*/
public abstract Option<HoodieClusteringPlan> scheduleClustering(HoodieEngineContext context,
String instantTime,
Option<Map<String, String>> extraMetadata);
/**
* Execute Clustering on the table. Clustering re-arranges the data so that it is optimized for data access.
*
* @param context HoodieEngineContext
* @param clusteringInstantTime Instant Time
*/
public abstract HoodieWriteMetadata<O> cluster(HoodieEngineContext context, String clusteringInstantTime);
/**
* Perform metadata/full bootstrap of a Hudi table.
* @param context HoodieEngineContext
* @param extraMetadata Additional Metadata for storing in commit file.
* @return HoodieBootstrapWriteMetadata
*/
public abstract HoodieBootstrapWriteMetadata<O> bootstrap(HoodieEngineContext context, Option<Map<String, String>> extraMetadata);
/**
* Perform rollback of bootstrap of a Hudi table.
* @param context HoodieEngineContext
*/
public abstract void rollbackBootstrap(HoodieEngineContext context, String instantTime);
/**
* Schedule cleaning for the instant time.
*
* @param context HoodieEngineContext
* @param instantTime Instant Time for scheduling cleaning
* @param extraMetadata additional metadata to write into plan
* @return HoodieCleanerPlan, if there is anything to clean.
*/
public abstract Option<HoodieCleanerPlan> scheduleCleaning(HoodieEngineContext context,
String instantTime,
Option<Map<String, String>> extraMetadata);
/**
* Executes a new clean action.
*
* @return information on cleaned file slices
*/
public abstract HoodieCleanMetadata clean(HoodieEngineContext context, String cleanInstantTime);
/**
* Schedule rollback for the instant time.
*
* @param context HoodieEngineContext
* @param instantTime Instant Time for scheduling rollback
* @param instantToRollback instant to be rolled back
* @return HoodieRollbackPlan containing info on rollback.
*/
public abstract Option<HoodieRollbackPlan> scheduleRollback(HoodieEngineContext context,
String instantTime,
HoodieInstant instantToRollback,
boolean skipTimelinePublish);
/**
* Rollback the (inflight/committed) record changes with the given commit time.
* <pre>
* Three steps:
* (1) Atomically unpublish this commit
* (2) clean indexing data
* (3) clean new generated parquet files.
* (4) Finally delete .commit or .inflight file, if deleteInstants = true
* </pre>
*/
public abstract HoodieRollbackMetadata rollback(HoodieEngineContext context,
String rollbackInstantTime,
HoodieInstant commitInstant,
boolean deleteInstants);
/**
* Create a savepoint at the specified instant, so that the table can be restored
* to this point-in-timeline later if needed.
*/
public abstract HoodieSavepointMetadata savepoint(HoodieEngineContext context,
String instantToSavepoint,
String user,
String comment);
/**
* Restore the table to the given instant. Note that this is a admin table recovery operation
* that would cause any running queries that are accessing file slices written after the instant to fail.
*/
public abstract HoodieRestoreMetadata restore(HoodieEngineContext context,
String restoreInstantTime,
String instantToRestore);
/**
* Finalize the written data onto storage. Perform any final cleanups.
*
* @param context HoodieEngineContext
* @param stats List of HoodieWriteStats
* @throws HoodieIOException if some paths can't be finalized on storage
*/
public void finalizeWrite(HoodieEngineContext context, String instantTs, List<HoodieWriteStat> stats) throws HoodieIOException {
reconcileAgainstMarkers(context, instantTs, stats, config.getConsistencyGuardConfig().isConsistencyCheckEnabled());
}
private void deleteInvalidFilesByPartitions(HoodieEngineContext context, Map<String, List<Pair<String, String>>> invalidFilesByPartition) {
// Now delete partially written files
context.setJobStatus(this.getClass().getSimpleName(), "Delete invalid files generated during the write operation");
context.map(new ArrayList<>(invalidFilesByPartition.values()), partitionWithFileList -> {
final FileSystem fileSystem = metaClient.getFs();
LOG.info("Deleting invalid data files=" + partitionWithFileList);
if (partitionWithFileList.isEmpty()) {
return true;
}
// Delete
partitionWithFileList.stream().map(Pair::getValue).forEach(file -> {
try {
fileSystem.delete(new Path(file), false);
} catch (IOException e) {
throw new HoodieIOException(e.getMessage(), e);
}
});
return true;
}, config.getFinalizeWriteParallelism());
}
/**
* Returns the possible invalid data file name with given marker files.
*/
protected Set<String> getInvalidDataPaths(WriteMarkers markers) throws IOException {
return markers.createdAndMergedDataPaths(context, config.getFinalizeWriteParallelism());
}
/**
* Reconciles WriteStats and marker files to detect and safely delete duplicate data files created because of Spark
* retries.
*
* @param context HoodieEngineContext
* @param instantTs Instant Timestamp
* @param stats Hoodie Write Stat
* @param consistencyCheckEnabled Consistency Check Enabled
* @throws HoodieIOException
*/
protected void reconcileAgainstMarkers(HoodieEngineContext context,
String instantTs,
List<HoodieWriteStat> stats,
boolean consistencyCheckEnabled) throws HoodieIOException {
try {
// Reconcile marker and data files with WriteStats so that partially written data-files due to failed
// (but succeeded on retry) tasks are removed.
String basePath = getMetaClient().getBasePath();
WriteMarkers markers = WriteMarkersFactory.get(config.getMarkersType(), this, instantTs);
if (!markers.doesMarkerDirExist()) {
// can happen if it was an empty write say.
return;
}
// we are not including log appends here, since they are already fail-safe.
Set<String> invalidDataPaths = getInvalidDataPaths(markers);
Set<String> validDataPaths = stats.stream()
.map(HoodieWriteStat::getPath)
.filter(p -> p.endsWith(this.getBaseFileExtension()))
.collect(Collectors.toSet());
// Contains list of partially created files. These needs to be cleaned up.
invalidDataPaths.removeAll(validDataPaths);
if (!invalidDataPaths.isEmpty()) {
LOG.info("Removing duplicate data files created due to spark retries before committing. Paths=" + invalidDataPaths);
Map<String, List<Pair<String, String>>> invalidPathsByPartition = invalidDataPaths.stream()
.map(dp -> Pair.of(new Path(basePath, dp).getParent().toString(), new Path(basePath, dp).toString()))
.collect(Collectors.groupingBy(Pair::getKey));
// Ensure all files in delete list is actually present. This is mandatory for an eventually consistent FS.
// Otherwise, we may miss deleting such files. If files are not found even after retries, fail the commit
if (consistencyCheckEnabled) {
// This will either ensure all files to be deleted are present.
waitForAllFiles(context, invalidPathsByPartition, FileVisibility.APPEAR);
}
// Now delete partially written files
context.setJobStatus(this.getClass().getSimpleName(), "Delete all partially written files");
deleteInvalidFilesByPartitions(context, invalidPathsByPartition);
// Now ensure the deleted files disappear
if (consistencyCheckEnabled) {
// This will either ensure all files to be deleted are absent.
waitForAllFiles(context, invalidPathsByPartition, FileVisibility.DISAPPEAR);
}
}
} catch (IOException ioe) {
throw new HoodieIOException(ioe.getMessage(), ioe);
}
}
/**
* Ensures all files passed either appear or disappear.
*
* @param context HoodieEngineContext
* @param groupByPartition Files grouped by partition
* @param visibility Appear/Disappear
*/
private void waitForAllFiles(HoodieEngineContext context, Map<String, List<Pair<String, String>>> groupByPartition, FileVisibility visibility) {
// This will either ensure all files to be deleted are present.
context.setJobStatus(this.getClass().getSimpleName(), "Wait for all files to appear/disappear");
boolean checkPassed =
context.map(new ArrayList<>(groupByPartition.entrySet()), partitionWithFileList -> waitForCondition(partitionWithFileList.getKey(),
partitionWithFileList.getValue().stream(), visibility), config.getFinalizeWriteParallelism())
.stream().allMatch(x -> x);
if (!checkPassed) {
throw new HoodieIOException("Consistency check failed to ensure all files " + visibility);
}
}
private boolean waitForCondition(String partitionPath, Stream<Pair<String, String>> partitionFilePaths, FileVisibility visibility) {
final FileSystem fileSystem = metaClient.getRawFs();
List<String> fileList = partitionFilePaths.map(Pair::getValue).collect(Collectors.toList());
try {
getConsistencyGuard(fileSystem, config.getConsistencyGuardConfig()).waitTill(partitionPath, fileList, visibility);
} catch (IOException | TimeoutException ioe) {
LOG.error("Got exception while waiting for files to show up", ioe);
return false;
}
return true;
}
/**
* Instantiate {@link ConsistencyGuard} based on configs.
* <p>
* Default consistencyGuard class is {@link OptimisticConsistencyGuard}.
*/
public static ConsistencyGuard getConsistencyGuard(FileSystem fs, ConsistencyGuardConfig consistencyGuardConfig) throws IOException {
try {
return consistencyGuardConfig.shouldEnableOptimisticConsistencyGuard()
? new OptimisticConsistencyGuard(fs, consistencyGuardConfig) : new FailSafeConsistencyGuard(fs, consistencyGuardConfig);
} catch (Throwable e) {
throw new IOException("Could not load ConsistencyGuard ", e);
}
}
public TaskContextSupplier getTaskContextSupplier() {
return taskContextSupplier;
}
/**
* Ensure that the current writerSchema is compatible with the latest schema of this dataset.
*
* When inserting/updating data, we read records using the last used schema and convert them to the
* GenericRecords with writerSchema. Hence, we need to ensure that this conversion can take place without errors.
*/
private void validateSchema() throws HoodieUpsertException, HoodieInsertException {
if (!config.getAvroSchemaValidate() || getActiveTimeline().getCommitsTimeline().filterCompletedInstants().empty()) {
// Check not required
return;
}
Schema tableSchema;
Schema writerSchema;
boolean isValid;
try {
TableSchemaResolver schemaUtil = new TableSchemaResolver(getMetaClient());
writerSchema = HoodieAvroUtils.createHoodieWriteSchema(config.getSchema());
tableSchema = HoodieAvroUtils.createHoodieWriteSchema(schemaUtil.getTableAvroSchemaWithoutMetadataFields());
isValid = TableSchemaResolver.isSchemaCompatible(tableSchema, writerSchema);
} catch (Exception e) {
throw new HoodieException("Failed to read schema/check compatibility for base path " + metaClient.getBasePath(), e);
}
if (!isValid) {
throw new HoodieException("Failed schema compatibility check for writerSchema :" + writerSchema
+ ", table schema :" + tableSchema + ", base path :" + metaClient.getBasePath());
}
}
public void validateUpsertSchema() throws HoodieUpsertException {
try {
validateSchema();
} catch (HoodieException e) {
throw new HoodieUpsertException("Failed upsert schema compatibility check.", e);
}
}
public void validateInsertSchema() throws HoodieInsertException {
try {
validateSchema();
} catch (HoodieException e) {
throw new HoodieInsertException("Failed insert schema compability check.", e);
}
}
public HoodieFileFormat getBaseFileFormat() {
return metaClient.getTableConfig().getBaseFileFormat();
}
public HoodieFileFormat getLogFileFormat() {
return metaClient.getTableConfig().getLogFileFormat();
}
public HoodieLogBlockType getLogDataBlockFormat() {
switch (getBaseFileFormat()) {
case PARQUET:
case ORC:
return HoodieLogBlockType.AVRO_DATA_BLOCK;
case HFILE:
return HoodieLogBlockType.HFILE_DATA_BLOCK;
default:
throw new HoodieException("Base file format " + getBaseFileFormat()
+ " does not have associated log block format");
}
}
public String getBaseFileExtension() {
return getBaseFileFormat().getFileExtension();
}
public boolean requireSortedRecords() {
return getBaseFileFormat() == HoodieFileFormat.HFILE;
}
public HoodieEngineContext getContext() {
// This is to handle scenarios where this is called at the executor tasks which do not have access
// to engine context, and it ends up being null (as its not serializable and marked transient here).
return context == null ? new HoodieLocalEngineContext(hadoopConfiguration.get()) : context;
}
}
|
package org.bouncycastle.operator.bc;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.bouncycastle.asn1.cryptopro.CryptoProObjectIdentifiers;
import org.bouncycastle.asn1.nist.NISTObjectIdentifiers;
import org.bouncycastle.asn1.oiw.OIWObjectIdentifiers;
import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers;
import org.bouncycastle.asn1.rosstandart.RosstandartObjectIdentifiers;
import org.bouncycastle.asn1.teletrust.TeleTrusTObjectIdentifiers;
import org.bouncycastle.asn1.x509.AlgorithmIdentifier;
import org.bouncycastle.crypto.ExtendedDigest;
import org.bouncycastle.crypto.digests.GOST3411Digest;
import org.bouncycastle.crypto.digests.GOST3411_2012_256Digest;
import org.bouncycastle.crypto.digests.GOST3411_2012_512Digest;
import org.bouncycastle.crypto.digests.MD2Digest;
import org.bouncycastle.crypto.digests.MD4Digest;
import org.bouncycastle.crypto.digests.MD5Digest;
import org.bouncycastle.crypto.digests.RIPEMD128Digest;
import org.bouncycastle.crypto.digests.RIPEMD160Digest;
import org.bouncycastle.crypto.digests.RIPEMD256Digest;
import org.bouncycastle.crypto.digests.SHA1Digest;
import org.bouncycastle.crypto.digests.SHA224Digest;
import org.bouncycastle.crypto.digests.SHA256Digest;
import org.bouncycastle.crypto.digests.SHA384Digest;
import org.bouncycastle.crypto.digests.SHA3Digest;
import org.bouncycastle.crypto.digests.SHA512Digest;
import org.bouncycastle.operator.OperatorCreationException;
public class BcDefaultDigestProvider
implements BcDigestProvider
{
private static final Map lookup = createTable();
private static Map createTable()
{
Map table = new HashMap();
table.put(OIWObjectIdentifiers.idSHA1, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new SHA1Digest();
}
});
table.put(NISTObjectIdentifiers.id_sha224, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new SHA224Digest();
}
});
table.put(NISTObjectIdentifiers.id_sha256, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new SHA256Digest();
}
});
table.put(NISTObjectIdentifiers.id_sha384, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new SHA384Digest();
}
});
table.put(NISTObjectIdentifiers.id_sha512, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new SHA512Digest();
}
});
table.put(NISTObjectIdentifiers.id_sha3_224, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new SHA3Digest(224);
}
});
table.put(NISTObjectIdentifiers.id_sha3_256, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new SHA3Digest(256);
}
});
table.put(NISTObjectIdentifiers.id_sha3_384, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new SHA3Digest(384);
}
});
table.put(NISTObjectIdentifiers.id_sha3_512, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new SHA3Digest(512);
}
});
table.put(PKCSObjectIdentifiers.md5, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new MD5Digest();
}
});
table.put(PKCSObjectIdentifiers.md4, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new MD4Digest();
}
});
table.put(PKCSObjectIdentifiers.md2, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new MD2Digest();
}
});
table.put(CryptoProObjectIdentifiers.gostR3411, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new GOST3411Digest();
}
});
table.put(RosstandartObjectIdentifiers.id_tc26_gost_3411_12_256, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new GOST3411_2012_256Digest();
}
});
table.put(RosstandartObjectIdentifiers.id_tc26_gost_3411_12_512, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new GOST3411_2012_512Digest();
}
});
table.put(TeleTrusTObjectIdentifiers.ripemd128, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new RIPEMD128Digest();
}
});
table.put(TeleTrusTObjectIdentifiers.ripemd160, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new RIPEMD160Digest();
}
});
table.put(TeleTrusTObjectIdentifiers.ripemd256, new BcDigestProvider()
{
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
{
return new RIPEMD256Digest();
}
});
return Collections.unmodifiableMap(table);
}
public static final BcDigestProvider INSTANCE = new BcDefaultDigestProvider();
private BcDefaultDigestProvider()
{
}
public ExtendedDigest get(AlgorithmIdentifier digestAlgorithmIdentifier)
throws OperatorCreationException
{
BcDigestProvider extProv = (BcDigestProvider)lookup.get(digestAlgorithmIdentifier.getAlgorithm());
if (extProv == null)
{
throw new OperatorCreationException("cannot recognise digest");
}
return extProv.get(digestAlgorithmIdentifier);
}
}
|
public static double[][] dissim(int nrow, int ncol, double[] mass, double[][] A) {
double[][] Adiss = new double[nrow][nrow];
for (int i1 = 0; i1 < nrow; i1++) {
for (int i2 = 0; i2 < nrow; i2++) {
Adiss[i1][i2] = 0.0;
}
}
for (int i1 = 0; i1 < nrow; i1++) {
for (int i2 = 0; i2 < i1; i2++) {
for (int j = 0; j < ncol; j++) {
Adiss[i1][i2] += 0.5 * Math.pow(A[i1][j] - A[i2][j], 2.0);
}
Adiss[i2][i1] = Adiss[i1][i2];
}
}
return Adiss;
}
|
/* This software was developed by employees of the National Institute of
* Standards and Technology (NIST), an agency of the Federal Government.
* Pursuant to title 15 United States Code Section 105, works of NIST
* employees are not subject to copyright protection in the United States
* and are considered to be in the public domain. As a result, a formal
* license is not needed to use the software.
*
* This software is provided by NIST as a service and is expressly
* provided "AS IS". NIST MAKES NO WARRANTY OF ANY KIND, EXPRESS, IMPLIED
* OR STATUTORY, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTY OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT
* AND DATA ACCURACY. NIST does not warrant or make any representations
* regarding the use of the software or the results thereof including, but
* not limited to, the correctness, accuracy, reliability or usefulness of
* the software.
*
* Permission to use this software is contingent upon your acceptance
* of the terms of this agreement.
*/
package gov.nist.csd.pm.common.net;
/**
* This class defines an enumeration of PM item types. The following types are: <ul> <li><tt>BINARY</tt> A binary payload stream sent by either a client or a server. <li><tt>CMD_ARG</tt> A command argument sent by a client to a server. <li><tt>CMD_CODE</tt> A command sent by a client to a server. <li><tt>RESPONSE_ERROR</tt> A response sent by a server to a client indicating that an error occurred on the server with respect to the client's previous request. <li><tt>RESPONSE_SUCCESS</tt> A response sent by a server to a client indicating that an operation was successful with respect to the client's previous request. <li><tt>RESPONSE_TEXT</tt> A response sent by a server to a client containing data relevant to the client's previous request. </ul>
* @author steveq@nist.gov
* @version $Revision: 1.1 $, $Date: 2008/07/16 17:02:57 $
* @since 6.0
*/
public enum ItemType {
/* If you add more items here, be sure to also add in getType() below! */
/* Since commands and responses are basically the same now that we
* are allowing clients to send files to the server, we can simplify this
* by not distinguishing between 'CMD' and 'RESPONSE' and just have
* 'MESSAGE' or 'MSG'. However, because Serban has already started
* integration with PM, I will hold off on making this change.
*/
/**
* @uml.property name="bINARY"
* @uml.associationEnd
*/
BINARY(0, "BINARY"), // For file or binary stream payloads
/**
* @uml.property name="cMD_ARG"
* @uml.associationEnd
*/
CMD_ARG(1, "CMD_ARG"),
/**
* @uml.property name="cMD_CODE"
* @uml.associationEnd
*/
CMD_CODE(2, "Used to indicate a command."),
/**
* @uml.property name="rESPONSE_ERROR"
* @uml.associationEnd
*/
RESPONSE_ERROR(3, "RESPONSE_ERROR"),
/**
* @uml.property name="rESPONSE_SUCCESS"
* @uml.associationEnd
*/
RESPONSE_SUCCESS(4, "RESPONSE_SUCCESS"),
/**
* @uml.property name="rESPONSE_TEXT"
* @uml.associationEnd
*/
RESPONSE_TEXT(5, "RESPONSE_TEXT"),
/**
* @uml.property name="rESPONSE_DNR"
* @uml.associationEnd
*/
RESPONSE_DNR(6, "RESPONSE_DNR"); // do not respond
/**
* The int value for this item type.
* @uml.property name="type"
*/
private int type = -1;
/**
* The description of this item type.
* @uml.property name="description"
*/
private String description = "";
/**
* Construct an item type.
*
* @param type
* The int value of this item type.
* @param description
* The description of this item type.
*/
ItemType(int type, String description) {
this.type = type;
this.description = description;
}
/**
* Get the int value of this item type.
*
* @return The int value of this item type.
*/
protected int intValue() {
return type;
}
/**
* Get the description of this item type.
*
* @return The description of this item type.
*/
public String toString() {
return description;
}
public String toPrefix() {
switch (type) {
case 0:
return "BIN";
case 1:
return "ARG";
case 2:
return "CMD";
case 3:
return "ERR";
case 4:
return "SUC";
case 5:
return "TXT";
case 6:
return "DNR";
default:
return "UNK";
}
}
/**
* Get the item type given its byte representation.
*
* @param b The byte representation of the item type.
* @return The item type for the byte representation.
*/
public static ItemType getType(byte b) {
int i = b & 0xFF;
switch (i) {
case 0:
return ItemType.BINARY;
case 1:
return ItemType.CMD_ARG;
case 2:
return ItemType.CMD_CODE;
case 3:
return ItemType.RESPONSE_ERROR;
case 4:
return ItemType.RESPONSE_SUCCESS;
case 5:
return ItemType.RESPONSE_TEXT;
case 6:
return ItemType.RESPONSE_DNR;
default:
return null;
}
}
}
|
import java.io.PrintWriter;
import java.io.StringWriter;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
public class StressTest extends BaseProjectTest {
private static final int WARM_RUNS = 3;
private static final int TIME_RUNS = 5;
private static final int THREADS = 5;
@Rule
public Timeout globalTimeout = Timeout.seconds(60 * TIME_RUNS);
@Test
public void testIndexConsistency() {
ThreadTest.OutputTest tester = new ThreadTest.OutputTest(
String.valueOf(THREADS));
for (int i = 0; i < TIME_RUNS; i++) {
tester.test02IndexComplex();
}
}
@Test
public void testSearchConsistency() {
ThreadTest.OutputTest tester = new ThreadTest.OutputTest(
String.valueOf(THREADS));
for (int i = 0; i < TIME_RUNS; i++) {
tester.test04SearchComplex();
}
}
@Test
public void testRuntime() {
double singleAverage = benchmark(String.valueOf(1)) / 1000000000.0;
double threadAverage = benchmark(String.valueOf(THREADS))
/ 1000000000.0;
System.out.println();
System.out.printf("%d Threads: %.2f s%n", 1, singleAverage);
System.out.printf("%d Threads: %.2f s%n", THREADS, threadAverage);
System.out.printf(" Speedup: %.2f %n%n",
singleAverage / threadAverage);
Assert.assertTrue(singleAverage - threadAverage > 0);
}
private double benchmark(String numThreads) {
String[] args = {
DIR_FLAG, INDEX_DIR.toString(), //
QUERY_FLAG, QUERY_DIR.resolve("complex.txt").toString(), //
THREAD_NUM, numThreads
};
long total = 0;
long start = 0;
try {
for (int i = 0; i < WARM_RUNS; i++) {
Driver.main(args);
}
for (int i = 0; i < TIME_RUNS; i++) {
start = System.nanoTime();
Driver.main(args);
total += System.nanoTime() - start;
}
}
catch (Exception e) {
StringWriter writer = new StringWriter();
e.printStackTrace(new PrintWriter(writer));
Assert.fail(errorMessage("Benchmark: " + numThreads, args,
writer.toString()));
}
return (double) total / TIME_RUNS;
}
}
|
package com.intkr.saas.manager.item.impl;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Resource;
import org.springframework.stereotype.Repository;
import com.intkr.saas.dao.BaseDAO;
import com.intkr.saas.dao.item.AuctionDAO;
import com.intkr.saas.domain.bo.item.AuctionBO;
import com.intkr.saas.domain.bo.item.AuctionDetailBO;
import com.intkr.saas.domain.bo.item.ItemBO;
import com.intkr.saas.domain.bo.order.OrderBO;
import com.intkr.saas.domain.bo.order.OrderDetailBO;
import com.intkr.saas.domain.bo.order.PaticipateBO;
import com.intkr.saas.domain.bo.shop.ShopCommentBO;
import com.intkr.saas.domain.bo.shopping.ItemCollectBO;
import com.intkr.saas.domain.bo.sns.AttentionBO;
import com.intkr.saas.domain.bo.sns.CollectBO;
import com.intkr.saas.domain.bo.sns.LikeBO;
import com.intkr.saas.domain.bo.sns.MsgBO;
import com.intkr.saas.domain.bo.sns.PraiseDownBO;
import com.intkr.saas.domain.bo.sns.PraiseUpBO;
import com.intkr.saas.domain.dbo.item.AuctionDO;
import com.intkr.saas.domain.type.order.OrderDetailType;
import com.intkr.saas.domain.type.sns.AttentionType;
import com.intkr.saas.domain.type.sns.CollectType;
import com.intkr.saas.domain.type.sns.CommentType;
import com.intkr.saas.domain.type.sns.LikeType;
import com.intkr.saas.domain.type.sns.PaticipateType;
import com.intkr.saas.domain.type.sns.PraiseDownType;
import com.intkr.saas.domain.type.sns.PraiseUpType;
import com.intkr.saas.manager.BaseManagerImpl;
import com.intkr.saas.manager.item.AuctionManager;
/**
*
* @author Beiden
* @date 2016-5-18 上午10:32:19
* @version 1.0
*/
@Repository("AuctionManager")
public class AuctionManagerImpl extends BaseManagerImpl<AuctionBO, AuctionDO> implements AuctionManager {
@Resource
private AuctionDAO auctionDAO;
public BaseDAO<AuctionDO> getBaseDAO() {
return auctionDAO;
}
public OrderDetailBO fill(OrderDetailBO orderDetail) {
if (orderDetail == null) {
return null;
}
if (!OrderDetailType.Auction.getCode().equals(orderDetail.getType())) {
return orderDetail;
}
orderDetail.setProperty("auction", get(orderDetail.getRelatedId()));
return orderDetail;
}
public OrderBO fill(OrderBO order) {
if (order == null || order.getOrderDetails() == null) {
return order;
}
for (OrderDetailBO detail : order.getOrderDetails()) {
fill(detail);
}
return order;
}
public CollectBO fill(CollectBO collect) {
if (collect == null) {
return collect;
}
if (!CollectType.Auction.getCode().equals(collect.getType())) {
return collect;
}
collect.setRelatedObject(get(collect.getRelatedId()));
return collect;
}
public List<?> fill(List<?> list) {
if (list == null) {
return new ArrayList<MsgBO>();
}
for (Object obj : list) {
if (obj instanceof CollectBO) {
fill((CollectBO) obj);
} else if (obj instanceof OrderBO) {
fill((OrderBO) obj);
} else if (obj instanceof OrderDetailBO) {
fill((OrderDetailBO) obj);
} else if (obj instanceof AuctionDetailBO) {
fill((AuctionDetailBO) obj);
} else if (obj instanceof PaticipateBO) {
fill((PaticipateBO) obj);
} else if (obj instanceof AttentionBO) {
fill((AttentionBO) obj);
}
}
return list;
}
public AttentionBO fill(AttentionBO attention) {
if (attention == null) {
return attention;
}
if (AttentionType.Auction.getCode().equals(attention.getType()) || AttentionType.Auction.getCode().equals(attention.getType()) || AttentionType.Bids.getCode().equals(attention.getType())) {
attention.setRelatedObject(get(attention.getRelatedId()));
}
return attention;
}
public LikeBO fill(LikeBO like) {
if (like == null) {
return like;
}
if (!LikeType.Auction.getCode().equals(like.getType())) {
return like;
}
like.setRelatedObject(get(like.getRelatedId()));
return like;
}
public PraiseUpBO fill(PraiseUpBO praiseUp) {
if (praiseUp == null) {
return praiseUp;
}
if (!PraiseUpType.Auction.getCode().equals(praiseUp.getType())) {
return praiseUp;
}
praiseUp.setRelatedObject(get(praiseUp.getRelatedId()));
return praiseUp;
}
public PraiseDownBO fill(PraiseDownBO praiseDown) {
if (praiseDown == null) {
return praiseDown;
}
if (!PraiseDownType.Auction.getCode().equals(praiseDown.getType())) {
return praiseDown;
}
praiseDown.setRelatedObject(get(praiseDown.getRelatedId()));
return praiseDown;
}
public ShopCommentBO fill(ShopCommentBO comment) {
if (comment == null) {
return comment;
}
if (!CommentType.Auction.getCode().equals(comment.getType())) {
return comment;
}
comment.setProperty("auction", get(comment.getRelatedId()));
return comment;
}
public ItemBO fill(ItemBO item) {
// if (item == null) {
// return item;
// }
// if (!ItemTradeMethod.Auction.getCode().equals(item.getTradeMethod()))
// {
// return item;
// }
// item.setAuction(get(item.getTradeMethodId()));
// return item;
throw new RuntimeException("donot supported!");
}
public AuctionDetailBO fill(AuctionDetailBO auctionDetail) {
if (auctionDetail == null) {
return null;
}
auctionDetail.setAuction(get(auctionDetail.getAuctionId()));
return auctionDetail;
}
public PaticipateBO fill(PaticipateBO paticipate) {
if (paticipate == null) {
return paticipate;
}
if (PaticipateType.Auction.getCode().equals(paticipate.getType()) || //
PaticipateType.Auction.getCode().equals(paticipate.getType()) || //
PaticipateType.AuctionHit.getCode().equals(paticipate.getType()) || //
PaticipateType.BidsHit.getCode().equals(paticipate.getType()) || //
PaticipateType.Bids.getCode().equals(paticipate.getType())) {
paticipate.setRelatedObject(get(paticipate.getRelatedId()));
}
return paticipate;
}
public ItemCollectBO fill(ItemCollectBO collect) {
throw new RuntimeException("donot supported!");
}
}
|
package umontreal.ssj.networks.staticreliability;
import umontreal.ssj.util.Tools;
import umontreal.ssj.rng.*;
import umontreal.ssj.networks.GraphReliability;
import umontreal.ssj.probdist.*;
import umontreal.ssj.util.*;
import java.util.*;
/**
* This class implements the Permutation Monte Carlo method with shocks to
* estimate the reliability of a network. A shock corresponds to a list of links
* that all fail simultaneously with a given probability. The method uses the
* destructive schema: it assumes that all links are working initially, then the
* links start failing (by experiencing one shock after another) until the
* network fails, i.e., the subset of nodes V0 becomes disconnected.<br />
*
* <p>
* It also uses a reverse scan of the shocks similar to a constructive schema
* where all the shocks have struck initially. Then the shocks are removed one
* by one until the network becomes operational.
* </p>
*
*/
public class PMCShocks extends PMC {
protected ShockList shocks;
protected int kappa;
protected double[] rates; // shock rates
public PMCShocks(GraphReliability graph, GraphWithForest forest,
ShockList shocks) {
super(graph, forest);
if (antiScanFlag) {
if (!(forest instanceof GraphWithForestAntiShocks))
throw new IllegalArgumentException(
"forest must be an instance of ForestAntiShocks");
} else {
if (!(forest instanceof GraphWithForestShocks))
throw new IllegalArgumentException(
"forest must be an instance of ForestShocks");
}
this.shocks = shocks;
shockFlag = true;
kappa = shocks.getShocks().size();
rates = shocks.getRates();
Lam = new double[kappa + 1];
}
@Override
protected double doOneRun(RandomStream stream) {
// Draw independent shock times
((GraphWithForestShocks) forest).sampleShockTimes(stream);
int b = -1; // critical shock
int[] ranks; // shock ranks
if (antiScanFlag) {
ranks = computeRanks();
((GraphWithForestAntiShocks) forest).initForestAntiShocksNotWeights();
b = getCriticalShock(ranks);
} else {
double[] A = new double[kappa];
double[] res = ((GraphWithForestShocks) forest).getFailTime(A);
b = (int) (res[1]); // sorted rank of critical shock
// now find shock indices by sorted weights
ranks = ((GraphWithForestShocks) forest).findShockRanks(A);
}
computeRates(ranks, b);
criticalLink.add(b + 1); // critical shock; b counts from 0
double ell = computeCDF(Lam, b + 1);
return ell;
}
protected double computeCDF(double[] Lambda, int b) {
double[] tLam = trimLam(Lambda, b);
int flag = getHypoExpKind();
switch (flag) {
case 0: return HypoExponentialDistEqual.cdf(kappa, b, shocks.getRate(0), 1.0);
case 1: return HypoExponentialDistQuick.cdf(tLam, 1.0);
case 2: return HypoExponentialDist.cdf2(tLam, 1.0);
case 3: return HypoExponentialDist.cdf(tLam, 1.0);
default: return -1.0;
}
}
/**
* Computes rates such that Lam(E_i) = sum lambda(shock) over shocks. The
* shocks are sorted according to their weight.
*
* @param rank
* shock ranks
* @param b
* critical shock rank at which network fails
* @return the compound rates
*/
protected void computeRates(int[] rank, int b) {
initLamShock();
int r; // shock number
for (int j = 0; j < b; j++) {
r = rank[j];
Lam[j + 1] = Lam[j] - shocks.getRate(r);
}
}
/**
* Sorts the a copy of the shocks weight in ascending order and find the
* ranks of the shocks weights compared to the original shocks order.
*
* @return the shocks ranks
*/
protected int[] computeRanks() {
double[] W = ((GraphWithForestShocks) forest).getShockWeight();
// kappa = W.length = number of shocks
double[] A = new double[kappa];
System.arraycopy(W, 0, A, 0, kappa);
Arrays.sort(A); // sorted weights A
// find ranks of sorted shocks
int[] ranks = ((GraphWithForestShocks) forest).findShockRanks(A);
return ranks;
}
/**
* Computes the rank of the critical shock for which the network becomes
* operational. Shock weights (or times) have been sampled randomly; the
* forest has been initialized to all broken links and all shocks on. Repair
* the shocks in reverse order of sorted weights until network is repaired.
*
* @return rank of critical shock which repairs the network
* @see getCriticalShockAndRates
*/
protected int getCriticalShock(int[] rank) {
int s; // current shock
// Add sorted shocks (from largest time to smallest) until network is
// repaired
for (int j = kappa - 1; j >= 0; j--) {
s = rank[j];
if (s < 0) // shock already visited before
continue;
((GraphWithForestShocks) forest).repairLinksOfShock(s);
if (forest.isConnected())
return j;
}
return -1;
}
/**
* Computes the first compound Lambda, which is the sum of all lambdas.
*
* @return the first compound Lambda.
*/
protected double initLamShock() {
double x = 0;
for (int j = 0; j < kappa; j++) {
x += rates[j]; // Sum all shock rates
Lam[j] = 0;
}
Lam[0] = x; // first Lambda
Lam[kappa] = 0;
return x;
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id: GenericShorthandParser.java 679326 2008-07-24 09:35:34Z vhennebert $ */
package org.apache.fop.fo.properties;
import java.util.Iterator;
import org.apache.fop.fo.PropertyList;
import org.apache.fop.fo.expr.PropertyException;
/**
* Generic shorthand parser for ListProperties
*/
public class GenericShorthandParser implements ShorthandParser {
/**
* Constructor.
*/
public GenericShorthandParser() {
}
/**
* @param list the ListProperty
* @param index the index into the List of properties
* @return the property from the List of properties at the index parameter
*/
protected Property getElement(Property list, int index) {
if (list.getList().size() > index) {
return (Property) list.getList().get(index);
} else {
return null;
}
}
/**
* {@inheritDoc}
*/
public Property getValueForProperty(int propId,
Property property,
PropertyMaker maker,
PropertyList propertyList)
throws PropertyException {
// Check for keyword "inherit"
if (property.getList().size() == 1) {
String sval = getElement(property, 0).getString();
if (sval != null && sval.equals("inherit")) {
return propertyList.getFromParent(propId);
}
}
return convertValueForProperty(propId, property, maker, propertyList);
}
/**
* Converts a property name into a Property
* @param propId the property ID in the Constants interface
* @param maker the Property.Maker to be used in the conversion
* @param property ...
* @param propertyList the PropertyList from which the Property should be
* extracted
* @return the Property matching the parameters, or null if not found
* @throws PropertyException (when?)
*/
protected Property convertValueForProperty(int propId,
Property property,
PropertyMaker maker,
PropertyList propertyList)
throws PropertyException {
Property prop = null;
// Try each of the stored values in turn
Iterator iprop = property.getList().iterator();
while (iprop.hasNext() && prop == null) {
Property p = (Property)iprop.next();
prop = maker.convertShorthandProperty(propertyList, p, null);
}
return prop;
}
}
|
package io.symphonia;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.geojson.Feature;
import org.geojson.FeatureCollection;
import org.geojson.Point;
import java.util.List;
import java.util.Map;
import java.util.stream.Collector;
public class GeoJsonFeatureMapper {
private static String LONGITUDE = "longitude";
private static String LATITUDE = "latitude";
private static String TEMPERATURE = "temperature";
public static FeatureCollection toFeatureCollection(List<Map<String, AttributeValue>> items) throws JsonProcessingException {
return items.stream()
.filter(GeoJsonFeatureMapper::hasLngLat)
.map(GeoJsonFeatureMapper::toFeature)
.collect(Collector.of(FeatureCollection::new, FeatureCollection::add,
(l, r) -> {
l.addAll(r.getFeatures());
return l;
}));
}
private static boolean hasLngLat(Map<String, AttributeValue> item) {
return item.containsKey(LONGITUDE) && item.containsKey(LATITUDE);
}
private static Feature toFeature(Map<String, AttributeValue> item) {
Double longitude = Double.parseDouble(item.get(LONGITUDE).getN());
Double latitude = Double.parseDouble(item.get(LATITUDE).getN());
Feature feature = new Feature();
feature.setGeometry(new Point(longitude, latitude));
if (item.containsKey(TEMPERATURE)) {
Double temperature = Double.parseDouble(item.get(TEMPERATURE).getN());
feature.setProperty(TEMPERATURE, temperature);
}
return feature;
}
}
|
package mruk.metarreader;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class MetarReaderApplication {
public static void main(String[] args) {
SpringApplication.run(MetarReaderApplication.class, args);
}
}
|
package com.prowidesoftware.swift.model.mx.dic;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
/**
* Details of the intra-position movement.
*
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "IntraPositionDetails4", propOrder = {
"poolId",
"acctOwnr",
"sfkpgAcct",
"finInstrmId",
"sttldQty",
"sttlmDt",
"balFr",
"balTo"
})
public class IntraPositionDetails4 {
@XmlElement(name = "PoolId")
protected String poolId;
@XmlElement(name = "AcctOwnr")
protected PartyIdentification13Choice acctOwnr;
@XmlElement(name = "SfkpgAcct", required = true)
protected SecuritiesAccount13 sfkpgAcct;
@XmlElement(name = "FinInstrmId", required = true)
protected SecurityIdentification11 finInstrmId;
@XmlElement(name = "SttldQty", required = true)
protected FinancialInstrumentQuantity1Choice sttldQty;
@XmlElement(name = "SttlmDt", required = true)
protected DateAndDateTimeChoice sttlmDt;
@XmlElement(name = "BalFr")
protected SecuritiesBalanceType2Choice balFr;
@XmlElement(name = "BalTo")
protected SecuritiesBalanceType2Choice balTo;
/**
* Gets the value of the poolId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPoolId() {
return poolId;
}
/**
* Sets the value of the poolId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public IntraPositionDetails4 setPoolId(String value) {
this.poolId = value;
return this;
}
/**
* Gets the value of the acctOwnr property.
*
* @return
* possible object is
* {@link PartyIdentification13Choice }
*
*/
public PartyIdentification13Choice getAcctOwnr() {
return acctOwnr;
}
/**
* Sets the value of the acctOwnr property.
*
* @param value
* allowed object is
* {@link PartyIdentification13Choice }
*
*/
public IntraPositionDetails4 setAcctOwnr(PartyIdentification13Choice value) {
this.acctOwnr = value;
return this;
}
/**
* Gets the value of the sfkpgAcct property.
*
* @return
* possible object is
* {@link SecuritiesAccount13 }
*
*/
public SecuritiesAccount13 getSfkpgAcct() {
return sfkpgAcct;
}
/**
* Sets the value of the sfkpgAcct property.
*
* @param value
* allowed object is
* {@link SecuritiesAccount13 }
*
*/
public IntraPositionDetails4 setSfkpgAcct(SecuritiesAccount13 value) {
this.sfkpgAcct = value;
return this;
}
/**
* Gets the value of the finInstrmId property.
*
* @return
* possible object is
* {@link SecurityIdentification11 }
*
*/
public SecurityIdentification11 getFinInstrmId() {
return finInstrmId;
}
/**
* Sets the value of the finInstrmId property.
*
* @param value
* allowed object is
* {@link SecurityIdentification11 }
*
*/
public IntraPositionDetails4 setFinInstrmId(SecurityIdentification11 value) {
this.finInstrmId = value;
return this;
}
/**
* Gets the value of the sttldQty property.
*
* @return
* possible object is
* {@link FinancialInstrumentQuantity1Choice }
*
*/
public FinancialInstrumentQuantity1Choice getSttldQty() {
return sttldQty;
}
/**
* Sets the value of the sttldQty property.
*
* @param value
* allowed object is
* {@link FinancialInstrumentQuantity1Choice }
*
*/
public IntraPositionDetails4 setSttldQty(FinancialInstrumentQuantity1Choice value) {
this.sttldQty = value;
return this;
}
/**
* Gets the value of the sttlmDt property.
*
* @return
* possible object is
* {@link DateAndDateTimeChoice }
*
*/
public DateAndDateTimeChoice getSttlmDt() {
return sttlmDt;
}
/**
* Sets the value of the sttlmDt property.
*
* @param value
* allowed object is
* {@link DateAndDateTimeChoice }
*
*/
public IntraPositionDetails4 setSttlmDt(DateAndDateTimeChoice value) {
this.sttlmDt = value;
return this;
}
/**
* Gets the value of the balFr property.
*
* @return
* possible object is
* {@link SecuritiesBalanceType2Choice }
*
*/
public SecuritiesBalanceType2Choice getBalFr() {
return balFr;
}
/**
* Sets the value of the balFr property.
*
* @param value
* allowed object is
* {@link SecuritiesBalanceType2Choice }
*
*/
public IntraPositionDetails4 setBalFr(SecuritiesBalanceType2Choice value) {
this.balFr = value;
return this;
}
/**
* Gets the value of the balTo property.
*
* @return
* possible object is
* {@link SecuritiesBalanceType2Choice }
*
*/
public SecuritiesBalanceType2Choice getBalTo() {
return balTo;
}
/**
* Sets the value of the balTo property.
*
* @param value
* allowed object is
* {@link SecuritiesBalanceType2Choice }
*
*/
public IntraPositionDetails4 setBalTo(SecuritiesBalanceType2Choice value) {
this.balTo = value;
return this;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE);
}
@Override
public boolean equals(Object that) {
return EqualsBuilder.reflectionEquals(this, that);
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
}
|
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.core5.http.impl;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CharsetEncoder;
import java.nio.charset.CodingErrorAction;
import org.apache.hc.core5.http.config.CharCodingConfig;
public final class CharCodingSupport {
private CharCodingSupport() {
}
public static CharsetDecoder createDecoder(final CharCodingConfig cconfig) {
if (cconfig == null) {
return null;
}
final Charset charset = cconfig.getCharset();
final CodingErrorAction malformed = cconfig.getMalformedInputAction();
final CodingErrorAction unmappable = cconfig.getUnmappableInputAction();
if (charset != null) {
return charset.newDecoder()
.onMalformedInput(malformed != null ? malformed : CodingErrorAction.REPORT)
.onUnmappableCharacter(unmappable != null ? unmappable: CodingErrorAction.REPORT);
}
return null;
}
public static CharsetEncoder createEncoder(final CharCodingConfig cconfig) {
if (cconfig == null) {
return null;
}
final Charset charset = cconfig.getCharset();
if (charset != null) {
final CodingErrorAction malformed = cconfig.getMalformedInputAction();
final CodingErrorAction unmappable = cconfig.getUnmappableInputAction();
return charset.newEncoder()
.onMalformedInput(malformed != null ? malformed : CodingErrorAction.REPORT)
.onUnmappableCharacter(unmappable != null ? unmappable: CodingErrorAction.REPORT);
}
return null;
}
}
|
package com.xoolibeut.wolma.model.transform;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
/**
*
* @author rgayeelhadji
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
public class PointGeographique {
private String id;
/**
* latitude.
*/
private Double a;
/**
* longitude
*/
private Double o;
/**
* le poids
*/
private int p;
/**
* poidsIntersection
*/
private int pi;
/**
* secteur s
*/
private String s;
/**
* o indique c'est un point intersection.
*/
private String i = "n";
/**
* voisin nord est
*/
private Set<String> v = new HashSet<String>();
private List<RoutePosition> r = new ArrayList<RoutePosition>();
@Override
public boolean equals(Object object) {
if (null == object || object.getClass() != this.getClass()) {
return false;
}
PointGeographique node = (PointGeographique) object;
return equals(this.id, node.getId());
}
private boolean equals(String id, String idRef) {
if (null == id) {
return null == idRef;
} else {
return id.equals(idRef);
}
}
@Override
public int hashCode() {
return id.hashCode();
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public Double getLat() {
return a;
}
public void setLat(Double lat) {
this.a = lat;
}
public Double getLon() {
return o;
}
public void setLon(Double lon) {
this.o = lon;
}
public int getPoids() {
return p;
}
public void setPoids(int poids) {
this.p = poids;
}
public String getS() {
return s;
}
public void setSecteur(String secteur) {
this.s = secteur;
}
public String getI() {
return i;
}
public void setIn(String in) {
this.i = in;
}
public int getPoidsIntersection() {
return pi;
}
public void setPoidsIntersection(int poidsIntersection) {
this.pi = poidsIntersection;
}
public List<RoutePosition> getRoutes() {
return r;
}
public void setRoutes(List<RoutePosition> routes) {
this.r = routes;
}
public Set<String> getVoisins() {
return v;
}
public void setVoisins(Set<String> v) {
this.v = v;
}
}
|
package com.sim.core.Sensors;
/**
* Created by kirill-good on 6.2.15.
*/
public class Sharp {
public final double max;
public final double min = 1;
public final double angle;
protected double value;
public Sharp(double max, double angle) {
this.max = max;
this.angle = angle;
}
public double getValue(){
return value;
}
public Sharp getCopy(){
return new Sharp(max,angle);
}
}
|
package com.redhat.cajun.navy.mission;
public enum ErrorCodes {
NO_ACTION_SPECIFIED,
BAD_ACTION
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tomee.chatterbox.xmpp.api;
public class MessageException extends Exception {
public MessageException() {
}
public MessageException(String message) {
super(message);
}
public MessageException(String message, Throwable cause) {
super(message, cause);
}
public MessageException(Throwable cause) {
super(cause);
}
public MessageException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
|
package pl.fakturomat.database.modelmanagers;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import pl.fakturomat.database.dao.ClientDao;
import pl.fakturomat.database.models.Client;
import pl.fakturomat.database.modelsfx.ClientFx;
import pl.fakturomat.tools.ApplicationException;
import pl.fakturomat.tools.converters.ClientConverter;
import java.util.List;
public class ClientModel {
private static ObservableList<ClientFx> clientFxList = FXCollections.observableArrayList();
/**
* Init.
* @throws ApplicationException Error.
*/
public void init() throws ApplicationException {
ClientDao clientDao = new ClientDao();
List<Client> clientList = clientDao.queryForAll();
clientFxList.clear();
clientList.forEach(client -> clientFxList.add(ClientConverter.convertToClientFx(client)));
}
public static ObservableList<ClientFx> getClientFxList() {
return clientFxList;
}
}
|
package com.malyvoj3.csvwvalidator.domain.metadata;
import com.malyvoj3.csvwvalidator.domain.ValidationError;
import lombok.Data;
import java.util.ArrayList;
import java.util.List;
/**
* Abstract class for CSV on the Web properties.
* @param <T>
*/
@Data
public abstract class Property<T> implements Normalizable {
protected T value;
public Property(T value) {
this.value = value;
}
@Override
public List<ValidationError> normalize(Context context) {
return new ArrayList<>();
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.regionserver.wal.HLog.Entry;
/**
* A {@link WALEntryFilter} which contains multiple filters and applies them
* in chain order
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.REPLICATION)
public class ChainWALEntryFilter implements WALEntryFilter {
private final WALEntryFilter[] filters;
public ChainWALEntryFilter(WALEntryFilter...filters) {
this.filters = filters;
}
public ChainWALEntryFilter(List<WALEntryFilter> filters) {
ArrayList<WALEntryFilter> rawFilters = new ArrayList<WALEntryFilter>(filters.size());
// flatten the chains
for (WALEntryFilter filter : filters) {
if (filter instanceof ChainWALEntryFilter) {
for (WALEntryFilter f : ((ChainWALEntryFilter) filter).filters) {
rawFilters.add(f);
}
} else {
rawFilters.add(filter);
}
}
this.filters = rawFilters.toArray(new WALEntryFilter[rawFilters.size()]);
}
@Override
public Entry filter(Entry entry) {
for (WALEntryFilter filter : filters) {
if (entry == null) {
return null;
}
entry = filter.filter(entry);
}
return entry;
}
}
|
package com.capitalone.dashboard.rest;
import com.capitalone.dashboard.config.TestConfig;
import com.capitalone.dashboard.config.WebMVCConfig;
import com.capitalone.dashboard.model.AuthType;
import com.capitalone.dashboard.model.Dashboard;
import com.capitalone.dashboard.model.DashboardType;
import com.capitalone.dashboard.request.DashboardRemoteRequest;
import com.capitalone.dashboard.service.DashboardRemoteService;
import com.google.gson.Gson;
import org.bson.types.ObjectId;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentMatchers;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import static com.capitalone.dashboard.fixture.DashboardFixture.makeDashboard;
import static com.capitalone.dashboard.fixture.DashboardFixture.makeDashboardRemoteRequest;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes = {TestConfig.class, WebMVCConfig.class})
@WebAppConfiguration
public class DashboardRemoteControllerTest {
private MockMvc mockMvc;
@Autowired private WebApplicationContext wac;
@Autowired private DashboardRemoteService dashboardRemoteService;
private String configItemBusServName = "ASVTEST";
private String configItemBusAppName = "BAPTEST";
@Before
public void before() {
SecurityContextHolder.clearContext();
mockMvc = MockMvcBuilders.webAppContextSetup(wac).build();
}
@Test
public void createTeamDashboardRemote() throws Exception {
Dashboard dashboard = makeDashboard("t1", "title", "app", "comp", "someuser", DashboardType.Team, configItemBusServName, configItemBusAppName);
dashboard.setId(ObjectId.get());
DashboardRemoteRequest request = makeDashboardRemoteRequest("template", "dashboardtitle", "app", "comp", "someuser", null, "team", configItemBusServName, configItemBusAppName);
initiateSecurityContext("someuser", AuthType.STANDARD);
when(dashboardRemoteService.remoteCreate(ArgumentMatchers.any(DashboardRemoteRequest.class), eq(false))).thenReturn(dashboard);
mockMvc.perform(post("/dashboard/remoteCreate")
.contentType(MediaType.APPLICATION_JSON)
.content(new Gson().toJson(request)))
.andExpect(status().isCreated());
}
@Test
public void updateTeamDashboardRemote() throws Exception {
Dashboard dashboard = makeDashboard("t1", "title", "app", "comp", "someuser", DashboardType.Team, configItemBusServName, configItemBusAppName);
dashboard.setId(ObjectId.get());
DashboardRemoteRequest request = makeDashboardRemoteRequest("template", "dashboardtitle", "app", "comp", "someuser", null, "team", configItemBusServName, configItemBusAppName);
initiateSecurityContext("someuser", AuthType.STANDARD);
when(dashboardRemoteService.remoteCreate(ArgumentMatchers.any(DashboardRemoteRequest.class), eq(true))).thenReturn(dashboard);
mockMvc.perform(post("/dashboard/remoteUpdate")
.contentType(MediaType.APPLICATION_JSON)
.content(new Gson().toJson(request)))
.andExpect(status().isCreated());
}
private void initiateSecurityContext(String username, AuthType standard) {
UsernamePasswordAuthenticationToken authentication = new UsernamePasswordAuthenticationToken(username, "password");
authentication.setDetails(AuthType.STANDARD.name());
SecurityContextHolder.getContext().setAuthentication(authentication);
}
}
|
package com.company;
public class Square extends Rectangle{
public Square(double a, double b) {
super(a, b);
}
@Override
public void area() {
System.out.println(super.getA() * super.getA());
}
}
|
package demo.domain;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.rest.core.annotation.RepositoryRestResource;
import org.springframework.data.rest.core.annotation.RestResource;
import java.util.List;
@RepositoryRestResource(path = "repo")
public interface RunningInfoRepository extends JpaRepository<RunningInfo, Long> {
@RestResource(rel = "PageByWarning")
Page<RunningInfo> findAllByOrderByHealthWarningLevelDescHeartRateDesc(Pageable pageable);
@RestResource(rel = "findByRunningId")
List<RunningInfo> findAllByRunningId(String runningId);
}
|
package cn.iocoder.mall.pay.biz.service;
import cn.iocoder.common.framework.util.DateUtil;
import cn.iocoder.common.framework.util.MathUtil;
import cn.iocoder.common.framework.util.ServiceExceptionUtil;
import cn.iocoder.common.framework.vo.CommonResult;
import cn.iocoder.mall.pay.api.PayRefundService;
import cn.iocoder.mall.pay.api.bo.refund.PayRefundPageBO;
import cn.iocoder.mall.pay.api.bo.refund.PayRefundSubmitBO;
import cn.iocoder.mall.pay.api.constant.PayErrorCodeEnum;
import cn.iocoder.mall.pay.api.constant.PayRefundStatus;
import cn.iocoder.mall.pay.api.constant.PayTransactionStatusEnum;
import cn.iocoder.mall.pay.api.dto.refund.PayRefundPageDTO;
import cn.iocoder.mall.pay.api.dto.refund.PayRefundSubmitDTO;
import cn.iocoder.mall.pay.biz.client.AbstractPaySDK;
import cn.iocoder.mall.pay.biz.client.PaySDKFactory;
import cn.iocoder.mall.pay.biz.client.RefundSuccessBO;
import cn.iocoder.mall.pay.biz.convert.PayRefundConvert;
import cn.iocoder.mall.pay.biz.dao.PayRefundMapper;
import cn.iocoder.mall.pay.biz.dataobject.PayAppDO;
import cn.iocoder.mall.pay.biz.dataobject.PayRefundDO;
import cn.iocoder.mall.pay.biz.dataobject.PayTransactionDO;
import cn.iocoder.mall.pay.biz.dataobject.PayTransactionExtensionDO;
import org.apache.rocketmq.spring.core.RocketMQTemplate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
import java.util.Date;
@Service
@org.apache.dubbo.config.annotation.Service(validation = "true", version = "${dubbo.provider.PayRefundService.version}")
public class PayRefundServiceImpl implements PayRefundService {
private Logger logger = LoggerFactory.getLogger(getClass());
@Autowired
private PayRefundMapper payRefundMapper;
@Autowired
private PayAppServiceImpl payAppService;
@Autowired
private PayNotifyServiceImpl payNotifyService;
@Autowired
private PayTransactionServiceImpl payTransactionService;
@Resource
private RocketMQTemplate rocketMQTemplate;
@Override
public CommonResult<PayRefundSubmitBO> submitRefund(PayRefundSubmitDTO payRefundSubmitDTO) {
// 校验 App 是否有效
PayAppDO payAppDO = payAppService.validPayApp(payRefundSubmitDTO.getAppId());
// 获得 PayTransactionDO ,并校验其是否存在
PayTransactionDO payTransaction = payTransactionService.getTransaction(payRefundSubmitDTO.getAppId(), payRefundSubmitDTO.getOrderId());
if (payTransaction == null) { // 是否存在
return ServiceExceptionUtil.error(PayErrorCodeEnum.PAY_TRANSACTION_NOT_FOUND.getCode());
}
if (!PayTransactionStatusEnum.SUCCESS.getValue().equals(payTransaction.getStatus())) { // 校验状态,必须是待支付
return ServiceExceptionUtil.error(PayErrorCodeEnum.PAY_TRANSACTION_STATUS_IS_NOT_SUCCESS.getCode());
}
if (payRefundSubmitDTO.getPrice() > payTransaction.getPrice() - payTransaction.getRefundTotal()) { // 金额校验
return ServiceExceptionUtil.error(PayErrorCodeEnum.PAY_REFUND_PRICE_EXCEED.getCode());
}
// 获得 PayTransactionExtensionDO ,并校验其是否存在
PayTransactionExtensionDO payTransactionExtension = payTransactionService.getPayTransactionExtension(payTransaction.getExtensionId());
if (payTransactionExtension == null) { // 是否存在
return ServiceExceptionUtil.error(PayErrorCodeEnum.PAY_TRANSACTION_EXTENSION_NOT_FOUND.getCode());
}
if (!PayTransactionStatusEnum.SUCCESS.getValue().equals(payTransactionExtension.getStatus())) { // 校验状态,必须是待支付
return ServiceExceptionUtil.error(PayErrorCodeEnum.PAY_TRANSACTION_EXTENSION_STATUS_IS_NOT_SUCCESS.getCode());
}
// 插入 PayTransactionExtensionDO
PayRefundDO payRefundDO = PayRefundConvert.INSTANCE.convert(payRefundSubmitDTO)
.setTransactionId(payTransaction.getId())
.setRefundCode(generateTransactionCode()) // TODO 芋艿,后续调整
.setStatus(PayRefundStatus.WAITING.getValue())
.setNotifyUrl(payAppDO.getRefundNotifyUrl())
.setRefundChannel(payTransaction.getPayChannel());
payRefundDO.setCreateTime(new Date());
payRefundMapper.insert(payRefundDO);
// 调用三方接口
AbstractPaySDK paySDK = PaySDKFactory.getSDK(payTransaction.getPayChannel());
CommonResult<String> invokeResult = paySDK.submitRefund(payRefundDO, payTransactionExtension, null); // TODO 暂时传入 extra = null
if (invokeResult.isError()) {
return CommonResult.error(invokeResult);
}
// 返回成功
PayRefundSubmitBO payRefundSubmitBO = new PayRefundSubmitBO()
.setId(payRefundDO.getId());
return CommonResult.success(payRefundSubmitBO);
}
@Override
@Transactional
public CommonResult<Boolean> updateRefundSuccess(Integer payChannel, String params) {
// TODO 芋艿,记录回调日志
// 解析传入的参数,成 TransactionSuccessBO 对象
AbstractPaySDK paySDK = PaySDKFactory.getSDK(payChannel);
CommonResult<RefundSuccessBO> paySuccessResult = paySDK.parseRefundSuccessParams(params);
if (paySuccessResult.isError()) {
return CommonResult.error(paySuccessResult);
}
// TODO 芋艿,先最严格的校验。即使调用方重复调用,实际哪个订单已经被重复回调的支付,也返回 false 。也没问题,因为实际已经回调成功了。
// 1.1 查询 PayRefundDO
PayRefundDO payRefund = payRefundMapper.selectByRefundCode(paySuccessResult.getData().getRefundCode());
if (payRefund == null) {
return ServiceExceptionUtil.error(PayErrorCodeEnum.PAY_REFUND_NOT_FOUND.getCode());
}
if (!PayRefundStatus.WAITING.getValue().equals(payRefund.getStatus())) { // 校验状态,必须是待支付
return ServiceExceptionUtil.error(PayErrorCodeEnum.PAY_REFUND_STATUS_NOT_WAITING.getCode());
}
// 1.2 更新 PayRefundDO
Integer status = paySuccessResult.getData().getSuccess() ? PayRefundStatus.SUCCESS.getValue() : PayRefundStatus.FAILURE.getValue();
PayRefundDO updatePayRefundDO = new PayRefundDO()
.setId(payRefund.getId())
.setStatus(status)
.setTradeNo(paySuccessResult.getData().getTradeNo())
.setExtensionData(params);
int updateCounts = payRefundMapper.update(updatePayRefundDO, PayRefundStatus.WAITING.getValue());
if (updateCounts == 0) { // 校验状态,必须是待支付
throw ServiceExceptionUtil.exception(PayErrorCodeEnum.PAY_REFUND_STATUS_NOT_WAITING.getCode());
}
// 2.1 判断 PayTransactionDO ,增加已退款金额
PayTransactionDO payTransaction = payTransactionService.getTransaction(payRefund.getTransactionId());
if (payTransaction == null) {
return ServiceExceptionUtil.error(PayErrorCodeEnum.PAY_TRANSACTION_NOT_FOUND.getCode());
}
if (!PayTransactionStatusEnum.SUCCESS.getValue().equals(payTransaction.getStatus())) { // 校验状态,必须是已支付
throw ServiceExceptionUtil.exception(PayErrorCodeEnum.PAY_TRANSACTION_STATUS_IS_NOT_SUCCESS.getCode());
}
if (payRefund.getPrice() + payTransaction.getRefundTotal() > payTransaction.getPrice()) {
throw ServiceExceptionUtil.exception(PayErrorCodeEnum.PAY_REFUND_PRICE_EXCEED.getCode());
}
// 2.2 更新 PayTransactionDO
updateCounts = payTransactionService.updateTransactionPriceTotalIncr(payRefund.getTransactionId(), payRefund.getPrice());
if (updateCounts == 0) { // 保证不超退 TODO 这种类型,需要思考下。需要返回错误,但是又要保证事务回滚
throw ServiceExceptionUtil.exception(PayErrorCodeEnum.PAY_REFUND_PRICE_EXCEED.getCode());
}
// 3 新增 PayNotifyTaskDO
payNotifyService.addRefundNotifyTask(payRefund);
// 返回结果
return CommonResult.success(true);
}
@Override
public PayRefundPageBO getRefundPage(PayRefundPageDTO payRefundPageDTO) {
PayRefundPageBO payRefundPageBO = new PayRefundPageBO();
// 查询分页数据
int offset = (payRefundPageDTO.getPageNo() - 1) * payRefundPageDTO.getPageSize();
payRefundPageBO.setList(PayRefundConvert.INSTANCE.convertList(payRefundMapper.selectListByPage(
payRefundPageDTO.getCreateBeginTime(), payRefundPageDTO.getCreateEndTime(),
payRefundPageDTO.getFinishBeginTime(), payRefundPageDTO.getFinishEndTime(),
payRefundPageDTO.getStatus(), payRefundPageDTO.getPayChannel(),
offset, payRefundPageDTO.getPageSize())));
// 查询分页总数
payRefundPageBO.setTotal(payRefundMapper.selectCountByPage(
payRefundPageDTO.getCreateBeginTime(), payRefundPageDTO.getCreateEndTime(),
payRefundPageDTO.getFinishBeginTime(), payRefundPageDTO.getFinishEndTime(),
payRefundPageDTO.getStatus(), payRefundPageDTO.getPayChannel()));
return payRefundPageBO;
}
private String generateTransactionCode() {
// wx
// 2014
// 10
// 27
// 20
// 09
// 39
// 5522657
// a690389285100
// 目前的算法
// 时间序列,年月日时分秒 14 位
// 纯随机,6 位 TODO 此处估计是会有问题的,后续在调整
return DateUtil.format(new Date(), "yyyyMMddHHmmss") + // 时间序列
MathUtil.random(100000, 999999) // 随机。为什么是这个范围,因为偷懒
;
}
}
|
package com.iamschingie.EmployeeManagementSystemAPI;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class EmployeeManagementSystemApiApplicationTests {
@Test
void contextLoads() {
}
}
|
package p01_Geometry.Abstract;
import p01_Geometry.Interfaces.AreaMeasurable;
import p01_Geometry.Interfaces.PerimeterMeasurable;
import p01_Geometry.Vertices.Vertex2D;
public abstract class PlaneShape extends Shape implements AreaMeasurable, PerimeterMeasurable {
public PlaneShape(Vertex2D[] vertices) {
this.coordinates = vertices;
}
@Override
public String toString() {
return String.format(
"Shape Type: %s\r\n\t" +
"Area: %.2f\r\n\t" +
"Perimeter: %.2f\r\n",
this.getClassName(),
this.getArea(),
this.getPerimeter());
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.broker.jmx;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.jms.Connection;
import javax.jms.InvalidSelectorException;
import javax.jms.MessageProducer;
import javax.jms.Session;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.openmbean.CompositeData;
import javax.management.openmbean.CompositeDataSupport;
import javax.management.openmbean.CompositeType;
import javax.management.openmbean.OpenDataException;
import javax.management.openmbean.TabularData;
import javax.management.openmbean.TabularDataSupport;
import javax.management.openmbean.TabularType;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.broker.jmx.OpenTypeSupport.OpenTypeFactory;
import org.apache.activemq.broker.region.Destination;
import org.apache.activemq.broker.region.Subscription;
import org.apache.activemq.broker.region.policy.AbortSlowConsumerStrategy;
import org.apache.activemq.broker.region.policy.SlowConsumerStrategy;
import org.apache.activemq.command.ActiveMQDestination;
import org.apache.activemq.command.ActiveMQMessage;
import org.apache.activemq.command.ActiveMQTextMessage;
import org.apache.activemq.command.Message;
import org.apache.activemq.filter.BooleanExpression;
import org.apache.activemq.filter.MessageEvaluationContext;
import org.apache.activemq.selector.SelectorParser;
import org.apache.activemq.store.MessageStore;
import org.apache.activemq.util.URISupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DestinationView implements DestinationViewMBean {
private static final Logger LOG = LoggerFactory.getLogger(DestinationViewMBean.class);
protected final Destination destination;
protected final ManagedRegionBroker broker;
public DestinationView(ManagedRegionBroker broker, Destination destination) {
this.broker = broker;
this.destination = destination;
}
public void gc() {
destination.gc();
}
@Override
public String getName() {
return destination.getName();
}
@Override
public void resetStatistics() {
destination.getDestinationStatistics().reset();
}
@Override
public long getEnqueueCount() {
return destination.getDestinationStatistics().getEnqueues().getCount();
}
@Override
public long getDequeueCount() {
return destination.getDestinationStatistics().getDequeues().getCount();
}
@Override
public long getForwardCount() {
return destination.getDestinationStatistics().getForwards().getCount();
}
@Override
public long getDispatchCount() {
return destination.getDestinationStatistics().getDispatched().getCount();
}
@Override
public long getInFlightCount() {
return destination.getDestinationStatistics().getInflight().getCount();
}
@Override
public long getExpiredCount() {
return destination.getDestinationStatistics().getExpired().getCount();
}
@Override
public long getConsumerCount() {
return destination.getDestinationStatistics().getConsumers().getCount();
}
@Override
public long getQueueSize() {
return destination.getDestinationStatistics().getMessages().getCount();
}
@Override
public long getStoreMessageSize() {
MessageStore messageStore = destination.getMessageStore();
return messageStore != null ? messageStore.getMessageStoreStatistics().getMessageSize().getTotalSize() : 0;
}
public long getMessagesCached() {
return destination.getDestinationStatistics().getMessagesCached().getCount();
}
@Override
public int getMemoryPercentUsage() {
return destination.getMemoryUsage().getPercentUsage();
}
@Override
public long getMemoryUsageByteCount() {
return destination.getMemoryUsage().getUsage();
}
@Override
public long getMemoryLimit() {
return destination.getMemoryUsage().getLimit();
}
@Override
public void setMemoryLimit(long limit) {
destination.getMemoryUsage().setLimit(limit);
}
@Override
public double getAverageEnqueueTime() {
return destination.getDestinationStatistics().getProcessTime().getAverageTime();
}
@Override
public long getMaxEnqueueTime() {
return destination.getDestinationStatistics().getProcessTime().getMaxTime();
}
@Override
public long getMinEnqueueTime() {
return destination.getDestinationStatistics().getProcessTime().getMinTime();
}
/**
* @return the average size of a message (bytes)
*/
@Override
public long getAverageMessageSize() {
// we are okay with the size without decimals so cast to long
return (long) destination.getDestinationStatistics().getMessageSize().getAverageSize();
}
/**
* @return the max size of a message (bytes)
*/
@Override
public long getMaxMessageSize() {
return destination.getDestinationStatistics().getMessageSize().getMaxSize();
}
/**
* @return the min size of a message (bytes)
*/
@Override
public long getMinMessageSize() {
return destination.getDestinationStatistics().getMessageSize().getMinSize();
}
@Override
public boolean isPrioritizedMessages() {
return destination.isPrioritizedMessages();
}
@Override
public CompositeData[] browse() throws OpenDataException {
try {
return browse(null);
} catch (InvalidSelectorException e) {
// should not happen.
throw new RuntimeException(e);
}
}
@Override
public CompositeData[] browse(String selector) throws OpenDataException, InvalidSelectorException {
Message[] messages = destination.browse();
ArrayList<CompositeData> c = new ArrayList<CompositeData>();
MessageEvaluationContext ctx = new MessageEvaluationContext();
ctx.setDestination(destination.getActiveMQDestination());
BooleanExpression selectorExpression = selector == null ? null : SelectorParser.parse(selector);
for (int i = 0; i < messages.length; i++) {
try {
if (selectorExpression == null) {
c.add(OpenTypeSupport.convert(messages[i]));
} else {
ctx.setMessageReference(messages[i]);
if (selectorExpression.matches(ctx)) {
c.add(OpenTypeSupport.convert(messages[i]));
}
}
} catch (Throwable e) {
LOG.warn("exception browsing destination", e);
}
}
CompositeData rc[] = new CompositeData[c.size()];
c.toArray(rc);
return rc;
}
/**
* Browses the current destination returning a list of messages
*/
@Override
public List<Object> browseMessages() throws InvalidSelectorException {
return browseMessages(null);
}
/**
* Browses the current destination with the given selector returning a list
* of messages
*/
@Override
public List<Object> browseMessages(String selector) throws InvalidSelectorException {
Message[] messages = destination.browse();
ArrayList<Object> answer = new ArrayList<Object>();
MessageEvaluationContext ctx = new MessageEvaluationContext();
ctx.setDestination(destination.getActiveMQDestination());
BooleanExpression selectorExpression = selector == null ? null : SelectorParser.parse(selector);
for (int i = 0; i < messages.length; i++) {
try {
Message message = messages[i];
message.setReadOnlyBody(true);
if (selectorExpression == null) {
answer.add(message);
} else {
ctx.setMessageReference(message);
if (selectorExpression.matches(ctx)) {
answer.add(message);
}
}
} catch (Throwable e) {
LOG.warn("exception browsing destination", e);
}
}
return answer;
}
@Override
public TabularData browseAsTable() throws OpenDataException {
try {
return browseAsTable(null);
} catch (InvalidSelectorException e) {
throw new RuntimeException(e);
}
}
@Override
public TabularData browseAsTable(String selector) throws OpenDataException, InvalidSelectorException {
OpenTypeFactory factory = OpenTypeSupport.getFactory(ActiveMQMessage.class);
Message[] messages = destination.browse();
CompositeType ct = factory.getCompositeType();
TabularType tt = new TabularType("MessageList", "MessageList", ct, new String[] { "JMSMessageID" });
TabularDataSupport rc = new TabularDataSupport(tt);
MessageEvaluationContext ctx = new MessageEvaluationContext();
ctx.setDestination(destination.getActiveMQDestination());
BooleanExpression selectorExpression = selector == null ? null : SelectorParser.parse(selector);
for (int i = 0; i < messages.length; i++) {
try {
if (selectorExpression == null) {
rc.put(new CompositeDataSupport(ct, factory.getFields(messages[i])));
} else {
ctx.setMessageReference(messages[i]);
if (selectorExpression.matches(ctx)) {
rc.put(new CompositeDataSupport(ct, factory.getFields(messages[i])));
}
}
} catch (Throwable e) {
LOG.warn("exception browsing destination", e);
}
}
return rc;
}
@Override
public String sendTextMessageWithProperties(String properties) throws Exception {
String[] kvs = properties.split(",");
Map<String, String> props = new HashMap<String, String>();
for (String kv : kvs) {
String[] it = kv.split("=");
if (it.length == 2) {
props.put(it[0],it[1]);
}
}
return sendTextMessage(props, props.remove("body"), props.remove("username"), props.remove("password"));
}
@Override
public String sendTextMessage(String body) throws Exception {
return sendTextMessage(Collections.EMPTY_MAP, body);
}
@Override
public String sendTextMessage(Map headers, String body) throws Exception {
return sendTextMessage(headers, body, null, null);
}
@Override
public String sendTextMessage(String body, String user, @Sensitive String password) throws Exception {
return sendTextMessage(Collections.EMPTY_MAP, body, user, password);
}
@Override
public String sendTextMessage(Map<String, String> headers, String body, String userName, @Sensitive String password) throws Exception {
String brokerUrl = "vm://" + broker.getBrokerName();
ActiveMQDestination dest = destination.getActiveMQDestination();
ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory(brokerUrl);
Connection connection = null;
try {
connection = cf.createConnection(userName, password);
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(dest);
ActiveMQTextMessage msg = (ActiveMQTextMessage) session.createTextMessage(body);
for (Iterator<Entry<String, String>> iter = headers.entrySet().iterator(); iter.hasNext();) {
Entry<String, String> entry = iter.next();
msg.setObjectProperty(entry.getKey(), entry.getValue());
}
producer.setDeliveryMode(msg.getJMSDeliveryMode());
producer.setPriority(msg.getPriority());
long ttl = 0;
if (msg.getExpiration() != 0) {
ttl = msg.getExpiration() - System.currentTimeMillis();
} else {
String timeToLive = headers.get("timeToLive");
if (timeToLive != null) {
ttl = Integer.valueOf(timeToLive);
}
}
producer.setTimeToLive(ttl > 0 ? ttl : 0);
producer.send(msg);
return msg.getJMSMessageID();
} finally {
if (connection != null) {
connection.close();
}
}
}
@Override
public int getMaxAuditDepth() {
return destination.getMaxAuditDepth();
}
@Override
public int getMaxProducersToAudit() {
return destination.getMaxProducersToAudit();
}
public boolean isEnableAudit() {
return destination.isEnableAudit();
}
public void setEnableAudit(boolean enableAudit) {
destination.setEnableAudit(enableAudit);
}
@Override
public void setMaxAuditDepth(int maxAuditDepth) {
destination.setMaxAuditDepth(maxAuditDepth);
}
@Override
public void setMaxProducersToAudit(int maxProducersToAudit) {
destination.setMaxProducersToAudit(maxProducersToAudit);
}
@Override
public float getMemoryUsagePortion() {
return destination.getMemoryUsage().getUsagePortion();
}
@Override
public long getProducerCount() {
return destination.getDestinationStatistics().getProducers().getCount();
}
@Override
public boolean isProducerFlowControl() {
return destination.isProducerFlowControl();
}
@Override
public void setMemoryUsagePortion(float value) {
destination.getMemoryUsage().setUsagePortion(value);
}
@Override
public void setProducerFlowControl(boolean producerFlowControl) {
destination.setProducerFlowControl(producerFlowControl);
}
@Override
public boolean isAlwaysRetroactive() {
return destination.isAlwaysRetroactive();
}
@Override
public void setAlwaysRetroactive(boolean alwaysRetroactive) {
destination.setAlwaysRetroactive(alwaysRetroactive);
}
/**
* Set's the interval at which warnings about producers being blocked by
* resource usage will be triggered. Values of 0 or less will disable
* warnings
*
* @param blockedProducerWarningInterval the interval at which warning about
* blocked producers will be triggered.
*/
@Override
public void setBlockedProducerWarningInterval(long blockedProducerWarningInterval) {
destination.setBlockedProducerWarningInterval(blockedProducerWarningInterval);
}
/**
*
* @return the interval at which warning about blocked producers will be
* triggered.
*/
@Override
public long getBlockedProducerWarningInterval() {
return destination.getBlockedProducerWarningInterval();
}
@Override
public int getMaxPageSize() {
return destination.getMaxPageSize();
}
@Override
public void setMaxPageSize(int pageSize) {
destination.setMaxPageSize(pageSize);
}
@Override
public boolean isUseCache() {
return destination.isUseCache();
}
@Override
public void setUseCache(boolean value) {
destination.setUseCache(value);
}
@Override
public ObjectName[] getSubscriptions() throws IOException, MalformedObjectNameException {
List<Subscription> subscriptions = destination.getConsumers();
ObjectName[] answer = new ObjectName[subscriptions.size()];
ObjectName brokerObjectName = broker.getBrokerService().getBrokerObjectName();
int index = 0;
for (Subscription subscription : subscriptions) {
String connectionClientId = subscription.getContext().getClientId();
answer[index++] = BrokerMBeanSupport.createSubscriptionName(brokerObjectName, connectionClientId, subscription.getConsumerInfo());
}
return answer;
}
@Override
public ObjectName getSlowConsumerStrategy() throws IOException, MalformedObjectNameException {
ObjectName result = null;
SlowConsumerStrategy strategy = destination.getSlowConsumerStrategy();
if (strategy != null && strategy instanceof AbortSlowConsumerStrategy) {
result = broker.registerSlowConsumerStrategy((AbortSlowConsumerStrategy)strategy);
}
return result;
}
@Override
public String getOptions() {
Map<String, String> options = destination.getActiveMQDestination().getOptions();
String optionsString = "";
try {
if (options != null) {
optionsString = URISupport.createQueryString(options);
}
} catch (URISyntaxException ignored) {}
return optionsString;
}
@Override
public boolean isDLQ() {
return destination.getActiveMQDestination().isDLQ();
}
@Override
public long getBlockedSends() {
return destination.getDestinationStatistics().getBlockedSends().getCount();
}
@Override
public double getAverageBlockedTime() {
return destination.getDestinationStatistics().getBlockedTime().getAverageTime();
}
@Override
public long getTotalBlockedTime() {
return destination.getDestinationStatistics().getBlockedTime().getTotalTime();
}
}
|
/*
* Copyright (c) 1994, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.lang;
/**
* Thrown to indicate some unexpected internal error has occurred in
* the Java Virtual Machine.
*
* @author unascribed
* @since 1.0
*/
public class InternalError extends VirtualMachineError {
private static final long serialVersionUID = -9062593416125562365L;
/**
* Constructs an {@code InternalError} with no detail message.
*/
public InternalError() {
super();
}
/**
* Constructs an {@code InternalError} with the specified
* detail message.
*
* @param message the detail message.
*/
public InternalError(String message) {
super(message);
}
/**
* Constructs an {@code InternalError} with the specified detail
* message and cause. <p>Note that the detail message associated
* with {@code cause} is <i>not</i> automatically incorporated in
* this error's detail message.
*
* @param message the detail message (which is saved for later retrieval
* by the {@link #getMessage()} method).
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). (A {@code null} value is
* permitted, and indicates that the cause is nonexistent or
* unknown.)
* @since 1.8
*/
public InternalError(String message, Throwable cause) {
super(message, cause);
}
/**
* Constructs an {@code InternalError} with the specified cause
* and a detail message of {@code (cause==null ? null :
* cause.toString())} (which typically contains the class and
* detail message of {@code cause}).
*
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). (A {@code null} value is
* permitted, and indicates that the cause is nonexistent or
* unknown.)
* @since 1.8
*/
public InternalError(Throwable cause) {
super(cause);
}
}
|
package Try.action;
public class Dog {
public void shout(){
System.out.println("wang~");
}
}
|
package io.github.luzzu.linkeddata.qualitymetrics.accessibility.performance;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.sparql.core.Quad;
import org.apache.jena.vocabulary.RDF;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.github.luzzu.exceptions.MetricProcessingException;
import io.github.luzzu.linkeddata.qualitymetrics.commons.AbstractQualityMetric;
import io.github.luzzu.linkeddata.qualitymetrics.commons.HTTPRetriever;
import io.github.luzzu.linkeddata.qualitymetrics.vocabulary.DQM;
import io.github.luzzu.qualitymetrics.algorithms.ReservoirSampler;
import io.github.luzzu.qualityproblems.ProblemCollection;
import io.github.luzzu.semantics.commons.ResourceCommons;
import io.github.luzzu.semantics.vocabularies.DAQ;
/**
* @author Santiago Londono
* Estimates the efficiency with which a system can bind to the dataset, by measuring the delay between
* the submission of a request for that very dataset and reception of the respective response (or part of it)
*/
public class LowLatency extends AbstractQualityMetric<Double> {
private final Resource METRIC_URI = DQM.LowLatencyMetric;
private static Logger logger = LoggerFactory.getLogger(LowLatency.class);
/**
* Amount of HTTP requests that will be sent to the data source in order to determine its latency, the
* resulting delays of all of these requests will be averaged to obtain the final latency measure
*/
private static final int NUM_HTTP_SAMPLES = 1;
/**
* Holds the total delay as currently calculated by the compute method
*/
private long totalDelay = -1;
/**
* Holds the metric value
*/
private Double metricValue = null;
/**
* Response time that is considered to be the ideal for a resource. In other words, its the amount of time in milliseconds below
* which response times for resources will get a perfect score of 1.0.
*/
private static final double NORM_TOTAL_RESPONSE_TIME = 1000.0;
ReservoirSampler<String> resSamp = new ReservoirSampler<String>(15,true);
public void compute(Quad quad) throws MetricProcessingException {
if (quad.getSubject().isURI()){
if (this.getDatasetURI().equals("")){
if (!(quad.getSubject().getURI().startsWith("file")))
resSamp.add(quad.getSubject().getURI());
}
else if (quad.getSubject().getURI().startsWith(this.getDatasetURI()))
resSamp.add(quad.getSubject().getURI());
}
}
/**
* Returns the current value of the Low Latency Metric as a ranking in the range [0, 1], with 1.0 the top ranking.
* It does so by computing the average of the time elapsed between the instant when a request is sent to the URI
* of the dataset and the instant when any response is received. Then this average response time is normalized by dividing
* NORM_TOTAL_RESPONSE_TIME, the ideal response time, by it
* @return Current value of the Low Latency metric, measured with respect to the dataset's URI
*/
@Override
public Double metricValue() {
if (this.metricValue == null){
for (String s : resSamp.getItems()){
totalDelay += HTTPRetriever.measureReqsBurstDelay(s, NUM_HTTP_SAMPLES);
logger.trace("Total delay for dataset {} was {}", s, totalDelay);
}
double avgRespTime = ((double)totalDelay / resSamp.getItems().size()) / ((double)NUM_HTTP_SAMPLES);
this.metricValue = Math.min(1.0, Math.max(0, NORM_TOTAL_RESPONSE_TIME / avgRespTime));
statsLogger.info("LowLatency. Dataset: {}; - Total Delay (millisecs) : {}; " +
"# HTTP Samples : {}; Norm Total Response Milliseconds : {};",
this.getDatasetURI(),
totalDelay, NUM_HTTP_SAMPLES, NORM_TOTAL_RESPONSE_TIME);
}
return this.metricValue;
}
public Resource getMetricURI() {
return METRIC_URI;
}
@Override
public boolean isEstimate() {
return true;
}
@Override
public Resource getAgentURI() {
return DQM.LuzzuProvenanceAgent;
}
@Override
public ProblemCollection<?> getProblemCollection() {
return null;
}
@Override
public Model getObservationActivity() {
Model activity = ModelFactory.createDefaultModel();
Resource mp = ResourceCommons.generateURI();
activity.add(mp, RDF.type, DAQ.MetricProfile);
//TODO: Information on the methods used to measure
return activity;
}
}
|
/**
* The MIT License
* Copyright (c) 2014-2016 Ilkka Seppälä
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.azureyjt.smartrest.service;
import com.azureyjt.smartrest.service.exception.NoSuchResourceException;
/**
* Interface of common REST API service.
*/
public interface CommonApiService {
/**
* Execute GET request.
*
* @param uri Request uri.
* @return Response body data.
*/
Object executeGet(String uri) throws NoSuchResourceException;
/**
* Execute POST request.
*
* @param uri Request uri.
* @param body Request body.
* @return Response body data.
*/
String executePost(String uri, String body);
/**
* Execute PUT request.
*
* @param uri Request uri.
* @param body Request body.
* @return Response body data.
*/
String executePut(String uri, String body);
/**
* Execute DELETE request.
*
* @param uri Request uri.
* @return Response body data.
*/
String executeDelete(String uri);
}
|
package uk.gov.hmcts.reform.professionalapi;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import net.serenitybdd.junit.spring.integration.SpringIntegrationSerenityRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.http.HttpStatus;
import uk.gov.hmcts.reform.professionalapi.controller.request.OrganisationCreationRequest;
import uk.gov.hmcts.reform.professionalapi.util.AuthorizationEnabledIntegrationTest;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static uk.gov.hmcts.reform.professionalapi.controller.constants.ProfessionalApiConstants.LENGTH_OF_ORGANISATION_IDENTIFIER;
import static uk.gov.hmcts.reform.professionalapi.controller.constants.ProfessionalApiConstants.ORGANISATION_IDENTIFIER_FORMAT_REGEX;
import static uk.gov.hmcts.reform.professionalapi.helper.OrganisationFixtures.organisationRequestWithAllFields;
import static uk.gov.hmcts.reform.professionalapi.helper.OrganisationFixtures.someMinimalOrganisationRequest;
@RunWith(SpringIntegrationSerenityRunner.class)
public class DeleteOrganisationIntTest extends AuthorizationEnabledIntegrationTest {
private String orgIdentifier;
@Test
public void returns_204_when_delete_minimal_pending_organisation_successfully() {
Map<String, Object> deleteResponse = deleteOrganization();
assertThat(deleteResponse.get("http_status")).isEqualTo(204);
Map<String, Object> orgResponse = professionalReferenceDataClient.retrieveSingleOrganisation(orgIdentifier,
hmctsAdmin);
assertThat(orgResponse.get("http_status").toString().contains("OK"));
}
@Test
public void return_forbidden_when_no_role_associated_with_end_point_to_delete_pending_organisation() {
OrganisationCreationRequest organisationCreationRequest = organisationRequestWithAllFields().build();
Map<String, Object> response =
professionalReferenceDataClient.createOrganisation(organisationCreationRequest);
String orgIdentifier = (String) response.get(ORG_IDENTIFIER);
assertThat(orgIdentifier).isNotNull();
Map<String, Object> deleteResponse =
professionalReferenceDataClient.deleteOrganisation(puiCaseManager, orgIdentifier);
assertThat(deleteResponse.get("http_status")).isEqualTo("403");
}
@Test
public void return_404_when_un_known_org_identifier_in_the_request_to_delete_pending_organisation() {
Map<String, Object> deleteResponse =
professionalReferenceDataClient.deleteOrganisation(hmctsAdmin, "O12DEF3");
assertThat(deleteResponse.get("http_status")).isEqualTo("404");
}
@Test
public void return_400_when_invalid_org_identifier_in_the_request_to_delete_pending_organisation() {
Map<String, Object> deleteResponse =
professionalReferenceDataClient.deleteOrganisation(hmctsAdmin, "O12DEF");
assertThat(deleteResponse.get("http_status")).isEqualTo("400");
}
@Test
public void returns_400_with_error_msg_when_delete_active_organisation_with_active_user_profile() {
userProfileCreateUserWireMock(HttpStatus.resolve(201));
String orgIdentifier = createAndActivateOrganisation();
Map<String, Object> deleteResponse =
professionalReferenceDataClient.deleteOrganisation(hmctsAdmin, orgIdentifier);
assertThat(deleteResponse.get("http_status")).isEqualTo("400");
assertThat((String) deleteResponse.get("response_body"))
.contains("The organisation admin is not in Pending state");
}
@Test
public void returns_204_when_delete_active_organisation_with_one_pending_user_profile() {
userProfileCreateUserWireMock(HttpStatus.resolve(201));
String orgIdentifier = createAndActivateOrganisation();
getUserProfileByEmailWireMock(HttpStatus.resolve(200));
deleteUserProfileMock(HttpStatus.resolve(204));
Map<String, Object> deleteResponse =
professionalReferenceDataClient.deleteOrganisation(hmctsAdmin, orgIdentifier);
assertThat(deleteResponse.get("http_status")).isEqualTo(204);
}
@Test
public void returns_400_when_delete_active_organisation_with_more_than_one__user_profile() {
List<String> userRoles = new ArrayList<>();
userRoles.add("pui-user-manager");
userProfileCreateUserWireMock(HttpStatus.resolve(201));
String orgIdentifier = createAndActivateOrganisation();
Map<String, Object> newUserResponse = professionalReferenceDataClient
.addUserToOrganisation(orgIdentifier,
inviteUserCreationRequest("somenewuser@email.com", userRoles), hmctsAdmin);
String userIdentifierResponse = (String) newUserResponse.get(USER_IDENTIFIER);
assertThat(newUserResponse).isNotNull();
assertEquals(newUserResponse.get(USER_IDENTIFIER), userIdentifierResponse);
Map<String, Object> deleteResponse =
professionalReferenceDataClient.deleteOrganisation(hmctsAdmin, orgIdentifier);
assertThat(deleteResponse.get("http_status")).isEqualTo("400");
}
@Test
public void returns_404_when_delete_active_organisation_with_external_endpoint() {
userProfileCreateUserWireMock(HttpStatus.resolve(201));
String orgIdentifier = createAndActivateOrganisation();
getUserProfileByEmailWireMock(HttpStatus.resolve(200));
deleteUserProfileMock(HttpStatus.resolve(204));
Map<String, Object> deleteResponse =
professionalReferenceDataClient.deleteOrganisationExternal(hmctsAdmin, orgIdentifier);
assertThat(deleteResponse.get("http_status")).isEqualTo("404");
}
private Map<String, Object> deleteOrganization() {
OrganisationCreationRequest organisationCreationRequest = someMinimalOrganisationRequest().build();
Map<String, Object> response =
professionalReferenceDataClient.createOrganisation(organisationCreationRequest);
orgIdentifier = (String) response.get(ORG_IDENTIFIER);
assertThat(orgIdentifier).isNotNull();
assertThat(orgIdentifier.length()).isEqualTo(LENGTH_OF_ORGANISATION_IDENTIFIER);
assertThat(orgIdentifier.matches(ORGANISATION_IDENTIFIER_FORMAT_REGEX)).isTrue();
return professionalReferenceDataClient.deleteOrganisation(hmctsAdmin, orgIdentifier);
}
}
|
package com.aoc.days2015.day19;
import com.aoc.solutionbase.SolutionBase;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
/**
* @author Llamadmiral.
*/
class SolutionNineteen extends SolutionBase {
private final Map<String, List<String>> rules = new HashMap<>();
private final Map<String, String> reverseRulebook = new TreeMap<>((s, t1) -> {
int comparison = t1.length() - s.length();
if (comparison == 0) {
comparison = t1.compareTo(s);
}
return comparison;
});
private String starting = "";
private boolean foundIt;
SolutionNineteen(final String day) {
super(day);
}
@Override
public void init() {
input = input.replaceAll(" => ", " ");
boolean readingStarting = false;
for (final String row : input.split("\n")) {
if (readingStarting) {
starting = row;
} else if (row.isEmpty()) {
readingStarting = true;
} else {
final String[] parts = row.split(" ");
final String from = parts[0];
final String to = parts[1];
if (!rules.containsKey(parts[0])) {
rules.put(from, new ArrayList<>());
}
rules.get(from).add(to);
reverseRulebook.put(to, from);
}
}
}
@Override
protected void solvePartOne() {
final MoleculeGenerator generator = new MoleculeGenerator(rules, starting);
final List<String> molecules = generator.generateUniqueMolecules();
setSolutionOne(molecules.size());
}
@Override
protected void solvePartTwo() {
reverseEngineerMolecule(starting, 0);
}
private void reverseEngineerMolecule(final String molecule, final int i) {
for (final Map.Entry<String, String> entry : reverseRulebook.entrySet()) {
if (!foundIt) {
final String changedMolecule = generateSingleChange(entry.getKey(), entry.getValue(), molecule);
if (changedMolecule != null && !"e".equals(changedMolecule)) {
reverseEngineerMolecule(changedMolecule, i + 1);
} else if ("e".equals(changedMolecule)) {
setSolutionTwo(i);
foundIt = true;
break;
}
}
}
}
private String generateSingleChange(final String ruleFrom, final String ruleTo, final String molecule) {
String newMolecule = null;
if (molecule.contains(ruleFrom)) {
final int i = molecule.indexOf(ruleFrom);
newMolecule = molecule.substring(0, i) + ruleTo + molecule.substring(i + ruleFrom.length());
}
return newMolecule;
}
}
|
/*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.test.mock.mockito;
import java.util.Arrays;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.exceptions.misusing.UnfinishedVerificationException;
import org.springframework.cache.CacheManager;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
import org.springframework.cache.interceptor.CacheResolver;
import org.springframework.cache.interceptor.SimpleCacheResolver;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.stereotype.Service;
import org.springframework.test.context.junit4.SpringRunner;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
/**
* Test {@link SpyBean} when mixed with Spring AOP.
*
* @author Phillip Webb
* @see <a href="https://github.com/spring-projects/spring-boot/issues/5837">5837</a>
*/
@RunWith(SpringRunner.class)
public class SpyBeanWithAopProxyAndNotProxyTargetAwareTests {
@SpyBean(proxyTargetAware = false)
private DateService dateService;
@Test(expected = UnfinishedVerificationException.class)
public void verifyShouldUseProxyTarget() throws Exception {
this.dateService.getDate();
verify(this.dateService, times(1)).getDate();
reset(this.dateService);
}
@Configuration
@EnableCaching(proxyTargetClass = true)
@Import(DateService.class)
static class Config {
@Bean
public CacheResolver cacheResolver(CacheManager cacheManager) {
SimpleCacheResolver resolver = new SimpleCacheResolver();
resolver.setCacheManager(cacheManager);
return resolver;
}
@Bean
public ConcurrentMapCacheManager cacheManager() {
ConcurrentMapCacheManager cacheManager = new ConcurrentMapCacheManager();
cacheManager.setCacheNames(Arrays.asList("test"));
return cacheManager;
}
}
@Service
static class DateService {
@Cacheable(cacheNames = "test")
public Long getDate() {
return System.nanoTime();
}
}
}
|
package com.example.application.knowledge;
import java.util.Arrays;
import java.util.List;
import org.hibernate.integrator.spi.Integrator;
import org.hibernate.jpa.boot.spi.IntegratorProvider;
public class CustomIntegratorProvider implements IntegratorProvider {
@Override
public List<Integrator> getIntegrators() {
return Arrays.asList(new CustomEventListenerIntegrator());
}
}
|
package wa.server;
import io.vertx.core.Vertx;
import io.vertx.junit5.VertxExtension;
import io.vertx.junit5.VertxTestContext;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
@ExtendWith(VertxExtension.class)
public class TestMainVerticle {
@BeforeEach
void deploy_verticle(Vertx vertx, VertxTestContext testContext) {
vertx.deployVerticle(new MainVerticle(), testContext.succeeding(id -> testContext.completeNow()));
}
@Test
void verticle_deployed(Vertx vertx, VertxTestContext testContext) throws Throwable {
testContext.completeNow();
}
}
|
/*
*
* Copyright (c) 2013 - 2020 Lijun Liao
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xipki.ocsp.client;
/**
* Exception related to the OCSP requestor.
*
* @author Lijun Liao
* @since 2.0.0
*/
public class OcspRequestorException extends Exception {
private static final long serialVersionUID = 1L;
public OcspRequestorException() {
}
public OcspRequestorException(String message) {
super(message);
}
public OcspRequestorException(Throwable cause) {
super(cause);
}
public OcspRequestorException(String message, Throwable cause) {
super(message, cause);
}
}
|
package com.atguigu.gmall.sms.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.atguigu.gmall.sms.entity.HomeAdvEntity;
import com.atguigu.core.bean.PageVo;
import com.atguigu.core.bean.QueryCondition;
/**
* 首页轮播广告
*
* @author lixianfeng
* @email lxf@atguigu.com
* @date 2020-01-02 15:55:46
*/
public interface HomeAdvService extends IService<HomeAdvEntity> {
PageVo queryPage(QueryCondition params);
}
|
package com.vmware.vim25;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for InvalidDatastore complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="InvalidDatastore">
* <complexContent>
* <extension base="{urn:vim25}VimFault">
* <sequence>
* <element name="datastore" type="{urn:vim25}ManagedObjectReference" minOccurs="0"/>
* <element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "InvalidDatastore", propOrder = {
"datastore",
"name"
})
@XmlSeeAlso({
InvalidDatastorePath.class,
InaccessibleDatastore.class,
DatastoreNotWritableOnHost.class
})
public class InvalidDatastore
extends VimFault
{
protected ManagedObjectReference datastore;
protected String name;
/**
* Gets the value of the datastore property.
*
* @return
* possible object is
* {@link ManagedObjectReference }
*
*/
public ManagedObjectReference getDatastore() {
return datastore;
}
/**
* Sets the value of the datastore property.
*
* @param value
* allowed object is
* {@link ManagedObjectReference }
*
*/
public void setDatastore(ManagedObjectReference value) {
this.datastore = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
}
|
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.apache.hop.trans.steps.textfileoutput;
import org.apache.hop.core.injection.Injection;
import org.apache.hop.core.row.value.ValueMetaFactory;
import org.apache.hop.core.row.value.ValueMetaString;
/**
* Describes a single field in a text file
*
* @author Matt
* @since 11-05-2005
*
*/
public class TextFileField implements Cloneable {
@Injection( name = "OUTPUT_FIELDNAME", group = "OUTPUT_FIELDS" )
private String name;
private int type;
@Injection( name = "OUTPUT_FORMAT", group = "OUTPUT_FIELDS" )
private String format;
@Injection( name = "OUTPUT_LENGTH", group = "OUTPUT_FIELDS" )
private int length = -1;
@Injection( name = "OUTPUT_PRECISION", group = "OUTPUT_FIELDS" )
private int precision = -1;
@Injection( name = "OUTPUT_CURRENCY", group = "OUTPUT_FIELDS" )
private String currencySymbol;
@Injection( name = "OUTPUT_DECIMAL", group = "OUTPUT_FIELDS" )
private String decimalSymbol;
@Injection( name = "OUTPUT_GROUP", group = "OUTPUT_FIELDS" )
private String groupingSymbol;
@Injection( name = "OUTPUT_NULL", group = "OUTPUT_FIELDS" )
private String nullString;
private int trimType;
public TextFileField( String name, int type, String format, int length, int precision, String currencySymbol,
String decimalSymbol, String groupSymbol, String nullString ) {
this.name = name;
this.type = type;
this.format = format;
this.length = length;
this.precision = precision;
this.currencySymbol = currencySymbol;
this.decimalSymbol = decimalSymbol;
this.groupingSymbol = groupSymbol;
this.nullString = nullString;
}
public TextFileField() {
}
public int compare( Object obj ) {
TextFileField field = (TextFileField) obj;
return name.compareTo( field.getName() );
}
public boolean equal( Object obj ) {
TextFileField field = (TextFileField) obj;
return name.equals( field.getName() );
}
@Override
public Object clone() {
try {
Object retval = super.clone();
return retval;
} catch ( CloneNotSupportedException e ) {
return null;
}
}
public int getLength() {
return length;
}
public void setLength( int length ) {
this.length = length;
}
public String getName() {
return name;
}
public void setName( String fieldname ) {
this.name = fieldname;
}
public int getType() {
return type;
}
public String getTypeDesc() {
return ValueMetaFactory.getValueMetaName( type );
}
public void setType( int type ) {
this.type = type;
}
@Injection( name = "OUTPUT_TYPE", group = "OUTPUT_FIELDS" )
public void setType( String typeDesc ) {
this.type = ValueMetaFactory.getIdForValueMeta( typeDesc );
}
public String getFormat() {
return format;
}
public void setFormat( String format ) {
this.format = format;
}
public String getGroupingSymbol() {
return groupingSymbol;
}
public void setGroupingSymbol( String group_symbol ) {
this.groupingSymbol = group_symbol;
}
public String getDecimalSymbol() {
return decimalSymbol;
}
public void setDecimalSymbol( String decimal_symbol ) {
this.decimalSymbol = decimal_symbol;
}
public String getCurrencySymbol() {
return currencySymbol;
}
public void setCurrencySymbol( String currency_symbol ) {
this.currencySymbol = currency_symbol;
}
public int getPrecision() {
return precision;
}
public void setPrecision( int precision ) {
this.precision = precision;
}
public String getNullString() {
return nullString;
}
public void setNullString( String null_string ) {
this.nullString = null_string;
}
@Override
public String toString() {
return name + ":" + getTypeDesc();
}
public int getTrimType() {
return trimType;
}
public void setTrimType( int trimType ) {
this.trimType = trimType;
}
@Injection( name = "OUTPUT_TRIM", group = "OUTPUT_FIELDS" )
public void setTrimTypeByDesc( String value ) {
this.trimType = ValueMetaString.getTrimTypeByDesc( value );
}
public String getTrimTypeCode() {
return ValueMetaString.getTrimTypeCode( trimType );
}
public String getTrimTypeDesc() {
return ValueMetaString.getTrimTypeDesc( trimType );
}
}
|
/*
* Copyright (c) 2008-2015, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.impl.protocol.parameters;
import com.hazelcast.client.impl.protocol.ClientMessage;
import com.hazelcast.client.impl.protocol.ClientMessageType;
import com.hazelcast.nio.Bits;
import com.hazelcast.transaction.impl.xa.SerializableXID;
@edu.umd.cs.findbugs.annotations.SuppressWarnings({"URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"})
public class XATransactionCreateParameters {
public static final ClientMessageType TYPE = ClientMessageType.XA_TRANSACTION_CREATE;
public SerializableXID xid;
public long timeout;
private XATransactionCreateParameters(ClientMessage clientMessage) {
xid = SerializableXIDCodec.decode(clientMessage);
timeout = clientMessage.getLong();
}
public static XATransactionCreateParameters decode(ClientMessage clientMessage) {
return new XATransactionCreateParameters(clientMessage);
}
public static ClientMessage encode(SerializableXID xid, long timeout) {
final int requiredDataSize = calculateDataSize(xid, timeout);
ClientMessage clientMessage = ClientMessage.createForEncode(requiredDataSize);
SerializableXIDCodec.encode(xid, clientMessage);
clientMessage.set(timeout);
clientMessage.setMessageType(TYPE.id());
clientMessage.updateFrameLength();
return clientMessage;
}
public static int calculateDataSize(SerializableXID xid, long timeout) {
return ClientMessage.HEADER_SIZE
+ SerializableXIDCodec.calculateDataSize(xid)
+ Bits.LONG_SIZE_IN_BYTES;
}
}
|
/**
* Copyright © 2014-2021 The SiteWhere Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sitewhere.spi.common;
/**
* Entity which provides a color scheme.
*/
public interface IColorProvider {
/**
* Background color for user interface.
*
* @return
*/
String getBackgroundColor();
/**
* Foreground color for user interface.
*
* @return
*/
String getForegroundColor();
/**
* Border color for user interface.
*
* @return
*/
String getBorderColor();
}
|
package com.foodies.services.crud;
import com.foodies.models.Restaurant;
import com.foodies.models.User;
import com.foodies.services.common.CrudService;
import java.util.List;
public interface UserCrudService extends CrudService<User> {
List<User> follow(Long id, Long idUser);
List<User> unfollow(Long id, Long idUser);
void addFollowingRestaurant(User userFollowing, Restaurant restaurantToFollow);
void removeFollowingRestaurant(User userUnfollowing, Restaurant restaurantToUnfollow);
User findByUsername(String username);
List<User> getFollowing(Long id);
}
|
/*
* Copyright 2016-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.server.config;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ForkJoinPool;
import javax.sql.DataSource;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.analytics.metrics.AggregateCounterRepository;
import org.springframework.analytics.metrics.FieldValueCounterRepository;
import org.springframework.analytics.rest.controller.AggregateCounterController;
import org.springframework.analytics.rest.controller.CounterController;
import org.springframework.analytics.rest.controller.FieldValueCounterController;
import org.springframework.batch.admin.service.JobService;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.actuate.metrics.repository.MetricRepository;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.domain.EntityScan;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.web.client.RestTemplateBuilder;
import org.springframework.cloud.client.circuitbreaker.EnableCircuitBreaker;
import org.springframework.cloud.common.security.AuthorizationProperties;
import org.springframework.cloud.common.security.support.FileSecurityProperties;
import org.springframework.cloud.common.security.support.LdapSecurityProperties;
import org.springframework.cloud.common.security.support.OnSecurityEnabledAndOAuth2Disabled;
import org.springframework.cloud.common.security.support.SecurityStateBean;
import org.springframework.cloud.dataflow.completion.CompletionConfiguration;
import org.springframework.cloud.dataflow.completion.StreamCompletionProvider;
import org.springframework.cloud.dataflow.completion.TaskCompletionProvider;
import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver;
import org.springframework.cloud.dataflow.registry.AppRegistry;
import org.springframework.cloud.dataflow.registry.AppRegistryCommon;
import org.springframework.cloud.dataflow.registry.RdbmsUriRegistry;
import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepository;
import org.springframework.cloud.dataflow.registry.service.AppRegistryService;
import org.springframework.cloud.dataflow.registry.service.DefaultAppRegistryService;
import org.springframework.cloud.dataflow.server.ConditionalOnSkipperDisabled;
import org.springframework.cloud.dataflow.server.ConditionalOnSkipperEnabled;
import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties;
import org.springframework.cloud.dataflow.server.config.features.FeaturesProperties;
import org.springframework.cloud.dataflow.server.controller.AboutController;
import org.springframework.cloud.dataflow.server.controller.AppRegistryController;
import org.springframework.cloud.dataflow.server.controller.CompletionController;
import org.springframework.cloud.dataflow.server.controller.JobExecutionController;
import org.springframework.cloud.dataflow.server.controller.JobInstanceController;
import org.springframework.cloud.dataflow.server.controller.JobStepExecutionController;
import org.springframework.cloud.dataflow.server.controller.JobStepExecutionProgressController;
import org.springframework.cloud.dataflow.server.controller.MetricsController;
import org.springframework.cloud.dataflow.server.controller.RestControllerAdvice;
import org.springframework.cloud.dataflow.server.controller.RootController;
import org.springframework.cloud.dataflow.server.controller.RuntimeAppInstanceController;
import org.springframework.cloud.dataflow.server.controller.RuntimeAppsController;
import org.springframework.cloud.dataflow.server.controller.SkipperAppRegistryController;
import org.springframework.cloud.dataflow.server.controller.SkipperStreamDeploymentController;
import org.springframework.cloud.dataflow.server.controller.StreamDefinitionController;
import org.springframework.cloud.dataflow.server.controller.StreamDeploymentController;
import org.springframework.cloud.dataflow.server.controller.TaskDefinitionController;
import org.springframework.cloud.dataflow.server.controller.TaskExecutionController;
import org.springframework.cloud.dataflow.server.controller.TaskSchedulerController;
import org.springframework.cloud.dataflow.server.controller.ToolsController;
import org.springframework.cloud.dataflow.server.controller.UiController;
import org.springframework.cloud.dataflow.server.controller.security.LoginController;
import org.springframework.cloud.dataflow.server.controller.security.SecurityController;
import org.springframework.cloud.dataflow.server.controller.support.MetricStore;
import org.springframework.cloud.dataflow.server.repository.DeploymentIdRepository;
import org.springframework.cloud.dataflow.server.repository.StreamDefinitionRepository;
import org.springframework.cloud.dataflow.server.repository.StreamDeploymentRepository;
import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository;
import org.springframework.cloud.dataflow.server.service.SchedulerService;
import org.springframework.cloud.dataflow.server.service.SkipperStreamService;
import org.springframework.cloud.dataflow.server.service.StreamService;
import org.springframework.cloud.dataflow.server.service.TaskJobService;
import org.springframework.cloud.dataflow.server.service.TaskService;
import org.springframework.cloud.dataflow.server.service.impl.AppDeployerStreamService;
import org.springframework.cloud.dataflow.server.service.impl.AppDeploymentRequestCreator;
import org.springframework.cloud.dataflow.server.service.impl.DefaultSkipperStreamService;
import org.springframework.cloud.dataflow.server.stream.AppDeployerStreamDeployer;
import org.springframework.cloud.dataflow.server.stream.SkipperStreamDeployer;
import org.springframework.cloud.dataflow.server.stream.StreamDeployer;
import org.springframework.cloud.deployer.resource.maven.MavenProperties;
import org.springframework.cloud.deployer.resource.maven.MavenResourceLoader;
import org.springframework.cloud.deployer.resource.registry.UriRegistry;
import org.springframework.cloud.deployer.resource.support.DelegatingResourceLoader;
import org.springframework.cloud.deployer.spi.app.AppDeployer;
import org.springframework.cloud.deployer.spi.task.TaskLauncher;
import org.springframework.cloud.skipper.client.DefaultSkipperClient;
import org.springframework.cloud.skipper.client.SkipperClient;
import org.springframework.cloud.skipper.client.SkipperClientProperties;
import org.springframework.cloud.skipper.client.SkipperClientResponseErrorHandler;
import org.springframework.cloud.task.repository.TaskExplorer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.core.io.ResourceLoader;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.hateoas.EntityLinks;
import org.springframework.hateoas.hal.Jackson2HalModule;
import org.springframework.http.converter.StringHttpMessageConverter;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.scheduling.concurrent.ForkJoinPoolFactoryBean;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.web.client.RestTemplate;
/**
* Configuration for the Data Flow Server Controllers.
*
* @author Mark Fisher
* @author Gunnar Hillert
* @author Ilayaperumal Gopinathan
* @author Andy Clement
* @author Glenn Renfro
* @author Christian Tzolov
*/
@SuppressWarnings("all")
@Configuration
@Import(CompletionConfiguration.class)
@ConditionalOnBean({ EnableDataFlowServerConfiguration.Marker.class, TaskLauncher.class })
@EnableConfigurationProperties({ FeaturesProperties.class, VersionInfoProperties.class, MetricsProperties.class })
@ConditionalOnProperty(prefix = "dataflow.server", name = "enabled", havingValue = "true", matchIfMissing = true)
@EnableCircuitBreaker
@EntityScan({ "org.springframework.cloud.dataflow.registry.domain" })
@EnableJpaRepositories(basePackages = "org.springframework.cloud.dataflow.registry.repository")
@EnableTransactionManagement
public class DataFlowControllerAutoConfiguration {
private static Log logger = LogFactory.getLog(DataFlowControllerAutoConfiguration.class);
@Bean
public RootController rootController(EntityLinks entityLinks) {
return new RootController(entityLinks);
}
@Bean
@ConditionalOnBean({ StreamDefinitionRepository.class, StreamDeploymentRepository.class })
public RuntimeAppInstanceController appInstanceController(StreamDeployer streamDeployer) {
return new RuntimeAppInstanceController(streamDeployer);
}
@Bean
public MetricStore metricStore(MetricsProperties metricsProperties) {
return new MetricStore(metricsProperties);
}
@Bean
@ConditionalOnBean(StreamDefinitionRepository.class)
public StreamDefinitionController streamDefinitionController(StreamDefinitionRepository repository,
StreamService streamService) {
return new StreamDefinitionController(streamService);
}
@Bean
@ConditionalOnMissingBean(name = "appRegistryFJPFB")
public ForkJoinPoolFactoryBean appRegistryFJPFB() {
ForkJoinPoolFactoryBean forkJoinPoolFactoryBean = new ForkJoinPoolFactoryBean();
forkJoinPoolFactoryBean.setParallelism(4);
return forkJoinPoolFactoryBean;
}
@Bean
public AppDeploymentRequestCreator streamDeploymentPropertiesUtils(AppRegistryCommon appRegistry,
CommonApplicationProperties commonApplicationProperties,
ApplicationConfigurationMetadataResolver applicationConfigurationMetadataResolver) {
return new AppDeploymentRequestCreator(appRegistry,
commonApplicationProperties,
applicationConfigurationMetadataResolver);
}
@Bean
@ConditionalOnBean({ StreamDefinitionRepository.class, StreamDeploymentRepository.class })
public RuntimeAppsController runtimeAppsController(StreamDeployer streamDeployer) {
return new RuntimeAppsController(streamDeployer);
}
@Bean
public MetricsController metricsController(MetricStore metricStore) {
return new MetricsController(metricStore);
}
@Bean
@ConditionalOnBean({ StreamDefinitionRepository.class, StreamDeploymentRepository.class })
@ConditionalOnMissingBean(name = "runtimeAppsStatusFJPFB")
public ForkJoinPoolFactoryBean runtimeAppsStatusFJPFB() {
ForkJoinPoolFactoryBean forkJoinPoolFactoryBean = new ForkJoinPoolFactoryBean();
forkJoinPoolFactoryBean.setParallelism(8);
return forkJoinPoolFactoryBean;
}
@Bean
public MavenResourceLoader mavenResourceLoader(MavenProperties properties) {
return new MavenResourceLoader(properties);
}
@Bean
@ConditionalOnMissingBean(DelegatingResourceLoader.class)
public DelegatingResourceLoader delegatingResourceLoader(MavenResourceLoader mavenResourceLoader) {
Map<String, ResourceLoader> loaders = new HashMap<>();
loaders.put("maven", mavenResourceLoader);
return new DelegatingResourceLoader(loaders);
}
@Bean
@ConditionalOnBean(TaskDefinitionRepository.class)
public TaskDefinitionController taskDefinitionController(TaskExplorer taskExplorer, TaskDefinitionRepository repository,
TaskService taskService) {
return new TaskDefinitionController(taskExplorer, repository, taskService);
}
@Bean
@ConditionalOnBean(TaskDefinitionRepository.class)
public TaskExecutionController taskExecutionController(TaskExplorer explorer, TaskService taskService,
TaskDefinitionRepository taskDefinitionRepository) {
return new TaskExecutionController(explorer, taskService, taskDefinitionRepository);
}
@Bean
@ConditionalOnBean(SchedulerService.class)
public TaskSchedulerController taskSchedulerController(SchedulerService schedulerService) {
return new TaskSchedulerController(schedulerService);
}
@Bean
@ConditionalOnBean(TaskDefinitionRepository.class)
public JobExecutionController jobExecutionController(TaskJobService repository) {
return new JobExecutionController(repository);
}
@Bean
@ConditionalOnBean(TaskDefinitionRepository.class)
public JobStepExecutionController jobStepExecutionController(JobService service) {
return new JobStepExecutionController(service);
}
@Bean
@ConditionalOnBean(TaskDefinitionRepository.class)
public JobStepExecutionProgressController jobStepExecutionProgressController(JobService service) {
return new JobStepExecutionProgressController(service);
}
@Bean
@ConditionalOnBean(TaskDefinitionRepository.class)
public JobInstanceController jobInstanceController(TaskJobService repository) {
return new JobInstanceController(repository);
}
@Bean
@ConditionalOnBean(MetricRepository.class)
public CounterController counterController(MetricRepository metricRepository) {
return new CounterController(metricRepository);
}
@Bean
@ConditionalOnBean(FieldValueCounterRepository.class)
public FieldValueCounterController fieldValueCounterController(FieldValueCounterRepository repository) {
return new FieldValueCounterController(repository);
}
@Bean
@ConditionalOnBean(AggregateCounterRepository.class)
public AggregateCounterController aggregateCounterController(AggregateCounterRepository repository) {
return new AggregateCounterController(repository);
}
@Bean
public CompletionController completionController(StreamCompletionProvider completionProvider,
TaskCompletionProvider taskCompletionProvider) {
return new CompletionController(completionProvider, taskCompletionProvider);
}
@Bean
public ToolsController toolsController() {
return new ToolsController();
}
@Bean
public SecurityController securityController(SecurityStateBean securityStateBean) {
return new SecurityController(securityStateBean);
}
@Bean
@Conditional(OnSecurityEnabledAndOAuth2Disabled.class)
public LoginController loginController() {
return new LoginController();
}
@Bean
public AboutController aboutController(ObjectProvider<StreamDeployer> streamDeployer, TaskLauncher taskLauncher,
FeaturesProperties featuresProperties, VersionInfoProperties versionInfoProperties,
SecurityStateBean securityStateBean) {
return new AboutController(streamDeployer.getIfAvailable(), taskLauncher, featuresProperties,
versionInfoProperties,
securityStateBean);
}
@Bean
public UiController uiController() {
return new UiController();
}
@Bean
public RestControllerAdvice restControllerAdvice() {
return new RestControllerAdvice();
}
@Bean
public MavenProperties mavenProperties() {
return new MavenConfigurationProperties();
}
@Bean
@ConfigurationProperties(prefix = "spring.cloud.dataflow.security.authorization")
public AuthorizationProperties authorizationProperties() {
return new AuthorizationProperties();
}
@Bean
@ConditionalOnProperty(name = "spring.cloud.dataflow.security.authentication.file.enabled", havingValue = "true")
@ConfigurationProperties(prefix = "spring.cloud.dataflow.security.authentication.file")
public FileSecurityProperties fileSecurityProperties() {
return new FileSecurityProperties();
}
@Bean
@ConditionalOnProperty(name = "spring.cloud.dataflow.security.authentication.ldap.enabled", havingValue = "true")
@ConfigurationProperties(prefix = "spring.cloud.dataflow.security.authentication.ldap")
public LdapSecurityProperties ldapSecurityProperties() {
return new LdapSecurityProperties();
}
@Bean
public SecurityStateBean securityStateBean() {
return new SecurityStateBean();
}
@Configuration
@ConditionalOnSkipperEnabled
@EnableConfigurationProperties(SkipperClientProperties.class)
public static class SkipperDeploymentConfiguration {
@Bean
@ConditionalOnBean({ StreamDefinitionRepository.class, StreamDeploymentRepository.class })
public SkipperStreamDeploymentController updatableStreamDeploymentController(
StreamDefinitionRepository repository, SkipperStreamService streamService) {
return new SkipperStreamDeploymentController(repository, streamService);
}
@Bean
@ConditionalOnBean(StreamDefinitionRepository.class)
public SkipperClient skipperClient(SkipperClientProperties properties,
RestTemplateBuilder restTemplateBuilder, ObjectMapper objectMapper) {
objectMapper.registerModule(new Jackson2HalModule());
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
RestTemplate restTemplate = restTemplateBuilder
.errorHandler(new SkipperClientResponseErrorHandler(objectMapper))
.interceptors(new OAuth2AccessTokenProvidingClientHttpRequestInterceptor())
.messageConverters(Arrays.asList(new StringHttpMessageConverter(),
new MappingJackson2HttpMessageConverter(objectMapper)))
.build();
return new DefaultSkipperClient(properties.getServerUri(), restTemplate);
}
@Bean
@ConditionalOnBean(StreamDefinitionRepository.class)
public SkipperStreamDeployer skipperStreamDeployer(SkipperClient skipperClient,
StreamDefinitionRepository streamDefinitionRepository,
SkipperClientProperties skipperClientProperties,
AppRegistryService appRegistryService,
ForkJoinPool runtimeAppsStatusFJPFB) {
logger.info("Skipper URI [" + skipperClientProperties.getServerUri() + "]");
return new SkipperStreamDeployer(skipperClient, streamDefinitionRepository, appRegistryService,
runtimeAppsStatusFJPFB);
}
@Bean
@ConditionalOnBean(StreamDefinitionRepository.class)
public SkipperStreamService skipperStreamDeploymentService(
StreamDefinitionRepository streamDefinitionRepository,
SkipperStreamDeployer skipperStreamDeployer, AppDeploymentRequestCreator appDeploymentRequestCreator,
AppRegistryCommon appRegistry) {
return new DefaultSkipperStreamService(streamDefinitionRepository, skipperStreamDeployer,
appDeploymentRequestCreator, appRegistry);
}
@Bean
public AppRegistryService appRegistryService(AppRegistrationRepository appRegistrationRepository,
DelegatingResourceLoader resourceLoader, MavenProperties mavenProperties) {
return new DefaultAppRegistryService(appRegistrationRepository, resourceLoader, mavenProperties);
}
@Bean
public SkipperAppRegistryController skipperAppRegistryController(
StreamDefinitionRepository streamDefinitionRepository,
StreamService streamService,
AppRegistryService appRegistry, ApplicationConfigurationMetadataResolver metadataResolver,
ForkJoinPool appRegistryFJPFB, MavenProperties mavenProperties) {
return new SkipperAppRegistryController(streamDefinitionRepository,
streamService,
appRegistry,
metadataResolver, appRegistryFJPFB, mavenProperties);
}
}
@Configuration
@ConditionalOnSkipperDisabled
@ConditionalOnBean({ AppDeployer.class })
public static class AppDeploymentConfiguration {
@Bean
@ConditionalOnBean({ StreamDefinitionRepository.class, StreamDeploymentRepository.class })
public StreamDeploymentController streamDeploymentController(StreamDefinitionRepository repository,
StreamService streamService) {
return new StreamDeploymentController(repository, streamService);
}
@Bean
@ConditionalOnBean({ StreamDefinitionRepository.class, StreamDeploymentRepository.class })
public StreamService simpleStreamDeploymentService(StreamDefinitionRepository streamDefinitionRepository,
AppDeployerStreamDeployer appDeployerStreamDeployer,
AppDeploymentRequestCreator appDeploymentRequestCreator, AppRegistryCommon appRegistry) {
return new AppDeployerStreamService(streamDefinitionRepository,
appDeployerStreamDeployer, appDeploymentRequestCreator, appRegistry);
}
@Bean
@ConditionalOnBean({ StreamDefinitionRepository.class, StreamDeploymentRepository.class })
public AppDeployerStreamDeployer appDeployerStreamDeployer(AppDeployer appDeployer,
DeploymentIdRepository deploymentIdRepository,
StreamDefinitionRepository streamDefinitionRepository,
StreamDeploymentRepository streamDeploymentRepository, ForkJoinPool appRegistryFJPFB) {
return new AppDeployerStreamDeployer(appDeployer, deploymentIdRepository, streamDefinitionRepository,
streamDeploymentRepository, appRegistryFJPFB);
}
@Bean
public AppRegistryController appRegistryController(AppRegistry appRegistry,
ApplicationConfigurationMetadataResolver metadataResolver, ForkJoinPool appRegistryFJPFB) {
return new AppRegistryController(appRegistry, metadataResolver, appRegistryFJPFB);
}
@Bean
public UriRegistry uriRegistry(DataSource dataSource) {
return new RdbmsUriRegistry(dataSource);
}
@Bean
public AppRegistry appRegistry(UriRegistry uriRegistry, DelegatingResourceLoader resourceLoader,
MavenProperties mavenProperties) {
return new AppRegistry(uriRegistry, resourceLoader, mavenProperties);
}
}
@ConfigurationProperties(prefix = "maven")
static class MavenConfigurationProperties extends MavenProperties {
}
}
|
package com.cms.controller.admin;
import java.math.BigDecimal;
import java.util.Date;
import java.util.HashMap;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang.StringUtils;
import com.cms.Feedback;
import com.cms.entity.Member;
import com.cms.routes.RouteMapping;
/**
* Controller - 会员
*
*
*
*/
@RouteMapping(url = "/admin/member")
public class MemberController extends BaseController{
/**
* 添加
*/
public void add(){
render(getView("member/add"));
}
/**
* 保存
*/
public void save(){
Member member = getModel(Member.class,"",true);
member.setPassword(DigestUtils.md5Hex(member.getPassword()));
member.setCreateDate(new Date());
member.setModifyDate(new Date());
member.setBalance(BigDecimal.ZERO);
member.save();
redirect(getListQuery("/admin/member/list"));
}
/**
* 编辑
*/
public void edit(){
Long id = getParaToLong("id");
setAttr("member", new Member().dao().findById(id));
render(getView("member/edit"));
}
/**
* 更新
*/
public void update(){
Long id = getParaToLong("id");
String password = getPara("password");
Member member = new Member().dao().findById(id);
if(StringUtils.isNotBlank(password)){
member.setPassword(DigestUtils.md5Hex(password));
member.update();
}
redirect(getListQuery("/admin/member/list"));
}
/**
* 列表
*/
public void list(){
String mobile = getPara("mobile");
Integer pageNumber = getParaToInt("pageNumber");
if(pageNumber==null){
pageNumber = 1;
}
setAttr("page", new Member().dao().findPage(mobile,pageNumber,PAGE_SIZE));
setAttr("mobile", mobile);
render(getView("member/list"));
}
/**
* 删除
*/
public void delete(){
Long ids[] = getParaValuesToLong("ids");
for(Long id:ids){
new Member().dao().deleteById(id);
}
renderJson(Feedback.success(new HashMap<>()));
}
}
|
package api.support.fixtures;
import static org.folio.circulation.support.JsonPropertyFetcher.getProperty;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeoutException;
import org.folio.circulation.domain.RequestType;
import org.folio.circulation.support.http.client.IndividualResource;
import api.support.builders.RequestPolicyBuilder;
import api.support.http.ResourceClient;
public class RequestPoliciesFixture {
private final RecordCreator requestPolicyRecordCreator;
public RequestPoliciesFixture(ResourceClient requestPoliciesClient) {
requestPolicyRecordCreator = new RecordCreator(requestPoliciesClient,
reason -> getProperty(reason, "name"));
}
public IndividualResource allowAllRequestPolicy()
throws InterruptedException,
MalformedURLException,
TimeoutException,
ExecutionException {
ArrayList<RequestType> types = new ArrayList<>();
types.add(RequestType.HOLD);
types.add(RequestType.PAGE);
types.add(RequestType.RECALL);
final RequestPolicyBuilder allowAllPolicy = new RequestPolicyBuilder(types);
return requestPolicyRecordCreator.createIfAbsent(allowAllPolicy);
}
public IndividualResource customRequestPolicy(ArrayList<RequestType> types)
throws InterruptedException,
MalformedURLException,
TimeoutException,
ExecutionException {
final RequestPolicyBuilder customPolicy = new RequestPolicyBuilder(types);
return requestPolicyRecordCreator.createIfAbsent(customPolicy);
}
public IndividualResource customRequestPolicy(ArrayList<RequestType> types, String name, String description)
throws InterruptedException,
MalformedURLException,
TimeoutException,
ExecutionException {
final RequestPolicyBuilder customPolicy = new RequestPolicyBuilder(types, name, description);
return requestPolicyRecordCreator.createIfAbsent(customPolicy);
}
public IndividualResource recallRequestPolicy()
throws InterruptedException,
MalformedURLException,
TimeoutException,
ExecutionException {
ArrayList<RequestType> requestTypesList = new ArrayList<>();
requestTypesList.add(RequestType.RECALL);
return customRequestPolicy(requestTypesList, "Recall request policy", "sample recall policy");
}
public IndividualResource holdRequestPolicy()
throws InterruptedException,
MalformedURLException,
TimeoutException,
ExecutionException {
ArrayList<RequestType> requestTypesList = new ArrayList<>();
requestTypesList.add(RequestType.HOLD);
return customRequestPolicy(requestTypesList);
}
public IndividualResource pageRequestPolicy()
throws InterruptedException,
MalformedURLException,
TimeoutException,
ExecutionException {
ArrayList<RequestType> requestTypesList = new ArrayList<>();
requestTypesList.add(RequestType.PAGE);
return customRequestPolicy(requestTypesList);
}
public void deleteRequestPolicy(IndividualResource policyToDelete)
throws InterruptedException,
MalformedURLException,
TimeoutException,
ExecutionException {
requestPolicyRecordCreator.delete(policyToDelete);
}
public IndividualResource findRequestPolicy(String requestPolicyName) {
return requestPolicyRecordCreator.getExistingRecord(requestPolicyName);
}
}
|
package appmanager;
import org.openqa.selenium.By;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.WebDriver;
import java.io.File;
public class HelperBase {
protected WebDriver wd;
public HelperBase(WebDriver wd) {
this.wd = wd;
}
protected void click(By locator) {
wd.findElement(locator).click();
}
protected void type(By locator, String text) {
click(locator);
if (text != null) {
String existingText = wd.findElement(locator).getAttribute("value");
if (!text.equals(existingText)) {
wd.findElement(locator).clear();
wd.findElement(locator).sendKeys(text);
}
}
}
protected void attach(By locator, File file) {
if (file != null) {
wd.findElement(locator).sendKeys(file.getAbsolutePath());
}
}
protected void closeDialogWindow() {
wd.switchTo().alert().accept();
}
public boolean isAlertPresent() {
try {
wd.switchTo().alert();
return true;
} catch (NoSuchElementException e) {
return false;
}
}
protected boolean isElementPresent(By locator) {
try {
wd.findElement(locator);
return true;
} catch (NoSuchElementException ex) {
return false;
}
}
}
|
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.muzei.util;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.os.Handler;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.NonNull;
import android.support.v4.os.ParcelableCompat;
import android.support.v4.os.ParcelableCompatCreatorCallbacks;
import android.util.AttributeSet;
import android.util.Log;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
import android.widget.EdgeEffect;
import android.widget.OverScroller;
/**
* View which supports panning around an image larger than the screen size. Supports both scrolling
* and flinging
*/
public class PanView extends View {
private static final String TAG = "PanView";
private Bitmap mImage;
private Bitmap mScaledImage;
private Bitmap mBlurredImage;
private float mBlurAmount = 0f;
private Paint mDrawBlurredPaint;
/**
* Horizontal offset for painting the image. As this is used in a canvas.drawBitmap it ranges
* from a negative value mWidth-image.getWidth() (remember the view is smaller than the image)
* to zero. If it is zero that means the offsetX side of the image is visible otherwise it is
* off screen and we are farther to the right.
*/
private float mOffsetX;
/**
* Vertical offset for painting the image. As this is used in a canvas.drawBitmap it ranges
* from a negative value mHeight-image.getHeight() (remember the view is smaller than the image)
* to zero. If it is zero that means the offsetY side of the image is visible otherwise it is
* off screen and we are farther down.
*/
private float mOffsetY;
/**
* View width
*/
private int mWidth = 1;
/**
* View height
*/
private int mHeight = 1;
// State objects and values related to gesture tracking.
private GestureDetector mGestureDetector;
private OverScroller mScroller;
/**
* Handler for posting fling animation updates
*/
private Handler mHandler = new Handler();
// Edge effect / overscroll tracking objects.
private EdgeEffect mEdgeEffectTop;
private EdgeEffect mEdgeEffectBottom;
private EdgeEffect mEdgeEffectLeft;
private EdgeEffect mEdgeEffectRight;
private boolean mEdgeEffectTopActive;
private boolean mEdgeEffectBottomActive;
private boolean mEdgeEffectLeftActive;
private boolean mEdgeEffectRightActive;
public PanView(Context context) {
this(context, null, 0);
}
public PanView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public PanView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
// Sets up interactions
mGestureDetector = new GestureDetector(context, new ScrollFlingGestureListener());
mScroller = new OverScroller(context);
mEdgeEffectLeft = new EdgeEffect(context);
mEdgeEffectTop = new EdgeEffect(context);
mEdgeEffectRight = new EdgeEffect(context);
mEdgeEffectBottom = new EdgeEffect(context);
mDrawBlurredPaint = new Paint();
mDrawBlurredPaint.setDither(true);
}
/**
* Sets an image to be displayed. Preferably this image should be larger than this view's size
* to allow scrolling. Note that the image will be centered on first display
* @param image Image to display
*/
public void setImage(Bitmap image) {
mImage = image;
updateScaledImage();
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
mWidth = Math.max(1, w);
mHeight = Math.max(1, h);
updateScaledImage();
}
private void updateScaledImage() {
if (mImage == null || mImage.getWidth() == 0 || mImage.getHeight() == 0) {
return;
}
int width = mImage.getWidth();
int height = mImage.getHeight();
if (width > height) {
float scalingFactor = mHeight * 1f / height;
int scaledWidth = Math.max(1, (int) (scalingFactor * width));
mScaledImage = Bitmap.createScaledBitmap(mImage, scaledWidth, mHeight, true);
} else {
float scalingFactor = mWidth * 1f / width;
int scaledHeight = Math.max(1, (int) (scalingFactor * height));
mScaledImage = Bitmap.createScaledBitmap(mImage, mWidth, scaledHeight, true);
}
ImageBlurrer blurrer = new ImageBlurrer(getContext(), mScaledImage);
mBlurredImage = blurrer.blurBitmap(ImageBlurrer.MAX_SUPPORTED_BLUR_PIXELS, 0f);
blurrer.destroy();
// Center the image
mOffsetX = (mWidth - mScaledImage.getWidth()) / 2;
mOffsetY = (mHeight - mScaledImage.getHeight()) / 2;
invalidate();
}
public void setBlurAmount(float blurAmount) {
mBlurAmount = blurAmount;
postInvalidateOnAnimation();
}
@Override
protected void onDraw(Canvas canvas) {
if (mBlurAmount < 1f) {
if (mScaledImage != null) {
canvas.drawBitmap(mScaledImage, mOffsetX, mOffsetY, null);
}
}
if (mBlurAmount > 0f) {
if (mBlurredImage != null) {
mDrawBlurredPaint.setAlpha((int) (mBlurAmount * 255));
canvas.drawBitmap(mBlurredImage, mOffsetX, mOffsetY, mDrawBlurredPaint);
}
}
drawEdgeEffects(canvas);
}
/**
* Draws the overscroll "glow" at the four edges, if necessary
*
* @see EdgeEffect
*/
private void drawEdgeEffects(Canvas canvas) {
// The methods below rotate and translate the canvas as needed before drawing the glow,
// since EdgeEffect always draws a top-glow at 0,0.
boolean needsInvalidate = false;
if (!mEdgeEffectTop.isFinished()) {
final int restoreCount = canvas.save();
mEdgeEffectTop.setSize(mWidth, mHeight);
if (mEdgeEffectTop.draw(canvas)) {
needsInvalidate = true;
}
canvas.restoreToCount(restoreCount);
}
if (!mEdgeEffectBottom.isFinished()) {
final int restoreCount = canvas.save();
canvas.translate(-mWidth, mHeight);
canvas.rotate(180, mWidth, 0);
mEdgeEffectBottom.setSize(mWidth, mHeight);
if (mEdgeEffectBottom.draw(canvas)) {
needsInvalidate = true;
}
canvas.restoreToCount(restoreCount);
}
if (!mEdgeEffectLeft.isFinished()) {
final int restoreCount = canvas.save();
canvas.translate(0, mHeight);
canvas.rotate(-90, 0, 0);
//noinspection SuspiciousNameCombination
mEdgeEffectLeft.setSize(mHeight, mWidth);
if (mEdgeEffectLeft.draw(canvas)) {
needsInvalidate = true;
}
canvas.restoreToCount(restoreCount);
}
if (!mEdgeEffectRight.isFinished()) {
final int restoreCount = canvas.save();
canvas.translate(mWidth, 0);
canvas.rotate(90, 0, 0);
//noinspection SuspiciousNameCombination
mEdgeEffectRight.setSize(mHeight, mWidth);
if (mEdgeEffectRight.draw(canvas)) {
needsInvalidate = true;
}
canvas.restoreToCount(restoreCount);
}
if (needsInvalidate) {
invalidate();
}
}
////////////////////////////////////////////////////////////////////////////////////////////////
//
// Methods and objects related to gesture handling
//
////////////////////////////////////////////////////////////////////////////////////////////////
@Override
public boolean onTouchEvent(@NonNull MotionEvent event) {
return mGestureDetector.onTouchEvent(event) || super.onTouchEvent(event);
}
private void setOffset(float offsetX, float offsetY) {
if (mScaledImage == null) {
return;
}
// Constrain between mWidth - mScaledImage.getWidth() and 0
// mWidth - mScaledImage.getWidth() -> right edge visible
// 0 -> left edge visible
mOffsetX = Math.min(0, Math.max(mWidth - mScaledImage.getWidth(), offsetX));
// Constrain between mHeight - mScaledImage.getHeight() and 0
// mHeight - mScaledImage.getHeight() -> bottom edge visible
// 0 -> top edge visible
mOffsetY = Math.min(0, Math.max(mHeight - mScaledImage.getHeight(), offsetY));
}
/**
* The gesture listener, used for handling simple gestures such as scrolls and flings.
*/
private class ScrollFlingGestureListener extends GestureDetector.SimpleOnGestureListener {
@Override
public boolean onDown(MotionEvent e) {
releaseEdgeEffects();
mScroller.forceFinished(true);
invalidate();
return true;
}
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
if (mScaledImage == null) {
return true;
}
float offsetX = mOffsetX;
float offsetY = mOffsetY;
setOffset(mOffsetX - distanceX, mOffsetY - distanceY);
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(TAG, "Scrolling to " + mOffsetX + ", " + mOffsetY);
}
if (mWidth != mScaledImage.getWidth() && mOffsetX < offsetX - distanceX) {
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(TAG, "Left edge pulled " + -distanceX);
}
mEdgeEffectLeft.onPull(-distanceX * 1f / mWidth);
mEdgeEffectLeftActive = true;
}
if (mHeight != mScaledImage.getHeight() && mOffsetY < offsetY - distanceY) {
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(TAG, "Top edge pulled " + distanceY);
}
mEdgeEffectTop.onPull(-distanceY * 1f / mHeight);
mEdgeEffectTopActive = true;
}
if (mHeight != mScaledImage.getHeight() && mOffsetY > offsetY - distanceY) {
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(TAG, "Bottom edge pulled " + -distanceY);
}
mEdgeEffectBottom.onPull(distanceY * 1f / mHeight);
mEdgeEffectBottomActive = true;
}
if (mWidth != mScaledImage.getWidth() && mOffsetX > offsetX - distanceX) {
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(TAG, "Right edge pulled " + distanceX);
}
mEdgeEffectRight.onPull(distanceX * 1f / mWidth);
mEdgeEffectRightActive = true;
}
invalidate();
return true;
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
if (mScaledImage == null) {
return true;
}
releaseEdgeEffects();
mScroller.forceFinished(true);
mScroller.fling(
(int) mOffsetX,
(int) mOffsetY,
(int) velocityX,
(int) velocityY,
mWidth - mScaledImage.getWidth(), 0, // mWidth - mScaledImage.getWidth() is negative
mHeight - mScaledImage.getHeight(), 0, // mHeight - mScaledImage.getHeight() is negative
mScaledImage.getWidth() / 2,
mScaledImage.getHeight() / 2);
postAnimateTick();
invalidate();
return true;
}
private void releaseEdgeEffects() {
mEdgeEffectLeftActive
= mEdgeEffectTopActive
= mEdgeEffectRightActive
= mEdgeEffectBottomActive
= false;
mEdgeEffectLeft.onRelease();
mEdgeEffectTop.onRelease();
mEdgeEffectRight.onRelease();
mEdgeEffectBottom.onRelease();
}
}
private void postAnimateTick() {
mHandler.removeCallbacks(mAnimateTickRunnable);
mHandler.post(mAnimateTickRunnable);
}
private Runnable mAnimateTickRunnable = new Runnable() {
@Override
public void run() {
boolean needsInvalidate = false;
if (mScroller.computeScrollOffset()) {
// The scroller isn't finished, meaning a fling is currently active.
setOffset(mScroller.getCurrX(), mScroller.getCurrY());
if (mWidth != mScaledImage.getWidth() && mOffsetX < mScroller.getCurrX()
&& mEdgeEffectLeft.isFinished()
&& !mEdgeEffectLeftActive) {
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(TAG, "Left edge absorbing " + mScroller.getCurrVelocity());
}
mEdgeEffectLeft.onAbsorb((int) mScroller.getCurrVelocity());
mEdgeEffectLeftActive = true;
} else if (mWidth != mScaledImage.getWidth() && mOffsetX > mScroller.getCurrX()
&& mEdgeEffectRight.isFinished()
&& !mEdgeEffectRightActive) {
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(TAG, "Right edge absorbing " + mScroller.getCurrVelocity());
}
mEdgeEffectRight.onAbsorb((int) mScroller.getCurrVelocity());
mEdgeEffectRightActive = true;
}
if (mHeight != mScaledImage.getHeight() && mOffsetY < mScroller.getCurrY()
&& mEdgeEffectTop.isFinished()
&& !mEdgeEffectTopActive) {
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(TAG, "Top edge absorbing " + mScroller.getCurrVelocity());
}
mEdgeEffectTop.onAbsorb((int) mScroller.getCurrVelocity());
mEdgeEffectTopActive = true;
} else if (mHeight != mScaledImage.getHeight() && mOffsetY > mScroller.getCurrY()
&& mEdgeEffectBottom.isFinished()
&& !mEdgeEffectBottomActive) {
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(TAG, "Bottom edge absorbing " + mScroller.getCurrVelocity());
}
mEdgeEffectBottom.onAbsorb((int) mScroller.getCurrVelocity());
mEdgeEffectBottomActive = true;
}
if (Log.isLoggable(TAG, Log.VERBOSE)) {
Log.v(TAG, "Flinging to " + mOffsetX + ", " + mOffsetY);
}
needsInvalidate = true;
}
if (needsInvalidate) {
invalidate();
postAnimateTick();
}
}
};
////////////////////////////////////////////////////////////////////////////////////////////////
//
// Methods and classes related to view state persistence.
//
////////////////////////////////////////////////////////////////////////////////////////////////
@Override
public Parcelable onSaveInstanceState() {
Parcelable superState = super.onSaveInstanceState();
SavedState ss = new SavedState(superState);
ss.offsetX = mOffsetX;
ss.offsetY = mOffsetY;
return ss;
}
@Override
public void onRestoreInstanceState(Parcelable state) {
if (!(state instanceof SavedState)) {
super.onRestoreInstanceState(state);
return;
}
SavedState ss = (SavedState) state;
super.onRestoreInstanceState(ss.getSuperState());
mOffsetX = ss.offsetX;
mOffsetY = ss.offsetY;
}
/**
* Persistent state that is saved by PanView.
*/
public static class SavedState extends BaseSavedState {
private float offsetX;
private float offsetY;
public SavedState(Parcelable superState) {
super(superState);
}
@Override
public void writeToParcel(@NonNull Parcel out, int flags) {
super.writeToParcel(out, flags);
out.writeFloat(offsetX);
out.writeFloat(offsetY);
}
@Override
public String toString() {
return "PanView.SavedState{"
+ Integer.toHexString(System.identityHashCode(this))
+ " offset=" + offsetX + ", " + offsetY + "}";
}
public static final Creator<SavedState> CREATOR
= ParcelableCompat.newCreator(new ParcelableCompatCreatorCallbacks<SavedState>() {
@Override
public SavedState createFromParcel(Parcel in, ClassLoader loader) {
return new SavedState(in);
}
@Override
public SavedState[] newArray(int size) {
return new SavedState[size];
}
});
SavedState(Parcel in) {
super(in);
offsetX = in.readFloat();
offsetY = in.readFloat();
}
}
}
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.security.keyvault.keys.cryptography;
import com.azure.core.annotation.ServiceClientBuilder;
import com.azure.core.credential.TokenCredential;
import com.azure.core.cryptography.AsyncKeyEncryptionKey;
import com.azure.core.cryptography.AsyncKeyEncryptionKeyResolver;
import com.azure.core.cryptography.KeyEncryptionKey;
import com.azure.core.cryptography.KeyEncryptionKeyResolver;
import com.azure.core.http.HttpClient;
import com.azure.core.http.HttpPipeline;
import com.azure.core.http.policy.HttpLogDetailLevel;
import com.azure.core.http.policy.HttpLogOptions;
import com.azure.core.http.policy.HttpPipelinePolicy;
import com.azure.core.http.policy.HttpLoggingPolicy;
import com.azure.core.util.Configuration;
import com.azure.core.util.logging.ClientLogger;
import reactor.core.publisher.Mono;
/**
* This class provides a fluent builder API to help aid the configuration and instantiation of the {@link
* AsyncKeyEncryptionKey KeyEncryptionKey async client} and {@link KeyEncryptionKey KeyEncryptionKey sync client},
* by calling {@link KeyEncryptionKeyClientBuilder#buildAsyncKeyEncryptionKey(String)} and {@link
* KeyEncryptionKeyClientBuilder#buildKeyEncryptionKey(String)} respectively
* It constructs an instance of the desired client.
*
* <p> The minimal configuration options required by {@link KeyEncryptionKeyClientBuilder} to
* build {@link AsyncKeyEncryptionKey} are {@link String identifier}) and
* {@link TokenCredential credential}).</p>
*
* <p>The {@link HttpLogDetailLevel log detail level}, multiple custom {@link HttpLoggingPolicy policies} and custom
* {@link HttpClient http client} can be optionally configured in the {@link KeyEncryptionKeyClientBuilder}.</p>
*
* <p>Alternatively, custom {@link HttpPipeline http pipeline} with custom {@link HttpPipelinePolicy} policies
* can be specified. It provides finer control over the construction of {@link AsyncKeyEncryptionKey} and {@link
* KeyEncryptionKey}</p>
*
* <p> The minimal configuration options required by {@link KeyEncryptionKeyClientBuilder keyEncryptionKeyClientBuilder}
* to build {@link KeyEncryptionKey} are {@link String key identifier}) and
* {@link TokenCredential credential}).</p>
*
* @see KeyEncryptionKeyAsyncClient
* @see KeyEncryptionKeyClient
*/
@ServiceClientBuilder(serviceClients = {KeyEncryptionKeyClient.class, KeyEncryptionKeyAsyncClient.class})
public final class KeyEncryptionKeyClientBuilder implements KeyEncryptionKeyResolver, AsyncKeyEncryptionKeyResolver {
private final ClientLogger logger = new ClientLogger(KeyEncryptionKeyClientBuilder.class);
private final CryptographyClientBuilder builder;
/**
* The constructor with defaults.
*/
public KeyEncryptionKeyClientBuilder() {
builder = new CryptographyClientBuilder();
}
/**
* Creates a {@link KeyEncryptionKey} based on options set in the builder.
* Every time {@code buildKeyEncryptionKey(String)} is called, a new instance of {@link KeyEncryptionKey}
* is created.
*
* <p>If {@link KeyEncryptionKeyClientBuilder#pipeline(HttpPipeline) pipeline} is set, then the {@code pipeline}
* and {@code keyId} are used to create the {@link KeyEncryptionKeyClient client}.
* All other builder settings are ignored. If {@code pipeline} is not set, then
* {@link KeyEncryptionKeyClientBuilder#credential(TokenCredential) vault credential} and {@code keyId}
* are required to build the {@link KeyEncryptionKeyClient client}.</p>
*
* @return A {@link KeyEncryptionKeyClient} with the options set from the builder.
* @throws IllegalStateException If {@link KeyEncryptionKeyClientBuilder#credential(TokenCredential)} or
* {@code keyId} have not been set.
*/
@Override
public KeyEncryptionKey buildKeyEncryptionKey(String keyId) {
return new KeyEncryptionKeyClient((KeyEncryptionKeyAsyncClient) buildAsyncKeyEncryptionKey(keyId).block());
}
/**
* Creates a {@link KeyEncryptionKeyAsyncClient} based on options set in the builder.
* Every time {@code buildAsyncKeyEncryptionKey(String)} is called, a new instance of
* {@link KeyEncryptionKeyAsyncClient} is created.
*
* <p>If {@link KeyEncryptionKeyClientBuilder#pipeline(HttpPipeline) pipeline} is set, then the {@code pipeline}
* and {@code keyId} are used to create the {@link KeyEncryptionKeyAsyncClient async client}.
* All other builder settings are ignored. If {@code pipeline} is not set, then
* ({@link KeyEncryptionKeyClientBuilder#credential(TokenCredential) jsonWebKey vault credential} and
* {@code keyId} are required to build the {@link KeyEncryptionKeyAsyncClient async client}.</p>
*
* @return A {@link KeyEncryptionKeyAsyncClient} with the options set from the builder.
* @throws IllegalStateException If {@link KeyEncryptionKeyClientBuilder#credential(TokenCredential)} or
* {@code keyId} have not been set.
*/
@Override
public Mono<? extends AsyncKeyEncryptionKey> buildAsyncKeyEncryptionKey(String keyId) {
builder.keyIdentifier(keyId);
if (Strings.isNullOrEmpty(keyId)) {
throw logger.logExceptionAsError(new IllegalStateException(
"Json Web Key or jsonWebKey identifier are required to create key encryption key async client"));
}
CryptographyServiceVersion serviceVersion = builder.getServiceVersion() != null ? builder.getServiceVersion() : CryptographyServiceVersion.getLatest();
if (builder.getPipeline() != null) {
return Mono.defer(() -> Mono.just(new KeyEncryptionKeyAsyncClient(keyId, builder.getPipeline(), serviceVersion)));
}
if (builder.getCredential() == null) {
throw logger.logExceptionAsError(new IllegalStateException(
"Key Vault credentials are required to build the key encryption key async client"));
}
HttpPipeline pipeline = builder.setupPipeline(serviceVersion);
return Mono.defer(() -> Mono.just(new KeyEncryptionKeyAsyncClient(keyId, pipeline, serviceVersion)));
}
/**
* Sets the credential to use when authenticating HTTP requests.
*
* @param credential The credential to use for authenticating HTTP requests.
* @return the updated builder object.
* @throws NullPointerException if {@code credential} is {@code null}.
*/
public KeyEncryptionKeyClientBuilder credential(TokenCredential credential) {
builder.credential(credential);
return this;
}
/**
* Sets the logging configuration for HTTP requests and responses.
*
* <p> If logLevel is not provided, default value of {@link HttpLogDetailLevel#NONE} is set.</p>
*
* @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses.
* @return the updated builder object.
*/
public KeyEncryptionKeyClientBuilder httpLogOptions(HttpLogOptions logOptions) {
builder.httpLogOptions(logOptions);
return this;
}
/**
* Adds a policy to the set of existing policies that are executed after the client required policies.
*
* @param policy The {@link HttpPipelinePolicy policy} to be added.
* @return the updated builder object.
* @throws NullPointerException if {@code policy} is {@code null}.
*/
public KeyEncryptionKeyClientBuilder addPolicy(HttpPipelinePolicy policy) {
builder.addPolicy(policy);
return this;
}
/**
* Sets the HTTP client to use for sending and receiving requests to and from the service.
*
* @param client The HTTP client to use for requests.
* @return the updated builder object.
* @throws NullPointerException If {@code client} is {@code null}.
*/
public KeyEncryptionKeyClientBuilder httpClient(HttpClient client) {
builder.httpClient(client);
return this;
}
/**
* Sets the HTTP pipeline to use for the service client.
*
* If {@code pipeline} is set, all other settings are ignored, aside from jsonWebKey identifier
* or jsonWebKey to build the clients.
*
* @param pipeline The HTTP pipeline to use for sending service requests and receiving responses.
* @return the updated builder object.
*/
public KeyEncryptionKeyClientBuilder pipeline(HttpPipeline pipeline) {
builder.pipeline(pipeline);
return this;
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* The default configuration store is a clone of the {@link Configuration#getGlobalConfiguration() global
* configuration store}, use {@link Configuration#NONE} to bypass using configuration settings during construction.
*
* @param configuration The configuration store used to
* @return the updated builder object.
*/
public KeyEncryptionKeyClientBuilder configuration(Configuration configuration) {
builder.configuration(configuration);
return this;
}
/**
* Sets the {@link CryptographyServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
* @return The updated builder object.
*/
public KeyEncryptionKeyClientBuilder serviceVersion(CryptographyServiceVersion version) {
builder.serviceVersion(version);
return this;
}
}
|
package me.erudev.ch02._0003;
/**
* 0003. 删除链表的中间节点和a/b处的节点
*
* @author pengfei.zhao
* @date 2020/12/6 12:59
*/
public class Solution {
static class Node {
private int value;
private Node next;
public Node(int value) {
this.value = value;
}
}
public static Node removeMidNode(Node head) {
if (head == null || head.next == null) {
return null;
}
if (head.next.next == null) {
return head.next;
}
Node pre = head;
Node cur = head.next.next;
while (cur.next != null && cur.next.next != null) {
pre = pre.next;
cur = cur.next.next;
}
pre.next = pre.next.next;
return head;
}
}
|
package cn.springcloud.gray.server.event;
import cn.springcloud.gray.event.EventType;
import cn.springcloud.gray.event.SourceType;
public interface EventSourceConverter {
Object convert(EventType eventType, SourceType sourceType, Object source);
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.vxquery.runtime.functions.datetime;
import org.apache.vxquery.datamodel.accessors.TaggedValuePointable;
import org.apache.vxquery.datamodel.accessors.atomic.XSDatePointable;
import org.apache.vxquery.datamodel.values.ValueTag;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
public class FnYearFromDateScalarEvaluatorFactory extends AbstractValueFromDateTimeScalarEvaluatorFactory {
private static final long serialVersionUID = 1L;
public FnYearFromDateScalarEvaluatorFactory(IScalarEvaluatorFactory[] args) {
super(args);
}
@Override
protected int getInputTag() {
return ValueTag.XS_DATE_TAG;
}
@Override
protected long getValueAsInteger(TaggedValuePointable tvp) {
XSDatePointable datep = (XSDatePointable) XSDatePointable.FACTORY.createPointable();
tvp.getValue(datep);
return datep.getYear();
}
}
|
/*
* Copyright 2019 Mi&Jack
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mijack.ppms.token;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* @author Mi&Jack
*/
@SpringBootApplication()
public class TokenApplication {
public static void main(String[] args) {
SpringApplication.run(TokenApplication.class, args);
}
}
|
/*
* Copyright (c) 2010, 2014, Oracle and/or its affiliates.
* All rights reserved. Use is subject to license terms.
*
* This file is available and licensed under the following license:
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the distribution.
* - Neither the name of Oracle Corporation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.javafx.experiments.importers.maya.values.impl;
import java.util.Iterator;
import java.util.List;
import com.javafx.experiments.importers.maya.MEnv;
import com.javafx.experiments.importers.maya.types.MDataType;
import com.javafx.experiments.importers.maya.values.MData;
public abstract class MDataImpl implements MData {
private MDataType dataType;
public MDataImpl(MDataType type) {
dataType = type;
}
// Index access for those values which suport it, such as array values
@Override
public MData getData(int index) {
if (index == 0) {
return this;
}
return null;
}
// Slice access for those values which support it, such as array values
@Override
public MData getData(int start, int end) {
if (start == 0 && end == 0) {
return this;
}
return null;
}
// Get the data associated with the given string path
@Override
public MData getData(String path) {
// System.out.println("get: "+ path);
return doGet(path, 0);
}
@Override
public MEnv getEnv() {
return getType().getEnv();
}
// Field access for those values which support it, such as compound values
@Override
public MData getFieldData(String name) {
if (name.length() == 0) {
return this;
}
return null;
}
@Override
public MDataType getType() {
return dataType;
}
@Override
public abstract void parse(Iterator<String> iter);
@Override
public void parse(List<String> values) {
parse(values.iterator());
}
@Override
public void parse(String field, List<String> values) {
MData value = doGet(field, 0);
if (value == null) {
// System.out.println("field value is null: " +field + " in " + getType().getName());
}
value.parse(values);
}
@Override
public void setSize(int size) {
// nothing
}
// Dereference from this MData down the path, starting parsing at the current point
protected MData doGet(String path, int start) {
if (start == path.length()) {
return this;
}
int dot = path.indexOf('.', start);
int bracket = path.indexOf('[', start);
if (dot == start) {
return doGet(path, start + 1);
} else if (bracket == start) {
int endBracket = path.indexOf(']', start);
int sliceStart = 0;
int sliceEnd = 0;
int i = start + 1;
for (; i < endBracket; i++) {
if (path.charAt(i) == ':') {
break;
}
sliceStart *= 10;
sliceStart += path.charAt(i) - '0';
}
if (path.charAt(i) == ':') {
i++;
for (; i < endBracket; i++) {
sliceEnd *= 10;
sliceEnd += path.charAt(i) - '0';
}
// FIXME: downcast undesirable
return ((MDataImpl) getData(sliceStart,
sliceEnd)).doGet(path,
endBracket + 1);
} else {
// FIXME: downcast undesirable
return ((MDataImpl) getData(sliceStart)).doGet(path,
endBracket + 1);
}
} else {
int endIdx;
if (dot < 0 && bracket < 0) {
endIdx = path.length();
} else {
if (dot < 0) {
endIdx = bracket;
} else if (bracket < 0) {
endIdx = dot;
} else {
endIdx = Math.min(dot, bracket);
}
}
String field = path.substring(start, endIdx);
MData data = getFieldData(field);
if (data == null) {
// System.err.println("WARNING: field data not found: "+field + " in "+ getType().getName());
return null;
} else {
// FIXME: downcast undesirable
return ((MDataImpl) data).doGet(path, endIdx);
}
}
}
}
|
package ir.co.bayan.simorq.zal.nutch.plugin.css;
import ir.co.bayan.simorq.zal.nutch.plugin.css.config.SelectorConfiguration;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.bind.JAXBException;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.apache.nutch.metadata.Metadata;
import org.apache.nutch.parse.HTMLMetaTags;
import org.apache.nutch.parse.HtmlParseFilter;
import org.apache.nutch.parse.ParseResult;
import org.apache.nutch.protocol.Content;
import org.w3c.dom.DocumentFragment;
public class CssSelectorParseFilter implements HtmlParseFilter {
public static final String MATCHED_DOC = "matched-doc";
private static final Logger logger = Logger.getLogger(CssSelectorParseFilter.class);
private Configuration configuration;
private CssSelectorBasedExtractor extractor;
private String defaultEncoding;
@Override
public ParseResult filter(Content content, ParseResult parseResult, HTMLMetaTags metaTags,
DocumentFragment documentFragment) {
try {
Metadata metadata = parseResult.get(content.getUrl()).getData().getParseMeta();
String encoding = StringUtils.defaultString(metadata.get(Metadata.ORIGINAL_CHAR_ENCODING), defaultEncoding);
String contentStr = new String(content.getContent(), encoding);
Map<String, String> extractedMap = extractor.extract(content.getUrl(), contentStr);
if (extractedMap != null) {
// Indicates that this document is matched with one of urls defined in the config.
// This will be used in ParseMetadataIndexingFilter to decide whether exclude document or not
metadata.add(MATCHED_DOC, "true");
for (Entry<String, String> entry : extractedMap.entrySet()) {
metadata.add(entry.getKey(), entry.getValue());
}
}
} catch (IOException e) {
logger.warn("", e);
}
return parseResult;
}
@Override
public Configuration getConf() {
return configuration;
}
@Override
public void setConf(Configuration configuration) {
this.configuration = configuration;
defaultEncoding = configuration.get("parser.character.encoding.default", "UTF-8");
try {
initConf();
} catch (UnsupportedEncodingException | JAXBException e) {
logger.error("", e);
}
}
private void initConf() throws UnsupportedEncodingException, JAXBException {
extractor = new CssSelectorBasedExtractor(SelectorConfiguration.readConfig(configuration));
}
}
|
package com.sequenceiq.freeipa.service.client;
import java.util.Set;
import java.util.stream.Collectors;
import javax.inject.Inject;
import javax.ws.rs.InternalServerErrorException;
import com.sequenceiq.freeipa.client.FreeIpaClientExceptionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import com.google.common.base.Joiner;
import com.sequenceiq.cloudbreak.auth.ThreadBasedUserCrnProvider;
import com.sequenceiq.freeipa.client.FreeIpaClient;
import com.sequenceiq.freeipa.client.FreeIpaClientException;
import com.sequenceiq.freeipa.client.FreeIpaClientExceptionWrapper;
import com.sequenceiq.freeipa.client.model.Group;
import com.sequenceiq.freeipa.client.model.User;
import com.sequenceiq.freeipa.service.freeipa.FreeIpaClientFactory;
@Service
public class FreeipaClientTestService {
private static final Logger LOGGER = LoggerFactory.getLogger(FreeipaClientTestService.class);
@Inject
private FreeIpaClientFactory freeIpaClientFactory;
public String userShow(Long id, String name) {
FreeIpaClient freeIpaClient;
try {
freeIpaClient = freeIpaClientFactory.getFreeIpaClientForStackId(id);
} catch (Exception e) {
LOGGER.error("Error creating FreeIpaClient", e);
return "FAILED TO CREATE CLIENT";
}
try {
User user = freeIpaClient.userShow(name);
LOGGER.info("Groups: {}", user.getMemberOfGroup());
LOGGER.info("Success: {}", user);
} catch (Exception e) {
LOGGER.error("Error showing user {}", name, e);
return "FAILED TO SHOW USER";
}
return "END";
}
public Boolean checkUsers(String environmentCrn, Set<String> requestedUsers) {
FreeIpaClient freeIpaClient = getClientByEnvironmentCrn(environmentCrn);
try {
LOGGER.info("Checking for users [{}] in environment {}", Joiner.on(",").join(requestedUsers), environmentCrn);
Set<String> freeipaUsers = freeIpaClient.userFindAll().stream().map(User::getUid).collect(Collectors.toSet());
LOGGER.debug("Users in freeipa: [{}]", Joiner.on(",").join(freeipaUsers));
return freeipaUsers.containsAll(requestedUsers);
} catch (FreeIpaClientException e) {
LOGGER.error("Find user FreeIPA call failed!", e);
throw new InternalServerErrorException("Find user FreeIPA call failed!");
}
}
public Boolean checkGroups(String environmentCrn, Set<String> requestGroups) {
FreeIpaClient freeIpaClient = getClientByEnvironmentCrn(environmentCrn);
try {
LOGGER.info("Checking for groups [{}] in environment {}", Joiner.on(",").join(requestGroups), environmentCrn);
Set<String> freeipaGroups = freeIpaClient.groupFindAll().stream().map(Group::getCn).collect(Collectors.toSet());
LOGGER.debug("Groups in freeipa: [{}]", Joiner.on(",").join(freeipaGroups));
return freeipaGroups.containsAll(requestGroups);
} catch (FreeIpaClientException e) {
LOGGER.error("Find group FreeIPA call failed!", e);
throw new InternalServerErrorException("Find group FreeIPA call failed!");
}
}
public Boolean checkUsersInGroup(String environmentCrn, Set<String> requestedUsers, String requestedGroup) {
FreeIpaClient freeIpaClient = getClientByEnvironmentCrn(environmentCrn);
try {
LOGGER.info("Checking if group [{}] has users [{}] in environment {}", requestedGroup,
Joiner.on(",").join(requestedUsers), environmentCrn);
Group freeipaGroup = freeIpaClient.groupShow(requestedGroup);
LOGGER.debug("Group [{}] found in freeipa, it has users [{}].", freeipaGroup.getCn(), freeipaGroup.getMemberUser());
return freeipaGroup.getMemberUser() != null && freeipaGroup.getMemberUser().containsAll(requestedUsers);
} catch (FreeIpaClientException e) {
if (FreeIpaClientExceptionUtil.isNotFoundException(e)) {
LOGGER.debug("Group [{}] not found in freeipa", requestedGroup);
return false;
}
LOGGER.error("Show group FreeIPA call failed!", e);
throw new InternalServerErrorException("Show group FreeIPA call failed!");
}
}
private FreeIpaClient getClientByEnvironmentCrn(String environmentCrn) {
try {
return freeIpaClientFactory.getFreeIpaClientByAccountAndEnvironment(
environmentCrn, ThreadBasedUserCrnProvider.getAccountId());
} catch (FreeIpaClientException e) {
LOGGER.error("FreeIPA client cannot be created!", e);
throw new FreeIpaClientExceptionWrapper(e);
}
}
}
|
/*
* Copyright (C) 2006 The Android Open Source Project
* Copyright (c) 2014 Chukong Technologies Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cocos2dx.lib;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.AssetFileDescriptor;
import android.content.res.Resources;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnErrorListener;
import android.net.Uri;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.FrameLayout;
import android.widget.MediaController.MediaPlayerControl;
import java.io.IOException;
import java.util.Map;
public class Cocos2dxVideoView extends SurfaceView implements MediaPlayerControl {
private String TAG = "VideoView";
private Uri mUri;
private int mDuration;
// all possible internal states
private static final int STATE_ERROR = -1;
private static final int STATE_IDLE = 0;
private static final int STATE_PREPARING = 1;
private static final int STATE_PREPARED = 2;
private static final int STATE_PLAYING = 3;
private static final int STATE_PAUSED = 4;
private static final int STATE_PLAYBACK_COMPLETED = 5;
// mCurrentState is a VideoView object's current state.
// mTargetState is the state that a method caller intends to reach.
// For instance, regardless the VideoView object's current state,
// calling pause() intends to bring the object to a target state
// of STATE_PAUSED.
private int mCurrentState = STATE_IDLE;
private int mTargetState = STATE_IDLE;
// All the stuff we need for playing and showing a video
private SurfaceHolder mSurfaceHolder = null;
private MediaPlayer mMediaPlayer = null;
private int mVideoWidth = 0;
private int mVideoHeight = 0;
private OnVideoEventListener mOnVideoEventListener;
private MediaPlayer.OnPreparedListener mOnPreparedListener;
private int mCurrentBufferPercentage;
private OnErrorListener mOnErrorListener;
// recording the seek position while preparing
private int mSeekWhenPrepared;
protected Context mContext = null;
protected int mViewLeft = 0;
protected int mViewTop = 0;
protected int mViewWidth = 0;
protected int mViewHeight = 0;
protected int mVisibleLeft = 0;
protected int mVisibleTop = 0;
protected int mVisibleWidth = 0;
protected int mVisibleHeight = 0;
private int mViewTag = 0;
public Cocos2dxVideoView(Context context,int tag) {
super(context);
mViewTag = tag;
mContext = context;
initVideoView();
}
public Cocos2dxVideoView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
mContext = context;
initVideoView();
}
public Cocos2dxVideoView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
mContext = context;
initVideoView();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (mVideoWidth == 0 || mVideoHeight == 0) {
setMeasuredDimension(mViewWidth, mViewHeight);
Log.e(TAG, ""+mViewWidth+ ":" +mViewHeight);
}
else {
setMeasuredDimension(mVisibleWidth, mVisibleHeight);
Log.e(TAG, ""+mVisibleWidth+ ":" +mVisibleHeight);
}
}
public void setVideoRect(int left,int top,int maxWidth,int maxHeight) {
mViewLeft = left;
mViewTop = top;
mViewWidth = maxWidth;
mViewHeight = maxHeight;
if (mVideoWidth != 0 && mVideoHeight != 0) {
fixSize();
}
}
public int resolveAdjustedSize(int desiredSize, int measureSpec) {
int result = desiredSize;
int specMode = MeasureSpec.getMode(measureSpec);
int specSize = MeasureSpec.getSize(measureSpec);
switch (specMode) {
case MeasureSpec.UNSPECIFIED:
/* Parent says we can be as big as we want. Just don't be larger
* than max size imposed on ourselves.
*/
result = desiredSize;
break;
case MeasureSpec.AT_MOST:
/* Parent says we can be as big as we want, up to specSize.
* Don't be larger than specSize, and don't be larger than
* the max size imposed on ourselves.
*/
result = Math.min(desiredSize, specSize);
break;
case MeasureSpec.EXACTLY:
// No choice. Do what we are told.
result = specSize;
break;
}
return result;
}
private boolean mNeedResume = false;
@Override
public void setVisibility(int visibility) {
if (visibility == INVISIBLE) {
mNeedResume = isPlaying();
if (mNeedResume) {
mSeekWhenPrepared = getCurrentPosition();
}
}
else if (mNeedResume){
start();
mNeedResume = false;
}
super.setVisibility(visibility);
}
private void initVideoView() {
mVideoWidth = 0;
mVideoHeight = 0;
getHolder().addCallback(mSHCallback);
setFocusable(true);
setFocusableInTouchMode(true);
mCurrentState = STATE_IDLE;
mTargetState = STATE_IDLE;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if((event.getAction() & MotionEvent.ACTION_MASK) == MotionEvent.ACTION_UP)
{
if (isPlaying()) {
pause();
} else if(mCurrentState == STATE_PAUSED){
resume();
}
}
return true;
}
private boolean isAssetRouse = false;
private String fileName = null;
public void setVideoFileName(String path) {
if (path.startsWith("/")) {
isAssetRouse = false;
setVideoURI(Uri.parse(path),null);
}
else {
fileName = path;
isAssetRouse = true;
setVideoURI(Uri.parse(path),null);
}
}
public void setVideoURL(String url) {
isAssetRouse = false;
setVideoURI(Uri.parse(url), null);
}
/**
* @hide
*/
private void setVideoURI(Uri uri, Map<String, String> headers) {
mUri = uri;
mSeekWhenPrepared = 0;
mVideoWidth = 0;
mVideoHeight = 0;
openVideo();
requestLayout();
invalidate();
}
public void stopPlayback() {
if (mMediaPlayer != null) {
mMediaPlayer.stop();
mMediaPlayer.release();
mMediaPlayer = null;
mCurrentState = STATE_IDLE;
mTargetState = STATE_IDLE;
}
}
private void openVideo() {
if (mSurfaceHolder == null) {
// not ready for playback just yet, will try again later
return;
}
if (isAssetRouse) {
if(fileName == null)
return;
} else {
if(mUri == null)
return;
}
// Tell the music playback service to pause
// TODO: these constants need to be published somewhere in the framework.
Intent i = new Intent("com.android.music.musicservicecommand");
i.putExtra("command", "pause");
mContext.sendBroadcast(i);
// we shouldn't clear the target state, because somebody might have
// called start() previously
release(false);
try {
//if (mMediaPlayer == null) {
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setOnPreparedListener(mPreparedListener);
mMediaPlayer.setOnVideoSizeChangedListener(mSizeChangedListener);
mMediaPlayer.setOnCompletionListener(mCompletionListener);
mMediaPlayer.setOnErrorListener(mErrorListener);
mMediaPlayer.setOnBufferingUpdateListener(mBufferingUpdateListener);
mMediaPlayer.setDisplay(mSurfaceHolder);
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mMediaPlayer.setScreenOnWhilePlaying(true);
//}
mDuration = -1;
mCurrentBufferPercentage = 0;
if (isAssetRouse) {
AssetFileDescriptor afd = mContext.getAssets().openFd(fileName);
mMediaPlayer.setDataSource(afd.getFileDescriptor(),afd.getStartOffset(),afd.getLength());
} else {
mMediaPlayer.setDataSource(mContext, mUri);
}
mMediaPlayer.prepareAsync();
// we don't set the target state here either, but preserve the
// target state that was there before.
mCurrentState = STATE_PREPARING;
} catch (IOException ex) {
Log.w(TAG, "Unable to open content: " + mUri, ex);
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
mErrorListener.onError(mMediaPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, 0);
return;
} catch (IllegalArgumentException ex) {
Log.w(TAG, "Unable to open content: " + mUri, ex);
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
mErrorListener.onError(mMediaPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, 0);
return;
}
}
private boolean mKeepRatio = false;
public void setKeepRatio(boolean enabled) {
mKeepRatio = enabled;
fixSize();
}
public void fixSize() {
if (mViewWidth != 0 && mViewHeight != 0) {
if (mKeepRatio) {
if ( mVideoWidth * mViewHeight > mViewWidth * mVideoHeight ) {
mVisibleWidth = mViewWidth;
mVisibleHeight = mViewWidth * mVideoHeight / mVideoWidth;
} else if ( mVideoWidth * mViewHeight < mViewWidth * mVideoHeight ) {
mVisibleWidth = mViewHeight * mVideoWidth / mVideoHeight;
mVisibleHeight = mViewHeight;
}
mVisibleLeft = mViewLeft + (mViewWidth - mVisibleWidth) / 2;
mVisibleTop = mViewTop + (mViewHeight - mVisibleHeight) / 2;
} else {
mVisibleLeft = mViewLeft;
mVisibleTop = mViewTop;
mVisibleWidth = mViewWidth;
mVisibleHeight = mViewHeight;
}
}
else {
mVisibleLeft = mViewLeft;
mVisibleTop = mViewTop;
mVisibleWidth = mVideoWidth;
mVisibleHeight = mVideoHeight;
}
getHolder().setFixedSize(mVisibleWidth, mVisibleHeight);
FrameLayout.LayoutParams lParams = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.WRAP_CONTENT,
FrameLayout.LayoutParams.WRAP_CONTENT);
lParams.leftMargin = mVisibleLeft;
lParams.topMargin = mVisibleTop;
setLayoutParams(lParams);
}
protected
MediaPlayer.OnVideoSizeChangedListener mSizeChangedListener =
new MediaPlayer.OnVideoSizeChangedListener() {
public void onVideoSizeChanged(MediaPlayer mp, int width, int height) {
mVideoWidth = mp.getVideoWidth();
mVideoHeight = mp.getVideoHeight();
if (mVideoWidth != 0 && mVideoHeight != 0) {
getHolder().setFixedSize(mVideoWidth, mVideoHeight);
}
}
};
MediaPlayer.OnPreparedListener mPreparedListener = new MediaPlayer.OnPreparedListener() {
public void onPrepared(MediaPlayer mp) {
mCurrentState = STATE_PREPARED;
if (mOnPreparedListener != null) {
mOnPreparedListener.onPrepared(mMediaPlayer);
}
mVideoWidth = mp.getVideoWidth();
mVideoHeight = mp.getVideoHeight();
int seekToPosition = mSeekWhenPrepared; // mSeekWhenPrepared may be changed after seekTo() call
if (seekToPosition != 0) {
seekTo(seekToPosition);
}
if (mVideoWidth != 0 && mVideoHeight != 0) {
fixSize();
}
if (mTargetState == STATE_PLAYING) {
start();
}
}
};
private MediaPlayer.OnCompletionListener mCompletionListener =
new MediaPlayer.OnCompletionListener() {
public void onCompletion(MediaPlayer mp) {
mCurrentState = STATE_PLAYBACK_COMPLETED;
mTargetState = STATE_PLAYBACK_COMPLETED;
release(true);
if (mOnVideoEventListener != null) {
mOnVideoEventListener.onVideoEvent(mViewTag,EVENT_COMPLETED);
}
}
};
private static final int EVENT_PLAYING = 0;
private static final int EVENT_PAUSED = 1;
private static final int EVENT_STOPPED = 2;
private static final int EVENT_COMPLETED = 3;
public interface OnVideoEventListener
{
void onVideoEvent(int tag,int event);
}
private MediaPlayer.OnErrorListener mErrorListener =
new MediaPlayer.OnErrorListener() {
public boolean onError(MediaPlayer mp, int framework_err, int impl_err) {
Log.d(TAG, "Error: " + framework_err + "," + impl_err);
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
/* If an error handler has been supplied, use it and finish. */
if (mOnErrorListener != null) {
if (mOnErrorListener.onError(mMediaPlayer, framework_err, impl_err)) {
return true;
}
}
/* Otherwise, pop up an error dialog so the user knows that
* something bad has happened. Only try and pop up the dialog
* if we're attached to a window. When we're going away and no
* longer have a window, don't bother showing the user an error.
*/
if (getWindowToken() != null) {
Resources r = mContext.getResources();
int messageId;
if (framework_err == MediaPlayer.MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK) {
//messageId = com.android.internal.R.string.VideoView_error_text_invalid_progressive_playback;
messageId = r.getIdentifier("VideoView_error_text_invalid_progressive_playback", "string", "android");
} else {
//messageId = com.android.internal.R.string.VideoView_error_text_unknown;
messageId = r.getIdentifier("VideoView_error_text_unknown", "string", "android");
}
int titleId = r.getIdentifier("VideoView_error_title", "string", "android");
int buttonStringId = r.getIdentifier("VideoView_error_button", "string", "android");
new AlertDialog.Builder(mContext)
.setTitle(r.getString(titleId))
.setMessage(messageId)
.setPositiveButton(r.getString(buttonStringId),
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
/* If we get here, there is no onError listener, so
* at least inform them that the video is over.
*/
if (mOnVideoEventListener != null) {
mOnVideoEventListener.onVideoEvent(mViewTag,EVENT_COMPLETED);
}
}
})
.setCancelable(false)
.show();
}
return true;
}
};
private MediaPlayer.OnBufferingUpdateListener mBufferingUpdateListener =
new MediaPlayer.OnBufferingUpdateListener() {
public void onBufferingUpdate(MediaPlayer mp, int percent) {
mCurrentBufferPercentage = percent;
}
};
/**
* Register a callback to be invoked when the media file
* is loaded and ready to go.
*
* @param l The callback that will be run
*/
public void setOnPreparedListener(MediaPlayer.OnPreparedListener l)
{
mOnPreparedListener = l;
}
/**
* Register a callback to be invoked when the end of a media file
* has been reached during playback.
*
* @param l The callback that will be run
*/
public void setOnCompletionListener(OnVideoEventListener l)
{
mOnVideoEventListener = l;
}
/**
* Register a callback to be invoked when an error occurs
* during playback or setup. If no listener is specified,
* or if the listener returned false, VideoView will inform
* the user of any errors.
*
* @param l The callback that will be run
*/
public void setOnErrorListener(OnErrorListener l)
{
mOnErrorListener = l;
}
SurfaceHolder.Callback mSHCallback = new SurfaceHolder.Callback()
{
public void surfaceChanged(SurfaceHolder holder, int format,
int w, int h)
{
boolean isValidState = (mTargetState == STATE_PLAYING);
boolean hasValidSize = (mVideoWidth == w && mVideoHeight == h);
if (mMediaPlayer != null && isValidState && hasValidSize) {
if (mSeekWhenPrepared != 0) {
seekTo(mSeekWhenPrepared);
}
start();
}
}
public void surfaceCreated(SurfaceHolder holder)
{
mSurfaceHolder = holder;
openVideo();
}
public void surfaceDestroyed(SurfaceHolder holder)
{
// after we return from this we can't use the surface any more
mSurfaceHolder = null;
release(true);
}
};
/*
* release the media player in any state
*/
private void release(boolean cleartargetstate) {
if (mMediaPlayer != null) {
mMediaPlayer.reset();
mMediaPlayer.release();
mMediaPlayer = null;
mCurrentState = STATE_IDLE;
if (cleartargetstate) {
mTargetState = STATE_IDLE;
}
}
}
public void start() {
if (isInPlaybackState()) {
mMediaPlayer.start();
mCurrentState = STATE_PLAYING;
if (mOnVideoEventListener != null) {
mOnVideoEventListener.onVideoEvent(mViewTag, EVENT_PLAYING);
}
}
mTargetState = STATE_PLAYING;
}
public void pause() {
if (isInPlaybackState()) {
if (mMediaPlayer.isPlaying()) {
mMediaPlayer.pause();
mCurrentState = STATE_PAUSED;
if (mOnVideoEventListener != null) {
mOnVideoEventListener.onVideoEvent(mViewTag, EVENT_PAUSED);
}
}
}
mTargetState = STATE_PAUSED;
}
public void stop() {
if (isInPlaybackState()) {
if (mMediaPlayer.isPlaying()) {
stopPlayback();
if (mOnVideoEventListener != null) {
mOnVideoEventListener.onVideoEvent(mViewTag, EVENT_STOPPED);
}
}
}
}
public void suspend() {
release(false);
}
public void resume() {
if (isInPlaybackState()) {
if (mCurrentState == STATE_PAUSED) {
mMediaPlayer.start();
mCurrentState = STATE_PLAYING;
if (mOnVideoEventListener != null) {
mOnVideoEventListener.onVideoEvent(mViewTag, EVENT_PLAYING);
}
}
}
}
public void restart() {
if (isInPlaybackState()) {
mMediaPlayer.seekTo(0);
mMediaPlayer.start();
mCurrentState = STATE_PLAYING;
mTargetState = STATE_PLAYING;
}
}
// cache duration as mDuration for faster access
public int getDuration() {
if (isInPlaybackState()) {
if (mDuration > 0) {
return mDuration;
}
mDuration = mMediaPlayer.getDuration();
return mDuration;
}
mDuration = -1;
return mDuration;
}
public int getCurrentPosition() {
if (isInPlaybackState()) {
return mMediaPlayer.getCurrentPosition();
}
return 0;
}
public void seekTo(int msec) {
if (isInPlaybackState()) {
mMediaPlayer.seekTo(msec);
mSeekWhenPrepared = 0;
} else {
mSeekWhenPrepared = msec;
}
}
public boolean isPlaying() {
return isInPlaybackState() && mMediaPlayer.isPlaying();
}
public int getBufferPercentage() {
if (mMediaPlayer != null) {
return mCurrentBufferPercentage;
}
return 0;
}
public boolean isInPlaybackState() {
return (mMediaPlayer != null &&
mCurrentState != STATE_ERROR &&
mCurrentState != STATE_IDLE &&
mCurrentState != STATE_PREPARING);
}
@Override
public boolean canPause() {
// TODO Auto-generated method stub
return true;
}
@Override
public boolean canSeekBackward() {
// TODO Auto-generated method stub
return true;
}
@Override
public boolean canSeekForward() {
// TODO Auto-generated method stub
return true;
}
public int getAudioSessionId () {
// TODO Auto-generated method stub
return mMediaPlayer.getAudioSessionId();
}
}
|
/* Copyright (c) 2014-2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.ims.vt;
import android.view.Surface;
import android.view.WindowManager;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraCharacteristics.Key;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.util.Size;
import com.qualcomm.ims.utils.Log;
/**
* The class is used to hold an {@code android.hardware.Camera} instance.
* <p>
* The {@code open()} and {@code release()} calls are similar to the ones in
* {@code android.hardware.Camera}.
*/
public class ImsCamera extends Camera {
private static final String TAG = "ImsCamera";
private static final boolean DBG = true;
private static final short IMS_CAMERA_OPERATION_SUCCESS = 0;
private static final Size INVALID_SIZE = new Size(-1, -1);
static {
System.loadLibrary("imscamera_jni");
}
private String mPackageName;
private WindowManager mWindowManager;
private CameraManager mCameraManager;
private boolean mIsOpen;
private boolean mIsPreviewStarted;
private boolean mIsRecordingStarted;
private Surface mPreviewSurface;
private ConfigIms mConfigIms;
// @deprecated Use overloaded variant and explicitly pass the package name.
public static native short native_open(int cameraId);
public static native short native_open(int cameraId, String packageName);
public native short native_release();
public native short native_startPreview();
public native short native_stopPreview();
public native short native_startRecording();
public native short native_stopRecording();
public native short native_setPreviewTexture(Surface surface);
public native short native_setDisplayOrientation(int rotation);
public native boolean native_isZoomSupported();
public native int native_getMaxZoom();
public native void native_setZoom(int zoomValue);
public native short native_setPreviewSize(int width, int height);
public native short native_setPreviewFpsRange(short fps);
/* package */
ImsCamera(Context context, String id, Camera.Listener listener) throws CameraAccessException {
super(context, id, listener);
mPackageName = context.getPackageName();
// TODO Maybe make this static.
mWindowManager = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE);
mCameraManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
mIsOpen = false;
mIsPreviewStarted = false;
mIsRecordingStarted = false;
mPreviewSurface = null;
mConfigIms = new ConfigIms(getDefaultPreviewSize(), ImsMediaConstants.DEFAULT_FPS,
ImsMediaConstants.PORTRAIT_MODE);
}
/**
* {@inheritDoc}
*/
@Override
public Size getPreviewSize() throws CameraAccessException {
return mConfigIms.getSize();
}
private Size getDefaultPreviewSize() throws CameraAccessException {
// Eventhough ImsCamera implementation should not access Camera2 APIs,
// we query some of camera characteristics using Camera2 APIs.
// Camera folks confirmed that this is OK, since all characteristics are
// cached when the camera service comes up, so this won't really make any
// access to camera.
StreamConfigurationMap map = getCameraCharacteristic(mCameraId,
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
return map.getOutputSizes(SurfaceTexture.class)[0];
}
/**
* {@inheritDoc}
*/
@Override
public void open() throws CameraAccessException {
Log.i(this, "open");
if (isOpen()) {
Log.i(this, "open: Camera is already open.");
return;
}
final int id = Integer.parseInt(getId());
final short error = native_open(id, mPackageName);
if (error != IMS_CAMERA_OPERATION_SUCCESS) {
Log.v(this, "open: error=" + error);
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
mIsOpen = true;
}
/**
* {@inheritDoc}
*/
@Override
public void close() {
Log.i(this, "close");
if (!isOpen()) {
Log.i(this, "close: Camera is already closed.");
return;
}
try {
stopPreview();
} catch (Exception e) {
Log.e(this, "close: Failed to close camera preview/recording, exception=" + e);
}
short error = native_release();
logIfError("release", error);
mIsOpen = false;
mIsPreviewStarted = false;
mIsRecordingStarted = false;
mPreviewSurface = null;
}
/**
* {@inheritDoc}
*/
@Override
public void setZoom(float v) throws CameraAccessException {
Log.i(this, "setZoom " + v);
if (!isOpen()) {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
if (v < getMinZoom() || v > getMaxZoom()) {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
native_setZoom((int) v);
}
/**
* {@inheritDoc}
*/
@Override
public void reconfigure(ConfigIms cfg) throws CameraAccessException {
Log.i(this, "reconfigure " + cfg);
if (!isOpen()) {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
setPreviewFps(cfg.getFps());
setFrameDimension(cfg.getWidth(), cfg.getHeight());
mConfigIms = cfg;
}
/**
* Sets output (recording) frames dimension.
* @param w Width of the frame.
* @param h Height of the frame.
* @throws CameraAccessException
*/
private void setFrameDimension(int w, int h) throws CameraAccessException {
Log.i(this, "setPreviewSize");
if (!isOpen() || isPreviewStarted() || isRecordingStarted()) {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
short error = native_setPreviewSize(w, h);
logIfError("setPreviewSize", error);
}
/**
* Sets output FPS.
* @param fps New FPS value.
* @throws CameraAccessException
*/
private void setPreviewFps(int fps) throws CameraAccessException {
Log.i(this, "setPreviewFps");
if (!isOpen() || isPreviewStarted() || isRecordingStarted()) {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
short error = native_setPreviewFpsRange((short)fps);
logIfError("setPreviewFpsRange", error);
}
/**
* {@inheritDoc}
*/
@Override
public boolean isZoomSupported() throws CameraAccessException {
if (!isOpen()) {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
boolean result = native_isZoomSupported();
Log.v(this, "isZoomSupported result=" + result);
return result;
}
/**
* {@inheritDoc}
*/
@Override
public float getMaxZoom() throws CameraAccessException {
if (!isOpen()) {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
int result = native_getMaxZoom();
Log.v(this, "getMaxZoom result = " + result);
return result;
}
/**
* {@inheritDoc}
*/
@Override
public float getMinZoom() throws CameraAccessException {
if (!isOpen()) {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
final int ZOOM_MIN_VALUE = 0;
return ZOOM_MIN_VALUE;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isFacingFront() throws CameraAccessException {
if (!isOpen()) {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(Integer.parseInt(getId()), info);
Log.v(this, "isFacingFront info.facing=" + info.facing);
return (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
}
/**
* {@inheritDoc}
*/
@Override
public void startPreview(Surface surface) throws CameraAccessException {
Log.i(this, "startPreview: Surface=" + surface);
if (!isOpen()) {
Log.e(this, "startPreview: Error camera is closed");
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
} else if (isPreviewStarted()) {
Log.i(this, "startPreview: Camera preview already started.");
return;
}
mPreviewSurface = surface;
short error = native_setPreviewTexture(surface);
logIfError("setPreviewTexture", error);
if (error == IMS_CAMERA_OPERATION_SUCCESS) {
error = native_startPreview();
logIfError("startPreview", error);
}
setDisplayOrientation();
if (error == IMS_CAMERA_OPERATION_SUCCESS) {
mIsPreviewStarted = true;
} else {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
}
private void doStopPreview() throws CameraAccessException {
Log.i(this, "doStopPreview");
short error = native_stopPreview();
logIfError("doStopPreview", error);
if (error != IMS_CAMERA_OPERATION_SUCCESS) {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
mIsPreviewStarted = false;
}
private void doStopRecording() throws CameraAccessException {
Log.i(this, "doStopRecording");
short error = native_stopRecording();
logIfError("doStopRecording", error);
if (error != IMS_CAMERA_OPERATION_SUCCESS) {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
mIsRecordingStarted = false;
}
/**
* {@inheritDoc}
*/
@Override
public void stopPreview() throws CameraAccessException {
if (!isPreviewStarted()) {
Log.i(this, "stopPreview: Camera preview already stopped.");
return;
}
Log.i(this, "stopPreview");
if (isRecordingStarted()) {
doStopRecording();
}
doStopPreview();
}
/**
* {@inheritDoc}
*/
@Override
public void startRecording(Surface previewSurface, Surface recordingSurface)
throws CameraAccessException {
Log.i(this, "startRecording: PreviewSurface=" + previewSurface + " RecordingSurface="
+ recordingSurface);
if (isRecordingStarted()) {
Log.i(this, "startRecording: Camera recording already started.");
return;
}
mPreviewSurface = previewSurface;
if (mPreviewSurface == null) {
Log.e(this, "startRecording: Preview surface is null.");
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
if (!isPreviewStarted()) {
startPreview(mPreviewSurface);
}
short error = native_startRecording();
logIfError("startRecording", error);
if (error != IMS_CAMERA_OPERATION_SUCCESS) {
throw new CameraAccessException(CameraAccessException.CAMERA_ERROR);
}
mIsRecordingStarted = true;
}
/**
* {@inheritDoc}
*/
@Override
public void stopRecording() throws CameraAccessException {
if (!isRecordingStarted()) {
Log.i(this, "stopRecording: Camera recording already stopped.");
return;
}
Log.i(this, "stopRecording");
doStopRecording();
if (isPreviewStarted()) {
doStopPreview();
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean isOpen() {
return mIsOpen;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isPreviewStarted() {
return mIsPreviewStarted;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isRecordingStarted() {
return mIsRecordingStarted;
}
/**
* {@inheritDoc}
*/
@Override
public int getSensorOrientation() throws CameraAccessException {
try {
return getCameraCharacteristic(mCameraId, CameraCharacteristics.SENSOR_ORIENTATION);
} catch (CameraAccessException e) {
Log.e(this, "getSensorOrientation: Failed to retrieve sensor orientation, " + e);
throw e;
}
}
private <T> T getCameraCharacteristic(String cameraId, Key<T> key)
throws CameraAccessException {
CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(mCameraId);
return characteristics.get(key);
}
/**
* Set the camera display orientation based on the screen rotation and the camera direction
*/
private void setDisplayOrientation() {
if (mWindowManager == null) {
Log.e(this, "WindowManager not available");
return;
}
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
int result = 0, degrees = 0, rotation = 0;
// We assume that the device will always be in it's native orientation.
// The actual rotation is done at UI side. This way we avoid stoping and starting camera
// preview and recording everytime UI gets rotated.
rotation = Surface.ROTATION_0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
default:
Log.e(this, "setDisplayOrientation: Unexpected rotation: " + rotation);
}
final int id = Integer.parseInt(getId());
android.hardware.Camera.getCameraInfo(id, info);
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
Log.i(this, "setDisplayOrientation rotation=" + result);
short error = native_setDisplayOrientation(result);
logIfError("setDisplayOrientation", error);
}
private void logIfError(String methodName, short error) {
if (error != IMS_CAMERA_OPERATION_SUCCESS) {
Log.e(this, methodName + " failed with error=" + error);
}
}
}
|
/*
Copyright 2013 Giovanni Bricconi
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.solr.kelvin;
import java.util.Iterator;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
public class QueryPerformerLoader extends ConfigurableLoader {
public QueryPerformerLoader() {
super(QueryPerformer.class);
}
@Override
protected void addDefaults() {
if (resources.size() == 0) {
URLQueryPerformer qp = new URLQueryPerformer();
try {
qp.configure(JsonNodeFactory.instance.objectNode());
resources.add(qp);
} catch (Exception e) {
e.printStackTrace();
}
}
}
public Iterator<QueryPerformer> iterator() {
return new Iterator<QueryPerformer>() {
private Iterator<IConfigurable> i = resources.iterator();
public boolean hasNext() {
return i.hasNext();
}
public QueryPerformer next() {
return (QueryPerformer)i.next();
}
public void remove() {
throw new UnsupportedOperationException ();
}
};
}
}
|
package com.devonfw.module.security.common.base.accesscontrol;
import com.devonfw.module.security.common.api.accesscontrol.AccessControlSchema;
/**
* This is the interface to {@link #loadSchema() load} the {@link AccessControlSchema} from an arbitrary source. The
* default implementation will load it from an XML file. You could create your own implementation to read from database
* or wherever if default is not suitable.
*
*/
public interface AccessControlSchemaProvider {
/**
* @return the loaded {@link AccessControlSchema}. May not be {@code null}.
*/
AccessControlSchema loadSchema();
}
|
/**
* This Source Code Form is subject to the terms of the Mozilla Public License,
* v. 2.0. If a copy of the MPL was not distributed with this file, You can
* obtain one at http://mozilla.org/MPL/2.0/. OpenMRS is also distributed under
* the terms of the Healthcare Disclaimer located at http://openmrs.org/license.
*
* Copyright (C) OpenMRS Inc. OpenMRS is a registered trademark and the OpenMRS
* graphic logo is a trademark of OpenMRS Inc.
*/
package org.openmrs.api.db;
/**
* Data access for custom datatypes
*
* @since 1.9
*/
public interface DatatypeDAO {
/**
* Gets a clob storage object by its id
*
* @param id
* @return clob storage object or null
*/
ClobDatatypeStorage getClobDatatypeStorage(Integer id);
/**
* Gets a clob storage object by its uuid
*
* @param uuid
* @return clob storage object or null
*/
ClobDatatypeStorage getClobDatatypeStorageByUuid(String uuid);
/**
* Creates or updates a clob storage object
*
* @param storage
* @return the saved object
*/
ClobDatatypeStorage saveClobDatatypeStorage(ClobDatatypeStorage storage);
/**
* Deletes a clob storage object from the database
*
* @param storage the object to delete
*/
void deleteClobDatatypeStorage(ClobDatatypeStorage storage);
}
|
package spotify.murari.controller;
import java.util.List;
import javax.validation.Valid;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import io.swagger.annotations.ApiOperation;
import spotify.murari.payload.request.AlbumCategoryRequest;
import spotify.murari.payload.response.AlbumCategoryResponse;
import spotify.murari.service.IAlbumCategoryService;
@RestController
@CrossOrigin
@RequestMapping("/spotify")
public class AlbumCategoryController {
@Autowired
private IAlbumCategoryService categoryService;
@ApiOperation(value = "SAVE ALBUM CATEGORY")
@PostMapping("/category")
public ResponseEntity<?> saveAlbumCategory(@Valid @RequestBody AlbumCategoryRequest categoryRequest){
ResponseEntity<String> resp=null;
try {
categoryService.saveCategory(categoryRequest);
resp=new ResponseEntity<String>("Album category saved "+categoryRequest.getTitle(),HttpStatus.CREATED);
} catch (Exception e) {
e.printStackTrace();
resp=new ResponseEntity<String>("Unable to process save",HttpStatus.INTERNAL_SERVER_ERROR);
}
return resp;
}
@ApiOperation(value = "FETCH ALL ALBUM CATEGORY")
@GetMapping("/category")
public ResponseEntity<?> getAlbumCategory(){
ResponseEntity<?> resp=null;
try {
List<AlbumCategoryResponse> categoryList=categoryService.getAlbumCategory();
resp=new ResponseEntity<List<AlbumCategoryResponse>>(categoryList,HttpStatus.OK);
} catch (Exception e) {
e.printStackTrace();
resp=new ResponseEntity<String>("Unable to fetch data",HttpStatus.INTERNAL_SERVER_ERROR);
}
return resp;
}
@ApiOperation(value = "FETCH ALBUM CATEGORY BY ID")
@GetMapping("/category/{id}")
public ResponseEntity<?> getOneCategory(@PathVariable String id){
ResponseEntity<?> resp=null;
try {
AlbumCategoryResponse category=categoryService.getOneCategory(id);
resp=new ResponseEntity<AlbumCategoryResponse>(category,HttpStatus.OK);
} catch (Exception e) {
throw e;
}
return resp;
}
@ApiOperation(value = "DELETE ALBUM CATEGORY BY ID")
@DeleteMapping("/category/{id}")
public ResponseEntity<String> removeOneCategory(@PathVariable String id){
ResponseEntity<String> resp=null;
try {
categoryService.deleteCategory(id);
resp=new ResponseEntity<String>("Category deleted",HttpStatus.OK);
}
catch (Exception e) {
e.printStackTrace();
resp=new ResponseEntity<String>("Unable to delete data",HttpStatus.INTERNAL_SERVER_ERROR);
}
return resp;
}
@ApiOperation(value = "UPDATE ALBUM CATEGORY")
@PutMapping("/category")
public ResponseEntity<String> updateCategory(@RequestBody AlbumCategoryRequest req){
ResponseEntity<String> resp=null;
try {
categoryService.updateCategory(req);
resp=new ResponseEntity<String>("Category updated",HttpStatus.RESET_CONTENT);
}
catch (Exception e) {
e.printStackTrace();
resp=new ResponseEntity<String>("Unable to update data",HttpStatus.INTERNAL_SERVER_ERROR);
}
return resp;
}
}
|
package com.example.service.account;
import com.example.mapper.account.AdvancePaymentsMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* 预收款service
*/
@Service
public class AdvancePaymentsService {
@Autowired
private AdvancePaymentsMapper advancePaymentsMapper;
}
|
package apron.test;
import apron.constraint.*;
import apron.permissionlanguage.*;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.tree.ParseTree;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
public class Test{
public static Evaluator CreateEvaluator(String inputFile) throws IOException{
InputStream is = new FileInputStream(inputFile);
ANTLRInputStream input = new ANTLRInputStream(is);
ApronLexer lexer = new ApronLexer(input);
CommonTokenStream tokens = new CommonTokenStream(lexer);
ApronParser parser = new ApronParser(tokens);
ParseTree tree = parser.program(); // parse
SyntaxGenerator syn = new SyntaxGenerator();
return new Evaluator(syn.visit(tree));
}
public static ConstraintGenerator Create_Con_Visitor(String inputFile) throws IOException{
InputStream is = new FileInputStream(inputFile);
ANTLRInputStream input = new ANTLRInputStream(is);
ConstraintLexer lexer = new ConstraintLexer(input);
CommonTokenStream tokens = new CommonTokenStream(lexer);
ConstraintParser parser = new ConstraintParser(tokens);
ParseTree tree = parser.program(); // parse
ConstraintGenerator con = new ConstraintGenerator();
con.visit(tree);
return con;
}
public static void main(String[] args) throws Exception {
System.out.println("Start!");
System.out.println(new FlowPreTestor().execute()?"True":"False");
System.out.println("Done!");
/*
SyntaxGenerator syn = new SyntaxGenerator();
SyntaxTree syntree = syn.visit(tree);
//syntree.print();
syntree.rebuild();
//syntree.reduce2dnf();
//syntree.print();
//syntree.reduce2cnf();
//syntree.print();
SyntaxTree a = Create_Syn_Tree("sample.a");
a.rebuild();
SyntaxTree b = Create_Syn_Tree("sample.b");
System.out.println(a.is_include(b));
ConsVisitor c = Create_Con_Visitor("sample.con");
//c.execute();
System.out.println(c.execute(a));
*/
}
}
|
package org.batfish.representation.cisco;
import javax.annotation.Nonnull;
import org.batfish.datamodel.IpSpace;
import org.batfish.datamodel.IpSpaceReference;
public class NetworkObjectGroupAddressSpecifier implements AccessListAddressSpecifier {
/** */
private static final long serialVersionUID = 1L;
private final String _name;
public NetworkObjectGroupAddressSpecifier(String name) {
_name = name;
}
@Override
@Nonnull
public IpSpace toIpSpace() {
return new IpSpaceReference(_name, String.format("Match network object-group: '%s'", _name));
}
}
|
package com.example.android.onlineshoppingdemo.exceptions;
public class AuthenticationException extends Exception {
private static final long serialVersionUID = 1L;
}
|
package ru.yandex.qatools.embed.postgresql.config;
import de.flapdoodle.embed.process.config.store.*;
import de.flapdoodle.embed.process.extract.UUIDTempNaming;
import de.flapdoodle.embed.process.io.directories.UserHome;
import de.flapdoodle.embed.process.io.progress.StandardConsoleProgressListener;
import ru.yandex.qatools.embed.postgresql.Command;
import ru.yandex.qatools.embed.postgresql.PackagePaths;
import ru.yandex.qatools.embed.postgresql.ext.SubdirTempDir;
/**
* Download config builder for postgres
*/
public class PostgresDownloadConfigBuilder {
private ImmutableDownloadConfig.Builder builder;
public ImmutableDownloadConfig.Builder defaultsForCommand(Command command) {
builder = ImmutableDownloadConfig.builder ()
.fileNaming (new UUIDTempNaming ())
// I've found the only open and easy to use cross platform binaries
.downloadPath (new SameDownloadPathForEveryDistribution ("http://get.enterprisedb.com/postgresql/"))
.packageResolver (new PackagePaths(command, SubdirTempDir.defaultInstance()))
.artifactStorePath(new UserHome(".embedpostgresql"))
.downloadPrefix("postgresql-download")
.userAgent("Mozilla/5.0 (compatible; Embedded postgres; +https://github.com/yandex-qatools)")
.progressListener(new StandardConsoleProgressListener() {
@Override
public void info(String label, String message) {
if (label.startsWith("Extract")) {
System.out.print(".");//NOSONAR
} else {
super.info(label, message);//NOSONAR
}
}
});
return builder;
}
public DownloadConfig build() {
final ImmutableDownloadConfig downloadConfig = builder.build ();
return new MutableDownloadConfig(downloadConfig.getDownloadPath(), downloadConfig.getDownloadPrefix(),
downloadConfig.getPackageResolver(), downloadConfig.getArtifactStorePath(), downloadConfig.getFileNaming(),
downloadConfig.getProgressListener(), downloadConfig.getUserAgent(), downloadConfig.getTimeoutConfig(), downloadConfig.proxyFactory().orElse(null));
}
}
|
package com.socrata.util.iterators;
import java.util.Iterator;
import java.util.NoSuchElementException;
public class TakeIterator<T> implements Iterator<T> {
private int remaining;
private final Iterator<T> underlying;
public TakeIterator(int howMany, Iterator<T> underlying) {
this.remaining = howMany;
this.underlying = underlying;
}
public boolean hasNext() {
return remaining > 0 && underlying.hasNext();
}
public T next() {
if(remaining <= 0) throw new NoSuchElementException();
T result = underlying.next();
remaining -= 1;
return result;
}
public void remove() {
underlying.remove();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.