gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.framework.security.util;
import java.security.Permission;
import java.util.Dictionary;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import org.apache.felix.framework.BundleRevisionImpl;
import org.apache.felix.framework.security.condpermadmin.ConditionalPermissionInfoImpl;
import org.apache.felix.framework.util.SecureAction;
import org.osgi.framework.Bundle;
import org.osgi.service.condpermadmin.Condition;
import org.osgi.service.condpermadmin.ConditionInfo;
/**
* This class caches conditions instances by their infos. Furthermore, it allows
* to eval postponed condition permission tuples as per spec (see 9.45).
*/
// TODO: maybe use bundle events instead of soft/weak references.
public final class Conditions
{
private static final ThreadLocal m_conditionStack = new ThreadLocal();
private static final Map m_conditionCache = new WeakHashMap();
private final Map m_cache = new WeakHashMap();
private final BundleRevisionImpl m_module;
private final ConditionInfo[] m_conditionInfos;
private final Condition[] m_conditions;
private final SecureAction m_action;
public Conditions(SecureAction action)
{
this(null, null, action);
}
private Conditions(BundleRevisionImpl module, ConditionInfo[] conditionInfos,
SecureAction action)
{
m_module = module;
m_conditionInfos = conditionInfos;
if ((module != null) && (conditionInfos != null))
{
synchronized (m_conditionCache)
{
Map conditionMap = (Map) m_conditionCache.get(module);
if (conditionMap == null)
{
conditionMap = new HashMap();
conditionMap.put(m_conditionInfos,
new Condition[m_conditionInfos.length]);
m_conditionCache.put(module, conditionMap);
}
Condition[] conditions = (Condition[]) conditionMap
.get(m_conditionInfos);
if (conditions == null)
{
conditions = new Condition[m_conditionInfos.length];
conditionMap.put(m_conditionInfos, conditions);
}
m_conditions = conditions;
}
}
else
{
m_conditions = null;
}
m_action = action;
}
public Conditions getConditions(BundleRevisionImpl key, ConditionInfo[] conditions)
{
Conditions result = null;
Map index = null;
synchronized (m_cache)
{
index = (Map) m_cache.get(conditions);
if (index == null)
{
index = new WeakHashMap();
m_cache.put(conditions, index);
}
}
synchronized (index)
{
if (key != null)
{
result = (Conditions) index.get(key);
}
}
if (result == null)
{
result = new Conditions(key, conditions, m_action);
synchronized (index)
{
index.put(key, result);
}
}
return result;
}
// See whether the given list is satisfied or not
public boolean isSatisfied(List posts, Permissions permissions,
Permission permission)
{
if (m_conditionInfos == null)
{
return true;
}
boolean check = true;
for (int i = 0; i < m_conditionInfos.length; i++)
{
if (m_module == null)
{
// TODO: check whether this is correct!
break;
}
try
{
Condition condition = null;
boolean add = false;
Class clazz = Class.forName(m_conditionInfos[i].getType());
synchronized (m_conditions)
{
if (m_conditions[i] == null)
{
m_conditions[i] = createCondition(m_module.getBundle(),
clazz, m_conditionInfos[i]);
}
condition = m_conditions[i];
}
Object current = m_conditionStack.get();
if (current != null)
{
if (current instanceof HashSet)
{
if (((HashSet) current).contains(clazz))
{
return false;
}
}
else
{
if (current == clazz)
{
return false;
}
}
}
if (condition.isPostponed())
{
if (check && !permissions.implies(permission, null))
{
return false;
}
else
{
check = false;
}
posts.add(new Object[] { condition, new Integer(i) });
}
else
{
if (current == null)
{
m_conditionStack.set(clazz);
}
else
{
if (current instanceof HashSet)
{
if (((HashSet) current).contains(clazz))
{
return false;
}
((HashSet) current).add(clazz);
}
else
{
if (current == clazz)
{
return false;
}
HashSet frame = new HashSet();
frame.add(current);
frame.add(clazz);
m_conditionStack.set(frame);
current = frame;
}
}
try
{
boolean mutable = condition.isMutable();
boolean result = condition.isSatisfied();
if (!mutable
&& ((condition != Condition.TRUE) && (condition != Condition.FALSE)))
{
synchronized (m_conditions)
{
m_conditions[i] = result ? Condition.TRUE
: Condition.FALSE;
}
}
if (!result)
{
return false;
}
}
finally
{
if (current == null)
{
m_conditionStack.set(null);
}
else
{
((HashSet) current).remove(clazz);
if (((HashSet) current).isEmpty())
{
m_conditionStack.set(null);
}
}
}
}
}
catch (Exception e)
{
// TODO: log this as per spec
e.printStackTrace();
return false;
}
}
return true;
}
public boolean evalRecursive(List entries)
{
Map contexts = new HashMap();
outer: for (Iterator iter = entries.iterator(); iter.hasNext();)
{
List tuples = (List) iter.next();
inner: for (Iterator inner = tuples.iterator(); inner.hasNext();)
{
Object[] entry = (Object[]) inner.next();
List conditions = (List) entry[1];
if (conditions == null)
{
if (!((ConditionalPermissionInfoImpl) entry[0]).isAllow())
{
return false;
}
continue outer;
}
for (Iterator iter2 = conditions.iterator(); iter2.hasNext();)
{
Object[] condEntry = (Object[]) iter2.next();
Condition cond = (Condition) condEntry[0];
Dictionary context = (Dictionary) contexts.get(cond
.getClass());
if (context == null)
{
context = new Hashtable();
contexts.put(cond.getClass(), context);
}
Object current = m_conditionStack.get();
if (current == null)
{
m_conditionStack.set(cond.getClass());
}
else
{
if (current instanceof HashSet)
{
((HashSet) current).add(cond.getClass());
}
else
{
HashSet frame = new HashSet();
frame.add(current);
frame.add(cond.getClass());
m_conditionStack.set(frame);
current = frame;
}
}
boolean result;
boolean mutable = cond.isMutable();
try
{
result = cond.isSatisfied(new Condition[] { cond },
context);
}
finally
{
if (current == null)
{
m_conditionStack.set(null);
}
else
{
((HashSet) current).remove(cond.getClass());
if (((HashSet) current).isEmpty())
{
m_conditionStack.set(null);
}
}
}
if (!mutable && (cond != Condition.TRUE)
&& (cond != Condition.FALSE))
{
synchronized (((Conditions) entry[2]).m_conditions)
{
((Conditions) entry[2]).m_conditions[((Integer) condEntry[1])
.intValue()] = result ? Condition.TRUE
: Condition.FALSE;
}
}
if (!result)
{
continue inner;
}
}
if (!((ConditionalPermissionInfoImpl) entry[0]).isAllow())
{
return false;
}
continue outer;
}
return false;
}
return true;
}
private Condition createCondition(final Bundle bundle, final Class clazz,
final ConditionInfo info) throws Exception
{
try
{
return (Condition) m_action.getMethod(clazz, "getCondition",
new Class[] { Bundle.class, ConditionInfo.class }).invoke(null,
new Object[] { bundle, info });
}
catch (Exception ex)
{
ex.printStackTrace();
return (Condition) m_action.getConstructor(clazz,
new Class[] { Bundle.class, ConditionInfo.class }).newInstance(
new Object[] { bundle, info });
}
}
}
| |
package com.aitesam.slate_nuces;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.cookie.Cookie;
import org.apache.http.impl.client.AbstractHttpClient;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.HttpConnectionParams;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences.Editor;
import android.util.Log;
import android.view.View;
import android.webkit.CookieManager;
import android.webkit.CookieSyncManager;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.Toast;
public class LoginServic extends Activity {
// UI Objects
//Http Objects
private HttpClient httpClient = new DefaultHttpClient();
public Cookie cookie = null;
public String cookies2;
public String cookieString;
private static final int TIMEOUT_MS = 3000;
private static final String redirURL = "http://slateisb.nu.edu.pk/portal/relogin";
// Static Variabel for Multi Classes
public int login_pass;
public Editor mSetting;
public Editor preferenceEditor;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//setContentView(R.layout.activity_login_page);
//Setting View of UI
mSetting = PreferenceManager.getDefaultSharedPreferences(this).edit();
preferenceEditor = getSharedPreferences("com.mycompany.android.myapp",MODE_PRIVATE).edit();
Config.test_web="abc";
if(isNetworkAvailable()){
//First Setting Keys
do_login mLogin=new do_login();
mLogin.execute();
Toast.makeText(getApplicationContext(), "Logging In...", Toast.LENGTH_SHORT).show();
}
else{
Toast.makeText(getApplicationContext(), "No Network", Toast.LENGTH_SHORT).show();
}
}
private class do_login extends AsyncTask<String,Void,String>{
@Override
protected String doInBackground(String... params) {
// TODO Auto-generated method stub
HttpConnectionParams.setConnectionTimeout(httpClient.getParams(), TIMEOUT_MS);
HttpConnectionParams.setSoTimeout(httpClient.getParams(), TIMEOUT_MS);
HttpPost httpPost = new HttpPost(redirURL);
List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>();
nameValuePairs.add(new BasicNameValuePair("curl", "varl"));
nameValuePairs.add(new BasicNameValuePair("flags", "0"));
nameValuePairs.add(new BasicNameValuePair("forcedownlevel", "0"));
nameValuePairs.add(new BasicNameValuePair("formdir", "9"));
nameValuePairs.add(new BasicNameValuePair("eid", "i120515"));//mRollNumber
nameValuePairs.add(new BasicNameValuePair("pw","password123"));//mPass
nameValuePairs.add(new BasicNameValuePair("trusted", "1"));
HttpResponse end = null;
try {
httpPost.setEntity(new UrlEncodedFormEntity(nameValuePairs));
HttpResponse response = httpClient.execute(httpPost);
HttpResponse tResponse=httpClient.execute(new HttpGet("http://slateisb.nu.edu.pk/portal/pda/~"+"i120515"+"/tool/fe9a4b7a-6ed7-4324-8a6b-8e35d612dd25"));
StatusLine mStatusLine=tResponse.getStatusLine();
Log.d("Http Status code", String.valueOf(mStatusLine.getStatusCode()));
ByteArrayOutputStream out2 = new ByteArrayOutputStream();
tResponse.getEntity().writeTo(out2);
out2.close();
String responseStr = out2.toString();
//String responseString2=responseStr.replaceFirst("View announcement", "");
int number=anucount(responseStr);
Log.d("PageResult", String.valueOf(number));
//After Login
List<Cookie> cookies = ((AbstractHttpClient) httpClient).getCookieStore().getCookies();
for (int i = 0; i < cookies.size(); i++) {
cookie = cookies.get(i);
}
cookies2 = ((AbstractHttpClient) httpClient).getCookieStore().getCookies().toString();
Log.d("cookie", cookies2);
end = response;
String deviceVersion= Build.VERSION.RELEASE;
int SDK_INT = android.os.Build.VERSION.SDK_INT;
String rs=Integer.toString(SDK_INT,10);
Log.d("sdk",rs);
ByteArrayOutputStream out = new ByteArrayOutputStream();
response.getEntity().writeTo(out);
out.close();
String responseString = out.toString();
login_pass=responseString.indexOf("alertMessage");
Log.d("Login Response", responseString);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
Log.d("Http Request", e.toString());
}
Config.mCookies = ((AbstractHttpClient) httpClient).getCookieStore().getCookies();
List<Cookie> cookies = Config.mCookies;
if (cookies != null && !cookies.isEmpty()) {
CookieSyncManager.createInstance(LoginServic.this);
CookieManager cookieManager = CookieManager.getInstance();
for (Cookie cookie : cookies) {
Cookie sessionInfo = cookie;
Config.mCookieString = sessionInfo.getName() + "="
+ sessionInfo.getValue() + "; domain="
+ sessionInfo.getDomain();
//cookieManager.setCookie("http://www.slateisb.nu.edu.pk/portal", cookieString);
CookieSyncManager.getInstance().sync();
Log.d("beforeWebView", Config.mCookieString);
}
}
String ab=Config.mCookieString;
return ab;
}
private int anucount(String responseString2) {
// TODO Auto-generated method stub
Date test = new Date();
int start=responseString2.indexOf("View announcement");
int end=responseString2.indexOf("pda-footer");
responseString2=responseString2.substring(start, end);
Log.d("In Function", test.toString());
int number=0;
int index=0;
while(true){
index=responseString2.indexOf("View announcement");
if(index==-1){
break;
}
else{
number=number+1;
responseString2=responseString2.replaceFirst("View announcement", "");
}
//return 0;
}
return number;
}
@Override
protected void onPostExecute(String result){
writeToFile(result,"config1.txt");
preferenceEditor.putString("cookie", result);
preferenceEditor.commit();
//ab.setText(result);
Log.d("Login Pass",String.valueOf(login_pass));
if((login_pass==-1)){
Intent mMainIntent= new Intent(LoginServic.this,MainActivity.class);
startActivity(mMainIntent);
finish();
}
else{
Intent mMainIntent= new Intent(LoginServic.this,MainActivity.class);
startActivity(mMainIntent);
finish();
//Toast.makeText(getApplicationContext(), "Login Failed Please Try Again", Toast.LENGTH_LONG).show();
}
//System.exit(1);
//Intent mMainIntent= new Intent(LoginPage.this,MainActivity.class);
//startActivity(mMainIntent);
}
}
private boolean isNetworkAvailable() {
ConnectivityManager connectivityManager
= (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo();
return activeNetworkInfo != null && activeNetworkInfo.isConnected();
}
public void writeToFile(String data,String mFileName) {
try {
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(openFileOutput(mFileName, Context.MODE_PRIVATE));
outputStreamWriter.write(data);
outputStreamWriter.close();
}
catch (IOException e) {
Log.e("Exception", "File write failed: " + e.toString());
Toast.makeText(getApplicationContext(), e.toString(), Toast.LENGTH_LONG).show();
}
}
/*@Override
public boolean onCreateOptionsMenu(Menu menu) {
return super.onCreateOptionsMenu(menu);
}*/
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.PrefixCodedTerms;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostingQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.HasChildQueryBuilder;
import org.elasticsearch.index.query.HasParentQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder;
import org.elasticsearch.indices.TermsLookup;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.QueryBuilders.termsLookupQuery;
import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery;
import static org.elasticsearch.percolator.PercolatorFieldMapper.EXTRACTION_COMPLETE;
import static org.elasticsearch.percolator.PercolatorFieldMapper.EXTRACTION_FAILED;
import static org.elasticsearch.percolator.PercolatorFieldMapper.EXTRACTION_PARTIAL;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
private String typeName;
private String fieldName;
private IndexService indexService;
private MapperService mapperService;
private PercolatorFieldMapper.FieldType fieldType;
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class, PercolatorPlugin.class, FoolMeScriptPlugin.class);
}
@Before
public void init() throws Exception {
indexService = createIndex("test", Settings.EMPTY);
mapperService = indexService.mapperService();
String mapper = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_field_names").field("enabled", false).endObject() // makes testing easier
.startObject("properties")
.startObject("field").field("type", "text").endObject()
.startObject("field1").field("type", "text").endObject()
.startObject("field2").field("type", "text").endObject()
.startObject("_field3").field("type", "text").endObject()
.startObject("field4").field("type", "text").endObject()
.startObject("number_field").field("type", "long").endObject()
.startObject("date_field").field("type", "date").endObject()
.endObject().endObject().endObject().string();
mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE, true);
}
private void addQueryMapping() throws Exception {
typeName = randomAsciiOfLength(4);
fieldName = randomAsciiOfLength(4);
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName)
.startObject("properties").startObject(fieldName).field("type", "percolator").endObject().endObject()
.endObject().endObject().string();
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true);
fieldType = (PercolatorFieldMapper.FieldType) mapperService.fullName(fieldName);
}
public void testExtractTerms() throws Exception {
addQueryMapping();
BooleanQuery.Builder bq = new BooleanQuery.Builder();
TermQuery termQuery1 = new TermQuery(new Term("field", "term1"));
bq.add(termQuery1, BooleanClause.Occur.SHOULD);
TermQuery termQuery2 = new TermQuery(new Term("field", "term2"));
bq.add(termQuery2, BooleanClause.Occur.SHOULD);
DocumentMapper documentMapper = mapperService.documentMapper(typeName);
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
mapperService.documentMapperParser(), documentMapper, null, null);
fieldMapper.processQuery(bq.build(), parseContext);
ParseContext.Document document = parseContext.doc();
PercolatorFieldMapper.FieldType fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType();
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_COMPLETE));
List<IndexableField> fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.queryTermsField.name())));
Collections.sort(fields, (field1, field2) -> field1.binaryValue().compareTo(field2.binaryValue()));
assertThat(fields.size(), equalTo(2));
assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("field\u0000term1"));
assertThat(fields.get(1).binaryValue().utf8ToString(), equalTo("field\u0000term2"));
}
public void testExtractTermsAndRanges_failed() throws Exception {
addQueryMapping();
TermRangeQuery query = new TermRangeQuery("field1", new BytesRef("a"), new BytesRef("z"), true, true);
DocumentMapper documentMapper = mapperService.documentMapper(typeName);
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
mapperService.documentMapperParser(), documentMapper, null, null);
fieldMapper.processQuery(query, parseContext);
ParseContext.Document document = parseContext.doc();
PercolatorFieldMapper.FieldType fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType();
assertThat(document.getFields().size(), equalTo(1));
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_FAILED));
}
public void testExtractTermsAndRanges_partial() throws Exception {
addQueryMapping();
PhraseQuery phraseQuery = new PhraseQuery("field", "term");
DocumentMapper documentMapper = mapperService.documentMapper(typeName);
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
mapperService.documentMapperParser(), documentMapper, null, null);
fieldMapper.processQuery(phraseQuery, parseContext);
ParseContext.Document document = parseContext.doc();
PercolatorFieldMapper.FieldType fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType();
assertThat(document.getFields().size(), equalTo(2));
assertThat(document.getFields().get(0).binaryValue().utf8ToString(), equalTo("field\u0000term"));
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
}
public void testCreateCandidateQuery() throws Exception {
addQueryMapping();
MemoryIndex memoryIndex = new MemoryIndex(false);
memoryIndex.addField("field1", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer());
memoryIndex.addField("field2", "some more text", new WhitespaceAnalyzer());
memoryIndex.addField("_field3", "unhide me", new WhitespaceAnalyzer());
memoryIndex.addField("field4", "123", new WhitespaceAnalyzer());
memoryIndex.addField(new LongPoint("number_field", 10L), new WhitespaceAnalyzer());
IndexReader indexReader = memoryIndex.createSearcher().getIndexReader();
BooleanQuery candidateQuery = (BooleanQuery) fieldType.createCandidateQuery(indexReader);
assertEquals(2, candidateQuery.clauses().size());
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(0).getOccur());
TermInSetQuery termsQuery = (TermInSetQuery) candidateQuery.clauses().get(0).getQuery();
PrefixCodedTerms terms = termsQuery.getTermData();
assertThat(terms.size(), equalTo(14L));
PrefixCodedTerms.TermIterator termIterator = terms.iterator();
assertTermIterator(termIterator, "_field3\u0000me", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "_field3\u0000unhide", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field1\u0000brown", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field1\u0000dog", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field1\u0000fox", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field1\u0000jumps", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field1\u0000lazy", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field1\u0000over", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field1\u0000quick", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field1\u0000the", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field2\u0000more", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field2\u0000some", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field2\u0000text", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field4\u0000123", fieldType.queryTermsField.name());
assertEquals(Occur.SHOULD, candidateQuery.clauses().get(1).getOccur());
assertEquals(new TermQuery(new Term(fieldType.extractionResultField.name(), EXTRACTION_FAILED)),
candidateQuery.clauses().get(1).getQuery());
}
private void assertTermIterator(PrefixCodedTerms.TermIterator termIterator, String expectedValue, String expectedField) {
assertThat(termIterator.next().utf8ToString(), equalTo(expectedValue));
assertThat(termIterator.field(), equalTo(expectedField));
}
public void testPercolatorFieldMapper() throws Exception {
addQueryMapping();
QueryBuilder queryBuilder = termQuery("field", "value");
ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject()
.field(fieldName, queryBuilder)
.endObject().bytes());
assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name()).length, equalTo(1));
assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name())[0].binaryValue().utf8ToString(), equalTo("field\0value"));
assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(1));
assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1));
assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(),
equalTo(EXTRACTION_COMPLETE));
BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
assertQueryBuilder(qbSource, queryBuilder);
// add an query for which we don't extract terms from
queryBuilder = rangeQuery("field").from("a").to("z");
doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject()
.field(fieldName, queryBuilder)
.endObject().bytes());
assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1));
assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(),
equalTo(EXTRACTION_FAILED));
assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name()).length, equalTo(0));
assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(1));
qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
assertQueryBuilder(qbSource, queryBuilder);
}
public void testStoringQueries() throws Exception {
addQueryMapping();
QueryBuilder[] queries = new QueryBuilder[]{
termQuery("field", "value"), matchAllQuery(), matchQuery("field", "value"), matchPhraseQuery("field", "value"),
prefixQuery("field", "v"), wildcardQuery("field", "v*"), rangeQuery("number_field").gte(0).lte(9),
rangeQuery("date_field").from("2015-01-01T00:00").to("2015-01-01T00:00")
};
// note: it important that range queries never rewrite, otherwise it will cause results to be wrong.
// (it can't use shard data for rewriting purposes, because percolator queries run on MemoryIndex)
for (QueryBuilder query : queries) {
ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1",
XContentFactory.jsonBuilder().startObject()
.field(fieldName, query)
.endObject().bytes());
BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
assertQueryBuilder(qbSource, query);
}
}
public void testQueryWithRewrite() throws Exception {
addQueryMapping();
client().prepareIndex("remote", "type", "1").setSource("field", "value").get();
QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "type", "1", "field"));
ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject()
.field(fieldName, queryBuilder)
.endObject().bytes());
BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
assertQueryBuilder(qbSource, queryBuilder.rewrite(indexService.newQueryShardContext(
randomInt(20), null, () -> { throw new UnsupportedOperationException(); })));
}
public void testPercolatorFieldMapperUnMappedField() throws Exception {
addQueryMapping();
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject()
.field(fieldName, termQuery("unmapped_field", "value"))
.endObject().bytes());
});
assertThat(exception.getCause(), instanceOf(QueryShardException.class));
assertThat(exception.getCause().getMessage(), equalTo("No field mapping can be found for the field with name [unmapped_field]"));
}
public void testPercolatorFieldMapper_noQuery() throws Exception {
addQueryMapping();
ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject()
.endObject().bytes());
assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0));
try {
mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject()
.nullField(fieldName)
.endObject().bytes());
} catch (MapperParsingException e) {
assertThat(e.getDetailedMessage(), containsString("query malformed, must start with start_object"));
}
}
public void testAllowNoAdditionalSettings() throws Exception {
addQueryMapping();
IndexService indexService = createIndex("test1", Settings.EMPTY);
MapperService mapperService = indexService.mapperService();
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName)
.startObject("properties").startObject(fieldName).field("type", "percolator").field("index", "no").endObject().endObject()
.endObject().endObject().string();
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true));
assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]"));
}
// multiple percolator fields are allowed in the mapping, but only one field can be used at index time.
public void testMultiplePercolatorFields() throws Exception {
String typeName = "another_type";
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName)
.startObject("_field_names").field("enabled", false).endObject() // makes testing easier
.startObject("properties")
.startObject("query_field1").field("type", "percolator").endObject()
.startObject("query_field2").field("type", "percolator").endObject()
.endObject()
.endObject().endObject().string();
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true);
QueryBuilder queryBuilder = matchQuery("field", "value");
ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject()
.field("query_field1", queryBuilder)
.field("query_field2", queryBuilder)
.endObject().bytes()
);
assertThat(doc.rootDoc().getFields().size(), equalTo(14)); // also includes all other meta fields
BytesRef queryBuilderAsBytes = doc.rootDoc().getField("query_field1.query_builder_field").binaryValue();
assertQueryBuilder(queryBuilderAsBytes, queryBuilder);
queryBuilderAsBytes = doc.rootDoc().getField("query_field2.query_builder_field").binaryValue();
assertQueryBuilder(queryBuilderAsBytes, queryBuilder);
}
// percolator field can be nested under an object field, but only one query can be specified per document
public void testNestedPercolatorField() throws Exception {
String typeName = "another_type";
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName)
.startObject("_field_names").field("enabled", false).endObject() // makes testing easier
.startObject("properties")
.startObject("object_field")
.field("type", "object")
.startObject("properties")
.startObject("query_field").field("type", "percolator").endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject().string();
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true);
QueryBuilder queryBuilder = matchQuery("field", "value");
ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject().startObject("object_field")
.field("query_field", queryBuilder)
.endObject().endObject().bytes()
);
assertThat(doc.rootDoc().getFields().size(), equalTo(11)); // also includes all other meta fields
BytesRef queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue();
assertQueryBuilder(queryBuilderAsBytes, queryBuilder);
doc = mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject()
.startArray("object_field")
.startObject().field("query_field", queryBuilder).endObject()
.endArray()
.endObject().bytes()
);
assertThat(doc.rootDoc().getFields().size(), equalTo(11)); // also includes all other meta fields
queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue();
assertQueryBuilder(queryBuilderAsBytes, queryBuilder);
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject()
.startArray("object_field")
.startObject().field("query_field", queryBuilder).endObject()
.startObject().field("query_field", queryBuilder).endObject()
.endArray()
.endObject().bytes()
);
}
);
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
assertThat(e.getCause().getMessage(), equalTo("a document can only contain one percolator query"));
}
public void testRangeQueryWithNowRangeIsForbidden() throws Exception {
addQueryMapping();
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject()
.field(fieldName, rangeQuery("date_field").from("2016-01-01||/D").to("now"))
.endObject().bytes());
}
);
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
e = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject()
.field(fieldName, rangeQuery("date_field").from("2016-01-01||/D").to("now/D"))
.endObject().bytes());
}
);
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
e = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject()
.field(fieldName, rangeQuery("date_field").from("now-1d").to("now"))
.endObject().bytes());
}
);
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
e = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject()
.field(fieldName, rangeQuery("date_field").from("now"))
.endObject().bytes());
}
);
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
e = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject()
.field(fieldName, rangeQuery("date_field").to("now"))
.endObject().bytes());
}
);
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
}
// https://github.com/elastic/elasticsearch/issues/22355
public void testVerifyRangeQueryWithNullBounds() throws Exception {
addQueryMapping();
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject()
.field(fieldName, rangeQuery("date_field").from("now").to(null))
.endObject().bytes());
}
);
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
e = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject()
.field(fieldName, rangeQuery("date_field").from(null).to("now"))
.endObject().bytes());
}
);
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
// No validation failures:
mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject()
.field(fieldName, rangeQuery("date_field").from("2016-01-01").to(null))
.endObject().bytes());
mapperService.documentMapper(typeName).parse("test", typeName, "1",
jsonBuilder().startObject()
.field(fieldName, rangeQuery("date_field").from(null).to("2016-01-01"))
.endObject().bytes());
}
public void testUnsupportedQueries() {
RangeQueryBuilder rangeQuery1 = new RangeQueryBuilder("field").from("2016-01-01||/D").to("2017-01-01||/D");
RangeQueryBuilder rangeQuery2 = new RangeQueryBuilder("field").from("2016-01-01||/D").to("now");
PercolatorFieldMapper.verifyQuery(rangeQuery1);
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(rangeQuery2));
PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(rangeQuery1));
expectThrows(IllegalArgumentException.class, () ->
PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(rangeQuery2)));
PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder((rangeQuery1)));
expectThrows(IllegalArgumentException.class, () ->
PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder(rangeQuery2)));
PercolatorFieldMapper.verifyQuery(new BoostingQueryBuilder(rangeQuery1, new MatchAllQueryBuilder()));
expectThrows(IllegalArgumentException.class, () ->
PercolatorFieldMapper.verifyQuery(new BoostingQueryBuilder(rangeQuery2, new MatchAllQueryBuilder())));
PercolatorFieldMapper.verifyQuery(new FunctionScoreQueryBuilder(rangeQuery1, new RandomScoreFunctionBuilder()));
expectThrows(IllegalArgumentException.class, () ->
PercolatorFieldMapper.verifyQuery(new FunctionScoreQueryBuilder(rangeQuery2, new RandomScoreFunctionBuilder())));
HasChildQueryBuilder hasChildQuery = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None);
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasChildQuery));
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery)));
HasParentQueryBuilder hasParentQuery = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false);
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasParentQuery));
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasParentQuery)));
}
private void assertQueryBuilder(BytesRef actual, QueryBuilder expected) throws IOException {
XContentParser sourceParser = createParser(PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent(),
new BytesArray(actual));
QueryParseContext qsc = indexService.newQueryShardContext(
randomInt(20), null, () -> { throw new UnsupportedOperationException(); })
.newParseContext(sourceParser);
assertThat(qsc.parseInnerQueryBuilder(), equalTo(expected));
}
public void testEmptyName() throws Exception {
// after 5.x
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("").field("type", "percolator").endObject().endObject()
.endObject().endObject().string();
DocumentMapperParser parser = mapperService.documentMapperParser();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type1", new CompressedXContent(mapping))
);
assertThat(e.getMessage(), containsString("name cannot be empty string"));
// before 5.x
Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5);
Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build();
DocumentMapperParser parser2x = createIndex("test_old", oldIndexSettings).mapperService().documentMapperParser();
DocumentMapper defaultMapper = parser2x.parse("type1", new CompressedXContent(mapping));
assertEquals(mapping, defaultMapper.mappingSource().string());
}
public void testImplicitlySetDefaultScriptLang() throws Exception {
addQueryMapping();
XContentBuilder query = jsonBuilder();
query.startObject();
query.startObject("script");
if (randomBoolean()) {
query.field("script", "return true");
} else {
query.startObject("script");
query.field("inline", "return true");
query.endObject();
}
query.endObject();
query.endObject();
ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1",
XContentFactory.jsonBuilder().startObject()
.rawField(fieldName, new BytesArray(query.string()), query.contentType())
.endObject().bytes());
BytesRef querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
Map<String, Object> parsedQuery = XContentHelper.convertToMap(new BytesArray(querySource), true).v2();
assertEquals(Script.DEFAULT_SCRIPT_LANG, XContentMapValues.extractValue("script.script.lang", parsedQuery));
query = jsonBuilder();
query.startObject();
query.startObject("function_score");
query.startArray("functions");
query.startObject();
query.startObject("script_score");
if (randomBoolean()) {
query.field("script", "return true");
} else {
query.startObject("script");
query.field("inline", "return true");
query.endObject();
}
query.endObject();
query.endObject();
query.endArray();
query.endObject();
query.endObject();
doc = mapperService.documentMapper(typeName).parse("test", typeName, "1",
XContentFactory.jsonBuilder().startObject()
.rawField(fieldName, new BytesArray(query.string()), query.contentType())
.endObject().bytes());
querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
parsedQuery = XContentHelper.convertToMap(new BytesArray(querySource), true).v2();
assertEquals(Script.DEFAULT_SCRIPT_LANG,
((List) XContentMapValues.extractValue("function_score.functions.script_score.script.lang", parsedQuery)).get(0));
}
// Just so that we store scripts in percolator queries, but not really execute these scripts.
public static class FoolMeScriptPlugin extends MockScriptPlugin {
@Override
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
return Collections.singletonMap("return true", (vars) -> true);
}
@Override
public String pluginScriptLang() {
return Script.DEFAULT_SCRIPT_LANG;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Ilya S. Okomin
*/
package net.windward.android.awt;
import net.windward.android.awt.font.FontRenderContext;
import net.windward.android.awt.font.LineMetrics;
import net.windward.android.awt.geom.Rectangle2D;
import org.apache.harmony.awt.internal.nls.Messages;
import java.io.Serializable;
import java.text.CharacterIterator;
public abstract class FontMetrics implements Serializable {
private static final long serialVersionUID = 1681126225205050147L;
protected Font font;
protected FontMetrics(Font fnt) {
this.font = fnt;
}
@Override
public String toString() {
return this.getClass().getName() +
"[font=" + this.getFont() + //$NON-NLS-1$
"ascent=" + this.getAscent() + //$NON-NLS-1$
", descent=" + this.getDescent() + //$NON-NLS-1$
", height=" + this.getHeight() + "]"; //$NON-NLS-1$ //$NON-NLS-2$
}
public Font getFont() {
return font;
}
public int getHeight() {
return this.getAscent() + this.getDescent() + this.getLeading();
}
public int getAscent() {
return 0;
}
public int getDescent() {
return 0;
}
public int getLeading() {
return 0;
}
public LineMetrics getLineMetrics(CharacterIterator ci, int beginIndex,
int limit, Graphics context) {
return font.getLineMetrics(ci, beginIndex, limit,
this.getFRCFromGraphics(context));
}
public LineMetrics getLineMetrics(String str, Graphics context) {
return font.getLineMetrics(str, this.getFRCFromGraphics(context));
}
public LineMetrics getLineMetrics(char[] chars, int beginIndex, int limit,
Graphics context) {
return font.getLineMetrics(chars, beginIndex, limit,
this.getFRCFromGraphics(context));
}
public LineMetrics getLineMetrics(String str, int beginIndex, int limit,
Graphics context) {
return font.getLineMetrics(str, beginIndex, limit,
this.getFRCFromGraphics(context));
}
public Rectangle2D getMaxCharBounds(Graphics context) {
return this.font.getMaxCharBounds(this.getFRCFromGraphics(context));
}
public Rectangle2D getStringBounds(CharacterIterator ci, int beginIndex,
int limit, Graphics context) {
return font.getStringBounds(ci, beginIndex, limit,
this.getFRCFromGraphics(context));
}
public Rectangle2D getStringBounds(String str, int beginIndex, int limit,
Graphics context) {
return font.getStringBounds(str, beginIndex, limit,
this.getFRCFromGraphics(context));
}
public Rectangle2D getStringBounds(char[] chars, int beginIndex, int limit,
Graphics context) {
return font.getStringBounds(chars, beginIndex, limit,
this.getFRCFromGraphics(context));
}
public Rectangle2D getStringBounds(String str, Graphics context) {
return font.getStringBounds(str, this.getFRCFromGraphics(context));
}
public boolean hasUniformLineMetrics() {
return this.font.hasUniformLineMetrics();
}
public int bytesWidth(byte[] data, int off, int len) {
int width = 0;
if ((off >= data.length) || (off < 0)){
// awt.13B=offset off is out of range
throw new IllegalArgumentException(Messages.getString("awt.13B")); //$NON-NLS-1$
}
if ((off+len > data.length)){
// awt.13C=number of elements len is out of range
throw new IllegalArgumentException(Messages.getString("awt.13C")); //$NON-NLS-1$
}
for (int i = off; i < off+len; i++){
width += charWidth(data[i]);
}
return width;
}
public int charsWidth(char[] data, int off , int len){
int width = 0;
if ((off >= data.length) || (off < 0)){
// awt.13B=offset off is out of range
throw new IllegalArgumentException(Messages.getString("awt.13B")); //$NON-NLS-1$
}
if ((off+len > data.length)){
// awt.13C=number of elements len is out of range
throw new IllegalArgumentException(Messages.getString("awt.13C")); //$NON-NLS-1$
}
for (int i = off; i < off+len; i++){
width += charWidth(data[i]);
}
return width;
}
public int charWidth(int ch) {
return 0;
}
public int charWidth(char ch) {
return 0;
}
public int getMaxAdvance() {
return 0;
}
public int getMaxAscent() {
return 0;
}
/**
* @deprecated
*/
@Deprecated
public int getMaxDecent() {
return 0;
}
public int getMaxDescent() {
return 0;
}
public int[] getWidths() {
return null;
}
public int stringWidth(String str) {
return 0;
}
/**
* Returns FontRenderContext instance of the Graphics context specified.
* @param context the specified Graphics context
*
* @return a FontRenderContext of the specified Graphics context.
*/
private FontRenderContext getFRCFromGraphics(Graphics context){
FontRenderContext frc;
if (context instanceof Graphics2D) {
frc = ((Graphics2D)context).getFontRenderContext();
} else {
frc = new FontRenderContext(null, false, false);
}
return frc;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.protocol.http;
import org.apache.wicket.request.ILogData;
import org.apache.wicket.request.ILoggableRequestHandler;
import org.apache.wicket.request.IRequestHandler;
import org.apache.wicket.request.handler.logger.NoLogData;
import org.apache.wicket.session.ISessionStore;
import org.apache.wicket.util.io.IClusterable;
import org.apache.wicket.util.string.Strings;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Interface for the request logger and viewer.
*
* @see org.apache.wicket.Application#newRequestLogger()
*
* @author jcompagner
*/
public interface IRequestLogger
{
/**
* @return The total created sessions counter
*/
int getTotalCreatedSessions();
/**
* @return The peak sessions counter
*/
int getPeakSessions();
/**
* This method returns a List of the current requests that are in mem. This is a readonly list.
*
* @return Collection of the current requests
*/
List<RequestData> getRequests();
/**
* @return Collection of live Sessions Data
*/
SessionData[] getLiveSessions();
/**
* @return The current active requests
*/
int getCurrentActiveRequestCount();
/**
* @return The {@link org.apache.wicket.protocol.http.IRequestLogger.RequestData} for the current request.
*/
RequestData getCurrentRequest();
/**
* @return The peak active requests
*/
int getPeakActiveRequestCount();
/**
* @return The number of requests per minute.
*/
long getRequestsPerMinute();
/**
* @return The average request time.
*/
long getAverageRequestTime();
/**
* called when the session is created and has an id. (for http it means that the http session is
* created)
*
* @param id
* the session id
*/
void sessionCreated(String id);
/**
* Method used to cleanup a livesession when the session was invalidated by the webcontainer
*
* @param sessionId
* the session id
*/
void sessionDestroyed(String sessionId);
/**
* This method is called when the request is over. This will set the total time a request takes
* and cleans up the current request data.
*
* @param timeTaken
* the time taken in milliseconds
*/
void requestTime(long timeTaken);
/**
* Called to monitor removals of objects out of the {@link ISessionStore}
*
* @param value
* the object being removed
*/
void objectRemoved(Object value);
/**
* Called to monitor updates of objects in the {@link ISessionStore}
*
* @param value
* the object being updated
*/
void objectUpdated(Object value);
/**
* Called to monitor additions of objects in the {@link ISessionStore}
*
* @param value
* the object being created/added
*/
void objectCreated(Object value);
/**
* Sets the target that was the response target for the current request
*
* @param target
* the response target
*/
void logResponseTarget(IRequestHandler target);
/**
* Sets the target that was the event target for the current request
*
* @param target
* the event target
*/
void logEventTarget(IRequestHandler target);
/**
* Logs the URL that was requested by the browser.
*
* @param url
* the requested URL
*/
void logRequestedUrl(String url);
/**
* Perform the actual logging
*/
void performLogging();
/**
* This class hold the information one request of a session has.
*
* @author jcompagner
*/
class SessionData implements IClusterable, Comparable<SessionData>
{
private static final long serialVersionUID = 1L;
private final String sessionId;
private final long startDate;
private long lastActive;
private long numberOfRequests;
private long totalTimeTaken;
private long sessionSize;
private Object sessionInfo;
/**
* Construct.
*
* @param sessionId
*/
public SessionData(String sessionId)
{
this.sessionId = sessionId;
startDate = System.currentTimeMillis();
numberOfRequests = 1;
}
/**
* @return The last active date.
*/
public Date getLastActive()
{
return new Date(lastActive);
}
/**
* @return The start date of this session
*/
public Date getStartDate()
{
return new Date(startDate);
}
/**
* @return The number of request for this session
*/
public long getNumberOfRequests()
{
return numberOfRequests;
}
/**
* @return Returns the session size.
*/
public long getSessionSize()
{
return sessionSize;
}
/**
* @return Returns the total time this session has spent in ms.
*/
public long getTotalTimeTaken()
{
return totalTimeTaken;
}
/**
* @return The session info object given by the {@link ISessionLogInfo#getSessionInfo()}
* session method.
*/
public Object getSessionInfo()
{
return sessionInfo;
}
/**
* @return The session id
*/
public String getSessionId()
{
return sessionId;
}
/**
* Adds {@code time} to the total server time.
*
* @param time
*/
public void addTimeTaken(long time)
{
lastActive = System.currentTimeMillis();
numberOfRequests++;
totalTimeTaken += time;
}
/**
* Sets additional session info (e.g. logged in user).
*
* @param sessionInfo
*/
public void setSessionInfo(Object sessionInfo)
{
this.sessionInfo = sessionInfo;
}
/**
* Sets the recorded session size.
*
* @param size
*/
public void setSessionSize(long size)
{
sessionSize = size;
}
@Override
public int compareTo(SessionData sd)
{
if (sd.startDate > startDate)
{
return 1;
}
else if (sd.startDate < startDate)
{
return -1;
}
return 0;
}
}
/**
* This class hold the information one request of a session has.
*
* @author jcompagner
*/
class RequestData implements IClusterable
{
private static final long serialVersionUID = 1L;
private long startDate;
private long timeTaken;
private final List<String> entries = new ArrayList<>(5);
private Map<String, Object> userData;
private String requestedUrl;
private IRequestHandler eventTarget;
private IRequestHandler responseTarget;
private String sessionId;
private long totalSessionSize;
private Object sessionInfo;
private int activeRequest;
/**
* @return The time taken for this request
*/
public Long getTimeTaken()
{
return timeTaken;
}
/**
* @param activeRequest
* The number of active request when this request happened
*/
public void setActiveRequest(int activeRequest)
{
this.activeRequest = activeRequest;
}
/**
* @return The number of active request when this request happened
*/
public int getActiveRequest()
{
return activeRequest;
}
/**
* @return The session object info, created by {@link ISessionLogInfo#getSessionInfo()}
*/
public Object getSessionInfo()
{
return sessionInfo;
}
/**
* Set the session info object of the session for this request.
*
* @param sessionInfo
*/
public void setSessionInfo(Object sessionInfo)
{
this.sessionInfo = sessionInfo;
}
/**
* @param sizeInBytes
*/
public void setSessionSize(long sizeInBytes)
{
totalSessionSize = sizeInBytes;
}
/**
* @param id
*/
public void setSessionId(String id)
{
sessionId = id;
}
/**
* @return The time taken for this request
*/
public Date getStartDate()
{
return new Date(startDate);
}
/**
* @return The event target
*/
public IRequestHandler getEventTarget()
{
return eventTarget;
}
/**
* @return The class of the event target
*/
public Class<? extends IRequestHandler> getEventTargetClass()
{
return eventTarget == null ? null : eventTarget.getClass();
}
/**
* @return The log data for the eventTarget, or {@link NoLogData} if the request handler is
* not loggable
*/
public ILogData getEventTargetLog()
{
if (eventTarget instanceof ILoggableRequestHandler)
return ((ILoggableRequestHandler)eventTarget).getLogData();
return new NoLogData();
}
/**
* @return The response target
*/
public IRequestHandler getResponseTarget()
{
return responseTarget;
}
/**
* @return The class of the response target
*/
public Class<? extends IRequestHandler> getResponseTargetClass()
{
return responseTarget == null ? null : responseTarget.getClass();
}
/**
* @return The log data for the responseTarget, or {@link NoLogData} if the request handler
* is not loggable
*/
public ILogData getResponseTargetLog()
{
if (responseTarget instanceof ILoggableRequestHandler)
return ((ILoggableRequestHandler)responseTarget).getLogData();
return new NoLogData();
}
/**
* @return the requested URL by the browser
*/
public String getRequestedUrl()
{
return requestedUrl;
}
/**
* @param requestedUrl
*/
public void setRequestedUrl(String requestedUrl)
{
this.requestedUrl = requestedUrl;
}
/**
* @param target
*/
public void setResponseTarget(IRequestHandler target)
{
responseTarget = target;
}
/**
* @param target
*/
public void setEventTarget(IRequestHandler target)
{
eventTarget = target;
}
/**
* @param timeTaken
*/
public void setTimeTaken(long timeTaken)
{
this.timeTaken = timeTaken;
startDate = System.currentTimeMillis() - timeTaken;
}
/**
* @param string
*/
public void addEntry(String string)
{
entries.add(string);
}
/**
* @param key
* @param value
*/
public void addUserData(String key, Object value)
{
getUserData().put(key, value);
}
/**
* @param key
* @return
*/
public Object getUserData(String key)
{
return getUserData().get(key);
}
/**
* @return the userData Map
*/
public Map<String, Object> getUserData()
{
if (userData == null) {
userData = new HashMap<>();
}
return userData;
}
/**
* @return All entries of the objects that are created/updated or removed in this request
*/
public String getAlteredObjects()
{
return Strings.join(", ", entries);
}
/**
* @return The session id for this request
*/
public String getSessionId()
{
return sessionId;
}
/**
* @return The total session size.
*/
public Long getSessionSize()
{
return totalSessionSize;
}
@Override
public String toString()
{
return "Request[timetaken=" + getTimeTaken() + ",sessioninfo=" + sessionInfo +
",sessionid=" + sessionId + ",sessionsize=" + totalSessionSize + ",request=" +
eventTarget + ",response=" + responseTarget + ",alteredobjects=" +
getAlteredObjects() + ",activerequest=" + activeRequest + "]";
}
}
/**
* This interface can be implemented in a custom session object. to give an object that has more
* information for the current session (state of session).
*
* @author jcompagner
*/
interface ISessionLogInfo
{
/**
* If you use the request logger log functionality then this object should have a nice
* String representation. So make sure that the toString() is implemented for the returned
* object.
*
* @return The custom object stored in the request loggers current request.
*/
Object getSessionInfo();
}
}
| |
/*
* Copyright 2017 Riyaz Ahamed
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ahamed.multiviewadapter;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.RestrictTo;
import android.support.v7.util.ListUpdateCallback;
import com.ahamed.multiviewadapter.listener.ItemSelectionChangedListener;
import com.ahamed.multiviewadapter.listener.MultiSelectionChangedListener;
import java.util.ArrayList;
import java.util.List;
class BaseDataManager<M> implements ListUpdateCallback {
final RecyclerAdapter adapter;
private List<M> dataList = new ArrayList<>();
private List<M> selectedItems = new ArrayList<>();
private ItemSelectionChangedListener<M> itemSelectionChangedListener;
private MultiSelectionChangedListener<M> multiSelectionChangedListener;
BaseDataManager(@NonNull RecyclerAdapter adapter) {
this.adapter = adapter;
}
@RestrictTo(RestrictTo.Scope.LIBRARY) @Override
public final void onInserted(int position, int count) {
adapter.notifyBinderItemRangeInserted(this, position, count);
}
@RestrictTo(RestrictTo.Scope.LIBRARY) @Override
public final void onRemoved(int position, int count) {
adapter.notifyBinderItemRangeRemoved(this, position, count);
}
@RestrictTo(RestrictTo.Scope.LIBRARY) @Override
public final void onMoved(int fromPosition, int toPosition) {
adapter.notifyBinderItemMoved(this, fromPosition, toPosition);
}
@RestrictTo(RestrictTo.Scope.LIBRARY) @Override
public final void onChanged(int position, int count, Object payload) {
adapter.notifyBinderItemRangeChanged(this, position, count, payload);
}
/**
* This method is used to get the selected items in a {@link DataListManager} or {@link
* DataItemManager}. It should be used in conjunction with the {@link SelectableAdapter}
*
* @return List of selected items or empty list
*/
public final List<M> getSelectedItems() {
List<M> selectedItemsList = new ArrayList<>();
for (M m : selectedItems) {
if (contains(m)) {
selectedItemsList.add(m);
}
}
selectedItems = selectedItemsList;
return selectedItemsList;
}
/**
* This method is used to set the selected items in a {@link DataListManager} or {@link
* DataItemManager}. It should be used in conjunction with the {@link SelectableAdapter}.
* Exception will be thrown if calling {@link DataListManager} is not used in the {@link
* SelectableAdapter}.
*
* @param selectedItems List of selected items
*/
public final void setSelectedItems(@NonNull List<M> selectedItems) {
if (!(adapter instanceof SelectableAdapter)) {
throw new IllegalStateException(
"Make sure your adapter extends from com.ahamed.multiviewadapter.SelectableAdapter");
}
if (size() < 0) {
return;
}
List<M> oldSelectedItems = new ArrayList<>(this.selectedItems);
this.selectedItems.clear();
for (M m : selectedItems) {
int index = indexOf(m);
if (!oldSelectedItems.contains(m)) {
onItemSelectionToggled(index, true);
} else {
this.selectedItems.add(m);
}
}
for (M m : oldSelectedItems) {
int index = indexOf(m);
if (!selectedItems.contains(m)) {
onItemSelectionToggled(index, false);
}
}
}
/**
* This method is used to clear the selected items in a {@link DataListManager} or {@link
* DataItemManager}. It should be used in conjunction with the {@link SelectableAdapter}.
* Exception will be thrown if calling {@link DataListManager} is not used in the {@link
* SelectableAdapter}.
*/
public final void clearSelectedItems() {
if (!(adapter instanceof SelectableAdapter)) {
throw new IllegalStateException(
"Make sure your adapter extends from com.ahamed.multiviewadapter.SelectableAdapter");
}
if (size() < 0) {
return;
}
this.selectedItems.clear();
onChanged(0, size(), null);
}
/**
* This method is used to get the selected item in a {@link DataListManager} or {@link
* DataItemManager}. It should be used in conjunction with the {@link SelectableAdapter}
*
* @return Selected item or null
*/
@Nullable public final M getSelectedItem() {
if (selectedItems.size() > 0) {
return selectedItems.get(0);
}
return null;
}
/**
* This method is used to set the selected item in a {@link DataListManager} or {@link
* DataItemManager}. It should be used in conjunction with the {@link SelectableAdapter}.
* Exception will be thrown if calling {@link DataListManager} is not used in the {@link
* SelectableAdapter}.
*
* @param selectedItem Selected item
*/
public final void setSelectedItem(@NonNull M selectedItem) {
if (!(adapter instanceof SelectableAdapter)) {
throw new IllegalStateException(
"Make sure your adapter extends from com.ahamed.multiviewadapter.SelectableAdapter");
}
if (size() < 0) {
return;
}
M previousSelectedItem = getSelectedItem();
int index = indexOf(selectedItem);
if (index != -1 && !selectedItem.equals(previousSelectedItem)) {
onItemSelectionToggled(index, true);
}
if (null != previousSelectedItem && !previousSelectedItem.equals(selectedItem)) {
onItemSelectionToggled(indexOf(previousSelectedItem), false);
}
}
/**
* Returns the number of elements in this data manager. If this list contains
* more than <tt>Integer.MAX_VALUE</tt> elements, returns
* <tt>Integer.MAX_VALUE</tt>.
*
* @return the number of elements in this list
*/
public final int getCount() {
return size();
}
/**
* Returns <tt>true</tt> if this data manager contains no elements.
*
* @return <tt>true</tt> if this data manager contains no elements
*/
public final boolean isEmpty() {
return dataList.isEmpty();
}
/**
* Returns <tt>true</tt> if this data manager contains the specified element.
* More formally, returns <tt>true</tt> if and only if this data manager contains
* at least one element <tt>e</tt> such that
* <tt>(item==null ? e==null : item.equals(e))</tt>.
*
* @param item element whose presence in this list is to be tested
* @return <tt>true</tt> if this list contains the specified element
*/
public final boolean contains(M item) {
return dataList.contains(item);
}
/**
* Returns the element at the specified position in this data manager.
*
* @param index index of the element to return
* @return the element at the specified position in this list
* @throws IndexOutOfBoundsException if the index is out of range
* (<tt>index < 0 || index >= size()</tt>)
*/
public final M get(int index) {
return dataList.get(index);
}
/**
* Returns the index of the first occurrence of the specified element
* in this data manager, or -1 if this data manager does not contain the element.
* More formally, returns the lowest index <tt>i</tt> such that
* <tt>(item==null ? get(i)==null : item.equals(get(i)))</tt>,
* or -1 if there is no such index.
*
* @param item element to search for
* @return the index of the first occurrence of the specified element in
* this data manager, or -1 if this data manager does not contain the element
*/
public final int indexOf(M item) {
return dataList.indexOf(item);
}
/**
* Returns the index of the last occurrence of the specified element
* in this list, or -1 if this data manager does not contain the element.
* More formally, returns the highest index <tt>i</tt> such that
* <tt>(item==null ? get(i)==null : item.equals(get(i)))</tt>,
* or -1 if there is no such index.
*
* @param item element to search for
* @return the index of the last occurrence of the specified element in
* this data manager, or -1 if this data manager does not contain the element
*/
public final int lastIndexOf(M item) {
return dataList.lastIndexOf(item);
}
/**
* Set listener to get notification when the selection changes
*
* @param itemSelectionChangedListener Listener for notify selection changes
*/
public final void setItemSelectionChangedListener(
ItemSelectionChangedListener<M> itemSelectionChangedListener) {
this.itemSelectionChangedListener = itemSelectionChangedListener;
}
/**
* Set listener to get notification when the selection changes
*
* @param multiSelectionChangedListener Listener for notify selection changes
*/
public final void setMultiSelectionChangedListener(
MultiSelectionChangedListener<M> multiSelectionChangedListener) {
this.multiSelectionChangedListener = multiSelectionChangedListener;
}
///////////////////////////////////////////
/////////// Internal API ahead. ///////////
///////////////////////////////////////////
void onItemSelectionToggled(int position, boolean isSelected) {
if (isSelected) {
selectedItems.add(getItem(position));
} else {
selectedItems.remove(getItem(position));
}
onChanged(position, 1, null);
if (adapter instanceof SelectableAdapter && (itemSelectionChangedListener != null
|| multiSelectionChangedListener != null)) {
SelectableAdapter adapter = (SelectableAdapter) this.adapter;
switch (adapter.getSelectionMode()) {
case SelectableAdapter.SELECTION_MODE_MULTIPLE:
if (null != multiSelectionChangedListener) {
multiSelectionChangedListener.onMultiSelectionChangedListener(getSelectedItems());
}
break;
case SelectableAdapter.SELECTION_MODE_SINGLE:
case SelectableAdapter.SELECTION_MODE_SINGLE_OR_NONE:
if (null != itemSelectionChangedListener) {
itemSelectionChangedListener.onItemSelectionChangedListener(getItem(position),
isSelected);
}
break;
case SelectableAdapter.SELECTION_MODE_NONE:
break;
}
}
}
void onItemExpansionToggled(int position) {
onChanged(position, 1, null);
}
void onGroupExpansionToggled() {
// Do nothing. Should be overridden by GroupDataManager
}
List<M> getDataList() {
return dataList;
}
void setDataList(List<M> dataList) {
this.dataList = new ArrayList<>(dataList);
}
int size() {
return dataList.size();
}
M getItem(int dataItemPosition) {
return get(dataItemPosition);
}
boolean isItemSelected(int dataItemPosition) {
return selectedItems.contains(getItem(dataItemPosition));
}
void onSwapped(int currentPosition, int targetPosition) {
M item = dataList.get(currentPosition);
dataList.remove(currentPosition);
dataList.add(targetPosition, item);
onMoved(currentPosition, targetPosition);
}
int getSelectedIndex() {
if (selectedItems.size() > 0 && contains(selectedItems.get(0))) {
return indexOf(selectedItems.get(0));
}
selectedItems.clear();
return -1;
}
}
| |
/*
* Copyright 2009 Kjetil Valstadsve
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package vanadis.extrt;
import org.osgi.framework.Bundle;
import vanadis.annopro.AnnotationDatum;
import vanadis.annopro.AnnotationsDigest;
import vanadis.annopro.AnnotationsDigests;
import vanadis.blueprints.ModuleSpecification;
import vanadis.concurrent.OperationQueuer;
import vanadis.core.collections.Generic;
import vanadis.common.io.Closeables;
import vanadis.core.lang.Strings;
import vanadis.common.test.ForTestingPurposes;
import vanadis.ext.AutoLaunch;
import vanadis.ext.Module;
import vanadis.ext.ModuleSystemException;
import vanadis.objectmanagers.ObjectManagerFactory;
import vanadis.osgi.Context;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Map;
final class ModulesProcessor {
private static final String CLASS_SUFFIX = ".class";
private static final String ROOT = "/";
private static final String DIR = "/";
static Map<String, ObjectManagerFactory> managedFactories(Context context, Bundle bundle,
ObjectManagerObserver observer,
OperationQueuer dispatch) {
Map<String, ObjectManagerFactory> factories = Generic.map();
collect(ROOT, context, bundle, factories, observer, dispatch);
return factories;
}
@ForTestingPurposes
static ObjectManagerFactory objectManagerFactory(Context context,
ClassLoader classLoader,
String className,
InputStream inputStream,
ObjectManagerObserver observer,
OperationQueuer dispatch) {
return objectManagerFactory(context, classLoader, null, className, inputStream, observer, dispatch);
}
private static Module module(AnnotationDatum<Class<?>> datum, Class<?> annotatedClass) {
return datum.createProxy(annotatedClass.getClassLoader(), Module.class);
}
private static AnnotationsDigest annotations(InputStream inputStream) {
try {
return AnnotationsDigests.createFromStream(inputStream, Module.class.getName());
} finally {
Closeables.close(inputStream);
}
}
private static AnnotationDatum<Class<?>> moduleData(Class<?> annotatedClass) {
AnnotationsDigest digest = AnnotationsDigests.createFullFromType(annotatedClass);
return digest.getClassDatum(Module.class);
}
private static Class<?> loadClass(ClassLoader classLoader, Bundle bundle, String moduleClassName) {
if (bundle != null) {
try {
return bundle.loadClass(moduleClassName);
} catch (ClassNotFoundException e) {
throw new ModuleSystemException
("Unable to load module class " + moduleClassName + " from " + bundle, e);
}
}
try {
return Class.forName(moduleClassName, true, classLoader);
} catch (ClassNotFoundException e) {
throw new ModuleSystemException
("Unable to load module class " + moduleClassName + " from " + classLoader, e);
}
}
private static Collection<ModuleSpecification> launches(String type, boolean autolaunch, AutoLaunch[] autoLaunches) {
Collection<ModuleSpecification> moduleSpecifications = Generic.list();
if (autolaunch) {
moduleSpecifications.add(ModuleSpecification.create(type, type));
}
if (autoLaunches != null) {
for (AutoLaunch autoLaunch : autoLaunches) {
String name = autoLaunch.name();
boolean useTypename = Strings.isBlank(name);
boolean typeNamedAlreadyLaunched = useTypename && autolaunch;
if (!typeNamedAlreadyLaunched) {
moduleSpecifications.add(ModuleSpecification.create
(type, useTypename ? type : name, PropertyUtils.read(autoLaunch.properties())));
}
}
}
return moduleSpecifications;
}
private static ObjectManagerFactory objectManagerFactory(Context context,
Bundle bundle,
String className,
InputStream inputStream,
ObjectManagerObserver observer,
OperationQueuer dispatch) {
return objectManagerFactory(context, null, bundle, className, inputStream, observer, dispatch);
}
private static ObjectManagerFactory objectManagerFactory(Context context,
ClassLoader classLoader, Bundle bundle,
String className,
InputStream inputStream,
ObjectManagerObserver observer,
OperationQueuer dispatch) {
AnnotationsDigest digest = annotations(inputStream);
if (digest.hasClassData(Module.class)) {
Class<?> annotatedClass = loadClass(classLoader, bundle, className);
AnnotationDatum<Class<?>> datum = moduleData(annotatedClass);
Module module = module(datum, annotatedClass);
String type = moduleType(bundle, annotatedClass, module);
return new ObjectManagerFactoryImpl(context, annotatedClass, type,
launches(type, module.autolaunch(), module.launch()),
observer, dispatch);
}
return null;
}
private static String moduleType(Bundle bundle, Class<?> annotatedClass, Module module) {
String annotatedType = module.moduleType();
if (Strings.isBlank(annotatedType)) {
if (bundle != null) {
return bundle.getSymbolicName();
}
return annotatedClass.getPackage().getName();
}
return annotatedType;
}
private static void collect(String prefix, Context context, Bundle bundle,
Map<String, ObjectManagerFactory> factories,
ObjectManagerObserver observer,
OperationQueuer dispatch) {
for (Object pathObject : entryPaths(prefix, bundle)) {
String path = pathObject.toString();
if (isClassFile(path)) {
ObjectManagerFactory factory =
objectManagerFactories(context, bundle, path, className(path), observer, dispatch);
if (factory != null) {
map(bundle, factories, factory);
}
} else if (isDirectory(path)) {
collect(path, context, bundle, factories, observer, dispatch);
}
}
}
private static boolean isClassFile(String path) {
return path.endsWith(CLASS_SUFFIX);
}
private static boolean isDirectory(String path) {
return path.endsWith(DIR);
}
private static void map(Bundle bundle, Map<String, ObjectManagerFactory> factories, ObjectManagerFactory factory) {
ObjectManagerFactory existing = factories.put(factory.getType(), factory);
if (existing != null) {
throw new IllegalStateException
("Multiple factories with type " + factory.getType() + " in bundle " +
bundle.getSymbolicName() + ": " + factory + " and " + existing);
}
}
private static String className(String pathName) {
return pathName.substring(0, pathName.length() - CLASS_SUFFIX.length()).replace('/', '.');
}
private static Iterable<String> entryPaths(String prefix, Bundle bundle) {
Enumeration<?> enumeration = bundle.getEntryPaths(prefix);
if (enumeration == null) {
return Collections.emptyList();
}
Collection<String> entryPaths = Generic.list();
while (enumeration.hasMoreElements()) {
Object object = enumeration.nextElement();
if (object != null) {
entryPaths.add(object.toString());
}
}
return entryPaths;
}
private static ObjectManagerFactory objectManagerFactories(Context context,
Bundle bundle,
String path,
String className,
ObjectManagerObserver observer,
OperationQueuer dispatch) {
return objectManagerFactory(context, bundle, className, stream(bundle, path, className), observer, dispatch);
}
private static InputStream stream(Bundle bundle, String path, String className) {
URL url = bundle.getEntry(path);
try {
return url.openStream();
} catch (IOException e) {
throw new IllegalStateException
("Failed to open entry " + path + " to load " + className + " from " + bundle.getSymbolicName(), e);
}
}
private ModulesProcessor() {
// Don't make me.
}
}
| |
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.assistants;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceEndpointInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceFaultInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceInSequenceInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointInputConnector2EditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressingEndpointInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AggregateMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BAMMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BeanMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BuilderMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CacheMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallTemplateMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CalloutMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ClassMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloneMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorOperationInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CommandMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ConditionalRouterMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBLookupMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBReportMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DataMapperMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointInputConnector2EditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DropMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EJBMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnqueueMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnrichMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EntitlementMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EventMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointInputConnector2EditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FastXSLTMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FaultMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FilterMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ForEachMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndPointInputConnector2EditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndPointInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HeaderMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.InboundEndpointOnErrorSequenceInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.InboundEndpointOnErrorSequenceOutputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.InboundEndpointSequenceInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.IterateMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.JsonTransformMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointInputConnector2EditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LogMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoopBackMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.MergeNodeFirstInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.MergeNodeSecondInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.MessageInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.NamedEndpointInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.OAuthMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PayloadFactoryMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyGroupMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyFaultInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyInSequenceInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ProxyInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PublishEventMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RMSequenceMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointInputConnector2EditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RespondMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RouterMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RuleMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ScriptMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SendMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequenceInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequencesInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SmooksMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SpringMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.StoreMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SwitchMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointInputConnector2EditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ThrottleMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TransactionMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.URLRewriteMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ValidateMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointInputConnector2EditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XQueryMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XSLTMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbModelingAssistantProvider;
/**
* @generated
*/
public class EsbModelingAssistantProviderOfInboundEndpointOnErrorSequenceOutputConnectorEditPart
extends EsbModelingAssistantProvider {
/**
* @generated
*/
@Override
public List<IElementType> getRelTypesOnSource(IAdaptable source) {
IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source.getAdapter(IGraphicalEditPart.class);
return doGetRelTypesOnSource((InboundEndpointOnErrorSequenceOutputConnectorEditPart) sourceEditPart);
}
/**
* @generated
*/
public List<IElementType> doGetRelTypesOnSource(InboundEndpointOnErrorSequenceOutputConnectorEditPart source) {
List<IElementType> types = new ArrayList<IElementType>(1);
types.add(EsbElementTypes.EsbLink_4001);
return types;
}
/**
* @generated
*/
@Override
public List<IElementType> getRelTypesOnSourceAndTarget(IAdaptable source, IAdaptable target) {
IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source.getAdapter(IGraphicalEditPart.class);
IGraphicalEditPart targetEditPart = (IGraphicalEditPart) target.getAdapter(IGraphicalEditPart.class);
return doGetRelTypesOnSourceAndTarget((InboundEndpointOnErrorSequenceOutputConnectorEditPart) sourceEditPart,
targetEditPart);
}
/**
* @generated
*/
public List<IElementType> doGetRelTypesOnSourceAndTarget(
InboundEndpointOnErrorSequenceOutputConnectorEditPart source, IGraphicalEditPart targetEditPart) {
List<IElementType> types = new LinkedList<IElementType>();
if (targetEditPart instanceof ProxyInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof ProxyFaultInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof DropMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof PropertyMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof PropertyGroupMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof ThrottleMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof FilterMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof LogMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof EnrichMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof XSLTMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof SwitchMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof SequenceInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof EventMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof EntitlementMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof ClassMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof SpringMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof ScriptMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof FaultMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof XQueryMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof CommandMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof DBLookupMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof DBReportMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof SmooksMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof SendMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof HeaderMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof CloneMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof CacheMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof IterateMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof CalloutMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof TransactionMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof RMSequenceMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof RuleMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof OAuthMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof AggregateMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof StoreMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof BuilderMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof CallTemplateMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof PayloadFactoryMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof EnqueueMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof URLRewriteMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof ValidateMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof RouterMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof ConditionalRouterMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof BAMMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof BeanMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof EJBMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof DefaultEndPointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof AddressEndPointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof FailoverEndPointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof RecipientListEndPointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof WSDLEndPointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof NamedEndpointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof LoadBalanceEndPointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof APIResourceEndpointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof AddressingEndpointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof HTTPEndPointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof TemplateEndpointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof CloudConnectorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof CloudConnectorOperationInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof LoopBackMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof RespondMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof CallMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof DataMapperMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof FastXSLTMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof ForEachMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof PublishEventMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof JsonTransformMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof ProxyInSequenceInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof MessageInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof MergeNodeFirstInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof MergeNodeSecondInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof SequencesInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof DefaultEndPointInputConnector2EditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof AddressEndPointInputConnector2EditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof FailoverEndPointInputConnector2EditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof RecipientListEndPointInputConnector2EditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof WSDLEndPointInputConnector2EditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof LoadBalanceEndPointInputConnector2EditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof HTTPEndPointInputConnector2EditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof TemplateEndpointInputConnector2EditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof APIResourceInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof APIResourceFaultInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof APIResourceInSequenceInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof InboundEndpointSequenceInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof InboundEndpointOnErrorSequenceInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
return types;
}
/**
* @generated
*/
@Override
public List<IElementType> getTypesForTarget(IAdaptable source, IElementType relationshipType) {
IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source.getAdapter(IGraphicalEditPart.class);
return doGetTypesForTarget((InboundEndpointOnErrorSequenceOutputConnectorEditPart) sourceEditPart,
relationshipType);
}
/**
* @generated
*/
public List<IElementType> doGetTypesForTarget(InboundEndpointOnErrorSequenceOutputConnectorEditPart source,
IElementType relationshipType) {
List<IElementType> types = new ArrayList<IElementType>();
if (relationshipType == EsbElementTypes.EsbLink_4001) {
types.add(EsbElementTypes.ProxyInputConnector_3003);
types.add(EsbElementTypes.ProxyFaultInputConnector_3489);
types.add(EsbElementTypes.DropMediatorInputConnector_3008);
types.add(EsbElementTypes.PropertyMediatorInputConnector_3033);
types.add(EsbElementTypes.PropertyGroupMediatorInputConnector_3789);
types.add(EsbElementTypes.ThrottleMediatorInputConnector_3121);
types.add(EsbElementTypes.FilterMediatorInputConnector_3010);
types.add(EsbElementTypes.LogMediatorInputConnector_3018);
types.add(EsbElementTypes.EnrichMediatorInputConnector_3036);
types.add(EsbElementTypes.XSLTMediatorInputConnector_3039);
types.add(EsbElementTypes.SwitchMediatorInputConnector_3042);
types.add(EsbElementTypes.SequenceInputConnector_3049);
types.add(EsbElementTypes.EventMediatorInputConnector_3052);
types.add(EsbElementTypes.EntitlementMediatorInputConnector_3055);
types.add(EsbElementTypes.ClassMediatorInputConnector_3058);
types.add(EsbElementTypes.SpringMediatorInputConnector_3061);
types.add(EsbElementTypes.ScriptMediatorInputConnector_3064);
types.add(EsbElementTypes.FaultMediatorInputConnector_3067);
types.add(EsbElementTypes.XQueryMediatorInputConnector_3070);
types.add(EsbElementTypes.CommandMediatorInputConnector_3073);
types.add(EsbElementTypes.DBLookupMediatorInputConnector_3076);
types.add(EsbElementTypes.DBReportMediatorInputConnector_3079);
types.add(EsbElementTypes.SmooksMediatorInputConnector_3082);
types.add(EsbElementTypes.SendMediatorInputConnector_3085);
types.add(EsbElementTypes.HeaderMediatorInputConnector_3100);
types.add(EsbElementTypes.CloneMediatorInputConnector_3103);
types.add(EsbElementTypes.CacheMediatorInputConnector_3106);
types.add(EsbElementTypes.IterateMediatorInputConnector_3109);
types.add(EsbElementTypes.CalloutMediatorInputConnector_3115);
types.add(EsbElementTypes.TransactionMediatorInputConnector_3118);
types.add(EsbElementTypes.RMSequenceMediatorInputConnector_3124);
types.add(EsbElementTypes.RuleMediatorInputConnector_3127);
types.add(EsbElementTypes.OAuthMediatorInputConnector_3130);
types.add(EsbElementTypes.AggregateMediatorInputConnector_3112);
types.add(EsbElementTypes.StoreMediatorInputConnector_3589);
types.add(EsbElementTypes.BuilderMediatorInputConnector_3592);
types.add(EsbElementTypes.CallTemplateMediatorInputConnector_3595);
types.add(EsbElementTypes.PayloadFactoryMediatorInputConnector_3598);
types.add(EsbElementTypes.EnqueueMediatorInputConnector_3601);
types.add(EsbElementTypes.URLRewriteMediatorInputConnector_3621);
types.add(EsbElementTypes.ValidateMediatorInputConnector_3624);
types.add(EsbElementTypes.RouterMediatorInputConnector_3629);
types.add(EsbElementTypes.ConditionalRouterMediatorInputConnector_3636);
types.add(EsbElementTypes.BAMMediatorInputConnector_3681);
types.add(EsbElementTypes.BeanMediatorInputConnector_3684);
types.add(EsbElementTypes.EJBMediatorInputConnector_3687);
types.add(EsbElementTypes.DefaultEndPointInputConnector_3021);
types.add(EsbElementTypes.AddressEndPointInputConnector_3030);
types.add(EsbElementTypes.FailoverEndPointInputConnector_3088);
types.add(EsbElementTypes.RecipientListEndPointInputConnector_3693);
types.add(EsbElementTypes.WSDLEndPointInputConnector_3092);
types.add(EsbElementTypes.NamedEndpointInputConnector_3661);
types.add(EsbElementTypes.LoadBalanceEndPointInputConnector_3095);
types.add(EsbElementTypes.APIResourceEndpointInputConnector_3675);
types.add(EsbElementTypes.AddressingEndpointInputConnector_3690);
types.add(EsbElementTypes.HTTPEndPointInputConnector_3710);
types.add(EsbElementTypes.TemplateEndpointInputConnector_3717);
types.add(EsbElementTypes.CloudConnectorInputConnector_3720);
types.add(EsbElementTypes.CloudConnectorOperationInputConnector_3723);
types.add(EsbElementTypes.LoopBackMediatorInputConnector_3737);
types.add(EsbElementTypes.RespondMediatorInputConnector_3740);
types.add(EsbElementTypes.CallMediatorInputConnector_3743);
types.add(EsbElementTypes.DataMapperMediatorInputConnector_3762);
types.add(EsbElementTypes.FastXSLTMediatorInputConnector_3765);
types.add(EsbElementTypes.ForEachMediatorInputConnector_3781);
types.add(EsbElementTypes.PublishEventMediatorInputConnector_3786);
types.add(EsbElementTypes.JsonTransformMediatorInputConnector_3792);
types.add(EsbElementTypes.ProxyInSequenceInputConnector_3731);
types.add(EsbElementTypes.MessageInputConnector_3046);
types.add(EsbElementTypes.MergeNodeFirstInputConnector_3014);
types.add(EsbElementTypes.MergeNodeSecondInputConnector_3015);
types.add(EsbElementTypes.SequencesInputConnector_3616);
types.add(EsbElementTypes.DefaultEndPointInputConnector_3644);
types.add(EsbElementTypes.AddressEndPointInputConnector_3647);
types.add(EsbElementTypes.FailoverEndPointInputConnector_3650);
types.add(EsbElementTypes.RecipientListEndPointInputConnector_3697);
types.add(EsbElementTypes.WSDLEndPointInputConnector_3654);
types.add(EsbElementTypes.LoadBalanceEndPointInputConnector_3657);
types.add(EsbElementTypes.HTTPEndPointInputConnector_3713);
types.add(EsbElementTypes.TemplateEndpointInputConnector_3726);
types.add(EsbElementTypes.APIResourceInputConnector_3670);
types.add(EsbElementTypes.APIResourceFaultInputConnector_3672);
types.add(EsbElementTypes.APIResourceInSequenceInputConnector_3747);
types.add(EsbElementTypes.InboundEndpointSequenceInputConnector_3768);
types.add(EsbElementTypes.InboundEndpointOnErrorSequenceInputConnector_3770);
}
return types;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.elasticsearch;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.annotations.Component;
import org.apache.camel.support.DefaultComponent;
import org.apache.http.HttpHost;
import org.elasticsearch.client.RestClient;
/**
* Represents the component that manages {@link ElasticsearchEndpoint}.
*/
@Component("elasticsearch-rest")
public class ElasticsearchComponent extends DefaultComponent {
@Metadata(label = "advanced")
private RestClient client;
@Metadata(label = "advanced")
private String hostAddresses;
@Metadata(label = "advanced", defaultValue = "" + ElasticsearchConstants.DEFAULT_SOCKET_TIMEOUT)
private int socketTimeout = ElasticsearchConstants.DEFAULT_SOCKET_TIMEOUT;
@Metadata(label = "advanced", defaultValue = "" + ElasticsearchConstants.MAX_RETRY_TIMEOUT)
private int maxRetryTimeout = ElasticsearchConstants.MAX_RETRY_TIMEOUT;
@Metadata(label = "advanced", defaultValue = "" + ElasticsearchConstants.DEFAULT_CONNECTION_TIMEOUT)
private int connectionTimeout = ElasticsearchConstants.DEFAULT_CONNECTION_TIMEOUT;
@Metadata(label = "security")
private String user;
@Metadata(label = "security", secret = true)
private String password;
@Metadata(label = "security", defaultValue = "false")
private boolean enableSSL;
@Metadata(label = "advanced", defaultValue = "false")
private boolean enableSniffer;
@Metadata(label = "advanced", defaultValue = "" + ElasticsearchConstants.DEFAULT_SNIFFER_INTERVAL)
private int snifferInterval = ElasticsearchConstants.DEFAULT_SNIFFER_INTERVAL;
@Metadata(label = "advanced", defaultValue = "" + ElasticsearchConstants.DEFAULT_AFTER_FAILURE_DELAY)
private int sniffAfterFailureDelay = ElasticsearchConstants.DEFAULT_AFTER_FAILURE_DELAY;
public ElasticsearchComponent() {
this(null);
}
public ElasticsearchComponent(CamelContext context) {
super(context);
registerExtension(new ElasticsearchRestComponentVerifierExtension());
}
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
ElasticsearchConfiguration config = new ElasticsearchConfiguration();
config.setHostAddresses(this.getHostAddresses());
config.setSocketTimeout(this.getSocketTimeout());
config.setMaxRetryTimeout(this.getMaxRetryTimeout());
config.setConnectionTimeout(this.getConnectionTimeout());
config.setUser(this.getUser());
config.setEnableSSL(this.getEnableSSL());
config.setPassword(this.getPassword());
config.setEnableSniffer(this.getEnableSniffer());
config.setSnifferInterval(this.getSnifferInterval());
config.setSniffAfterFailureDelay(this.getSniffAfterFailureDelay());
config.setClusterName(remaining);
setProperties(config, parameters);
config.setHostAddressesList(parseHostAddresses(config.getHostAddresses(), config));
Endpoint endpoint = new ElasticsearchEndpoint(uri, this, config, client);
return endpoint;
}
private List<HttpHost> parseHostAddresses(String ipsString, ElasticsearchConfiguration config) throws UnknownHostException {
if (ipsString == null || ipsString.isEmpty()) {
return null;
}
List<String> addressesStr = Arrays.asList(ipsString.split(","));
List<HttpHost> addressesTrAd = new ArrayList<>(addressesStr.size());
for (String address : addressesStr) {
String[] split = address.split(":");
String hostname;
if (split.length > 0) {
hostname = split[0];
} else {
throw new IllegalArgumentException();
}
Integer port = split.length > 1 ? Integer.parseInt(split[1]) : ElasticsearchConstants.DEFAULT_PORT;
addressesTrAd.add(new HttpHost(hostname, port, config.getEnableSSL() ? "HTTPS" : "HTTP"));
}
return addressesTrAd;
}
public RestClient getClient() {
return client;
}
/**
* To use an existing configured Elasticsearch client, instead of creating a client per endpoint.
* This allow to customize the client with specific settings.
*/
public void setClient(RestClient client) {
this.client = client;
}
/**
* Comma separated list with ip:port formatted remote transport addresses to use.
* The ip and port options must be left blank for hostAddresses to be considered instead.
*/
public String getHostAddresses() {
return hostAddresses;
}
public void setHostAddresses(String hostAddresses) {
this.hostAddresses = hostAddresses;
}
/**
* The timeout in ms to wait before the socket will timeout.
*/
public int getSocketTimeout() {
return socketTimeout;
}
public void setSocketTimeout(int socketTimeout) {
this.socketTimeout = socketTimeout;
}
/**
* The time in ms to wait before connection will timeout.
*/
public int getConnectionTimeout() {
return connectionTimeout;
}
public void setConnectionTimeout(int connectionTimeout) {
this.connectionTimeout = connectionTimeout;
}
/**
* Basic authenticate user
*/
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
/**
* Password for authenticate
*/
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
/**
* Enable SSL
*/
public Boolean getEnableSSL() {
return enableSSL;
}
public void setEnableSSL(Boolean enableSSL) {
this.enableSSL = enableSSL;
}
/**
* The time in ms before retry
*/
public int getMaxRetryTimeout() {
return maxRetryTimeout;
}
public void setMaxRetryTimeout(int maxRetryTimeout) {
this.maxRetryTimeout = maxRetryTimeout;
}
/**
* Enable automatically discover nodes from a running Elasticsearch cluster
*/
public Boolean getEnableSniffer() {
return enableSniffer;
}
public void setEnableSniffer(Boolean enableSniffer) {
this.enableSniffer = enableSniffer;
}
/**
* The interval between consecutive ordinary sniff executions in milliseconds. Will be honoured when
* sniffOnFailure is disabled or when there are no failures between consecutive sniff executions
*/
public int getSnifferInterval() {
return snifferInterval;
}
public void setSnifferInterval(int snifferInterval) {
this.snifferInterval = snifferInterval;
}
/**
* The delay of a sniff execution scheduled after a failure (in milliseconds)
*/
public int getSniffAfterFailureDelay() {
return sniffAfterFailureDelay;
}
public void setSniffAfterFailureDelay(int sniffAfterFailureDelay) {
this.sniffAfterFailureDelay = sniffAfterFailureDelay;
}
}
| |
/**
*
* Copyright 2003-2007 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smack.packet;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import org.jivesoftware.smack.util.StringUtils;
import org.jivesoftware.smack.util.XmlStringBuilder;
/**
* Represents an XMPP error sub-packet. Typically, a server responds to a request that has
* problems by sending the packet back and including an error packet. Each error has a type,
* error condition as well as as an optional text explanation. Typical errors are:<p>
*
* <table border=1>
* <hr><td><b>XMPP Error Condition</b></td><td><b>Type</b></td><td><b>RFC 6120 Section</b></td></hr>
* <tr><td>bad-request</td><td>MODIFY</td><td>8.3.3.1</td></tr>
* <tr><td>conflict</td><td>CANCEL</td><td>8.3.3.2</td></tr>
* <tr><td>feature-not-implemented</td><td>CANCEL</td><td>8.3.3.3</td></tr>
* <tr><td>forbidden</td><td>AUTH</td><td>8.3.3.4</td></tr>
* <tr><td>gone</td><td>MODIFY</td><td>8.3.3.5</td></tr>
* <tr><td>internal-server-error</td><td>WAIT</td><td>8.3.3.6</td></tr>
* <tr><td>item-not-found</td><td>CANCEL</td><td>8.3.3.7</td></tr>
* <tr><td>jid-malformed</td><td>MODIFY</td><td>8.3.3.8</td></tr>
* <tr><td>not-acceptable</td><td> MODIFY</td><td>8.3.3.9</td></tr>
* <tr><td>not-allowed</td><td>CANCEL</td><td>8.3.3.10</td></tr>
* <tr><td>not-authorized</td><td>AUTH</td><td>8.3.3.11</td></tr>
* <tr><td>policy-violation</td><td>AUTH</td><td>8.3.3.12</td></tr>
* <tr><td>recipient-unavailable</td><td>WAIT</td><td>8.3.3.13</td></tr>
* <tr><td>redirect</td><td>MODIFY</td><td>8.3.3.14</td></tr>
* <tr><td>registration-required</td><td>AUTH</td><td>8.3.3.15</td></tr>
* <tr><td>remote-server-not-found</td><td>CANCEL</td><td>8.3.3.16</td></tr>
* <tr><td>remote-server-timeout</td><td>WAIT</td><td>8.3.3.17</td></tr>
* <tr><td>resource-constraint</td><td>WAIT</td><td>8.3.3.18</td></tr>
* <tr><td>service-unavailable</td><td>CANCEL</td><td>8.3.3.19</td></tr>
* <tr><td>subscription-required</td><td>AUTH</td><td>8.3.3.20</td></tr>
* <tr><td>undefined-condition</td><td>WAIT</td><td>8.3.3.21</td></tr>
* <tr><td>unexpected-request</td><td>WAIT</td><td>8.3.3.22</td></tr>
* </table>
*
* @author Matt Tucker
* @see <a href="http://xmpp.org/rfcs/rfc6120.html#stanzas-error-syntax">RFC 6120 - 8.3.2 Syntax: The Syntax of XMPP error stanzas</a>
*/
public class XMPPError extends AbstractError {
public static final String NAMESPACE = "urn:ietf:params:xml:ns:xmpp-stanzas";
public static final String ERROR = "error";
private static final Logger LOGGER = Logger.getLogger(XMPPError.class.getName());
private static final Map<Condition, Type> CONDITION_TO_TYPE = new HashMap<Condition, Type>();
static {
CONDITION_TO_TYPE.put(Condition.bad_request, Type.MODIFY);
CONDITION_TO_TYPE.put(Condition.conflict, Type.CANCEL);
CONDITION_TO_TYPE.put(Condition.feature_not_implemented, Type.CANCEL);
CONDITION_TO_TYPE.put(Condition.forbidden, Type.AUTH);
CONDITION_TO_TYPE.put(Condition.gone, Type.CANCEL);
CONDITION_TO_TYPE.put(Condition.internal_server_error, Type.CANCEL);
CONDITION_TO_TYPE.put(Condition.item_not_found, Type.CANCEL);
CONDITION_TO_TYPE.put(Condition.jid_malformed, Type.MODIFY);
CONDITION_TO_TYPE.put(Condition.not_acceptable, Type.MODIFY);
CONDITION_TO_TYPE.put(Condition.not_allowed, Type.CANCEL);
CONDITION_TO_TYPE.put(Condition.not_authorized, Type.AUTH);
CONDITION_TO_TYPE.put(Condition.policy_violation, Type.MODIFY);
CONDITION_TO_TYPE.put(Condition.recipient_unavailable, Type.WAIT);
CONDITION_TO_TYPE.put(Condition.redirect, Type.MODIFY);
CONDITION_TO_TYPE.put(Condition.registration_required, Type.AUTH);
CONDITION_TO_TYPE.put(Condition.remote_server_not_found, Type.CANCEL);
CONDITION_TO_TYPE.put(Condition.remote_server_timeout, Type.WAIT);
CONDITION_TO_TYPE.put(Condition.resource_constraint, Type.WAIT);
CONDITION_TO_TYPE.put(Condition.service_unavailable, Type.WAIT);
CONDITION_TO_TYPE.put(Condition.subscription_required, Type.WAIT);
CONDITION_TO_TYPE.put(Condition.unexpected_request, Type.MODIFY);
}
private final Condition condition;
private final String conditionText;
private final String errorGenerator;
private final Type type;
public XMPPError(Condition condition) {
this(condition, null, null, null, null, null);
}
public XMPPError(Condition condition, PacketExtension applicationSpecificCondition) {
this(condition, null, null, null, null, Arrays.asList(applicationSpecificCondition));
}
/**
* Creates a new error with the specified type, condition and message.
* This constructor is used when the condition is not recognized automatically by XMPPError
* i.e. there is not a defined instance of ErrorCondition or it does not apply the default
* specification.
*
* @param type the error type.
* @param condition the error condition.
* @param descriptiveTexts
* @param extensions list of packet extensions
*/
public XMPPError(Condition condition, String conditionText, String errorGenerator, Type type, Map<String, String> descriptiveTexts,
List<PacketExtension> extensions) {
super(descriptiveTexts, NAMESPACE, extensions);
this.condition = condition;
// Some implementations may send the condition as non-empty element containing the empty string, that is
// <condition xmlns='foo'></condition>, in this case the parser may calls this constructor with the empty string
// as conditionText, therefore reset it to null if it's the empty string
if (StringUtils.isNullOrEmpty(conditionText)) {
conditionText = null;
}
if (conditionText != null) {
switch (condition) {
case gone:
case redirect:
break;
default:
throw new IllegalArgumentException(
"Condition text can only be set with condtion types 'gone' and 'redirect', not "
+ condition);
}
}
this.conditionText = conditionText;
this.errorGenerator = errorGenerator;
if (type == null) {
Type determinedType = CONDITION_TO_TYPE.get(condition);
if (determinedType == null) {
LOGGER.warning("Could not determine type for condition: " + condition);
determinedType = Type.CANCEL;
}
this.type = determinedType;
} else {
this.type = type;
}
}
/**
* Returns the error condition.
*
* @return the error condition.
*/
public Condition getCondition() {
return condition;
}
/**
* Returns the error type.
*
* @return the error type.
*/
public Type getType() {
return type;
}
public String getErrorGenerator() {
return errorGenerator;
}
public String getConditionText() {
return conditionText;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("XMPPError: ");
sb.append(condition.toString()).append(" - ").append(type.toString());
if (errorGenerator != null) {
sb.append(". Generated by ").append(errorGenerator);
}
return sb.toString();
}
/**
* Returns the error as XML.
*
* @return the error as XML.
*/
public XmlStringBuilder toXML() {
XmlStringBuilder xml = new XmlStringBuilder();
xml.halfOpenElement(ERROR);
xml.attribute("type", type.toString());
xml.optAttribute("by", errorGenerator);
xml.rightAngleBracket();
xml.halfOpenElement(condition.toString());
xml.xmlnsAttribute(NAMESPACE);
xml.closeEmptyElement();
addDescriptiveTextsAndExtensions(xml);
xml.closeElement(ERROR);
return xml;
}
public static XMPPError from(Condition condition, String descriptiveText) {
Map<String, String> descriptiveTexts = new HashMap<String, String>();
descriptiveTexts.put("en", descriptiveText);
return new XMPPError(condition, null, null, null, descriptiveTexts, null);
}
/**
* A class to represent the type of the Error. The types are:
*
* <ul>
* <li>XMPPError.Type.WAIT - retry after waiting (the error is temporary)
* <li>XMPPError.Type.CANCEL - do not retry (the error is unrecoverable)
* <li>XMPPError.Type.MODIFY - retry after changing the data sent
* <li>XMPPError.Type.AUTH - retry after providing credentials
* <li>XMPPError.Type.CONTINUE - proceed (the condition was only a warning)
* </ul>
*/
public static enum Type {
WAIT,
CANCEL,
MODIFY,
AUTH,
CONTINUE;
@Override
public String toString() {
// Locale.US not required, since Type consists only of ASCII chars
return name().toLowerCase();
}
public static Type fromString(String string) {
// Locale.US not required, since Type consists only of ASCII chars
string = string.toUpperCase();
return Type.valueOf(string);
}
}
public enum Condition {
bad_request,
conflict,
feature_not_implemented,
forbidden,
gone,
internal_server_error,
item_not_found,
jid_malformed,
not_acceptable,
not_allowed,
not_authorized,
policy_violation,
recipient_unavailable,
redirect,
registration_required,
remote_server_not_found,
remote_server_timeout,
resource_constraint,
service_unavailable,
subscription_required,
undefined_condition,
unexpected_request;
@Override
public String toString() {
return this.name().replace('_', '-');
}
public static Condition fromString(String string) {
// Backwards compatibility for older implementations still using RFC 3920. RFC 6120
// changed 'xml-not-well-formed' to 'not-well-formed'.
if ("xml-not-well-formed".equals(string)) {
string = "not-well-formed";
}
string = string.replace('-', '_');
Condition condition = null;
try {
condition = Condition.valueOf(string);
} catch (Exception e) {
throw new IllegalStateException("Could not transform string '" + string + "' to XMPPErrorCondition", e);
}
return condition;
}
}
}
| |
package liquibase.statementexecute;
import liquibase.CatalogAndSchema;
import liquibase.Scope;
import liquibase.changelog.ChangeLogHistoryServiceFactory;
import liquibase.database.Database;
import liquibase.database.DatabaseConnection;
import liquibase.database.DatabaseFactory;
import liquibase.database.core.MockDatabase;
import liquibase.database.core.UnsupportedDatabase;
import liquibase.database.example.ExampleCustomDatabase;
import liquibase.database.jvm.JdbcConnection;
import liquibase.datatype.DataTypeFactory;
import liquibase.exception.DatabaseException;
import liquibase.exception.UnexpectedLiquibaseException;
import liquibase.executor.ExecutorService;
import liquibase.extension.testing.testsystem.DatabaseTestSystem;
import liquibase.extension.testing.testsystem.TestSystemFactory;
import liquibase.listener.SqlListener;
import liquibase.lockservice.LockServiceFactory;
import liquibase.snapshot.SnapshotGeneratorFactory;
import liquibase.sql.Sql;
import liquibase.sqlgenerator.SqlGeneratorFactory;
import liquibase.statement.SqlStatement;
import liquibase.structure.core.Table;
import liquibase.test.TestContext;
import org.junit.After;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public abstract class AbstractExecuteTest {
protected SqlStatement statementUnderTest;
private Set<Class<? extends Database>> testedDatabases = new HashSet<Class<? extends Database>>();
@After
public void reset() {
for (Database database : TestContext.getInstance().getAllDatabases()) {
if (database.getConnection() != null) {
try {
database.rollback();
} catch (DatabaseException e) {
//ok
}
}
}
testedDatabases = new HashSet<Class<? extends Database>>();
this.statementUnderTest = null;
SnapshotGeneratorFactory.resetAll();
}
protected abstract List<? extends SqlStatement> setupStatements(Database database);
protected void testOnAll(String expectedSql) throws Exception {
test(expectedSql, null, null);
}
protected void assertCorrectOnRest(String expectedSql) throws Exception {
assertCorrect(expectedSql);
}
@SafeVarargs
protected final void assertCorrect(String expectedSql, Class<? extends Database>... includeDatabases) throws
Exception {
assertCorrect(new String[]{expectedSql}, includeDatabases);
}
@SafeVarargs
protected final void assertCorrect(String[] expectedSql, Class<? extends Database>... includeDatabases) throws Exception {
assertNotNull("SqlStatement to test is NOT null.", statementUnderTest);
test(expectedSql, includeDatabases, null);
}
@SafeVarargs
public final void testOnAllExcept(String expectedSql, Class<? extends Database>... excludedDatabases) throws Exception {
test(expectedSql, null, excludedDatabases);
}
private void test(String expectedSql, Class<? extends Database>[] includeDatabases, Class<? extends Database>[] excludeDatabases) throws Exception {
test(new String[]{expectedSql}, includeDatabases, excludeDatabases);
}
private void test(String[] expectedSql, Class<? extends Database>[] includeDatabases, Class<? extends Database>[] excludeDatabases) throws Exception {
if (expectedSql != null) {
for (Database database : TestContext.getInstance().getAllDatabases()) {
if (shouldTestDatabase(database, includeDatabases, excludeDatabases)) {
testedDatabases.add(database.getClass());
if (database.getConnection() != null) {
ChangeLogHistoryServiceFactory.getInstance().getChangeLogService(database).init();
LockServiceFactory.getInstance().getLockService(database).init();
}
Sql[] sql = SqlGeneratorFactory.getInstance().generateSql(statementUnderTest, database);
assertNotNull("Null SQL for " + database, sql);
assertEquals("Unexpected number of SQL statements for " + database, expectedSql.length, sql.length);
int index = 0;
for (String convertedSql : expectedSql) {
convertedSql = replaceEscaping(convertedSql, database);
convertedSql = replaceDatabaseClauses(convertedSql, database);
convertedSql = replaceStandardTypes(convertedSql, database);
assertEquals("Incorrect SQL for " + database.getClass().getName(), convertedSql.toLowerCase().trim(), sql[index].toSql().toLowerCase());
index++;
}
}
}
}
resetAvailableDatabases();
for (DatabaseTestSystem testSystem : Scope.getCurrentScope().getSingleton(TestSystemFactory.class).getAvailable(DatabaseTestSystem.class)) {
testSystem.start();
Database database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(new JdbcConnection(testSystem.getConnection()));
Statement statement = ((JdbcConnection) database.getConnection()).getUnderlyingConnection().createStatement();
if (shouldTestDatabase(database, includeDatabases, excludeDatabases)) {
String sqlToRun = SqlGeneratorFactory.getInstance().generateSql(statementUnderTest, database)[0].toSql();
try {
for (SqlListener listener : Scope.getCurrentScope().getListeners(SqlListener.class)) {
listener.writeSqlWillRun(sqlToRun);
}
statement.execute(sqlToRun);
} catch (Exception e) {
System.out.println("Failed to execute against " + database.getShortName() + ": " + sqlToRun);
throw e;
}
}
}
}
private String replaceStandardTypes(String convertedSql, Database database) {
convertedSql = replaceType("int", convertedSql, database);
convertedSql = replaceType("datetime", convertedSql, database);
convertedSql = replaceType("boolean", convertedSql, database);
convertedSql = convertedSql.replaceAll("FALSE", DataTypeFactory.getInstance().fromDescription("boolean", database).objectToSql(false, database));
convertedSql = convertedSql.replaceAll("TRUE", DataTypeFactory.getInstance().fromDescription("boolean", database).objectToSql(true, database));
convertedSql = convertedSql.replaceAll("NOW\\(\\)", database.getCurrentDateTimeFunction());
return convertedSql;
}
private String replaceType(String type, String baseString, Database database) {
return baseString.replaceAll(" " + type + " ", " " + DataTypeFactory.getInstance().fromDescription(type, database).toDatabaseDataType(database).toString() + " ")
.replaceAll(" " + type + ",", " " + DataTypeFactory.getInstance().fromDescription(type, database).toDatabaseDataType(database).toString() + ",");
}
private String replaceDatabaseClauses(String convertedSql, Database database) {
return convertedSql.replaceFirst("auto_increment_clause", database.getAutoIncrementClause(null, null, null, null));
}
private boolean shouldTestDatabase(Database database, Class<? extends Database>[] includeDatabases, Class<? extends Database>[] excludeDatabases) {
if ((database instanceof MockDatabase) || (database instanceof ExampleCustomDatabase) || (database instanceof
UnsupportedDatabase)) {
return false;
}
if (!SqlGeneratorFactory.getInstance().supports(statementUnderTest, database)
|| SqlGeneratorFactory.getInstance().validate(statementUnderTest, database).hasErrors()) {
return false;
}
boolean shouldInclude = true;
if ((includeDatabases != null) && (includeDatabases.length > 0)) {
shouldInclude = Arrays.asList(includeDatabases).contains(database.getClass());
}
boolean shouldExclude = false;
if ((excludeDatabases != null) && (excludeDatabases.length > 0)) {
shouldExclude = Arrays.asList(excludeDatabases).contains(database.getClass());
}
return !shouldExclude && shouldInclude && !testedDatabases.contains(database.getClass());
}
private String replaceEscaping(String expectedSql, Database database) {
String convertedSql = expectedSql;
int lastIndex = 0;
while ((lastIndex = convertedSql.indexOf("[", lastIndex)) >= 0) {
String objectName = convertedSql.substring(lastIndex + 1, convertedSql.indexOf("]", lastIndex));
try {
convertedSql = convertedSql.replace("[" + objectName + "]", database.escapeObjectName(objectName, Table.class));
} catch (Exception e) {
throw new RuntimeException(e);
}
lastIndex++;
}
return convertedSql;
}
public void resetAvailableDatabases() throws Exception {
for (DatabaseTestSystem testSystem : Scope.getCurrentScope().getSingleton(TestSystemFactory.class).getAvailable(DatabaseTestSystem.class)) {
testSystem.start();
Database database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(new JdbcConnection(testSystem.getConnection()));
DatabaseConnection connection = database.getConnection();
Statement connectionStatement = ((JdbcConnection) connection).getUnderlyingConnection().createStatement();
connection.commit();
try {
database.dropDatabaseObjects(CatalogAndSchema.DEFAULT);
CatalogAndSchema alt = new CatalogAndSchema(testSystem.getAltCatalog(), testSystem.getAltSchema());
database.dropDatabaseObjects(alt);
} catch (Exception e) {
throw new UnexpectedLiquibaseException("Error dropping objects for database "+database.getShortName(), e);
}
try {
connectionStatement.executeUpdate("drop table " + database.escapeTableName(database.getLiquibaseCatalogName(), database.getLiquibaseSchemaName(), database.getDatabaseChangeLogLockTableName()));
} catch (SQLException e) {
}
connection.commit();
try {
connectionStatement.executeUpdate("drop table " + database.escapeTableName(database.getLiquibaseCatalogName(), database.getLiquibaseSchemaName(), database.getDatabaseChangeLogTableName()));
} catch (SQLException e) {
}
connection.commit();
if (database.supportsSchemas()) {
database.dropDatabaseObjects(new CatalogAndSchema(null, testSystem.getAltSchema()));
connection.commit();
try {
connectionStatement.executeUpdate("drop table " + database.escapeTableName(testSystem.getAltCatalog(), testSystem.getAltSchema(), database.getDatabaseChangeLogLockTableName()));
} catch (SQLException e) {
//ok
}
connection.commit();
try {
connectionStatement.executeUpdate("drop table " + database.escapeTableName(testSystem.getAltCatalog(), testSystem.getAltSchema(), database.getDatabaseChangeLogTableName()));
} catch (SQLException e) {
//ok
}
connection.commit();
}
List<? extends SqlStatement> setupStatements = setupStatements(database);
if (setupStatements != null) {
for (SqlStatement statement : setupStatements) {
Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", database).execute(statement);
}
}
connectionStatement.close();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.orc;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
import org.apache.orc.impl.OutStream;
import org.apache.orc.impl.RecordReaderImpl;
import org.apache.orc.impl.StreamName;
import org.apache.orc.impl.TestInStream;
import org.apache.orc.impl.writer.StreamOptions;
import org.apache.orc.impl.writer.StringTreeWriter;
import org.apache.orc.impl.writer.TreeWriter;
import org.apache.orc.impl.writer.WriterContext;
import org.apache.orc.impl.writer.WriterEncryptionVariant;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.TestInfo;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestStringDictionary {
private Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + File.separator + "test"
+ File.separator + "tmp"));
private Configuration conf;
private FileSystem fs;
private Path testFilePath;
@BeforeEach
public void openFileSystem(TestInfo testInfo) throws Exception {
conf = new Configuration();
fs = FileSystem.getLocal(conf);
testFilePath = new Path(workDir, "TestStringDictionary." +
testInfo.getTestMethod().get().getName() + ".orc");
fs.delete(testFilePath, false);
}
private static Stream<Arguments> data() {
return Stream.of(Arguments.of("RBTREE"), Arguments.of("HASH"));
}
@ParameterizedTest
@MethodSource("data")
public void testTooManyDistinct(String dictImpl) throws Exception {
OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
TypeDescription schema = TypeDescription.createString();
Writer writer = OrcFile.createWriter(
testFilePath,
OrcFile.writerOptions(conf).setSchema(schema)
.compress(CompressionKind.NONE)
.bufferSize(10000));
VectorizedRowBatch batch = schema.createRowBatch();
BytesColumnVector col = (BytesColumnVector) batch.cols[0];
for (int i = 0; i < 20000; i++) {
if (batch.size == batch.getMaxSize()) {
writer.addRowBatch(batch);
batch.reset();
}
col.setVal(batch.size++, String.valueOf(i).getBytes(StandardCharsets.UTF_8));
}
writer.addRowBatch(batch);
writer.close();
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows = reader.rows();
batch = reader.getSchema().createRowBatch();
col = (BytesColumnVector) batch.cols[0];
int idx = 0;
while (rows.nextBatch(batch)) {
for(int r=0; r < batch.size; ++r) {
assertEquals(String.valueOf(idx++), col.toString(r));
}
}
// make sure the encoding type is correct
for (StripeInformation stripe : reader.getStripes()) {
// hacky but does the job, this casting will work as long this test resides
// within the same package as ORC reader
OrcProto.StripeFooter footer = ((RecordReaderImpl) rows).readStripeFooter(stripe);
for (int i = 0; i < footer.getColumnsCount(); ++i) {
OrcProto.ColumnEncoding encoding = footer.getColumns(i);
assertEquals(OrcProto.ColumnEncoding.Kind.DIRECT_V2, encoding.getKind());
}
}
}
@ParameterizedTest
@MethodSource("data")
public void testHalfDistinct(String dictImpl) throws Exception {
OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
final int totalSize = 20000;
final int bound = 10000;
TypeDescription schema = TypeDescription.createString();
Writer writer = OrcFile.createWriter(
testFilePath,
OrcFile.writerOptions(conf).setSchema(schema).compress(CompressionKind.NONE)
.bufferSize(bound));
Random rand = new Random(123);
int[] input = new int[totalSize];
for (int i = 0; i < totalSize; i++) {
input[i] = rand.nextInt(bound);
}
VectorizedRowBatch batch = schema.createRowBatch();
BytesColumnVector col = (BytesColumnVector) batch.cols[0];
for (int i = 0; i < totalSize; i++) {
if (batch.size == batch.getMaxSize()) {
writer.addRowBatch(batch);
batch.reset();
}
col.setVal(batch.size++, String.valueOf(input[i]).getBytes(StandardCharsets.UTF_8));
}
writer.addRowBatch(batch);
writer.close();
Reader reader = OrcFile.createReader(testFilePath,
OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows = reader.rows();
batch = reader.getSchema().createRowBatch();
col = (BytesColumnVector) batch.cols[0];
int idx = 0;
while (rows.nextBatch(batch)) {
for(int r=0; r < batch.size; ++r) {
assertEquals(String.valueOf(input[idx++]), col.toString(r));
}
}
// make sure the encoding type is correct
for (StripeInformation stripe : reader.getStripes()) {
// hacky but does the job, this casting will work as long this test resides
// within the same package as ORC reader
OrcProto.StripeFooter footer = ((RecordReaderImpl) rows).readStripeFooter(stripe);
for (int i = 0; i < footer.getColumnsCount(); ++i) {
OrcProto.ColumnEncoding encoding = footer.getColumns(i);
assertEquals(OrcProto.ColumnEncoding.Kind.DICTIONARY_V2, encoding.getKind());
}
}
}
static class WriterContextImpl implements WriterContext {
private final TypeDescription schema;
private final Configuration conf;
private final Map<StreamName, TestInStream.OutputCollector> streams =
new HashMap<>();
WriterContextImpl(TypeDescription schema, Configuration conf) {
this.schema = schema;
this.conf = conf;
}
@Override
public OutStream createStream(StreamName name) {
TestInStream.OutputCollector collect = new TestInStream.OutputCollector();
streams.put(name, collect);
return new OutStream("test", new StreamOptions(1000), collect);
}
@Override
public int getRowIndexStride() {
return 10000;
}
@Override
public boolean buildIndex() {
return OrcConf.ENABLE_INDEXES.getBoolean(conf);
}
@Override
public boolean isCompressed() {
return false;
}
@Override
public OrcFile.EncodingStrategy getEncodingStrategy() {
return OrcFile.EncodingStrategy.SPEED;
}
@Override
public boolean[] getBloomFilterColumns() {
return new boolean[schema.getMaximumId() + 1];
}
@Override
public double getBloomFilterFPP() {
return 0;
}
@Override
public Configuration getConfiguration() {
return conf;
}
@Override
public OrcFile.Version getVersion() {
return OrcFile.Version.V_0_12;
}
@Override
public PhysicalWriter getPhysicalWriter() {
return null;
}
@Override
public void setEncoding(int column, WriterEncryptionVariant variant, OrcProto.ColumnEncoding encoding) {
}
@Override
public void writeStatistics(StreamName name, OrcProto.ColumnStatistics.Builder stats) {
}
@Override
public OrcFile.BloomFilterVersion getBloomFilterVersion() {
return OrcFile.BloomFilterVersion.UTF8;
}
@Override
public void writeIndex(StreamName name, OrcProto.RowIndex.Builder index) {
}
@Override
public void writeBloomFilter(StreamName name,
OrcProto.BloomFilterIndex.Builder bloom) {
}
@Override
public DataMask getUnencryptedMask(int columnId) {
return null;
}
@Override
public WriterEncryptionVariant getEncryption(int columnId) {
return null;
}
@Override
public boolean getUseUTCTimestamp() {
return true;
}
@Override
public double getDictionaryKeySizeThreshold(int column) {
return OrcConf.DICTIONARY_KEY_SIZE_THRESHOLD.getDouble(conf);
}
@Override
public boolean getProlepticGregorian() {
return false;
}
}
@ParameterizedTest
@MethodSource("data")
public void testNonDistinctDisabled(String dictImpl) throws Exception {
OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
TypeDescription schema = TypeDescription.createString();
conf.set(OrcConf.DICTIONARY_KEY_SIZE_THRESHOLD.getAttribute(), "0.0");
WriterContextImpl writerContext = new WriterContextImpl(schema, conf);
StringTreeWriter writer = (StringTreeWriter)
TreeWriter.Factory.create(schema, null, writerContext);
VectorizedRowBatch batch = schema.createRowBatch();
BytesColumnVector col = (BytesColumnVector) batch.cols[0];
batch.size = 1024;
col.isRepeating = true;
col.setVal(0, "foobar".getBytes(StandardCharsets.UTF_8));
writer.writeBatch(col, 0, batch.size);
TestInStream.OutputCollector output = writerContext.streams.get(
new StreamName(0, OrcProto.Stream.Kind.DATA));
// Check to make sure that the strings are being written to the stream,
// even before we get to the first rowGroup. (6 * 1024 / 1000 * 1000)
assertEquals(6000, output.buffer.size());
}
@ParameterizedTest
@MethodSource("data")
public void testTooManyDistinctCheckDisabled(String dictImpl) throws Exception {
OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
TypeDescription schema = TypeDescription.createString();
conf.setBoolean(OrcConf.ROW_INDEX_STRIDE_DICTIONARY_CHECK.getAttribute(), false);
Writer writer = OrcFile.createWriter(
testFilePath,
OrcFile.writerOptions(conf).setSchema(schema).compress(CompressionKind.NONE)
.bufferSize(10000));
VectorizedRowBatch batch = schema.createRowBatch();
BytesColumnVector string = (BytesColumnVector) batch.cols[0];
for (int i = 0; i < 20000; i++) {
if (batch.size == batch.getMaxSize()) {
writer.addRowBatch(batch);
batch.reset();
}
string.setVal(batch.size++, String.valueOf(i).getBytes(StandardCharsets.UTF_8));
}
writer.addRowBatch(batch);
writer.close();
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows = reader.rows();
batch = reader.getSchema().createRowBatch();
string = (BytesColumnVector) batch.cols[0];
int idx = 0;
while (rows.nextBatch(batch)) {
for(int r=0; r < batch.size; ++r) {
assertEquals(String.valueOf(idx++), string.toString(r));
}
}
// make sure the encoding type is correct
for (StripeInformation stripe : reader.getStripes()) {
// hacky but does the job, this casting will work as long this test resides
// within the same package as ORC reader
OrcProto.StripeFooter footer = ((RecordReaderImpl) rows).readStripeFooter(stripe);
for (int i = 0; i < footer.getColumnsCount(); ++i) {
OrcProto.ColumnEncoding encoding = footer.getColumns(i);
assertEquals(OrcProto.ColumnEncoding.Kind.DIRECT_V2, encoding.getKind());
}
}
}
@ParameterizedTest
@MethodSource("data")
public void testHalfDistinctCheckDisabled(String dictImpl) throws Exception {
OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
TypeDescription schema = TypeDescription.createString();
conf.setBoolean(OrcConf.ROW_INDEX_STRIDE_DICTIONARY_CHECK.getAttribute(),
false);
Writer writer = OrcFile.createWriter(
testFilePath,
OrcFile.writerOptions(conf).setSchema(schema)
.compress(CompressionKind.NONE)
.bufferSize(10000));
Random rand = new Random(123);
int[] input = new int[20000];
for (int i = 0; i < 20000; i++) {
input[i] = rand.nextInt(10000);
}
VectorizedRowBatch batch = schema.createRowBatch();
BytesColumnVector string = (BytesColumnVector) batch.cols[0];
for (int i = 0; i < 20000; i++) {
if (batch.size == batch.getMaxSize()) {
writer.addRowBatch(batch);
batch.reset();
}
string.setVal(batch.size++, String.valueOf(input[i]).getBytes(StandardCharsets.UTF_8));
}
writer.addRowBatch(batch);
writer.close();
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows = reader.rows();
batch = reader.getSchema().createRowBatch();
string = (BytesColumnVector) batch.cols[0];
int idx = 0;
while (rows.nextBatch(batch)) {
for(int r=0; r < batch.size; ++r) {
assertEquals(String.valueOf(input[idx++]), string.toString(r));
}
}
// make sure the encoding type is correct
for (StripeInformation stripe : reader.getStripes()) {
// hacky but does the job, this casting will work as long this test resides
// within the same package as ORC reader
OrcProto.StripeFooter footer = ((RecordReaderImpl) rows).readStripeFooter(stripe);
for (int i = 0; i < footer.getColumnsCount(); ++i) {
OrcProto.ColumnEncoding encoding = footer.getColumns(i);
assertEquals(OrcProto.ColumnEncoding.Kind.DICTIONARY_V2, encoding.getKind());
}
}
}
@ParameterizedTest
@MethodSource("data")
public void testTooManyDistinctV11AlwaysDictionary(String dictImpl) throws Exception {
OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
TypeDescription schema = TypeDescription.createString();
Writer writer = OrcFile.createWriter(
testFilePath,
OrcFile.writerOptions(conf).setSchema(schema)
.compress(CompressionKind.NONE)
.version(OrcFile.Version.V_0_11).bufferSize(10000));
VectorizedRowBatch batch = schema.createRowBatch();
BytesColumnVector string = (BytesColumnVector) batch.cols[0];
for (int i = 0; i < 20000; i++) {
if (batch.size == batch.getMaxSize()) {
writer.addRowBatch(batch);
batch.reset();
}
string.setVal(batch.size++, String.valueOf(i).getBytes(StandardCharsets.UTF_8));
}
writer.addRowBatch(batch);
writer.close();
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
batch = reader.getSchema().createRowBatch();
string = (BytesColumnVector) batch.cols[0];
RecordReader rows = reader.rows();
int idx = 0;
while (rows.nextBatch(batch)) {
for(int r=0; r < batch.size; ++r) {
assertEquals(String.valueOf(idx++), string.toString(r));
}
}
// make sure the encoding type is correct
for (StripeInformation stripe : reader.getStripes()) {
// hacky but does the job, this casting will work as long this test resides
// within the same package as ORC reader
OrcProto.StripeFooter footer = ((RecordReaderImpl) rows).readStripeFooter(stripe);
for (int i = 0; i < footer.getColumnsCount(); ++i) {
OrcProto.ColumnEncoding encoding = footer.getColumns(i);
assertEquals(OrcProto.ColumnEncoding.Kind.DICTIONARY, encoding.getKind());
}
}
}
/**
* Test that dictionaries can be disabled, per column. In this test, we want to disable DICTIONARY_V2 for the
* `longString` column (presumably for a low hit-ratio), while preserving DICTIONARY_V2 for `shortString`.
* @throws Exception on unexpected failure
*/
@ParameterizedTest
@MethodSource("data")
public void testDisableDictionaryForSpecificColumn(String dictImpl) throws Exception {
OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
final String SHORT_STRING_VALUE = "foo";
final String LONG_STRING_VALUE = "BAAAAAAAAR!!";
TypeDescription schema =
TypeDescription.fromString("struct<shortString:string,longString:string>");
Writer writer = OrcFile.createWriter(
testFilePath,
OrcFile.writerOptions(conf).setSchema(schema)
.compress(CompressionKind.NONE)
.bufferSize(10000)
.directEncodingColumns("longString"));
VectorizedRowBatch batch = schema.createRowBatch();
BytesColumnVector shortStringColumnVector = (BytesColumnVector) batch.cols[0];
BytesColumnVector longStringColumnVector = (BytesColumnVector) batch.cols[1];
for (int i = 0; i < 20000; i++) {
if (batch.size == batch.getMaxSize()) {
writer.addRowBatch(batch);
batch.reset();
}
shortStringColumnVector.setVal(batch.size, SHORT_STRING_VALUE.getBytes(StandardCharsets.UTF_8));
longStringColumnVector.setVal( batch.size, LONG_STRING_VALUE.getBytes(StandardCharsets.UTF_8));
++batch.size;
}
writer.addRowBatch(batch);
writer.close();
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
RecordReader recordReader = reader.rows();
batch = reader.getSchema().createRowBatch();
shortStringColumnVector = (BytesColumnVector) batch.cols[0];
longStringColumnVector = (BytesColumnVector) batch.cols[1];
while (recordReader.nextBatch(batch)) {
for(int r=0; r < batch.size; ++r) {
assertEquals(SHORT_STRING_VALUE, shortStringColumnVector.toString(r));
assertEquals(LONG_STRING_VALUE, longStringColumnVector.toString(r));
}
}
// make sure the encoding type is correct
for (StripeInformation stripe : reader.getStripes()) {
// hacky but does the job, this casting will work as long this test resides
// within the same package as ORC reader
OrcProto.StripeFooter footer = ((RecordReaderImpl) recordReader).readStripeFooter(stripe);
for (int i = 0; i < footer.getColumnsCount(); ++i) {
assertEquals(3, footer.getColumnsCount(),
"Expected 3 columns in the footer: One for the Orc Struct, and two for its members.");
assertEquals(
OrcProto.ColumnEncoding.Kind.DIRECT, footer.getColumns(0).getKind(),
"The ORC schema struct should be DIRECT encoded."
);
assertEquals(
OrcProto.ColumnEncoding.Kind.DICTIONARY_V2, footer.getColumns(1).getKind(),
"The shortString column must be DICTIONARY_V2 encoded"
);
assertEquals(
OrcProto.ColumnEncoding.Kind.DIRECT_V2, footer.getColumns(2).getKind(),
"The longString column must be DIRECT_V2 encoded"
);
}
}
}
@ParameterizedTest
@MethodSource("data")
public void testForcedNonDictionary(String dictImpl) throws Exception {
OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
// Set the row stride to 16k so that it is a multiple of the batch size
final int INDEX_STRIDE = 16 * 1024;
final int NUM_BATCHES = 50;
// Explicitly turn off dictionary encoding.
OrcConf.DICTIONARY_KEY_SIZE_THRESHOLD.setDouble(conf, 0);
TypeDescription schema = TypeDescription.fromString("struct<str:string>");
try (Writer writer = OrcFile.createWriter(testFilePath,
OrcFile.writerOptions(conf)
.setSchema(schema)
.rowIndexStride(INDEX_STRIDE))) {
// Write 50 batches where each batch has a single value for str.
VectorizedRowBatch batch = schema.createRowBatchV2();
BytesColumnVector col = (BytesColumnVector) batch.cols[0];
for(int b=0; b < NUM_BATCHES; ++b) {
batch.reset();
batch.size = 1024;
col.setVal(0, ("Value for " + b).getBytes(StandardCharsets.UTF_8));
col.isRepeating = true;
writer.addRowBatch(batch);
}
}
try (Reader reader = OrcFile.createReader(testFilePath,
OrcFile.readerOptions(conf));
RecordReaderImpl rows = (RecordReaderImpl) reader.rows()) {
VectorizedRowBatch batch = reader.getSchema().createRowBatchV2();
BytesColumnVector col = (BytesColumnVector) batch.cols[0];
// Get the index for the str column
OrcProto.RowIndex index = rows.readRowIndex(0, null, null)
.getRowGroupIndex()[1];
// We assume that it fits in a single stripe
assertEquals(1, reader.getStripes().size());
// There are 4 entries, because ceil(NUM_BATCHES * 1024 / INDEX_STRIDE) = 4.
assertEquals(4, index.getEntryCount());
for(int e=0; e < index.getEntryCount(); ++e) {
OrcProto.RowIndexEntry entry = index.getEntry(e);
// For a string column with direct encoding, compression & no nulls, we
// should have 5 positions in each entry.
assertEquals(5, entry.getPositionsCount(), "position count entry " + e);
// make sure we can seek and get the right data
int row = e * INDEX_STRIDE;
rows.seekToRow(row);
assertTrue(rows.nextBatch(batch), "entry " + e);
assertEquals(1024, batch.size, "entry " + e);
assertTrue(col.noNulls, "entry " + e);
assertEquals("Value for " + (row / 1024), col.toString(0), "entry " + e);
}
}
}
/**
* That when we disable dictionaries, we don't get broken row indexes.
*/
@ParameterizedTest
@MethodSource("data")
public void testRowIndex(String dictImpl) throws Exception {
OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
TypeDescription schema =
TypeDescription.fromString("struct<str:string>");
// turn off the dictionaries
OrcConf.DICTIONARY_KEY_SIZE_THRESHOLD.setDouble(conf, 0);
Writer writer = OrcFile.createWriter(
testFilePath,
OrcFile.writerOptions(conf).setSchema(schema).rowIndexStride(4 * 1024));
VectorizedRowBatch batch = schema.createRowBatch();
BytesColumnVector strVector = (BytesColumnVector) batch.cols[0];
for (int i = 0; i < 32 * 1024; i++) {
if (batch.size == batch.getMaxSize()) {
writer.addRowBatch(batch);
batch.reset();
}
byte[] value = String.format("row %06d", i).getBytes(StandardCharsets.UTF_8);
strVector.setRef(batch.size, value, 0, value.length);
++batch.size;
}
writer.addRowBatch(batch);
writer.close();
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
SearchArgument sarg = SearchArgumentFactory.newBuilder(conf)
.lessThan("str", PredicateLeaf.Type.STRING, "row 001000")
.build();
RecordReader recordReader = reader.rows(reader.options().searchArgument(sarg, null));
batch = reader.getSchema().createRowBatch();
strVector = (BytesColumnVector) batch.cols[0];
long base = 0;
while (recordReader.nextBatch(batch)) {
for(int r=0; r < batch.size; ++r) {
String value = String.format("row %06d", r + base);
assertEquals(value, strVector.toString(r), "row " + (r + base));
}
base += batch.size;
}
// We should only read the first row group.
assertEquals(4 * 1024, base);
}
/**
* Test that files written before ORC-569 are read correctly.
*/
@ParameterizedTest
@MethodSource("data")
public void testRowIndexPreORC569(String dictImpl) throws Exception {
OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
testFilePath = new Path(System.getProperty("example.dir"), "TestStringDictionary.testRowIndex.orc");
SearchArgument sarg = SearchArgumentFactory.newBuilder(conf)
.lessThan("str", PredicateLeaf.Type.STRING, "row 001000")
.build();
try (Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs))) {
try (RecordReader recordReader = reader.rows(reader.options().searchArgument(sarg, null))) {
VectorizedRowBatch batch = reader.getSchema().createRowBatch();
BytesColumnVector strVector = (BytesColumnVector) batch.cols[0];
long base = 0;
while (recordReader.nextBatch(batch)) {
for (int r = 0; r < batch.size; ++r) {
String value = String.format("row %06d", r + base);
assertEquals(value, strVector.toString(r), "row " + (r + base));
}
base += batch.size;
}
// We should only read the first row group.
assertEquals(4 * 1024, base);
}
try (RecordReader recordReader = reader.rows()) {
VectorizedRowBatch batch = reader.getSchema().createRowBatch();
recordReader.seekToRow(4 * 1024);
assertTrue(recordReader.nextBatch(batch));
recordReader.seekToRow(0);
assertTrue(recordReader.nextBatch(batch));
}
}
}
}
| |
package org.apache.taverna.robundle.fs;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.nio.file.WatchEvent.Kind;
import java.nio.file.WatchEvent.Modifier;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.util.Iterator;
public class BundlePath implements Path {
private final BundleFileSystem fs;
private final Path zipPath;
protected BundlePath(BundleFileSystem fs, Path zipPath) {
if (fs == null || zipPath == null) {
throw new NullPointerException();
}
this.fs = fs;
this.zipPath = zipPath;
}
@Override
public int compareTo(Path other) {
return zipPath.compareTo(fs.unwrap(other));
}
@Override
public boolean endsWith(Path other) {
return zipPath.endsWith(fs.unwrap(other));
}
@Override
public boolean endsWith(String other) {
return zipPath.endsWith(other);
}
@Override
public boolean equals(Object other) {
if (!(other instanceof BundlePath)) {
return false;
}
BundlePath bundlePath = (BundlePath) other;
return zipPath.equals(fs.unwrap(bundlePath));
}
@Override
public BundlePath getFileName() {
return fs.wrap(zipPath.getFileName());
}
@Override
public BundleFileSystem getFileSystem() {
return fs;
}
@Override
public BundlePath getName(int index) {
return fs.wrap(zipPath.getName(index));
}
@Override
public int getNameCount() {
return zipPath.getNameCount();
}
@Override
public BundlePath getParent() {
return fs.wrap(zipPath.getParent());
}
@Override
public BundlePath getRoot() {
return fs.wrap(zipPath.getRoot());
}
protected Path getZipPath() {
return zipPath;
}
@Override
public int hashCode() {
return zipPath.hashCode();
}
@Override
public boolean isAbsolute() {
return zipPath.isAbsolute();
}
@Override
public Iterator<Path> iterator() {
return fs.wrapIterator(zipPath.iterator());
}
@Override
public BundlePath normalize() {
return fs.wrap(zipPath.normalize());
}
@Override
public WatchKey register(WatchService watcher, Kind<?>... events)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public WatchKey register(WatchService watcher, Kind<?>[] events,
Modifier... modifiers) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public BundlePath relativize(Path other) {
return fs.wrap(zipPath.relativize(fs.unwrap(other)));
}
@Override
public BundlePath resolve(Path other) {
return fs.wrap(zipPath.resolve(fs.unwrap(other)));
}
@Override
public BundlePath resolve(String other) {
return fs.wrap(zipPath.resolve(other));
}
@Override
public BundlePath resolveSibling(Path other) {
return fs.wrap(zipPath.resolveSibling(fs.unwrap(other)));
}
@Override
public BundlePath resolveSibling(String other) {
return fs.wrap(zipPath.resolveSibling(other));
}
@Override
public boolean startsWith(Path other) {
return zipPath.startsWith(fs.unwrap(other));
}
@Override
public boolean startsWith(String other) {
return zipPath.startsWith(other);
}
@Override
public BundlePath subpath(int beginIndex, int endIndex) {
return fs.wrap(zipPath.subpath(beginIndex, endIndex));
}
@Override
public BundlePath toAbsolutePath() {
return fs.wrap(zipPath.toAbsolutePath());
}
@Override
public File toFile() {
throw new UnsupportedOperationException();
}
@Override
public BundlePath toRealPath(LinkOption... options) throws IOException {
return fs.wrap(zipPath.toRealPath(options));
}
/**
* Note: This method is used by JSON serialization and should return a valid
* relative path from .ro/ or /
*/
@Override
public String toString() {
if (zipPath.isAbsolute() && zipPath.startsWith("/.ro/")) {
Path base = fs.getRootDirectory().zipPath.resolve(".ro");
return base.relativize(zipPath).toString();
} else {
return zipPath.toString();
}
}
@Override
public URI toUri() {
Path abs = zipPath.toAbsolutePath();
URI pathRel;
try {
pathRel = new URI(null, null, abs.toString(), null);
} catch (URISyntaxException e) {
throw new IllegalStateException("Can't create URL for " + zipPath,
e);
}
return fs.getBaseURI().resolve(pathRel);
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.agera.rvdatabinding;
import static android.databinding.DataBinderMapper.setDataBinding;
import static com.google.android.agera.Result.failure;
import static com.google.android.agera.Result.present;
import static com.google.android.agera.Result.success;
import static com.google.android.agera.rvdatabinding.DataBindingRepositoryPresenters.dataBindingRepositoryPresenterOf;
import static com.google.android.agera.rvdatabinding.RecycleConfig.CLEAR_ALL;
import static com.google.android.agera.rvdatabinding.RecycleConfig.CLEAR_COLLECTION;
import static com.google.android.agera.rvdatabinding.RecycleConfig.CLEAR_HANDLERS;
import static com.google.android.agera.rvdatabinding.RecycleConfig.CLEAR_ITEM;
import static com.google.android.agera.rvdatabinding.RecycleConfig.DO_NOTHING;
import static com.google.android.agera.rvdatabinding.test.VerifyingWrappers.verifyingWrapper;
import static com.google.android.agera.rvdatabinding.test.matchers.HasPrivateConstructor.hasPrivateConstructor;
import static java.lang.String.valueOf;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
import android.databinding.ViewDataBinding;
import android.support.annotation.LayoutRes;
import android.support.annotation.NonNull;
import android.support.v7.util.DiffUtil;
import android.support.v7.util.ListUpdateCallback;
import android.support.v7.widget.RecyclerView.ViewHolder;
import android.view.View;
import com.google.android.agera.Function;
import com.google.android.agera.Functions;
import com.google.android.agera.Result;
import com.google.android.agera.rvadapter.RepositoryPresenter;
import com.google.android.agera.rvdatabinding.test.DiffingLogic;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
@RunWith(RobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class DataBindingRepositoryPresentersTest {
private static final String STRING = "string";
private static final String FIRST_STRING_CHARACTER = "s";
private static final String SECOND_STRING = "string2";
private static final Result<String> STRING_RESULT = present(STRING);
private static final List<String> STRING_LIST = asList(STRING, SECOND_STRING);
private static final Result<List<String>> STRING_LIST_RESULT = success(STRING_LIST);
private static final Result<String> FAILURE = failure();
private static final Result<List<String>> LIST_FAILURE = failure();
private static final Object HANDLER = new Object();
private static final Object SECOND_HANDLER = new Object();
@LayoutRes
private static final int LAYOUT_ID = 1;
private static final int DYNAMIC_LAYOUT_ID = 2;
private static final int ITEM_ID = 3;
private static final int HANDLER_ID = 4;
private static final int SECOND_HANDLER_ID = 5;
private static final int COLLECTION_ID = 6;
private static final long STABLE_ID = 2;
@Mock
private Function<String, Integer> layoutForItem;
@Mock
private Function<String, Integer> itemIdForItem;
@Mock
private ViewDataBinding viewDataBinding;
@Mock
private View view;
@Mock
private ListUpdateCallback listUpdateCallback;
private ViewHolder viewHolder;
@Before
public void setUp() {
initMocks(this);
viewHolder = new ViewHolder(view) {};
setDataBinding(viewDataBinding, LAYOUT_ID);
setDataBinding(viewDataBinding, DYNAMIC_LAYOUT_ID);
when(view.getTag()).thenReturn("string");
when(layoutForItem.apply(SECOND_STRING)).thenReturn(DYNAMIC_LAYOUT_ID);
when(itemIdForItem.apply(SECOND_STRING)).thenReturn(ITEM_ID);
}
@Test
public void shouldBindRepositoryPresenterOfResult() {
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.forResult();
resultRepositoryPresenter.bind(STRING_RESULT, 0, viewHolder);
verify(view).setTag(R.id.agera__rvdatabinding__item_id, ITEM_ID);
verify(viewDataBinding).setVariable(ITEM_ID, STRING);
verify(viewDataBinding).setVariable(HANDLER_ID, HANDLER);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, SECOND_HANDLER);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldBindRepositoryPresenterWithoutItem() {
final RepositoryPresenter<String> repositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.forItem();
repositoryPresenter.bind(STRING, 0, viewHolder);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldBindRepositoryPresenterOfCollection() {
final RepositoryPresenter<String> repositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forCollection(new Function<String, List<String>>() {
@NonNull
@Override
public List<String> apply(@NonNull final String input) {
return singletonList(valueOf(input.charAt(0)));
}
});
repositoryPresenter.bind(STRING, 0, viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, FIRST_STRING_CHARACTER);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldBindRepositoryPresenterCollectionOfCollection() {
final RepositoryPresenter<String> repositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.collectionId(COLLECTION_ID)
.forCollection(new StringToFirstCharStringList());
repositoryPresenter.bind(STRING, 0, viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, FIRST_STRING_CHARACTER);
verify(viewDataBinding).setVariable(COLLECTION_ID, STRING);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldHandleRecycleOfRepositoryPresenterWithoutItemId() {
final RepositoryPresenter<String> repositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.onRecycle(CLEAR_ALL)
.forItem();
repositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldNotRecycleRepositoryPresenterOfResultWithNoRecycling() {
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(DO_NOTHING)
.forResult();
resultRepositoryPresenter.recycle(viewHolder);
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfResultWithItemRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ITEM)
.forResult();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfResultWithAllRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ALL)
.forResult();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfResultWithHandlerRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_HANDLERS)
.forResult();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldNotRecycleRepositoryPresenterOfCollectionWithNoRecycling() {
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(DO_NOTHING)
.collectionId(COLLECTION_ID)
.forCollection(new StringToFirstCharStringList());
resultRepositoryPresenter.recycle(viewHolder);
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfCollectionWithItemRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ITEM)
.collectionId(COLLECTION_ID)
.forCollection(new StringToFirstCharStringList());
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfCollectionWithAllRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
when(view.getTag(R.id.agera__rvdatabinding__collection_id)).thenReturn(COLLECTION_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ALL)
.collectionId(COLLECTION_ID)
.forCollection(new StringToFirstCharStringList());
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).setVariable(COLLECTION_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfCollectionWithCollectionRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
when(view.getTag(R.id.agera__rvdatabinding__collection_id)).thenReturn(COLLECTION_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_COLLECTION)
.collectionId(COLLECTION_ID)
.forCollection(new StringToFirstCharStringList());
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(COLLECTION_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfCollectionWithHandlerRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
when(view.getTag(R.id.agera__rvdatabinding__collection_id)).thenReturn(COLLECTION_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_HANDLERS)
.collectionId(COLLECTION_ID)
.forCollection(new StringToFirstCharStringList());
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldBindRepositoryPresenterOfResultList() {
final RepositoryPresenter<Result<List<String>>> resultListRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.forResultList();
resultListRepositoryPresenter.bind(STRING_LIST_RESULT, 1, viewHolder);
verify(view).setTag(R.id.agera__rvdatabinding__item_id, ITEM_ID);
verify(viewDataBinding).setVariable(ITEM_ID, SECOND_STRING);
verify(viewDataBinding).setVariable(HANDLER_ID, HANDLER);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldNotRecycleRepositoryPresenterOfResultListWithNoRecycling() {
final RepositoryPresenter<Result<List<String>>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(DO_NOTHING)
.forResultList();
resultRepositoryPresenter.recycle(viewHolder);
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfResultListWithItemRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<Result<List<String>>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ITEM)
.forResultList();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfResultListWithAllRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<Result<List<String>>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ALL)
.forResultList();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfResultListWithHandlerRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<Result<List<String>>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_HANDLERS)
.forResultList();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfItemWithItemRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ITEM)
.forItem();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfItemWithAllRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ALL)
.forItem();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfItemWithHandlerRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_HANDLERS)
.forItem();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldBindRepositoryPresenterOfItem() {
final RepositoryPresenter<String> itemRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forItem();
itemRepositoryPresenter.bind(STRING, 0, viewHolder);
}
@Test
public void shouldBindRepositoryPresenterOfList() {
final RepositoryPresenter<List<String>> listRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forList();
listRepositoryPresenter.bind(STRING_LIST, 1, viewHolder);
verify(view).setTag(R.id.agera__rvdatabinding__item_id, ITEM_ID);
verify(viewDataBinding).setVariable(ITEM_ID, SECOND_STRING);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldNotRecycleRepositoryPresenterOfListWithNoRecycling() {
final RepositoryPresenter<List<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(DO_NOTHING)
.forList();
resultRepositoryPresenter.recycle(viewHolder);
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfListWithItemRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<List<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ITEM)
.forList();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfListWithAllRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<List<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ALL)
.forList();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfListWithHandlerRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<List<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_HANDLERS)
.forList();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldReturnZeroForCountOfRepositoryPresenterOfFailedResult() {
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.forResult();
assertThat(resultRepositoryPresenter.getItemCount(FAILURE), is(0));
}
@Test
public void shouldReturnOneForCountOfRepositoryPresenterOfSuccessfulResult() {
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forResult();
assertThat(resultRepositoryPresenter.getItemCount(STRING_RESULT), is(1));
}
@Test
public void shouldReturnListSizeForCountOfRepositoryPresenterOfList() {
final RepositoryPresenter<List<String>> listRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forList();
assertThat(listRepositoryPresenter.getItemCount(STRING_LIST), is(STRING_LIST.size()));
}
@Test
public void shouldReturnZeroForCountOfRepositoryPresenterOfFailedResultList() {
final RepositoryPresenter<Result<List<String>>> resultListRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forResultList();
assertThat(resultListRepositoryPresenter.getItemCount(LIST_FAILURE), is(0));
}
@Test
public void shouldReturnListSizeForCountOfRepositoryPresenterOfSuccessfulResultList() {
final RepositoryPresenter<Result<List<String>>> resultListRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forResultList();
assertThat(resultListRepositoryPresenter.getItemCount(STRING_LIST_RESULT),
is(STRING_LIST.size()));
}
@Test
public void shouldGenerateLayoutForItemOfRepositoryPresenterOfResultList() {
final RepositoryPresenter<Result<List<String>>> resultListRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layoutForItem(layoutForItem)
.itemId(ITEM_ID)
.forResultList();
assertThat(resultListRepositoryPresenter.getLayoutResId(STRING_LIST_RESULT, 1),
is(DYNAMIC_LAYOUT_ID));
}
@Test
public void shouldGenerateItemIdForItemOfRepositoryPresenterOfResultList() {
final RepositoryPresenter<Result<List<String>>> resultListRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemIdForItem(itemIdForItem)
.forResultList();
resultListRepositoryPresenter.bind(STRING_LIST_RESULT, 1, viewHolder);
}
@Test
public void shouldReturnStableIdForRepositoryPresenterOfItem() {
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.stableIdForItem(Functions.<String, Long>staticFunction(STABLE_ID))
.forItem();
assertThat(resultRepositoryPresenter.getItemId(STRING, 0), is(STABLE_ID));
}
@Test
public void shouldReturnStableIdForRepositoryPresenterOfResult() {
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.stableIdForItem(Functions.<String, Long>staticFunction(STABLE_ID))
.forResult();
assertThat(resultRepositoryPresenter.getItemId(STRING_RESULT, 0), is(STABLE_ID));
}
@Test
public void shouldReturnStaticStableIdForRepositoryPresenterOfItem() {
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.stableId(STABLE_ID)
.forItem();
assertThat(resultRepositoryPresenter.getItemId(STRING, 0), is(STABLE_ID));
}
@Test
public void shouldReturnStaticStableIdForRepositoryPresenterOfResult() {
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.stableId(STABLE_ID)
.forResult();
assertThat(resultRepositoryPresenter.getItemId(STRING_RESULT, 0), is(STABLE_ID));
}
@Test
public void shouldReturnStableIdForRepositoryPresenterOfResultList() {
final RepositoryPresenter<Result<List<String>>> resultListRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.stableIdForItem(Functions.<String, Long>staticFunction(STABLE_ID))
.forResultList();
assertThat(resultListRepositoryPresenter.getItemId(STRING_LIST_RESULT, 0), is(STABLE_ID));
}
@Test
public void shouldReturnStableIdForRepositoryPresenterOfList() {
final RepositoryPresenter<List<String>> listRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.stableIdForItem(Functions.<String, Long>staticFunction(STABLE_ID))
.forList();
assertThat(listRepositoryPresenter.getItemId(STRING_LIST, 0), is(STABLE_ID));
}
@Test
public void shouldHandleRebindWithSameData() {
final RepositoryPresenter<String> repositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forItem();
repositoryPresenter.bind(STRING, 0, viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, STRING);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
reset(viewDataBinding);
repositoryPresenter.bind(STRING, 0, viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, STRING);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldHandleRebindWithNewData() {
final RepositoryPresenter<String> repositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forItem();
repositoryPresenter.bind(STRING, 0, viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, STRING);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
reset(viewDataBinding);
repositoryPresenter.bind(SECOND_STRING, 0, viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, SECOND_STRING);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRefuseFineGrainedEventsWithoutDiffWith() {
final RepositoryPresenter<String> presenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forItem();
final boolean fineGrained = presenter.getUpdates("String1", "String2", listUpdateCallback);
assertThat(fineGrained, is(false));
}
@Test
public void shouldNotifyFineGrainedEventsWithDiffWith() {
final List<String> oldData = asList("A:1", "B:2", "C:3");
final List<String> newData = asList("B:2", "A:4", "C:5");
final DiffingLogic diffingLogic = new DiffingLogic(oldData, newData);
final RepositoryPresenter<List<String>> diffingPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.diffWith(diffingLogic, false)
.forList();
final boolean fineGrained = diffingPresenter.getUpdates(oldData, newData, listUpdateCallback);
assertThat(fineGrained, is(true));
DiffUtil.calculateDiff(diffingLogic, false).dispatchUpdatesTo(
verifyingWrapper(listUpdateCallback));
verifyNoMoreInteractions(listUpdateCallback);
}
@Test
public void shouldNotifyFineGrainedEventsWithDiffWithMoveDetection() {
final List<String> oldData = asList("A:1", "B:2", "C:3", "D:0");
final List<String> newData = asList("B:2", "D:0", "A:4", "C:5");
final DiffingLogic diffingLogic = new DiffingLogic(oldData, newData);
final RepositoryPresenter<List<String>> diffingPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.diffWith(diffingLogic, true)
.forCollection(Functions.<List<String>>identityFunction());
final boolean fineGrained = diffingPresenter.getUpdates(oldData, newData, listUpdateCallback);
assertThat(fineGrained, is(true));
DiffUtil.calculateDiff(diffingLogic, true).dispatchUpdatesTo(
verifyingWrapper(listUpdateCallback));
verifyNoMoreInteractions(listUpdateCallback);
}
@Test
public void shouldNotifySingleItemFineGrainedEventsWithDiff() {
final Result<String> withA = success("A");
final Result<String> withB = success("B");
final Result<String> without = failure();
final RepositoryPresenter<Result<String>> diffingPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.diff()
.forResult();
boolean fineGrained = diffingPresenter.getUpdates(withA, withB, listUpdateCallback);
assertThat(fineGrained, is(true));
verify(listUpdateCallback).onChanged(0, 1, null);
verifyNoMoreInteractions(listUpdateCallback);
fineGrained = diffingPresenter.getUpdates(withA, without, listUpdateCallback);
assertThat(fineGrained, is(true));
verify(listUpdateCallback).onRemoved(0, 1);
verifyNoMoreInteractions(listUpdateCallback);
fineGrained = diffingPresenter.getUpdates(without, withB, listUpdateCallback);
assertThat(fineGrained, is(true));
verify(listUpdateCallback).onInserted(0, 1);
verifyNoMoreInteractions(listUpdateCallback);
}
@Test
public void shouldNotifyBlanketChangeEventForSameObjectForOldAndNewData() {
final List<String> oneList = asList("A:0", "B:1");
final DiffingLogic diffingLogic = new DiffingLogic(oneList, oneList);
final RepositoryPresenter<List<String>> diffingPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.diffWith(diffingLogic, false)
.forList();
final boolean fineGrained = diffingPresenter.getUpdates(oneList, oneList, listUpdateCallback);
assertThat(fineGrained, is(true));
verify(listUpdateCallback).onChanged(0, oneList.size(), null);
verifyNoMoreInteractions(listUpdateCallback);
}
@Test
public void shouldHavePrivateConstructor() {
assertThat(DataBindingRepositoryPresenters.class, hasPrivateConstructor());
}
private static final class StringToFirstCharStringList implements Function<String, List<String>> {
@NonNull
@Override
public List<String> apply(@NonNull final String input) {
return singletonList(valueOf(input.charAt(0)));
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.security.ChangePasswordRequest;
import org.elasticsearch.client.security.ClearApiKeyCacheRequest;
import org.elasticsearch.client.security.ClearPrivilegesCacheRequest;
import org.elasticsearch.client.security.ClearRealmCacheRequest;
import org.elasticsearch.client.security.ClearRolesCacheRequest;
import org.elasticsearch.client.security.CreateApiKeyRequest;
import org.elasticsearch.client.security.CreateTokenRequest;
import org.elasticsearch.client.security.DelegatePkiAuthenticationRequest;
import org.elasticsearch.client.security.DeletePrivilegesRequest;
import org.elasticsearch.client.security.DeleteRoleMappingRequest;
import org.elasticsearch.client.security.DeleteRoleRequest;
import org.elasticsearch.client.security.DeleteUserRequest;
import org.elasticsearch.client.security.DisableUserRequest;
import org.elasticsearch.client.security.EnableUserRequest;
import org.elasticsearch.client.security.GetApiKeyRequest;
import org.elasticsearch.client.security.GetPrivilegesRequest;
import org.elasticsearch.client.security.GetRoleMappingsRequest;
import org.elasticsearch.client.security.GetRolesRequest;
import org.elasticsearch.client.security.GetUsersRequest;
import org.elasticsearch.client.security.HasPrivilegesRequest;
import org.elasticsearch.client.security.InvalidateApiKeyRequest;
import org.elasticsearch.client.security.InvalidateTokenRequest;
import org.elasticsearch.client.security.PutPrivilegesRequest;
import org.elasticsearch.client.security.PutRoleMappingRequest;
import org.elasticsearch.client.security.PutRoleRequest;
import org.elasticsearch.client.security.PutUserRequest;
import org.elasticsearch.client.security.SetUserEnabledRequest;
import org.elasticsearch.common.Strings;
import java.io.IOException;
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
import static org.elasticsearch.client.RequestConverters.createEntity;
final class SecurityRequestConverters {
private SecurityRequestConverters() {}
static Request changePassword(ChangePasswordRequest changePasswordRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/user")
.addPathPart(changePasswordRequest.getUsername())
.addPathPartAsIs("_password")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setEntity(createEntity(changePasswordRequest, REQUEST_BODY_CONTENT_TYPE));
RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(changePasswordRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request;
}
static Request getUsers(GetUsersRequest getUsersRequest) {
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/user");
if (getUsersRequest.getUsernames().size() > 0) {
builder.addPathPart(Strings.collectionToCommaDelimitedString(getUsersRequest.getUsernames()));
}
return new Request(HttpGet.METHOD_NAME, builder.build());
}
static Request putUser(PutUserRequest putUserRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/user")
.addPathPart(putUserRequest.getUser().getUsername())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
request.setEntity(createEntity(putUserRequest, REQUEST_BODY_CONTENT_TYPE));
RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(putUserRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request;
}
static Request deleteUser(DeleteUserRequest deleteUserRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security", "user")
.addPathPart(deleteUserRequest.getName())
.build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(deleteUserRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request;
}
static Request putRoleMapping(final PutRoleMappingRequest putRoleMappingRequest) throws IOException {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/role_mapping")
.addPathPart(putRoleMappingRequest.getName())
.build();
final Request request = new Request(HttpPut.METHOD_NAME, endpoint);
request.setEntity(createEntity(putRoleMappingRequest, REQUEST_BODY_CONTENT_TYPE));
final RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(putRoleMappingRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request;
}
static Request getRoleMappings(final GetRoleMappingsRequest getRoleMappingRequest) throws IOException {
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder();
builder.addPathPartAsIs("_security/role_mapping");
if (getRoleMappingRequest.getRoleMappingNames().size() > 0) {
builder.addPathPart(Strings.collectionToCommaDelimitedString(getRoleMappingRequest.getRoleMappingNames()));
}
return new Request(HttpGet.METHOD_NAME, builder.build());
}
static Request enableUser(EnableUserRequest enableUserRequest) {
return setUserEnabled(enableUserRequest);
}
static Request disableUser(DisableUserRequest disableUserRequest) {
return setUserEnabled(disableUserRequest);
}
private static Request setUserEnabled(SetUserEnabledRequest setUserEnabledRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/user")
.addPathPart(setUserEnabledRequest.getUsername())
.addPathPart(setUserEnabledRequest.isEnabled() ? "_enable" : "_disable")
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(setUserEnabledRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request;
}
static Request hasPrivileges(HasPrivilegesRequest hasPrivilegesRequest) throws IOException {
Request request = new Request(HttpGet.METHOD_NAME, "/_security/user/_has_privileges");
request.setEntity(createEntity(hasPrivilegesRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request clearRealmCache(ClearRealmCacheRequest clearRealmCacheRequest) {
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/realm");
if (clearRealmCacheRequest.getRealms().isEmpty() == false) {
builder.addCommaSeparatedPathParts(clearRealmCacheRequest.getRealms().toArray(Strings.EMPTY_ARRAY));
} else {
builder.addPathPart("_all");
}
final String endpoint = builder.addPathPartAsIs("_clear_cache").build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
if (clearRealmCacheRequest.getUsernames().isEmpty() == false) {
RequestConverters.Params params = new RequestConverters.Params();
params.putParam("usernames", Strings.collectionToCommaDelimitedString(clearRealmCacheRequest.getUsernames()));
request.addParameters(params.asMap());
}
return request;
}
static Request clearRolesCache(ClearRolesCacheRequest disableCacheRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/role")
.addCommaSeparatedPathParts(disableCacheRequest.names())
.addPathPart("_clear_cache")
.build();
return new Request(HttpPost.METHOD_NAME, endpoint);
}
static Request clearPrivilegesCache(ClearPrivilegesCacheRequest clearPrivilegesCacheRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/privilege")
.addCommaSeparatedPathParts(clearPrivilegesCacheRequest.applications())
.addPathPart("_clear_cache")
.build();
return new Request(HttpPost.METHOD_NAME, endpoint);
}
static Request clearApiKeyCache(ClearApiKeyCacheRequest clearApiKeyCacheRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/api_key")
.addCommaSeparatedPathParts(clearApiKeyCacheRequest.ids())
.addPathPart("_clear_cache")
.build();
return new Request(HttpPost.METHOD_NAME, endpoint);
}
static Request deleteRoleMapping(DeleteRoleMappingRequest deleteRoleMappingRequest) {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/role_mapping")
.addPathPart(deleteRoleMappingRequest.getName())
.build();
final Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
final RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(deleteRoleMappingRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request;
}
static Request deleteRole(DeleteRoleRequest deleteRoleRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/role")
.addPathPart(deleteRoleRequest.getName())
.build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(deleteRoleRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request;
}
static Request getRoles(GetRolesRequest getRolesRequest) {
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder();
builder.addPathPartAsIs("_security/role");
if (getRolesRequest.getRoleNames().size() > 0) {
builder.addPathPart(Strings.collectionToCommaDelimitedString(getRolesRequest.getRoleNames()));
}
return new Request(HttpGet.METHOD_NAME, builder.build());
}
static Request createToken(CreateTokenRequest createTokenRequest) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, "/_security/oauth2/token");
request.setEntity(createEntity(createTokenRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request delegatePkiAuthentication(DelegatePkiAuthenticationRequest delegatePkiAuthenticationRequest) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, "/_security/delegate_pki");
request.setEntity(createEntity(delegatePkiAuthenticationRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request invalidateToken(InvalidateTokenRequest invalidateTokenRequest) throws IOException {
Request request = new Request(HttpDelete.METHOD_NAME, "/_security/oauth2/token");
request.setEntity(createEntity(invalidateTokenRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request getPrivileges(GetPrivilegesRequest getPrivilegesRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/privilege")
.addPathPart(getPrivilegesRequest.getApplicationName())
.addCommaSeparatedPathParts(getPrivilegesRequest.getPrivilegeNames())
.build();
return new Request(HttpGet.METHOD_NAME, endpoint);
}
static Request putPrivileges(final PutPrivilegesRequest putPrivilegesRequest) throws IOException {
Request request = new Request(HttpPut.METHOD_NAME, "/_security/privilege");
request.setEntity(createEntity(putPrivilegesRequest, REQUEST_BODY_CONTENT_TYPE));
RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(putPrivilegesRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request;
}
static Request deletePrivileges(DeletePrivilegesRequest deletePrivilegeRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/privilege")
.addPathPart(deletePrivilegeRequest.getApplication())
.addCommaSeparatedPathParts(deletePrivilegeRequest.getPrivileges())
.build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(deletePrivilegeRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request;
}
static Request putRole(final PutRoleRequest putRoleRequest) throws IOException {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_security/role")
.addPathPart(putRoleRequest.getRole().getName())
.build();
final Request request = new Request(HttpPut.METHOD_NAME, endpoint);
request.setEntity(createEntity(putRoleRequest, REQUEST_BODY_CONTENT_TYPE));
final RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(putRoleRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request;
}
static Request createApiKey(final CreateApiKeyRequest createApiKeyRequest) throws IOException {
final Request request = new Request(HttpPost.METHOD_NAME, "/_security/api_key");
request.setEntity(createEntity(createApiKeyRequest, REQUEST_BODY_CONTENT_TYPE));
final RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(createApiKeyRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request;
}
static Request getApiKey(final GetApiKeyRequest getApiKeyRequest) throws IOException {
final Request request = new Request(HttpGet.METHOD_NAME, "/_security/api_key");
if (Strings.hasText(getApiKeyRequest.getId())) {
request.addParameter("id", getApiKeyRequest.getId());
}
if (Strings.hasText(getApiKeyRequest.getName())) {
request.addParameter("name", getApiKeyRequest.getName());
}
if (Strings.hasText(getApiKeyRequest.getUserName())) {
request.addParameter("username", getApiKeyRequest.getUserName());
}
if (Strings.hasText(getApiKeyRequest.getRealmName())) {
request.addParameter("realm_name", getApiKeyRequest.getRealmName());
}
request.addParameter("owner", Boolean.toString(getApiKeyRequest.ownedByAuthenticatedUser()));
return request;
}
static Request invalidateApiKey(final InvalidateApiKeyRequest invalidateApiKeyRequest) throws IOException {
final Request request = new Request(HttpDelete.METHOD_NAME, "/_security/api_key");
request.setEntity(createEntity(invalidateApiKeyRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
}
| |
/*******************************************************************************
* Copyright 2015, The IKANOW Open Source Project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.ikanow.aleph2.data_model.utils;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiFunction;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.google.inject.AbstractModule;
import com.google.inject.CreationException;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.Scopes;
import com.google.inject.name.Names;
import com.ikanow.aleph2.data_model.interfaces.data_services.IColumnarService;
import com.ikanow.aleph2.data_model.interfaces.data_services.IDocumentService;
import com.ikanow.aleph2.data_model.interfaces.data_services.IGeospatialService;
import com.ikanow.aleph2.data_model.interfaces.data_services.IGraphService;
import com.ikanow.aleph2.data_model.interfaces.data_services.IManagementDbService;
import com.ikanow.aleph2.data_model.interfaces.data_services.ISearchIndexService;
import com.ikanow.aleph2.data_model.interfaces.data_services.IStorageService;
import com.ikanow.aleph2.data_model.interfaces.data_services.ITemporalService;
import com.ikanow.aleph2.data_model.interfaces.shared_services.IExtraDependencyLoader;
import com.ikanow.aleph2.data_model.interfaces.shared_services.ISecurityService;
import com.ikanow.aleph2.data_model.interfaces.shared_services.IServiceContext;
import com.ikanow.aleph2.data_model.interfaces.shared_services.IUnderlyingService;
import com.ikanow.aleph2.data_model.interfaces.shared_services.IUuidService;
import com.ikanow.aleph2.data_model.objects.shared.ConfigDataServiceEntry;
import com.ikanow.aleph2.data_model.objects.shared.GlobalPropertiesBean;
import com.ikanow.aleph2.data_model.utils.Lambdas.ThrowableWrapper.Supplier;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import fj.data.Either;
/**
* Utility functions for loading modules into the system. Typically is used
* by calling {@link #loadModulesFromConfig(Config)} this will create all
* the service injectors needed. Services can then be retrieved from {@link #getService(Class, Optional)}
* or using any of the other Contexts that implement IServiceContext.
*
* @author Burch
*
*/
public class ModuleUtils {
private final static String SERVICES_PROPERTY = "service";
private static Set<Class<?>> interfaceHasDefault = null;
private static Set<String> serviceDefaults = new HashSet<String>(Arrays.asList("SecurityService", "ColumnarService",
"DataWarehouseService", "DocumentService", "GeospatialService", "GraphService", "ManagementDbService", "ManagementDbService",
"SearchIndexService", "StorageService", "TemporalService", "CoreDistributedServices"));
private static Logger logger = LogManager.getLogger();
@SuppressWarnings("rawtypes")
private static Map<Key, Injector> serviceInjectors = null;
private static Injector parent_injector = null;
private static GlobalPropertiesBean globals = BeanTemplateUtils.build(GlobalPropertiesBean.class).done().get();
//(do it this way to avoid having to keep changing this test every time globals changes)
private static Config saved_config = null;
@SuppressWarnings("rawtypes")
private static BiFunction<Injector,Key,Object> getInstance = ModuleUtils.memoize(ModuleUtils::getInstance_onceOnly);
/** Returns the static config set up by a call to loadModulesFromConfig or createInjector
* INTENDED TO BE CALLED FROM guice_submodule.configure() (or later of course, though you should be using injected beans by then)
* @return the user config (or whatever is on the classpath as a fallback)
*/
public static Config getStaticConfig() {
return Optional.ofNullable(saved_config).orElse(ConfigFactory.load());
}
/** Returns the global configuration bean associated with the last configuration generated
* @return
*/
public static GlobalPropertiesBean getGlobalProperties() {
return globals;
}
/**
* Loads up all the services it can find in the given config file. Typically
* the config comes from ConfigFactory.load() which just loads the default
* environment config files and env vars. To override the config location
* call this function before requesting a getService.
*
* @param config
* @throws Exception
*/
protected static void loadModulesFromConfig(Config config) throws Exception {
initialize(config);
}
/**
* Reads in the config file for properties of the format:
* service.{Service name}.interface={full path to interface (optional)}
* service.{Service name}.service={full path to service}
* service.{Service name}.default={true|false (optional)}
*
* Will then try to create injectors for each of these services that can be retrieved by their
* Service name. If default is set to true they can be retrieved by their
* interface/service directly.
*
* If a service has already been created with the same {full path to service} as
* another service, the second service will use the first as an injector, then we go back
* and replace the original injectors with the new one.
*
* @param config
* @param parent_injector
* @return
* @throws Exception
*/
@SuppressWarnings("rawtypes")
private static Map<Key, Injector> loadServicesFromConfig(
Config config, Injector parent_injector) throws Exception {
//temporary map so we don't create multiple injectors for the same service class
Map<String, Injector> service_class_injectors = new HashMap<String, Injector>();
//actual list of key->injector we are returning
Map<Key, Injector> injectors = new HashMap<Key, Injector>();
List<ConfigDataServiceEntry> serviceProperties = PropertiesUtils.getDataServiceProperties(config, SERVICES_PROPERTY);
List<Exception> exceptions = new ArrayList<Exception>();
serviceProperties.stream()
.forEach( entry -> {
try {
Map<Key, Injector> injector_entries;
Injector sibling_injector = service_class_injectors.get(entry.serviceName);
//if there is not an injector for this serviceName, just use the parent binding
if ( sibling_injector == null ) {
injector_entries = bindServiceEntry(entry, parent_injector, true);
if ( injector_entries.size() > 0 ) {
Injector injector_entry = injector_entries.entrySet().iterator().next().getValue();
service_class_injectors.put(entry.serviceName, injector_entry);
}
} else {
//an injector already exists, use it to create the injector, then replace all existing entries w/ it
injector_entries = bindServiceEntry(entry, sibling_injector, false);
if ( injector_entries.size() > 0 ) {
Injector injector_entry = injector_entries.entrySet().iterator().next().getValue();
service_class_injectors.put(entry.serviceName, injector_entry);
//replace any existing entries in injectors w/ this new copy
for ( Entry<Key, Injector> inj : injectors.entrySet() ) {
if ( inj.getValue() == sibling_injector ) {
logger.info("replacing previous injector with child (note: this is expected if a single service handles multiple interfaces)");
injectors.put(inj.getKey(), injector_entry);
}
}
}
}
//always bind all the new entries we created
injectors.putAll(injector_entries);
} catch (Exception e) {
if (e instanceof CreationException) { // (often fails to provide useful information, so we'll insert it ourselves..)
CreationException ce = (CreationException) e;
e = null;
for (com.google.inject.spi.Message m: ce.getErrorMessages()) {
if (null == e) e = new RuntimeException(m.toString(), e);
else {
logger.error(ErrorUtils.get("Sub-Error during service {1}:{2} binding {0}", m.toString(), entry.interfaceName, entry.serviceName));
}
}
}
logger.error(ErrorUtils.getLongForm("Error during service {1}:{2} binding {0}",e, entry.interfaceName, entry.serviceName));
exceptions.add(e);
}
});
if ( exceptions.size() > 0 ){
throw new Exception(exceptions.size() + " exceptions occured during loading services from config file, first shown", exceptions.get(0));
}
return injectors;
}
/**
* Creates a child injector w/ all the necessary bindings setup from
* a config entry. Most actions attempted in this method will throw
* an exception if something is wrong so it can be caught above.
*
* How this works:
* First we set this entry to a default one if they use the same name
* as the default hard coded names or set default to true in the config.
* Then we attempt to get the service class and interface class (if exists)
* to make sure they are valid.
* If the service class extends IExtraDependencyLoader we will try to grab its
* depedency modules.
*
* The injector is then creates from those extra depedencies and a custom one
* that creates the bindings from interface to service (w/ annotations). The
* injector is set in the service context then so it can be used in the future.
*
* @param entry
* @param parent_injector
* @param addExtraDependencies
* @param optional
* @throws Exception
*/
@SuppressWarnings("rawtypes")
private static Map<Key, Injector> bindServiceEntry(ConfigDataServiceEntry entry, Injector parent_injector, boolean addExtraDependencies) throws Exception {
Map<Key, Injector> injectorMap = new HashMap<Key, Injector>();
entry = new ConfigDataServiceEntry(entry.annotationName, entry.interfaceName, entry.serviceName, entry.isDefault || serviceDefaults.contains(entry.annotationName));
logger.info("BINDING: " + entry.annotationName + " " + entry.interfaceName + " " + entry.serviceName + " " + entry.isDefault + " " + addExtraDependencies );
Class serviceClazz = Class.forName(entry.serviceName);
List<Module> modules = new ArrayList<Module>();
if ( addExtraDependencies )
modules.addAll(getExtraDepedencyModules(serviceClazz));
Optional<Class> interfaceClazz = getInterfaceClass(entry.interfaceName);
if ( entry.isDefault && interfaceClazz.isPresent() )
validateOnlyOneDefault(interfaceClazz);
//add default service binding w/ annotation
modules.add(new ServiceBinderModule(serviceClazz, interfaceClazz, Optional.ofNullable(entry.annotationName)));
if ( entry.isDefault ) //if default, add service binding w/o annotation
modules.add(new ServiceBinderModule(serviceClazz, interfaceClazz, Optional.empty()));
//create the child injector
Injector child_injector = parent_injector.createChildInjector(modules);
//add injector to serviceContext for interface+annotation, interface w/o annotation, or service only
if ( interfaceClazz.isPresent()) {
injectorMap.put(getKey(interfaceClazz.get(), Optional.ofNullable(entry.annotationName)), child_injector);
if ( entry.isDefault )
injectorMap.put(getKey(interfaceClazz.get(), Optional.empty()), child_injector);
} else {
injectorMap.put(getKey(serviceClazz, Optional.empty()), child_injector);
}
return injectorMap;
}
/**
* Throws an exception if this interface already has a default binding.
*
* If not, adds this interface to the list of default bindings (so future checks will throw an error).
*
* @param interfaceClazz An interface that wants to set the default binding.
* @throws Exception
*/
@SuppressWarnings("rawtypes")
private static void validateOnlyOneDefault(Optional<Class> interfaceClazz) throws Exception {
if (interfaceHasDefault.contains(interfaceClazz.get()))
throw new Exception(interfaceClazz.get() + " already had a default binding, there can be only one.");
else
interfaceHasDefault.add(interfaceClazz.get());
}
/**
* Returns back an optional with the interface class if interfaceName was not empty.
*
* Will throw an exception if interfaceName cannot be turned into a class. (via Class.forName(interfaceName);)
*
* @param interfaceName interface to try and get the Class of or empty
* @return
* @throws ClassNotFoundException
*/
@SuppressWarnings("rawtypes")
private static Optional<Class> getInterfaceClass(Optional<String> interfaceName) throws ClassNotFoundException {
if ( interfaceName.isPresent() )
return Optional.of(Class.forName(interfaceName.get()));
return Optional.empty();
}
/**
* If service class implements {@link com.ikanow.aleph2.data_model.interfaces.shared_services.IExtraDependencyLoader}
* then try to invoke the static method getExtraDepedencyModules to retrieve any additional dependencies this service needs.
*
* Otherwise return an empty list.
*
* @param serviceClazz
* @return
* @throws Exception
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private static List<Module> getExtraDepedencyModules(Class<?> serviceClazz) throws Exception {
//if serviceClazz implements IExtraDepedency then add those bindings
if ( IExtraDependencyLoader.class.isAssignableFrom(serviceClazz) ) {
logger.debug("Loading Extra Depedency Modules");
List<Module> modules = new ArrayList<Module>();
Class[] param_types = new Class[0];
Object[] params = new Object[0];
try {
modules.addAll((List<Module>) serviceClazz.getMethod("getExtraDependencyModules", param_types).invoke(null, params));
} catch (IllegalAccessException | IllegalArgumentException
| InvocationTargetException | NoSuchMethodException
| SecurityException e) {
logger.error(ErrorUtils.getLongForm("Module: " + serviceClazz.getSimpleName() + " implemented IExtraDependencyModule but forgot to create the static method getExtraDependencyModules():List<Module> double check you have this set up correctly. {0}",e));
throw new Exception("Module: " + serviceClazz.getSimpleName() + " implemented IExtraDependencyModule but forgot to create the static method getExtraDependencyModules():List<Module> double check you have this set up correctly. \n" + e.getMessage());
}
return modules;
}
return Collections.emptyList();
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private static Key getKey(Class serviceClazz, Optional<String> serviceName) {
if ( serviceName.isPresent() )
return Key.get(serviceClazz, Names.named(serviceName.get()));
else
return Key.get(serviceClazz);
}
/**
* Returns back an instance of the requested serviceClazz/annotation
* if an injector exists for it. If the injectors have not yet been
* created will try to load them from the default config.
*
* @param serviceClazz
* @param serviceName
* @return
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public static <I> I getService(Class<I> serviceClazz, Optional<String> serviceName) {
if ( serviceInjectors == null ) {
try {
loadModulesFromConfig(ConfigFactory.load());
} catch (Exception e) {
logger.error("Error loading modules", e);
}
}
Key key = getKey(serviceClazz, serviceName);
Injector injector = serviceInjectors.get(key);
if ( injector != null ) {
//return (I) getInstance_onceOnly(injector, key);
return (I) getInstance.apply(injector, key);
//return (I) injector.getInstance(key);
}
else
return null;
}
/**
* Helper function to make creating an instance of our memoized bifunction easier to read.
*
* @param function
* @return
*/
private static <T, U, R> BiFunction<T, U, R> memoize(final BiFunction<T, U, R> function) {
return new BiFunctionMemoize<T, U, R>().doMemoizeIgnoreSecondArg(function);
}
/**
* Class to handle a bifunction memoize.
*
* @author Burch
*
* @param <T>
* @param <U>
* @param <R>
*/
private static class BiFunctionMemoize<T, U, R> {
protected BiFunctionMemoize() {}
private final Map<T, R> instance_cache = new ConcurrentHashMap<T, R>();
/**
* If T is in the cache, returns the instance, otherwise calls function with T,U.
*
* @param function
* @return
*/
public BiFunction<T, U, R> doMemoizeIgnoreSecondArg(final BiFunction<T, U, R> function) {
return (input1, input2) -> {
if (instance_cache.keySet().contains(input1)) {
return instance_cache.get(input1);
}
else {
final R res = function.apply(input1, input2);
instance_cache.put(input1, res);
return res;
}
};
//computeIfAbsent cannot be called recursively currently (oracle claims to fix that up later)
//if they fix it, this function can be minimized to this:
//return (input1, input2) -> instance_cache.computeIfAbsent(input1, ___ -> function.apply(input1, input2));
}
}
/**
* Returns an instance of the key given an injector
*
* @param injector
* @param key
* @return
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private static Object getInstance_onceOnly(Injector injector, Key key) {
return injector.getInstance(key);
}
/**
* Initializes the module utils class.
*
* This includes reading in the properties and setting up all the initial bindings
* found in the config.
*
* @param config
* @throws Exception
*/
private static void initialize(Config config) throws Exception {
saved_config = config;
final Config subconfig = PropertiesUtils.getSubConfig(config, GlobalPropertiesBean.PROPERTIES_ROOT).orElse(null);
synchronized (ModuleUtils.class) {
globals = BeanTemplateUtils.from(subconfig, GlobalPropertiesBean.class);
}
if ( parent_injector != null)
logger.warn("Resetting default bindings, this could cause issues if it occurs after initialization and typically should not occur except during testing");
interfaceHasDefault = new HashSet<Class<?>>();
parent_injector = Guice.createInjector(new ServiceModule());
serviceInjectors = loadServicesFromConfig(config, parent_injector);
}
/** GENERIC - CALLED BY TEST / APP
* Creates a child injector from our parent configured injector to allow applications to take
* advantage of our injection without having to create a config file. The typical reason to
* do this is to inject the IServiceContext into your application so you can access the other
* configured services via {@link com.ikanow.aleph2.data_model.interface.data_access.IServiceContext#getService()}
*
* @param modules Any modules you wanted added to your child injector (put your bindings in these)
* @param config If exists will reset injectors to create defaults via the config
* @return
* @throws Exception
*/
private static Injector createInjector(List<Module> modules, Optional<Config> config) throws Exception {
try {
if ( parent_injector == null && !config.isPresent() )
config = Optional.of(ConfigFactory.load());
if ( config.isPresent() )
initialize(config.get());
} catch (Throwable e) {
System.out.println(e.getMessage());
e.printStackTrace();
throw e;
}
return parent_injector.createChildInjector(modules);
}
/** THIS VERSION IS FOR TESTS - CAN BE RUN MULTIPLE TIMES
* Creates a child injector from our parent configured injector to allow applications to take
* advantage of our injection without having to create a config file. The typical reason to
* do this is to inject the IServiceContext into your application so you can access the other
* configured services via {@link com.ikanow.aleph2.data_model.interface.data_access.IServiceContext#getService()}
*
* @param modules Any modules you wanted added to your child injector (put your bindings in these)
* @param config If exists will reset injectors to create defaults via the config
* @return
* @throws Exception
*/
public static Injector createTestInjector(List<Module> modules, Optional<Config> config) throws Exception {
_test_mode.set(true);
final Injector i = createInjector(modules, config);
_test_injector.obtrudeValue(i);
return i;
}
/** For tests not using Guice, this has to be called (or any modules that use the ModuleUtils.getAppInjector.thenRun() constructs will hang)
*/
public static void disableTestInjection() {
_test_mode.set(true);
_test_injector.complete(null);
}
// Some application level global state
private static AtomicBoolean _test_mode = new AtomicBoolean();
private enum GlobalGuiceState { idle, initializing, complete };
private static GlobalGuiceState _module_state = GlobalGuiceState.idle;
private static final CompletableFuture<Injector> _test_injector = new CompletableFuture<>();
private static final CompletableFuture<Injector> _app_injector = new CompletableFuture<>();
private static final CompletableFuture<Boolean> _called_ctor = new CompletableFuture<>();
/** APP LEVEL VERSION - ONLY CREATES STUFF ONCE. CANNOT NORMALLY BE CALLED FROM JUNIT TESTS - USE createTestInjector for that
* Creates a single application level injector and either injects members into application, or
* If called multiple times, will wait for the first time to complete before performing the desired action.
* To get the created injector, use getAppInjector
*
* @param modules Any modules you wanted added to your child injector (put your bindings in these)
* @param config If exists will reset injectors to create defaults via the config
* @param application - either a class to create, or an object to inject
* @return
* @throws Exception
*/
public static <T> T initializeApplication(final List<Module> modules, final Optional<Config> config, final Either<Class<T>, T> application) throws Exception {
_test_mode.set(false);
boolean initializing = false;
T return_val = null;
final Supplier<T> on_complete = () -> {
try {
return application.either(app_clazz -> _app_injector.join().getInstance(app_clazz), app_obj -> {
_app_injector.join().injectMembers(app_obj);
return app_obj;
});
}
catch (Throwable t) { // These will normally be guice errors so will break on wrap
logger.error(ErrorUtils.getLongForm("ModuleUtils.initializeApplication.onComplete {0}", t));
throw t;
}
};
synchronized (ModuleUtils.class) {
if (GlobalGuiceState.idle == _module_state) { // winner!
_module_state = GlobalGuiceState.initializing;
}
else if (GlobalGuiceState.initializing == _module_state) { // wait for the initial guice module to complete
initializing = true;
}
else { // (active)
return Lambdas.wrap_u(on_complete).get();
}
}
if (initializing) { // if here just wait for it to complete
for (;;) {
Thread.sleep(250L);
synchronized (ModuleUtils.class) {
if (GlobalGuiceState.complete == _module_state) break;
}
}
return_val = Lambdas.wrap_u(on_complete).get();
}
else { // this version actually gets to complete it
try {
// If here then we're the only person that gets to initialize guice
_app_injector.complete(createInjector(modules, config));
return_val = Lambdas.wrap_u(on_complete).get();
_called_ctor.complete(true);
}
catch (Throwable t) {
_app_injector.completeExceptionally(t);
_called_ctor.completeExceptionally(t);
throw t;
}
finally {
synchronized (ModuleUtils.class) {
_module_state = GlobalGuiceState.complete;
}
}
}
return return_val;
}
/** Returns a future to an app injector that is only valid after the app injector has been created *and* the app has been initialized
* (via at least one call to app initialization)
* @return
*/
public static CompletableFuture<Injector> getAppInjector() {
return _test_mode.get()
? _test_injector
: _app_injector.thenCombine(_called_ctor, (i, b) -> i);
}
/**
* Implementation of the IServiceContext class for easy usage
* from the other contexts.
*
* @author Burch
*
*/
public static class ServiceContext implements IServiceContext {
/**
* Delegates to the ModuleUtils get service call for the
* requested class, serviceName.
*
*/
@Override
public <I extends IUnderlyingService> Optional<I> getService(Class<I> serviceClazz,
Optional<String> serviceName) {
return Optional.ofNullable(ModuleUtils.getService(serviceClazz, serviceName));
}
/**
* Utility function that just calls {@link #getService(Class, Optional)}
*
*/
@Override
public Optional<IColumnarService> getColumnarService() {
return getService(IColumnarService.class, Optional.empty());
}
/**
* Utility function that just calls {@link #getService(Class, Optional)}
*
*/
@Override
public Optional<IDocumentService> getDocumentService() {
return getService(IDocumentService.class, Optional.empty());
}
/**
* Utility function that just calls {@link #getService(Class, Optional)}
*
*/
@Override
public Optional<IGeospatialService> getGeospatialService() {
return getService(IGeospatialService.class, Optional.empty());
}
/**
* Utility function that just calls {@link #getService(Class, Optional)}
*
*/
@Override
public Optional<IGraphService> getGraphService() {
return getService(IGraphService.class, Optional.empty());
}
/**
* Utility function that just calls {@link #getService(Class, Optional)}
*
*/
@Override
public IManagementDbService getCoreManagementDbService() {
try {
return getService(IManagementDbService.class, IManagementDbService.CORE_MANAGEMENT_DB).get();
} catch (NoSuchElementException e) { throw new RuntimeException("Missing mandatory service: CoreManagementDbService", e); }
}
/**
* Utility function that just calls {@link #getService(Class, Optional)}
*
*/
@Override
public Optional<ISearchIndexService> getSearchIndexService() {
return getService(ISearchIndexService.class, Optional.empty());
}
/**
* Utility function that just calls {@link #getService(Class, Optional)}
*
*/
@Override
public IStorageService getStorageService() {
try {
return getService(IStorageService.class, Optional.empty()).get();
} catch (NoSuchElementException e) { throw new RuntimeException("Missing mandatory service: IStorageService", e); }
}
/**
* Utility function that just calls {@link #getService(Class, Optional)}
*
*/
@Override
public Optional<ITemporalService> getTemporalService() {
return getService(ITemporalService.class, Optional.empty());
}
/**
* Utility function that just calls {@link #getService(Class, Optional)}
*
*/
@Override
public ISecurityService getSecurityService() {
try {
return getService(ISecurityService.class, Optional.empty()).get();
} catch (NoSuchElementException e) { throw new RuntimeException("Missing mandatory service: ISecurityService", e); }
}
@Override
public GlobalPropertiesBean getGlobalProperties() {
return globals;
}
}
/**
* Module that the config loader uses to bind the configured interfaces
* to the given services.
*
* @author Burch
*
*/
public static class ServiceBinderModule extends AbstractModule {
@SuppressWarnings("rawtypes")
private Class serviceClass;
@SuppressWarnings("rawtypes")
private Optional<Class> interfaceClazz;
private Optional<String> annotationName;
@SuppressWarnings("rawtypes")
public ServiceBinderModule(Class serviceClazz, Optional<Class> interfaceClazz, Optional<String> annotationName) {
this.serviceClass = serviceClazz;
this.interfaceClazz = interfaceClazz;
this.annotationName = annotationName;
}
/**
* Configures the given interface/service against the following rules
* A. If an interface is present
* A1. Annotation Present: Bind to service with annotationName
* A2. No Annotation Present: Bind to service
* B. Otherwise just bind service, it cannot use the annotation because it must be requested directly from the classname
*/
@SuppressWarnings("unchecked")
@Override
protected void configure() {
if ( interfaceClazz.isPresent() ) {
if ( annotationName.isPresent() ) {
bind(interfaceClazz.get()).annotatedWith(Names.named(annotationName.get())).to(serviceClass).in(Scopes.SINGLETON);
} else
bind(interfaceClazz.get()).to(serviceClass).in(Scopes.SINGLETON);
} else {
bind(serviceClass).in(Scopes.SINGLETON); //you can't annotate a plain bind
}
}
}
/**
* Default IServiceContext binding module.
*
* @author Burch
*
*/
public static class ServiceModule extends AbstractModule {
@Override
protected void configure() {
bind(IServiceContext.class).to(ServiceContext.class).in(Scopes.SINGLETON);
bind(IUuidService.class).toInstance(UuidUtils.get());
bind(GlobalPropertiesBean.class).toInstance(globals);
}
}
}
| |
/*
Copyright (c) 2017 Ahome' Innovation Technologies. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// TODO - review DSJ
package com.ait.lienzo.client.core.shape.wires;
import com.ait.lienzo.client.core.event.NodeDragEndEvent;
import com.ait.lienzo.client.core.event.NodeDragEndHandler;
import com.ait.lienzo.client.core.shape.Layer;
import com.ait.lienzo.client.core.shape.wires.event.WiresResizeEndEvent;
import com.ait.lienzo.client.core.shape.wires.event.WiresResizeEndHandler;
import com.ait.lienzo.client.core.shape.wires.event.WiresResizeStartEvent;
import com.ait.lienzo.client.core.shape.wires.event.WiresResizeStartHandler;
import com.ait.lienzo.client.core.shape.wires.handlers.AlignAndDistributeControl;
import com.ait.lienzo.client.core.shape.wires.handlers.WiresConnectorControl;
import com.ait.lienzo.client.core.shape.wires.handlers.WiresConnectorHandler;
import com.ait.lienzo.client.core.shape.wires.handlers.WiresControl;
import com.ait.lienzo.client.core.shape.wires.handlers.WiresControlFactory;
import com.ait.lienzo.client.core.shape.wires.handlers.WiresHandlerFactory;
import com.ait.lienzo.client.core.shape.wires.handlers.WiresShapeControl;
import com.ait.lienzo.client.core.shape.wires.handlers.impl.WiresControlFactoryImpl;
import com.ait.lienzo.client.core.shape.wires.handlers.impl.WiresHandlerFactoryImpl;
import com.ait.lienzo.client.core.shape.wires.handlers.impl.WiresShapeHandler;
import com.ait.lienzo.client.core.types.OnLayerBeforeDraw;
import com.ait.lienzo.client.core.types.Point2D;
import com.ait.lienzo.client.widget.DragConstraintEnforcer;
import com.ait.lienzo.client.widget.DragContext;
import com.ait.tooling.common.api.java.util.function.Supplier;
import com.ait.tooling.nativetools.client.collection.NFastArrayList;
import com.ait.tooling.nativetools.client.collection.NFastStringMap;
import com.ait.tooling.nativetools.client.event.HandlerRegistrationManager;
import java.util.ArrayList;
import java.util.Collection;
public final class WiresManager
{
private static final NFastStringMap<WiresManager> MANAGER_MAP = new NFastStringMap<WiresManager>();
private final MagnetManager m_magnetManager = new MagnetManager();
private final AlignAndDistribute m_index;
private final NFastStringMap<WiresShape> m_shapesMap = new NFastStringMap<WiresShape>();
private final NFastStringMap<HandlerRegistrationManager> m_shapeHandlersMap = new NFastStringMap<HandlerRegistrationManager>();
private final NFastArrayList<WiresConnector> m_connectorList = new NFastArrayList<WiresConnector>();
private final WiresLayer m_layer;
private WiresControlFactory m_controlFactory;
private WiresHandlerFactory m_wiresHandlerFactory;
private ILocationAcceptor m_locationAcceptor = ILocationAcceptor.ALL;
private IConnectionAcceptor m_connectionAcceptor = IConnectionAcceptor.ALL;
private IContainmentAcceptor m_containmentAcceptor = IContainmentAcceptor.ALL;
private IControlPointsAcceptor m_controlPointsAcceptor = IControlPointsAcceptor.ALL;
private IDockingAcceptor m_dockingAcceptor = IDockingAcceptor.NONE;
private SelectionManager m_selectionManager;
private WiresDragHandler m_handler;
private boolean m_spliceEnabled;
public static final WiresManager get(Layer layer)
{
final String uuid = layer.uuid();
WiresManager manager = MANAGER_MAP.get(layer.uuid());
if (null != manager)
{
return manager;
}
manager = new WiresManager(layer);
MANAGER_MAP.put(uuid, manager);
return manager;
}
public static void remove(Layer layer)
{
remove(get(layer));
}
public static void remove(WiresManager manager)
{
final String uuid = manager.getLayer().getLayer().uuid();
manager.destroy();
MANAGER_MAP.remove(uuid);
}
private WiresManager(final Layer layer)
{
m_layer = new WiresLayer(layer);
m_layer.setWiresManager(this);
layer.setOnLayerBeforeDraw(new LinePreparer(this));
m_index = new AlignAndDistribute(layer);
m_handler = null;
m_controlFactory = new WiresControlFactoryImpl();
m_wiresHandlerFactory = new WiresHandlerFactoryImpl();
}
public SelectionManager enableSelectionManager()
{
if (m_selectionManager==null)
{
m_selectionManager = new SelectionManager(this);
}
return m_selectionManager;
}
public boolean isSpliceEnabled()
{
return m_spliceEnabled;
}
public void setSpliceEnabled(boolean spliceEnabled)
{
m_spliceEnabled = spliceEnabled;
}
public static class LinePreparer implements OnLayerBeforeDraw
{
private WiresManager m_wiresManager;
public LinePreparer(WiresManager wiresManager)
{
m_wiresManager = wiresManager;
}
@Override
public boolean onLayerBeforeDraw(Layer layer)
{
// this is necessary as the line decorator cannot be determined until line parse has been attempted
// as this is expensive it's delayed until the last minute before draw. As drawing order is not guaranteed
// this method is used to force a parse on any line that has been refreshed. Refreshed means it's points where
// changed and thus will be reparsed.
for (WiresConnector c : m_wiresManager.getConnectorList())
{
if (WiresConnector.updateHeadTailForRefreshedConnector(c))
{
return false;
}
}
return true;
}
}
public MagnetManager getMagnetManager()
{
return m_magnetManager;
}
public SelectionManager getSelectionManager()
{
return m_selectionManager;
}
public WiresShapeControl register(final WiresShape shape)
{
return register(shape, true);
}
public WiresShapeControl register(final WiresShape shape,
final boolean addIntoIndex)
{
return register(shape, addIntoIndex, true);
}
public WiresShapeControl register(final WiresShape shape,
final boolean addIntoIndex,
final boolean addHandlers)
{
shape.setWiresManager(this);
final WiresShapeControl control = getControlFactory().newShapeControl(shape, this);
shape.setControl(control);
final HandlerRegistrationManager registrationManager = createHandlerRegistrationManager();
if (addHandlers) {
final WiresShapeHandler handler =
getWiresHandlerFactory()
.newShapeHandler(shape,
getControlFactory().newShapeHighlight(this),
this);
addWiresShapeHandler(shape, registrationManager, handler);
}
if (addIntoIndex)
{
addAlignAndDistributeHandlers(shape, registrationManager);
}
// Shapes added to the canvas layer by default.
getLayer().add(shape);
final String uuid = shape.uuid();
m_shapesMap.put(uuid, shape);
m_shapeHandlersMap.put(uuid, registrationManager);
return control;
}
private void addAlignAndDistributeHandlers(final WiresShape shape,
final HandlerRegistrationManager registrationManager)
{
// Shapes added to the align and distribute index.
// Treat a resize like a drag.
// Except right now we cannot A&D during steps (TODO)
final AlignAndDistributeControl alignAndDistrControl = addToIndex(shape);
shape.getControl().setAlignAndDistributeControl(alignAndDistrControl);
registrationManager.register(shape.addWiresResizeStartHandler(new WiresResizeStartHandler()
{
@Override public void onShapeResizeStart(final WiresResizeStartEvent event)
{
alignAndDistrControl.dragStart();
}
}));
registrationManager.register(shape.addWiresResizeEndHandler(new WiresResizeEndHandler()
{
@Override
public void onShapeResizeEnd(WiresResizeEndEvent event)
{
alignAndDistrControl.dragEnd();
}
}));
}
public static void addWiresShapeHandler(final WiresShape shape,
final HandlerRegistrationManager registrationManager,
final WiresShapeHandler handler)
{
registrationManager.register(shape.getGroup().addNodeMouseClickHandler(handler));
registrationManager.register(shape.getGroup().addNodeMouseDownHandler(handler));
registrationManager.register(shape.getGroup().addNodeMouseUpHandler(handler));
registrationManager.register(shape.getGroup().addNodeDragEndHandler(handler));
shape.getGroup().setDragConstraints(handler);
}
public void deregister(final WiresShape shape)
{
final String uuid = shape.uuid();
deselect(shape);
removeHandlers(uuid);
shape.destroy();
removeFromIndex(shape);
getLayer().remove(shape);
m_shapesMap.remove(uuid);
}
public WiresConnectorControl register(final WiresConnector connector)
{
return register(connector, true);
}
public WiresConnectorControl register(final WiresConnector connector,
final boolean addHandlers)
{
final String uuid = connector.uuid();
final WiresConnectorControl control = getControlFactory().newConnectorControl(connector, this);
connector.setControl(control);
final HandlerRegistrationManager m_registrationManager = createHandlerRegistrationManager();
if (addHandlers) {
final WiresConnectorHandler handler = getWiresHandlerFactory().newConnectorHandler(connector, this);
m_registrationManager.register(connector.getGroup().addNodeDragStartHandler(handler));
m_registrationManager.register(connector.getGroup().addNodeDragMoveHandler(handler));
m_registrationManager.register(connector.getGroup().addNodeDragEndHandler(handler));
m_registrationManager.register(connector.getLine().addNodeMouseClickHandler(handler));
m_registrationManager.register(connector.getLine().addNodeMouseDownHandler(handler));
m_registrationManager.register(connector.getLine().addNodeMouseMoveHandler(handler));
m_registrationManager.register(connector.getLine().addNodeMouseEnterHandler(handler));
m_registrationManager.register(connector.getLine().addNodeMouseExitHandler(handler));
m_registrationManager.register(connector.getHead().addNodeMouseClickHandler(handler));
m_registrationManager.register(connector.getHead().addNodeMouseEnterHandler(handler));
m_registrationManager.register(connector.getHead().addNodeMouseMoveHandler(handler));
m_registrationManager.register(connector.getHead().addNodeMouseExitHandler(handler));
m_registrationManager.register(connector.getTail().addNodeMouseClickHandler(handler));
m_registrationManager.register(connector.getTail().addNodeMouseEnterHandler(handler));
m_registrationManager.register(connector.getTail().addNodeMouseMoveHandler(handler));
m_registrationManager.register(connector.getTail().addNodeMouseExitHandler(handler));
}
getConnectorList().add(connector);
m_shapeHandlersMap.put(uuid, m_registrationManager);
connector.addToLayer(getLayer().getLayer());
return control;
}
public void deregister(final WiresConnector connector)
{
final String uuid = connector.uuid();
deselect(connector);
removeHandlers(uuid);
connector.destroy();
getConnectorList().remove(connector);
}
public void resetContext() {
if (null != m_handler) {
m_handler.reset();
m_handler = null;
}
}
private void destroy() {
if (!m_shapesMap.isEmpty()) {
final Collection<WiresShape> shapes = new ArrayList<>(m_shapesMap.values());
for (WiresShape shape : shapes) {
deregister(shape);
}
m_shapesMap.clear();
}
if (!m_connectorList.isEmpty()) {
final NFastArrayList<WiresConnector> connectors = m_connectorList.copy();
for (WiresConnector connector : connectors) {
deregister(connector);
}
m_connectorList.clear();
}
if (null != m_selectionManager) {
m_selectionManager.destroy();
m_selectionManager = null;
}
if (null != m_handler) {
m_handler.reset();
m_handler = null;
}
m_shapeHandlersMap.clear();
m_controlFactory = null;
m_wiresHandlerFactory = null;
m_locationAcceptor = null;
m_connectionAcceptor = null;
m_containmentAcceptor = null;
m_controlPointsAcceptor = null;
m_dockingAcceptor = null;
}
public WiresLayer getLayer()
{
return m_layer;
}
public WiresShape getShape(final String uuid)
{
return m_shapesMap.get(uuid);
}
private AlignAndDistributeControl addToIndex(final WiresShape shape)
{
return m_index.addShape(shape.getGroup());
}
private void removeFromIndex(final WiresShape shape)
{
m_index.removeShape(shape.getGroup());
}
public AlignAndDistribute getAlignAndDistribute()
{
return m_index;
}
public void setWiresControlFactory(final WiresControlFactory factory)
{
this.m_controlFactory = factory;
}
public void setWiresHandlerFactory(WiresHandlerFactory wiresHandlerFactory) {
this.m_wiresHandlerFactory = wiresHandlerFactory;
}
public WiresControlFactory getControlFactory()
{
return m_controlFactory;
}
public WiresHandlerFactory getWiresHandlerFactory() {
return m_wiresHandlerFactory;
}
public IConnectionAcceptor getConnectionAcceptor()
{
return m_connectionAcceptor;
}
public IControlPointsAcceptor getControlPointsAcceptor()
{
return m_controlPointsAcceptor;
}
public IContainmentAcceptor getContainmentAcceptor()
{
return m_containmentAcceptor;
}
public IDockingAcceptor getDockingAcceptor() {
return m_dockingAcceptor;
}
public void setConnectionAcceptor(IConnectionAcceptor connectionAcceptor)
{
m_connectionAcceptor = connectionAcceptor;
}
public void setControlPointsAcceptor(IControlPointsAcceptor controlPointsAcceptor)
{
if (controlPointsAcceptor == null)
{
throw new IllegalArgumentException("ControlPointsAcceptor cannot be null");
}
this.m_controlPointsAcceptor = controlPointsAcceptor;
}
public void setContainmentAcceptor(IContainmentAcceptor containmentAcceptor)
{
if (containmentAcceptor == null)
{
throw new IllegalArgumentException("ContainmentAcceptor cannot be null");
}
m_containmentAcceptor = containmentAcceptor;
}
public void setDockingAcceptor(IDockingAcceptor dockingAcceptor)
{
if (dockingAcceptor == null)
{
throw new IllegalArgumentException("DockingAcceptor cannot be null");
}
this.m_dockingAcceptor = dockingAcceptor;
}
public void setLocationAcceptor(ILocationAcceptor m_locationAcceptor) {
if (m_locationAcceptor == null)
{
throw new IllegalArgumentException("LocationAcceptor cannot be null");
}
this.m_locationAcceptor = m_locationAcceptor;
}
public ILocationAcceptor getLocationAcceptor() {
return m_locationAcceptor;
}
private void removeHandlers(final String uuid)
{
final HandlerRegistrationManager m_registrationManager = m_shapeHandlersMap.get(uuid);
if (null != m_registrationManager)
{
m_registrationManager.removeHandler();
m_shapeHandlersMap.remove(uuid);
}
}
public NFastArrayList<WiresConnector> getConnectorList()
{
return m_connectorList;
}
public NFastStringMap<WiresShape> getShapesMap()
{
return m_shapesMap;
}
HandlerRegistrationManager createHandlerRegistrationManager()
{
return new HandlerRegistrationManager();
}
public static abstract class WiresDragHandler implements DragConstraintEnforcer,
NodeDragEndHandler {
private final WiresManager wiresManager;
private DragContext dragContext;
protected WiresDragHandler(final WiresManager wiresManager) {
this.wiresManager = wiresManager;
}
public abstract WiresControl getControl();
protected abstract boolean doAdjust(Point2D dxy);
protected abstract void doOnNodeDragEnd(NodeDragEndEvent event);
@Override
public void startDrag(DragContext dragContext) {
this.dragContext = dragContext;
wiresManager.m_handler = this;
}
@Override
public boolean adjust(Point2D dxy) {
if (null == dragContext) {
dxy.setX(0);
dxy.setY(0);
return true;
}
return doAdjust(dxy);
}
@Override
public void onNodeDragEnd(NodeDragEndEvent event) {
if (null != dragContext) {
doOnNodeDragEnd(event);
this.dragContext = null;
wiresManager.m_handler = null;
}
}
public void reset() {
if (null != dragContext) {
doReset();
}
}
protected void doReset() {
dragContext.reset();
dragContext = null;
getControl().reset();
}
protected WiresManager getWiresManager() {
return wiresManager;
}
}
private void deselect(final WiresShape shape)
{
if (null != getSelectionManager() &&
getSelectionManager().getSelectedItems().isShapeSelected(shape))
{
getSelectionManager().getSelectedItems().remove(shape);
}
}
private void deselect(final WiresConnector connector)
{
if (null != getSelectionManager() &&
getSelectionManager().getSelectedItems().isConnectorSelected(connector))
{
getSelectionManager().getSelectedItems().remove(connector);
}
}
}
| |
package com.bq.corbel.resources.rem.service;
import com.bq.corbel.lib.queries.builder.QueryParametersBuilder;
import com.bq.corbel.lib.queries.builder.ResourceQueryBuilder;
import com.bq.corbel.lib.ws.api.error.ErrorResponseFactory;
import com.bq.corbel.resources.rem.Rem;
import com.bq.corbel.resources.rem.model.AclPermission;
import com.bq.corbel.resources.rem.model.ManagedCollection;
import com.bq.corbel.resources.rem.model.RemDescription;
import com.bq.corbel.resources.rem.request.CollectionParametersImpl;
import com.bq.corbel.resources.rem.request.RequestParameters;
import com.bq.corbel.resources.rem.request.ResourceId;
import com.bq.corbel.resources.rem.request.ResourceParameters;
import com.bq.corbel.resources.rem.request.builder.RequestParametersBuilder;
import java.net.URI;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import javax.ws.rs.core.Response;
import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.util.StringUtils;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
public class DefaultAclConfigurationService implements AclConfigurationService {
public static final String COLLECTION_NAME_FIELD = "collectionName";
public static final String DOMAIN_FIELD = "domain";
public static final String DEFAULT_PERMISSION_FIELD = "defaultPermission";
private static final String ALL_URIS_REGEXP = "(/.*)?";
private static final Logger LOG = LoggerFactory.getLogger(DefaultAclConfigurationService.class);
public static final char JOIN_CHAR = ':';
public static final String REGISTRY_DOMAIN = "_silkroad";
public static final String RESMI_GET = "ResmiGetRem";
public static final String RESMI_PUT = "ResmiPutRem";
public static final String RESMI_POST = "ResmiPostRem";
private static final String RESMI_DELETE = "ResmiDeleteRem";
private final Gson gson;
private final String adminsCollection;
private List<Pair<Rem, HttpMethod>> remsAndMethods = Collections.emptyList();
private RemService remService;
private Rem resmiGetRem;
private Rem resmiPutRem;
private Rem resmiPostRem;
private Rem resmiDeleteRem;
private final AclResourcesService aclResourcesService;
public DefaultAclConfigurationService(Gson gson, String adminsCollection, AclResourcesService aclResourcesService) {
this.gson = gson;
this.adminsCollection = adminsCollection;
this.aclResourcesService = aclResourcesService;
}
@Override
public void setRemsAndMethods(List<Pair<Rem, HttpMethod>> remsAndMethods) {
this.remsAndMethods = remsAndMethods;
}
@Override
public Response getConfigurations(String domain) {
return aclResourcesService.getCollection(
getResmiGetRem(),
adminsCollection,
new RequestParametersBuilder(REGISTRY_DOMAIN).apiParameters(
new CollectionParametersImpl(new QueryParametersBuilder().queries(
new ResourceQueryBuilder().add(DOMAIN_FIELD, domain).build()).build())).build(), Collections.emptyList());
}
@Override
public Response getConfiguration(String id, String domain) {
return getResourceWithParameters(
id,
new RequestParametersBuilder(REGISTRY_DOMAIN).apiParameters(
new CollectionParametersImpl(new QueryParametersBuilder().condition(
new ResourceQueryBuilder().add(DOMAIN_FIELD, domain).build()).build())).build());
}
@Override
public Response getConfiguration(String id) {
return getResourceWithParameters(id, new RequestParametersBuilder(REGISTRY_DOMAIN).build());
}
private Response getResourceWithParameters(String id, RequestParameters<ResourceParameters> parameters) {
return aclResourcesService.getResource(getResmiGetRem(), adminsCollection, new ResourceId(id), parameters, Collections.emptyList());
}
@Override
public Response createConfiguration(URI uri, ManagedCollection managedCollection) {
JsonObject jsonObject = gson.toJsonTree(managedCollection).getAsJsonObject();
RequestParameters requestParameters = new RequestParametersBuilder(REGISTRY_DOMAIN).build();
return aclResourcesService.saveResource(getResmiPostRem(), requestParameters, adminsCollection, uri, jsonObject,
Collections.emptyList());
}
@Override
public Response updateConfiguration(String id, ManagedCollection managedCollection) {
RequestParameters requestParameters = new RequestParametersBuilder(REGISTRY_DOMAIN).build();
ResourceId resourceId = new ResourceId(id);
Response response = aclResourcesService.getResource(getResmiGetRem(), adminsCollection, resourceId, requestParameters,
Collections.emptyList());
if (response.getStatus() == HttpStatus.NOT_FOUND.value()) {
return ErrorResponseFactory.getInstance().preconditionFailed("Can't create a acl configuration with PUT method.");
}
JsonObject jsonObject = gson.toJsonTree(managedCollection).getAsJsonObject();
return aclResourcesService.updateResource(getResmiPutRem(), adminsCollection, resourceId, requestParameters, jsonObject,
Collections.emptyList());
}
@Override
public void addAclConfiguration(String collectionName) {
List<RemDescription> remDescriptions = remService.getRegisteredRemDescriptions();
boolean alreadyRegistered = remDescriptions.stream().anyMatch(
description -> description.getUriPattern().equals(collectionName) && description.getRemName().startsWith("Acl"));
if (alreadyRegistered) {
return;
}
remsAndMethods.forEach(remAndMethod -> remService.registerRem(remAndMethod.getLeft(), getRemPattern(collectionName),
remAndMethod.getRight()));
}
@Override
public void removeAclConfiguration(String id, String collectionName) {
remsAndMethods.stream().map(Pair::getLeft)
.forEach(aclRem -> remService.unregisterRem(aclRem.getClass(), getRemPattern(collectionName)));
RequestParameters parameters = new RequestParametersBuilder(REGISTRY_DOMAIN).build();
aclResourcesService.deleteResource(getResmiDeleteRem(), adminsCollection, new ResourceId(id), parameters, Collections.emptyList());
}
private String getRemPattern(String collectionName) {
return collectionName + ALL_URIS_REGEXP;
}
@Override
public void setResourcesWithDefaultPermission(String collectionName, String domain, String defaultPermission) {
JsonObject aclObject = constructAclObjectWithDefaultPermission(defaultPermission);
RequestParameters parameters = new RequestParametersBuilder(domain).build();
getResmiPutRem().collection(collectionName, parameters, null, Optional.of(aclObject));
}
private JsonObject constructAclObjectWithDefaultPermission(String defaultPermission) {
JsonObject aclObject = new JsonObject();
JsonObject allObject = new JsonObject();
JsonObject allContentObject = new JsonObject();
allContentObject.addProperty(DefaultAclResourcesService.PERMISSION,
StringUtils.isEmpty(defaultPermission) ? AclPermission.READ.name() : defaultPermission);
allContentObject.add(DefaultAclResourcesService.PROPERTIES, new JsonObject());
allObject.add(DefaultAclResourcesService.ALL, allContentObject);
aclObject.add(DefaultAclResourcesService._ACL, allObject);
return aclObject;
}
@Override
public void refreshRegistry() {
RequestParameters requestParameters = new RequestParametersBuilder(REGISTRY_DOMAIN).build();
Response response = aclResourcesService.getCollection(getResmiGetRem(), adminsCollection, requestParameters,
Collections.emptyList());
if (response.getStatus() != Response.Status.OK.getStatusCode()) {
LOG.error("Can't access {}", adminsCollection);
return;
}
JsonArray jsonArray;
try {
jsonArray = (JsonArray) response.getEntity();
} catch (ClassCastException e) {
LOG.error("Can't read " + adminsCollection + " properly", e);
return;
}
for (JsonElement jsonElement : jsonArray) {
Optional<JsonObject> collectionName = Optional.of(jsonElement).filter(JsonElement::isJsonObject)
.map(JsonElement::getAsJsonObject).filter(jsonObject -> jsonObject.has(COLLECTION_NAME_FIELD));
if (!collectionName.isPresent()) {
LOG.error("Document in acl configuration collection has no collectionName field: {}", jsonElement.toString());
continue;
}
Optional<String> collectionNameOptional = collectionName.map(jsonObject -> jsonObject.get(COLLECTION_NAME_FIELD))
.filter(JsonElement::isJsonPrimitive).map(JsonElement::getAsJsonPrimitive).filter(JsonPrimitive::isString)
.map(JsonPrimitive::getAsString);
if (!collectionNameOptional.isPresent()) {
LOG.error("Unrecognized collectionName: {}", jsonElement.toString());
continue;
}
addAclConfiguration(collectionNameOptional.get());
}
}
private Rem getResmiGetRem() {
if (resmiGetRem == null) {
resmiGetRem = remService.getRem(RESMI_GET);
}
return resmiGetRem;
}
private Rem getResmiPutRem() {
if (resmiPutRem == null) {
resmiPutRem = remService.getRem(RESMI_PUT);
}
return resmiPutRem;
}
private Rem getResmiPostRem() {
if (resmiPostRem == null) {
resmiPostRem = remService.getRem(RESMI_POST);
}
return resmiPostRem;
}
private Rem getResmiDeleteRem() {
if (resmiDeleteRem == null) {
resmiDeleteRem = remService.getRem(RESMI_DELETE);
}
return resmiDeleteRem;
}
@Override
public void setRemService(RemService remService) {
this.remService = remService;
}
}
| |
/*
* Created by IntelliJ IDEA.
* User: Lennart
* Date: 15-mrt-03
* Time: 11:34:28
*/
package com.compomics.mslims.util.mascot;
import org.apache.log4j.Logger;
/*
* CVS information:
*
* $Revision: 1.6 $
* $Date: 2007/05/01 13:30:44 $
*/
/**
* This class implements a single header for a Mascot identification.
*
* @author Lennart Martens
*/
public class MascotHeader implements Comparable {
// Class specific log4j logger for MascotHeader instances.
private static Logger logger = Logger.getLogger(MascotHeader.class);
private String iAccession = null;
private String iDescription = null;
private int iStart = -1;
private int iEnd = -1;
private String iIsoformAccesions = null;
/**
* This constructor attempts to find a start and end location in the accession, and will find these if they are
* formatted a ' (x-y)' in the end of the accession number. If none are found, they are not set.
*
* @param aAccession String with the accession number, may contain coded start and end location: ' (x-y)' at the
* end.
* @param aDescription String with the description for the protein.
*/
public MascotHeader(String aAccession, String aDescription) {
this.iDescription = aDescription;
// Try to parse the accession number for start and end locations.
String test = aAccession;
int open = aAccession.lastIndexOf(" (") + 1;
int close = aAccession.lastIndexOf(")");
int hyphen = aAccession.lastIndexOf("-");
if ((open >= 0) && (close >= 0) && (open < hyphen) && (hyphen < close)) {
String start = aAccession.substring(open + 1, hyphen).trim();
String end = aAccession.substring(hyphen + 1, close).trim();
try {
int startNr = Integer.parseInt(start);
int endNr = Integer.parseInt(end);
this.iStart = startNr;
this.iEnd = endNr;
// Open-1 because there is a space at -1.
test = test.substring(0, open - 1);
} catch (Exception e) {
// Just let it go, probably not a start-end location after all.
}
}
this.iAccession = test;
}
/**
* This constructor takes the formative elements of the header separately and it takes these 'as is'. No processing
* is performed, so location information is considered removed from the accession number.
*
* @param aAccession String with the accession number.
* @param aDescription String with the description.
* @param aStart int with the start index.
* @param aEnd int with the end index.
*/
public MascotHeader(String aAccession, String aDescription, int aStart, int aEnd) {
this.iAccession = aAccession;
this.iDescription = aDescription;
this.iStart = aStart;
this.iEnd = aEnd;
}
/**
* This method checks the incoming start and end indices and updates the current ones if applicable.
*
* @param aStart int with the start position of the peptide in the parent protein.
* @param aEnd int with the end position of the peptide in the parent protein.
*/
public void updateLocation(int aStart, int aEnd) {
// Two checks, three avenues.
// a. If the current start and stop are both '-1', simply update.
if (iStart < 0 && iEnd < 0) {
iStart = aStart;
iEnd = aEnd;
}
// b. See if the start AND stop are both above 0.
else if (aStart > 0 && aEnd > 0) {
// 1. New delta (end-start) is smaller than the current one.
// In this case: replace.
if ((aEnd - aStart) < (iEnd - iStart)) {
iStart = aStart;
iEnd = aEnd;
}
// 2. Current start and end are both larger than the new start and end
// positions.
// In this case: replace (first occurrence has precedence).
else if (aStart < iStart && aEnd < iEnd) {
iStart = aStart;
iEnd = aEnd;
}
}
// 3. In all other cases, do nothing.
}
/**
* This method returns only the accession number. Localization information is never added here.
*
* @return String with only the accession number (no localization).
*/
public String getAccession() {
return this.iAccession;
}
/**
* This method returns the accession number, completed with ' (start-end)' if these are present. If no localization
* is present, the result will be the same as calling 'getAccession()'.
*
* @return String with the accession number, appended with ' (start-end)' if localization information is present.
*/
public String getCompoundAccession() {
String result = this.getAccession();
if ((iStart > 0) && (iEnd > 0)) {
result += " (" + iStart + "-" + iEnd + ")";
}
return result;
}
/**
* This method reports on the description for this protein.
*
* @return String with the description for this protein.
*/
public String getDescription() {
return iDescription;
}
/**
* Simple setter for the description.
*
* @param aDescription String with the new description.
*/
public void setDescription(String aDescription) {
this.iDescription = aDescription;
}
/**
* This method reports on the start index of the sequence.
*
* @return int with the start index, -1 if not known.
*/
public int getStart() {
return iStart;
}
/**
* This method reports on the end index of the sequence.
*
* @return int with the end index, -1 if not known.
*/
public int getEnd() {
return iEnd;
}
public String getIsoformAccessions() {
return iIsoformAccesions;
}
public void setIsoformAccessions(String aIosformAccesions) {
iIsoformAccesions = aIosformAccesions;
}
/**
* This method gives headers a score. Currently only works for IPI accession numbers.
*
* @return int with the score.
*/
public int getScore() {
int score = 0;
if (this.iAccession.toUpperCase().startsWith("IPI") && this.iDescription != null) {
// IPI header. These we can score in detail.
String temp = this.iDescription.toUpperCase();
if (temp.indexOf("SWISS-PROT") >= 0) {
score = 2;
} else if (temp.indexOf("TREMBL") >= 0 || temp.indexOf("REFSEQ_NP") >= 0) {
score = 1;
}
}
return score;
}
/**
* Compares this object with the specified object for order. Returns a negative integer, zero, or a positive
* integer as this object is less than, equal to, or greater than the specified object.<p>
* <p/>
* The implementor must ensure <tt>sgn(x.compareTo(y)) == -sgn(y.compareTo(x))</tt> for all <tt>x</tt> and
* <tt>y</tt>. (This implies that <tt>x.compareTo(y)</tt> must throw an exception iff <tt>y.compareTo(x)</tt>
* throws an exception.)<p>
* <p/>
* The implementor must also ensure that the relation is transitive: <tt>(x.compareTo(y)>0 &&
* y.compareTo(z)>0)</tt> implies <tt>x.compareTo(z)>0</tt>.<p>
* <p/>
* Finally, the implementer must ensure that <tt>x.compareTo(y)==0</tt> implies that <tt>sgn(x.compareTo(z)) ==
* sgn(y.compareTo(z))</tt>, for all <tt>z</tt>.<p>
* <p/>
* It is strongly recommended, but <i>not</i> strictly required that <tt>(x.compareTo(y)==0) == (x.equals(y))</tt>.
* Generally speaking, any class that implements the <tt>Comparable</tt> interface and violates this condition
* should clearly indicate this fact. The recommended language is "Note: this class has a natural ordering that is
* inconsistent with equals."
*
* @param o the Object to be compared.
* @return a negative integer, zero, or a positive integer as this object is less than, equal to, or greater than
* the specified object.
* @throws ClassCastException if the specified object's type prevents it from being compared to this Object.
*/
public int compareTo(Object o) {
MascotHeader mh = (MascotHeader) o;
String myAccession = this.getAccession();
String otherAccession = mh.getAccession();
int compare = 0;
// Accession starting with numbers should go before letters.
if (Character.isDigit(myAccession.charAt(0)) && !Character.isDigit(otherAccession.charAt(0))) {
compare = -1;
} else if (!Character.isDigit(myAccession.charAt(0)) && Character.isDigit(otherAccession.charAt(0))) {
compare = 1;
} else {
compare = myAccession.compareTo(otherAccession);
}
if (compare == 0) {
int myStart = this.getStart();
int otherStart = mh.getStart();
if (myStart >= 0 && otherStart >= 0) {
compare = myStart - otherStart;
} else if (myStart >= 0 && otherStart < 0) {
compare = -1;
} else if (myStart < 0 && otherStart >= 0) {
compare = 1;
}
}
return compare;
}
}
| |
package dataMapper.diagram.edit.policies;
import org.eclipse.draw2d.ColorConstants;
import org.eclipse.draw2d.Figure;
import org.eclipse.draw2d.FigureListener;
import org.eclipse.draw2d.Graphics;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.Label;
import org.eclipse.draw2d.RectangleFigure;
import org.eclipse.draw2d.geometry.Rectangle;
import org.eclipse.gef.editpolicies.SelectionEditPolicy;
import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel;
import org.eclipse.gmf.tooling.runtime.edit.policies.labels.IRefreshableFeedbackEditPolicy;
/**
* @generated
*/
public class DataMapperTextSelectionEditPolicy extends SelectionEditPolicy
implements
IRefreshableFeedbackEditPolicy {
/**
* @generated
*/
private IFigure selectionFeedbackFigure;
/**
* @generated
*/
private IFigure focusFeedbackFigure;
/**
* @generated
*/
private FigureListener hostPositionListener;
/**
* @generated
*/
protected void showPrimarySelection() {
if (getHostFigure() instanceof WrappingLabel) {
((WrappingLabel) getHostFigure()).setSelected(true);
((WrappingLabel) getHostFigure()).setFocus(true);
} else {
showSelection();
showFocus();
}
}
/**
* @generated
*/
protected void showSelection() {
if (getHostFigure() instanceof WrappingLabel) {
((WrappingLabel) getHostFigure()).setSelected(true);
((WrappingLabel) getHostFigure()).setFocus(false);
} else {
hideSelection();
addFeedback(selectionFeedbackFigure = createSelectionFeedbackFigure());
getHostFigure().addFigureListener(getHostPositionListener());
refreshSelectionFeedback();
hideFocus();
}
}
/**
* @generated
*/
protected void hideSelection() {
if (getHostFigure() instanceof WrappingLabel) {
((WrappingLabel) getHostFigure()).setSelected(false);
((WrappingLabel) getHostFigure()).setFocus(false);
} else {
if (selectionFeedbackFigure != null) {
removeFeedback(selectionFeedbackFigure);
getHostFigure().removeFigureListener(getHostPositionListener());
selectionFeedbackFigure = null;
}
hideFocus();
}
}
/**
* @generated
*/
protected void showFocus() {
if (getHostFigure() instanceof WrappingLabel) {
((WrappingLabel) getHostFigure()).setFocus(true);
} else {
hideFocus();
addFeedback(focusFeedbackFigure = createFocusFeedbackFigure());
refreshFocusFeedback();
}
}
/**
* @generated
*/
protected void hideFocus() {
if (getHostFigure() instanceof WrappingLabel) {
((WrappingLabel) getHostFigure()).setFocus(false);
} else {
if (focusFeedbackFigure != null) {
removeFeedback(focusFeedbackFigure);
focusFeedbackFigure = null;
}
}
}
/**
* @generated
*/
protected Rectangle getFeedbackBounds() {
Rectangle bounds;
if (getHostFigure() instanceof Label) {
bounds = ((Label) getHostFigure()).getTextBounds();
bounds.intersect(getHostFigure().getBounds());
} else {
bounds = getHostFigure().getBounds().getCopy();
}
getHostFigure().getParent().translateToAbsolute(bounds);
getFeedbackLayer().translateToRelative(bounds);
return bounds;
}
/**
* @generated
*/
protected IFigure createSelectionFeedbackFigure() {
if (getHostFigure() instanceof Label) {
Label feedbackFigure = new Label();
feedbackFigure.setOpaque(true);
feedbackFigure
.setBackgroundColor(ColorConstants.menuBackgroundSelected);
feedbackFigure
.setForegroundColor(ColorConstants.menuForegroundSelected);
return feedbackFigure;
} else {
RectangleFigure feedbackFigure = new RectangleFigure();
feedbackFigure.setFill(false);
return feedbackFigure;
}
}
/**
* @generated
*/
protected IFigure createFocusFeedbackFigure() {
return new Figure() {
protected void paintFigure(Graphics graphics) {
graphics.drawFocus(getBounds().getResized(-1, -1));
}
};
}
/**
* @generated
*/
protected void updateLabel(Label target) {
Label source = (Label) getHostFigure();
target.setText(source.getText());
target.setTextAlignment(source.getTextAlignment());
target.setFont(source.getFont());
}
/**
* @generated
*/
protected void refreshSelectionFeedback() {
if (selectionFeedbackFigure != null) {
if (selectionFeedbackFigure instanceof Label) {
updateLabel((Label) selectionFeedbackFigure);
selectionFeedbackFigure.setBounds(getFeedbackBounds());
} else {
selectionFeedbackFigure.setBounds(getFeedbackBounds().expand(5,
5));
}
}
}
/**
* @generated
*/
protected void refreshFocusFeedback() {
if (focusFeedbackFigure != null) {
focusFeedbackFigure.setBounds(getFeedbackBounds());
}
}
/**
* @generated
*/
@Override
public void refreshFeedback() {
refreshSelectionFeedback();
refreshFocusFeedback();
}
/**
* @generated
*/
private FigureListener getHostPositionListener() {
if (hostPositionListener == null) {
hostPositionListener = new FigureListener() {
public void figureMoved(IFigure source) {
refreshFeedback();
}
};
}
return hostPositionListener;
}
}
| |
/*
* Copyright (C) 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.ros.internal.message;
import org.jboss.netty.buffer.ChannelBuffer;
import org.ros.exception.RosRuntimeException;
import org.ros.internal.message.context.MessageContext;
import org.ros.internal.message.field.Field;
import org.ros.internal.message.field.MessageFieldType;
import org.ros.internal.message.field.MessageFields;
import org.ros.message.Duration;
import org.ros.message.MessageIdentifier;
import org.ros.message.Time;
import java.util.List;
/**
* @author damonkohler@google.com (Damon Kohler)
*/
class MessageImpl implements RawMessage, GetInstance {
private final MessageContext messageContext;
private final MessageFields messageFields;
public MessageImpl(MessageContext messageContext) {
this.messageContext = messageContext;
messageFields = new MessageFields(messageContext);
}
public MessageContext getMessageContext() {
return messageContext;
}
public MessageFields getMessageFields() {
return messageFields;
}
@Override
public RawMessage toRawMessage() {
return (RawMessage) this;
}
@Override
public MessageIdentifier getIdentifier() {
return messageContext.getMessageIdentifer();
}
@Override
public String getType() {
return messageContext.getType();
}
@Override
public String getPackage() {
return messageContext.getPackage();
}
@Override
public String getName() {
return messageContext.getName();
}
@Override
public String getDefinition() {
return messageContext.getDefinition();
}
@Override
public List<Field> getFields() {
return messageFields.getFields();
}
@Override
public boolean getBool(String name) {
return (Boolean) messageFields.getFieldValue(name);
}
@Override
public boolean[] getBoolArray(String name) {
return (boolean[]) messageFields.getFieldValue(name);
}
@Override
public Duration getDuration(String name) {
return (Duration) messageFields.getFieldValue(name);
}
@SuppressWarnings("unchecked")
@Override
public List<Duration> getDurationList(String name) {
return (List<Duration>) messageFields.getFieldValue(name);
}
@Override
public float getFloat32(String name) {
return (Float) messageFields.getFieldValue(name);
}
@Override
public float[] getFloat32Array(String name) {
return (float[]) messageFields.getFieldValue(name);
}
@Override
public double getFloat64(String name) {
return (Double) messageFields.getFieldValue(name);
}
@Override
public double[] getFloat64Array(String name) {
return (double[]) messageFields.getFieldValue(name);
}
@Override
public short getInt16(String name) {
return (Short) messageFields.getFieldValue(name);
}
@Override
public short[] getInt16Array(String name) {
return (short[]) messageFields.getFieldValue(name);
}
@Override
public int getInt32(String name) {
return (Integer) messageFields.getFieldValue(name);
}
@Override
public int[] getInt32Array(String name) {
return (int[]) messageFields.getFieldValue(name);
}
@Override
public long getInt64(String name) {
return (Long) messageFields.getFieldValue(name);
}
@Override
public long[] getInt64Array(String name) {
return (long[]) messageFields.getFieldValue(name);
}
@Override
public byte getInt8(String name) {
return (Byte) messageFields.getFieldValue(name);
}
@Override
public byte[] getInt8Array(String name) {
return (byte[]) messageFields.getFieldValue(name);
}
@Override
public <T extends Message> T getMessage(String name) {
if (messageFields.getField(name).getType() instanceof MessageFieldType) {
return messageFields.getField(name).<T>getValue();
}
throw new RosRuntimeException("Failed to access message field: " + name);
}
@Override
public <T extends Message> List<T> getMessageList(String name) {
if (messageFields.getField(name).getType() instanceof MessageFieldType) {
return messageFields.getField(name).<List<T>>getValue();
}
throw new RosRuntimeException("Failed to access list field: " + name);
}
@Override
public String getString(String name) {
return (String) messageFields.getFieldValue(name);
}
@SuppressWarnings("unchecked")
@Override
public List<String> getStringList(String name) {
return (List<String>) messageFields.getFieldValue(name);
}
@Override
public Time getTime(String name) {
return (Time) messageFields.getFieldValue(name);
}
@SuppressWarnings("unchecked")
@Override
public List<Time> getTimeList(String name) {
return (List<Time>) messageFields.getFieldValue(name);
}
@Override
public short getUInt16(String name) {
return (Short) messageFields.getFieldValue(name);
}
@Override
public short[] getUInt16Array(String name) {
return (short[]) messageFields.getFieldValue(name);
}
@Override
public int getUInt32(String name) {
return (Integer) messageFields.getFieldValue(name);
}
@Override
public int[] getUInt32Array(String name) {
return (int[]) messageFields.getFieldValue(name);
}
@Override
public long getUInt64(String name) {
return (Long) messageFields.getFieldValue(name);
}
@Override
public long[] getUInt64Array(String name) {
return (long[]) messageFields.getFieldValue(name);
}
@Override
public short getUInt8(String name) {
return (Short) messageFields.getFieldValue(name);
}
@Override
public short[] getUInt8Array(String name) {
return (short[]) messageFields.getFieldValue(name);
}
@Override
public void setBool(String name, boolean value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setBoolArray(String name, boolean[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setDurationList(String name, List<Duration> value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setDuration(String name, Duration value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setFloat32(String name, float value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setFloat32Array(String name, float[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setFloat64(String name, double value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setFloat64Array(String name, double[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setInt16(String name, short value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setInt16Array(String name, short[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setInt32(String name, int value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setInt32Array(String name, int[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setInt64(String name, long value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setInt64Array(String name, long[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setInt8(String name, byte value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setInt8Array(String name, byte[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setMessage(String name, Message value) {
// TODO(damonkohler): Verify the type of the provided Message?
messageFields.setFieldValue(name, value);
}
@Override
public void setMessageList(String name, List<Message> value) {
// TODO(damonkohler): Verify the type of all Messages in the provided list?
messageFields.setFieldValue(name, value);
}
@Override
public void setString(String name, String value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setStringList(String name, List<String> value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setTime(String name, Time value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setTimeList(String name, List<Time> value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setUInt16(String name, short value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setUInt16Array(String name, short[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setUInt32(String name, int value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setUInt32Array(String name, int[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setUInt64(String name, long value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setUInt64Array(String name, long[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setUInt8(String name, byte value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setUInt8Array(String name, byte[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public byte getByte(String name) {
return (Byte) messageFields.getFieldValue(name);
}
@Override
public short getChar(String name) {
return (Short) messageFields.getFieldValue(name);
}
@Override
public void setByte(String name, byte value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setChar(String name, short value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setByteArray(String name, byte[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public void setCharArray(String name, short[] value) {
messageFields.setFieldValue(name, value);
}
@Override
public byte[] getByteArray(String name) {
return (byte[]) messageFields.getFieldValue(name);
}
@Override
public short[] getCharArray(String name) {
return (short[]) messageFields.getFieldValue(name);
}
@Override
public ChannelBuffer getChannelBuffer(String name) {
return (ChannelBuffer) messageFields.getFieldValue(name);
}
@Override
public void setChannelBuffer(String name, ChannelBuffer value) {
messageFields.setFieldValue(name, value);
}
@Override
public Object getInstance() {
return this;
}
@Override
public String toString() {
return String.format("MessageImpl<%s>", getType());
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((messageContext == null) ? 0 : messageContext.hashCode());
result = prime * result + ((messageFields == null) ? 0 : messageFields.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (!(obj instanceof GetInstance))
return false;
obj = ((GetInstance) obj).getInstance();
if (getClass() != obj.getClass())
return false;
MessageImpl other = (MessageImpl) obj;
if (messageContext == null) {
if (other.messageContext != null)
return false;
} else if (!messageContext.equals(other.messageContext))
return false;
if (messageFields == null) {
if (other.messageFields != null)
return false;
} else if (!messageFields.equals(other.messageFields))
return false;
return true;
}
}
| |
package net.java.otr4j.session;
import info.guardianproject.bouncycastle.util.encoders.Hex;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.List;
import net.java.otr4j.OtrEngineHost;
import net.java.otr4j.OtrException;
import net.java.otr4j.OtrKeyManager;
import net.java.otr4j.crypto.OtrTlvHandler;
import net.java.otr4j.crypto.SM;
import net.java.otr4j.crypto.SM.SMException;
import net.java.otr4j.crypto.SM.SMState;
import net.java.otr4j.io.OtrOutputStream;
public class OtrSm implements OtrTlvHandler {
public static interface OtrSmEngineHost extends OtrEngineHost {
void askForSecret(SessionID sessionID, String question);
}
SMState smstate;
private SessionID sessionID;
private OtrKeyManager keyManager;
private OtrSmEngineHost engineHost;
private Session session;
private List<TLV> pendingTlvs;
/**
* Construct an OTR Socialist Millionaire handler object.
*
* @param authContext The encryption context for encrypting the session.
* @param keyManager The long-term key manager.
* @param sessionId The session ID.
* @param engineHost The host where we can present messages or ask for the
* shared secret.
*/
public OtrSm(Session session, OtrKeyManager keyManager, SessionID sessionId,
OtrSmEngineHost engineHost) {
smstate = new SMState();
this.session = session;
this.sessionID = sessionId;
this.keyManager = keyManager;
this.engineHost = engineHost;
}
/* Compute secret session ID as hash of agreed secret */
private static byte[] computeSessionId(BigInteger s) throws SMException {
byte[] sdata;
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
OtrOutputStream oos = new OtrOutputStream(out);
oos.write(0x00);
oos.writeBigInt(s);
sdata = out.toByteArray();
oos.close();
} catch (IOException e1) {
throw new SMException(e1);
}
/* Calculate the session id */
MessageDigest sha256;
try {
sha256 = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException e) {
throw new SMException("cannot find SHA-256");
}
byte[] res = sha256.digest(sdata);
byte[] secure_session_id = new byte[8];
System.arraycopy(res, 0, secure_session_id, 0, 8);
return secure_session_id;
}
/**
* Respond to or initiate an SMP negotiation
*
* @param question The question to present to the peer, if initiating. May
* be null for no question.
* @param secret The secret.
* @param initiating Whether we are initiating or responding to an initial
* request.
*
* @return TLVs to send to the peer
*/
public List<TLV> initRespondSmp(String question, String secret, boolean initiating)
throws OtrException {
if (question != null && !initiating)
throw new IllegalArgumentException("Only supply a question if initiating");
/*
* Construct the combined secret as a SHA256 hash of:
* Version byte (0x01), Initiator fingerprint (20 bytes),
* responder fingerprint (20 bytes), secure session id, input secret
*/
byte[] our_fp = Hex.decode(keyManager.getLocalFingerprint(sessionID));
byte[] their_fp = Hex.decode(keyManager.getRemoteFingerprint(sessionID));
byte[] sessionId;
try {
sessionId = computeSessionId(session.getS());
} catch (SMException ex) {
throw new OtrException(ex);
}
int combined_buf_len = 41 + sessionId.length + secret.length();
byte[] combined_buf = new byte[combined_buf_len];
combined_buf[0] = 1;
if (initiating) {
System.arraycopy(our_fp, 0, combined_buf, 1, 20);
System.arraycopy(their_fp, 0, combined_buf, 21, 20);
} else {
System.arraycopy(their_fp, 0, combined_buf, 1, 20);
System.arraycopy(our_fp, 0, combined_buf, 21, 20);
}
System.arraycopy(sessionId, 0, combined_buf, 41, sessionId.length);
System.arraycopy(secret.getBytes(), 0, combined_buf, 41 + sessionId.length, secret.length());
MessageDigest sha256;
try {
sha256 = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException ex) {
throw new OtrException(ex);
}
byte[] combined_secret = sha256.digest(combined_buf);
byte[] smpmsg;
try {
if (initiating) {
smpmsg = SM.step1(smstate, combined_secret);
} else {
smpmsg = SM.step2b(smstate, combined_secret);
}
} catch (SMException ex) {
throw new OtrException(ex);
}
// If we've got a question, attach it to the smpmsg
if (question != null) {
byte[] qsmpmsg = new byte[question.length() + 1 + smpmsg.length];
System.arraycopy(question.getBytes(), 0, qsmpmsg, 0, question.length());
System.arraycopy(smpmsg, 0, qsmpmsg, question.length() + 1, smpmsg.length);
smpmsg = qsmpmsg;
}
TLV sendtlv = new TLV(initiating ? (question != null ? TLV.SMP1Q : TLV.SMP1) : TLV.SMP2,
smpmsg);
smstate.nextExpected = initiating ? SM.EXPECT2 : SM.EXPECT3;
return makeTlvList(sendtlv);
}
/**
* Create an abort TLV and reset our state.
*
* @return TLVs to send to the peer
*/
public List<TLV> abortSmp() throws OtrException {
TLV sendtlv = new TLV(TLV.SMP_ABORT, new byte[0]);
smstate.nextExpected = SM.EXPECT1;
return makeTlvList(sendtlv);
}
public List<TLV> getPendingTlvs() {
return pendingTlvs;
}
/** Process an incoming TLV and optionally send back TLVs to peer. */
public void processTlv(TLV tlv) throws OtrException {
try {
pendingTlvs = doProcessTlv(tlv);
} catch (SMException ex) {
throw new OtrException(ex);
}
}
private List<TLV> doProcessTlv(TLV tlv) throws SMException {
/* If TLVs contain SMP data, process it */
int nextMsg = smstate.nextExpected;
int tlvType = tlv.getType();
if (tlvType == TLV.SMP1Q && nextMsg == SM.EXPECT1) {
/* We can only do the verification half now.
* We must wait for the secret to be entered
* to continue. */
byte[] question = tlv.getValue();
int qlen = 0;
for (; qlen != question.length && question[qlen] != 0; qlen++) {
}
if (qlen == question.length)
qlen = 0;
else
qlen++;
byte[] input = new byte[question.length - qlen];
System.arraycopy(question, qlen, input, 0, question.length - qlen);
SM.step2a(smstate, input, true);
if (qlen != 0)
qlen--;
byte[] plainq = new byte[qlen];
System.arraycopy(question, 0, plainq, 0, qlen);
if (smstate.smProgState != SM.PROG_CHEATED) {
engineHost.askForSecret(sessionID, new String(plainq));
} else {
engineHost.showError(sessionID, "Peer attempted to cheat during verification");
smstate.nextExpected = SM.EXPECT1;
smstate.smProgState = SM.PROG_OK;
}
} else if (tlvType == TLV.SMP1Q) {
engineHost.showError(sessionID, "Error during verification (step 1q)");
} else if (tlvType == TLV.SMP1 && nextMsg == SM.EXPECT1) {
/* We can only do the verification half now.
* We must wait for the secret to be entered
* to continue. */
SM.step2a(smstate, tlv.getValue(), false);
if (smstate.smProgState != SM.PROG_CHEATED) {
engineHost.askForSecret(sessionID, null);
} else {
engineHost.showError(sessionID, "Peer attempted to cheat during verification");
smstate.nextExpected = SM.EXPECT1;
smstate.smProgState = SM.PROG_OK;
}
} else if (tlvType == TLV.SMP1) {
engineHost.showError(sessionID, "Error during verification (step 1)");
} else if (tlvType == TLV.SMP2 && nextMsg == SM.EXPECT2) {
byte[] nextmsg = SM.step3(smstate, tlv.getValue());
if (smstate.smProgState != SM.PROG_CHEATED) {
/* Send msg with next smp msg content */
TLV sendtlv = new TLV(TLV.SMP3, nextmsg);
smstate.nextExpected = SM.EXPECT4;
return makeTlvList(sendtlv);
} else {
engineHost.showError(sessionID, "Peer attempted to cheat during verification");
smstate.nextExpected = SM.EXPECT1;
smstate.smProgState = SM.PROG_OK;
}
} else if (tlvType == TLV.SMP2) {
engineHost.showError(sessionID, "Error during verification (step 2)");
} else if (tlvType == TLV.SMP3 && nextMsg == SM.EXPECT3) {
byte[] nextmsg = SM.step4(smstate, tlv.getValue());
notifyKeyManager();
if (smstate.smProgState != SM.PROG_CHEATED) {
/* Send msg with next smp msg content */
TLV sendtlv = new TLV(TLV.SMP4, nextmsg);
smstate.nextExpected = SM.EXPECT1;
return makeTlvList(sendtlv);
} else {
engineHost.showError(sessionID, "Peer attempted to cheat during verification");
smstate.nextExpected = SM.EXPECT1;
smstate.smProgState = SM.PROG_OK;
}
} else if (tlvType == TLV.SMP3) {
engineHost.showError(sessionID, "Error during verification (step 3)");
} else if (tlvType == TLV.SMP4 && nextMsg == SM.EXPECT4) {
SM.step5(smstate, tlv.getValue());
notifyKeyManager();
if (smstate.smProgState != SM.PROG_CHEATED) {
smstate.nextExpected = SM.EXPECT1;
} else {
engineHost.showError(sessionID, "Peer attempted to cheat during verification");
smstate.nextExpected = SM.EXPECT1;
smstate.smProgState = SM.PROG_OK;
}
} else if (tlvType == TLV.SMP4) {
engineHost.showError(sessionID, "Error during verification (step 4)");
} else if (tlvType == TLV.SMP_ABORT) {
smstate.nextExpected = SM.EXPECT1;
}
// Nothing to send
return null;
}
private void notifyKeyManager() {
if (smstate.smProgState == SM.PROG_SUCCEEDED) {
if (smstate.isReceivedQuestion())
keyManager.remoteVerifiedUs(sessionID);
else
keyManager.verify(sessionID);
} else {
engineHost.showError(sessionID, "verification failed - check answer");
keyManager.unverify(sessionID);
}
}
private List<TLV> makeTlvList(TLV sendtlv) {
List<TLV> tlvs = new ArrayList<TLV>(1);
tlvs.add(sendtlv);
return tlvs;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.dbcp.dbcp2.datasources;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import javax.naming.Context;
import javax.naming.Name;
import javax.naming.RefAddr;
import javax.naming.Reference;
import javax.naming.spi.ObjectFactory;
import org.apache.tomcat.dbcp.dbcp2.ListException;
import org.apache.tomcat.dbcp.dbcp2.Utils;
/**
* A JNDI ObjectFactory which creates <code>SharedPoolDataSource</code>s or <code>PerUserPoolDataSource</code>s
*
* @since 2.0
*/
abstract class InstanceKeyDataSourceFactory implements ObjectFactory {
private static final Map<String, InstanceKeyDataSource> INSTANCE_MAP = new ConcurrentHashMap<>();
/**
* Closes all pools associated with this class.
*
* @throws Exception
* a {@link ListException} containing all exceptions thrown by {@link InstanceKeyDataSource#close()}
* @see InstanceKeyDataSource#close()
* @see ListException
* @since 2.4.0 throws a {@link ListException} instead of, in 2.3.0 and before, the first exception thrown by
* {@link InstanceKeyDataSource#close()}.
*/
public static void closeAll() throws Exception {
// Get iterator to loop over all instances of this data source.
final List<Throwable> exceptionList = new ArrayList<>(INSTANCE_MAP.size());
for (final Entry<String, InstanceKeyDataSource> next : INSTANCE_MAP.entrySet()) {
// Bullet-proof to avoid anything else but problems from InstanceKeyDataSource#close().
if (next != null) {
final InstanceKeyDataSource value = next.getValue();
if (value != null) {
try {
value.close();
} catch (final Exception e) {
exceptionList.add(e);
}
}
}
}
INSTANCE_MAP.clear();
if (!exceptionList.isEmpty()) {
throw new ListException("Could not close all InstanceKeyDataSource instances.", exceptionList);
}
}
/**
* Deserializes the provided byte array to create an object.
*
* @param data
* Data to deserialize to create the configuration parameter.
*
* @return The Object created by deserializing the data.
*
* @throws ClassNotFoundException
* If a class cannot be found during the deserialization of a configuration parameter.
* @throws IOException
* If an I/O error occurs during the deserialization of a configuration parameter.
*/
protected static final Object deserialize(final byte[] data) throws IOException, ClassNotFoundException {
ObjectInputStream in = null;
try {
in = new ObjectInputStream(new ByteArrayInputStream(data));
return in.readObject();
} finally {
Utils.closeQuietly(in);
}
}
static synchronized String registerNewInstance(final InstanceKeyDataSource ds) {
int max = 0;
for (final String s : INSTANCE_MAP.keySet()) {
if (s != null) {
try {
max = Math.max(max, Integer.parseInt(s));
} catch (final NumberFormatException e) {
// no sweat, ignore those keys
}
}
}
final String instanceKey = String.valueOf(max + 1);
// Put a placeholder here for now, so other instances will not
// take our key. We will replace with a pool when ready.
INSTANCE_MAP.put(instanceKey, ds);
return instanceKey;
}
static void removeInstance(final String key) {
if (key != null) {
INSTANCE_MAP.remove(key);
}
}
/**
* Creates an instance of the subclass and sets any properties contained in the Reference.
*
* @param ref
* The properties to be set on the created DataSource
*
* @return A configured DataSource of the appropriate type.
*
* @throws ClassNotFoundException
* If a class cannot be found during the deserialization of a configuration parameter.
* @throws IOException
* If an I/O error occurs during the deserialization of a configuration parameter.
*/
protected abstract InstanceKeyDataSource getNewInstance(Reference ref) throws IOException, ClassNotFoundException;
/**
* Implements ObjectFactory to create an instance of SharedPoolDataSource or PerUserPoolDataSource
*/
@Override
public Object getObjectInstance(final Object refObj, final Name name, final Context context,
final Hashtable<?, ?> env) throws IOException, ClassNotFoundException {
// The spec says to return null if we can't create an instance
// of the reference
Object obj = null;
if (refObj instanceof Reference) {
final Reference ref = (Reference) refObj;
if (isCorrectClass(ref.getClassName())) {
final RefAddr refAddr = ref.get("instanceKey");
if (refAddr != null && refAddr.getContent() != null) {
// object was bound to JNDI via Referenceable API.
obj = INSTANCE_MAP.get(refAddr.getContent());
} else {
// Tomcat JNDI creates a Reference out of server.xml
// <ResourceParam> configuration and passes it to an
// instance of the factory given in server.xml.
String key = null;
if (name != null) {
key = name.toString();
obj = INSTANCE_MAP.get(key);
}
if (obj == null) {
final InstanceKeyDataSource ds = getNewInstance(ref);
setCommonProperties(ref, ds);
obj = ds;
if (key != null) {
INSTANCE_MAP.put(key, ds);
}
}
}
}
}
return obj;
}
/**
* Tests if className is the value returned from getClass().getName().toString().
*
* @param className
* The class name to test.
*
* @return true if and only if className is the value returned from getClass().getName().toString()
*/
protected abstract boolean isCorrectClass(String className);
boolean parseBoolean(final RefAddr refAddr) {
return Boolean.parseBoolean(toString(refAddr));
}
int parseInt(final RefAddr refAddr) {
return Integer.parseInt(toString(refAddr));
}
long parseLong(final RefAddr refAddr) {
return Long.parseLong(toString(refAddr));
}
private void setCommonProperties(final Reference ref, final InstanceKeyDataSource ikds)
throws IOException, ClassNotFoundException {
RefAddr refAddr = ref.get("dataSourceName");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDataSourceName(toString(refAddr));
}
refAddr = ref.get("description");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDescription(toString(refAddr));
}
refAddr = ref.get("jndiEnvironment");
if (refAddr != null && refAddr.getContent() != null) {
final byte[] serialized = (byte[]) refAddr.getContent();
ikds.setJndiEnvironment((Properties) deserialize(serialized));
}
refAddr = ref.get("loginTimeout");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setLoginTimeout(Duration.ofSeconds(parseInt(refAddr)));
}
// Pool properties
refAddr = ref.get("blockWhenExhausted");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultBlockWhenExhausted(parseBoolean(refAddr));
}
refAddr = ref.get("evictionPolicyClassName");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultEvictionPolicyClassName(toString(refAddr));
}
// Pool properties
refAddr = ref.get("lifo");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultLifo(parseBoolean(refAddr));
}
refAddr = ref.get("maxIdlePerKey");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultMaxIdle(parseInt(refAddr));
}
refAddr = ref.get("maxTotalPerKey");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultMaxTotal(parseInt(refAddr));
}
refAddr = ref.get("maxWaitMillis");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultMaxWait(Duration.ofMillis(parseLong(refAddr)));
}
refAddr = ref.get("minEvictableIdleTimeMillis");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultMinEvictableIdle(Duration.ofMillis(parseLong(refAddr)));
}
refAddr = ref.get("minIdlePerKey");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultMinIdle(parseInt(refAddr));
}
refAddr = ref.get("numTestsPerEvictionRun");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultNumTestsPerEvictionRun(parseInt(refAddr));
}
refAddr = ref.get("softMinEvictableIdleTimeMillis");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultSoftMinEvictableIdle(Duration.ofMillis(parseLong(refAddr)));
}
refAddr = ref.get("testOnCreate");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultTestOnCreate(parseBoolean(refAddr));
}
refAddr = ref.get("testOnBorrow");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultTestOnBorrow(parseBoolean(refAddr));
}
refAddr = ref.get("testOnReturn");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultTestOnReturn(parseBoolean(refAddr));
}
refAddr = ref.get("testWhileIdle");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultTestWhileIdle(parseBoolean(refAddr));
}
refAddr = ref.get("timeBetweenEvictionRunsMillis");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultDurationBetweenEvictionRuns(Duration.ofMillis(parseLong(refAddr)));
}
// Connection factory properties
refAddr = ref.get("validationQuery");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setValidationQuery(toString(refAddr));
}
refAddr = ref.get("validationQueryTimeout");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setValidationQueryTimeout(Duration.ofSeconds(parseInt(refAddr)));
}
refAddr = ref.get("rollbackAfterValidation");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setRollbackAfterValidation(parseBoolean(refAddr));
}
refAddr = ref.get("maxConnLifetimeMillis");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setMaxConnLifetime(Duration.ofMillis(parseLong(refAddr)));
}
// Connection properties
refAddr = ref.get("defaultAutoCommit");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultAutoCommit(Boolean.valueOf(toString(refAddr)));
}
refAddr = ref.get("defaultTransactionIsolation");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultTransactionIsolation(parseInt(refAddr));
}
refAddr = ref.get("defaultReadOnly");
if (refAddr != null && refAddr.getContent() != null) {
ikds.setDefaultReadOnly(Boolean.valueOf(toString(refAddr)));
}
}
String toString(final RefAddr refAddr) {
return refAddr.getContent().toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.query;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Callable;
import javax.cache.Cache;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.binary.BinaryObject;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cache.query.ContinuousQuery;
import org.apache.ignite.cache.query.ScanQuery;
import org.apache.ignite.cache.query.SpiQuery;
import org.apache.ignite.cache.query.SqlFieldsQuery;
import org.apache.ignite.cache.query.SqlQuery;
import org.apache.ignite.cache.query.TextQuery;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.apache.ignite.lang.IgniteCallable;
import org.apache.ignite.lang.IgniteClosure;
import org.apache.ignite.resources.IgniteInstanceResource;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.junit.Test;
/**
* Test for scan query with transformer.
*/
public class GridCacheQueryTransformerSelfTest extends GridCommonAbstractTest {
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
cfg.setMarshaller(null);
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
startGridsMultiThreaded(3);
startClientGrid();
}
/**
* @throws Exception If failed.
*/
@Test
public void testGetKeys() throws Exception {
IgniteCache<Integer, String> cache = grid().createCache("test-cache");
try {
for (int i = 0; i < 50; i++)
cache.put(i, "val" + i);
IgniteClosure<Cache.Entry<Integer, String>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, String>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, String> e) {
return e.getKey();
}
};
List<Integer> keys = cache.query(new ScanQuery<Integer, String>(), transformer).getAll();
assertEquals(50, keys.size());
Collections.sort(keys);
for (int i = 0; i < 50; i++)
assertEquals(i, keys.get(i).intValue());
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testGetKeysFiltered() throws Exception {
IgniteCache<Integer, String> cache = grid().createCache("test-cache");
try {
for (int i = 0; i < 50; i++)
cache.put(i, "val" + i);
IgniteBiPredicate<Integer, String> filter = new IgniteBiPredicate<Integer, String>() {
@Override public boolean apply(Integer k, String v) {
return k % 10 == 0;
}
};
IgniteClosure<Cache.Entry<Integer, String>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, String>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, String> e) {
return e.getKey();
}
};
List<Integer> keys = cache.query(new ScanQuery<>(filter), transformer).getAll();
assertEquals(5, keys.size());
Collections.sort(keys);
for (int i = 0; i < 5; i++)
assertEquals(i * 10, keys.get(i).intValue());
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testGetObjectField() throws Exception {
IgniteCache<Integer, Value> cache = grid().createCache("test-cache");
try {
for (int i = 0; i < 50; i++)
cache.put(i, new Value("str" + i, i * 100));
IgniteClosure<Cache.Entry<Integer, Value>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, Value>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, Value> e) {
return e.getValue().idx;
}
};
List<Integer> res = cache.query(new ScanQuery<Integer, Value>(), transformer).getAll();
assertEquals(50, res.size());
Collections.sort(res);
for (int i = 0; i < 50; i++)
assertEquals(i * 100, res.get(i).intValue());
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testGetObjectFieldPartitioned() throws Exception {
IgniteCache<Integer, Value> cache = grid().createCache("test-cache");
Affinity<Integer> affinity = affinity(cache);
try {
int[] keys = new int[50];
for (int i = 0, j = 0; i < keys.length; j++) {
if (affinity.partition(j) == 0)
keys[i++] = j;
}
for (int i : keys)
cache.put(i, new Value("str" + i, i * 100));
IgniteClosure<Cache.Entry<Integer, Value>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, Value>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, Value> e) {
return e.getValue().idx;
}
};
List<Integer> res = cache.query(new ScanQuery<Integer, Value>().setPartition(0), transformer).getAll();
assertEquals(50, res.size());
Collections.sort(res);
for (int i = 0; i < keys.length; i++)
assertEquals(keys[i] * 100, res.get(i).intValue());
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testGetObjectFieldFiltered() throws Exception {
IgniteCache<Integer, Value> cache = grid().createCache("test-cache");
try {
for (int i = 0; i < 50; i++)
cache.put(i, new Value("str" + i, i * 100));
IgniteBiPredicate<Integer, Value> filter = new IgniteBiPredicate<Integer, Value>() {
@Override public boolean apply(Integer k, Value v) {
return v.idx % 1000 == 0;
}
};
IgniteClosure<Cache.Entry<Integer, Value>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, Value>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, Value> e) {
return e.getValue().idx;
}
};
List<Integer> res = cache.query(new ScanQuery<>(filter), transformer).getAll();
assertEquals(5, res.size());
Collections.sort(res);
for (int i = 0; i < 5; i++)
assertEquals(i * 1000, res.get(i).intValue());
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testKeepBinary() throws Exception {
IgniteCache<Integer, Value> cache = grid().createCache("test-cache");
try {
for (int i = 0; i < 50; i++)
cache.put(i, new Value("str" + i, i * 100));
IgniteCache<Integer, BinaryObject> binaryCache = cache.withKeepBinary();
IgniteClosure<Cache.Entry<Integer, BinaryObject>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, BinaryObject>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, BinaryObject> e) {
return e.getValue().field("idx");
}
};
List<Integer> res = binaryCache.query(new ScanQuery<Integer, BinaryObject>(), transformer).getAll();
assertEquals(50, res.size());
Collections.sort(res);
for (int i = 0; i < 50; i++)
assertEquals(i * 100, res.get(i).intValue());
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testKeepBinaryFiltered() throws Exception {
IgniteCache<Integer, Value> cache = grid().createCache("test-cache");
try {
for (int i = 0; i < 50; i++)
cache.put(i, new Value("str" + i, i * 100));
IgniteCache<Integer, BinaryObject> binaryCache = cache.withKeepBinary();
IgniteBiPredicate<Integer, BinaryObject> filter = new IgniteBiPredicate<Integer, BinaryObject>() {
@Override public boolean apply(Integer k, BinaryObject v) {
return v.<Integer>field("idx") % 1000 == 0;
}
};
IgniteClosure<Cache.Entry<Integer, BinaryObject>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, BinaryObject>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, BinaryObject> e) {
return e.getValue().field("idx");
}
};
List<Integer> res = binaryCache.query(new ScanQuery<>(filter), transformer).getAll();
assertEquals(5, res.size());
Collections.sort(res);
for (int i = 0; i < 5; i++)
assertEquals(i * 1000, res.get(i).intValue());
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testLocal() throws Exception {
IgniteCache<Integer, Value> cache = grid().createCache("test-cache");
try {
for (int i = 0; i < 50; i++)
cache.put(i, new Value("str" + i, i * 100));
Collection<List<Integer>> lists = grid().compute().broadcast(new IgniteCallable<List<Integer>>() {
@IgniteInstanceResource
private Ignite ignite;
@Override public List<Integer> call() throws Exception {
IgniteClosure<Cache.Entry<Integer, Value>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, Value>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, Value> e) {
return e.getValue().idx;
}
};
return ignite.cache("test-cache").query(new ScanQuery<Integer, Value>().setLocal(true),
transformer).getAll();
}
});
List<Integer> res = new ArrayList<>(F.flatCollections(lists));
assertEquals(50, res.size());
Collections.sort(res);
for (int i = 0; i < 50; i++)
assertEquals(i * 100, res.get(i).intValue());
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testLocalFiltered() throws Exception {
IgniteCache<Integer, Value> cache = grid().createCache("test-cache");
try {
for (int i = 0; i < 50; i++)
cache.put(i, new Value("str" + i, i * 100));
Collection<List<Integer>> lists = grid().compute().broadcast(new IgniteCallable<List<Integer>>() {
@IgniteInstanceResource
private Ignite ignite;
@Override public List<Integer> call() throws Exception {
IgniteBiPredicate<Integer, Value> filter = new IgniteBiPredicate<Integer, Value>() {
@Override public boolean apply(Integer k, Value v) {
return v.idx % 1000 == 0;
}
};
IgniteClosure<Cache.Entry<Integer, Value>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, Value>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, Value> e) {
return e.getValue().idx;
}
};
return ignite.cache("test-cache").query(new ScanQuery<>(filter).setLocal(true),
transformer).getAll();
}
});
List<Integer> res = new ArrayList<>(F.flatCollections(lists));
assertEquals(5, res.size());
Collections.sort(res);
for (int i = 0; i < 5; i++)
assertEquals(i * 1000, res.get(i).intValue());
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testLocalKeepBinary() throws Exception {
IgniteCache<Integer, Value> cache = grid().createCache("test-cache");
try {
for (int i = 0; i < 50; i++)
cache.put(i, new Value("str" + i, i * 100));
Collection<List<Integer>> lists = grid().compute().broadcast(new IgniteCallable<List<Integer>>() {
@IgniteInstanceResource
private Ignite ignite;
@Override public List<Integer> call() throws Exception {
IgniteClosure<Cache.Entry<Integer, BinaryObject>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, BinaryObject>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, BinaryObject> e) {
return e.getValue().field("idx");
}
};
return ignite.cache("test-cache").withKeepBinary().query(
new ScanQuery<Integer, BinaryObject>().setLocal(true), transformer).getAll();
}
});
List<Integer> res = new ArrayList<>(F.flatCollections(lists));
assertEquals(50, res.size());
Collections.sort(res);
for (int i = 0; i < 50; i++)
assertEquals(i * 100, res.get(i).intValue());
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testLocalKeepBinaryFiltered() throws Exception {
IgniteCache<Integer, Value> cache = grid().createCache("test-cache");
try {
for (int i = 0; i < 50; i++)
cache.put(i, new Value("str" + i, i * 100));
Collection<List<Integer>> lists = grid().compute().broadcast(new IgniteCallable<List<Integer>>() {
@IgniteInstanceResource
private Ignite ignite;
@Override public List<Integer> call() throws Exception {
IgniteBiPredicate<Integer, BinaryObject> filter = new IgniteBiPredicate<Integer, BinaryObject>() {
@Override public boolean apply(Integer k, BinaryObject v) {
return v.<Integer>field("idx") % 1000 == 0;
}
};
IgniteClosure<Cache.Entry<Integer, BinaryObject>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, BinaryObject>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, BinaryObject> e) {
return e.getValue().field("idx");
}
};
return ignite.cache("test-cache").withKeepBinary().query(new ScanQuery<>(filter).setLocal(true),
transformer).getAll();
}
});
List<Integer> res = new ArrayList<>(F.flatCollections(lists));
assertEquals(5, res.size());
Collections.sort(res);
for (int i = 0; i < 5; i++)
assertEquals(i * 1000, res.get(i).intValue());
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testUnsupported() throws Exception {
final IgniteCache<Integer, Integer> cache = grid().createCache("test-cache");
final IgniteClosure<Cache.Entry<Integer, Integer>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, Integer>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, Integer> e) {
return null;
}
};
try {
GridTestUtils.assertThrows(
log,
new Callable<Object>() {
@Override public Object call() throws Exception {
cache.query(new SqlQuery<Integer, Integer>(Integer.class, "clause"), transformer);
return null;
}
},
UnsupportedOperationException.class,
"Transformers are supported only for SCAN queries."
);
GridTestUtils.assertThrows(
log,
new Callable<Object>() {
@Override public Object call() throws Exception {
cache.query(new SqlFieldsQuery("clause"), new IgniteClosure<List<?>, Object>() {
@Override public Object apply(List<?> objects) {
return null;
}
});
return null;
}
},
UnsupportedOperationException.class,
"Transformers are supported only for SCAN queries."
);
GridTestUtils.assertThrows(
log,
new Callable<Object>() {
@Override public Object call() throws Exception {
cache.query(new TextQuery<Integer, Integer>(Integer.class, "clause"), transformer);
return null;
}
},
UnsupportedOperationException.class,
"Transformers are supported only for SCAN queries."
);
GridTestUtils.assertThrows(
log,
new Callable<Object>() {
@Override public Object call() throws Exception {
cache.query(new SpiQuery<Integer, Integer>(), transformer);
return null;
}
},
UnsupportedOperationException.class,
"Transformers are supported only for SCAN queries."
);
GridTestUtils.assertThrows(
log,
new Callable<Object>() {
@Override public Object call() throws Exception {
cache.query(new ContinuousQuery<Integer, Integer>(), transformer);
return null;
}
},
UnsupportedOperationException.class,
"Transformers are supported only for SCAN queries."
);
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testPageSize() throws Exception {
IgniteCache<Integer, Value> cache = grid().createCache("test-cache");
int numEntries = 10_000;
int pageSize = 3;
try {
for (int i = 0; i < numEntries; i++)
cache.put(i, new Value("str" + i, i));
IgniteClosure<Cache.Entry<Integer, Value>, Integer> transformer =
new IgniteClosure<Cache.Entry<Integer, Value>, Integer>() {
@Override public Integer apply(Cache.Entry<Integer, Value> e) {
return e.getValue().idx;
}
};
ScanQuery<Integer, Value> query = new ScanQuery<>();
query.setPageSize(pageSize);
List<Integer> res = cache.query(query, transformer).getAll();
assertEquals(numEntries, res.size());
Collections.sort(res);
for (int i = 0; i < numEntries; i++)
assertEquals(i, res.get(i).intValue());
}
finally {
cache.destroy();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testLocalInjection() throws Exception {
IgniteCache<Integer, Value> cache = grid().createCache("test-cache");
try {
for (int i = 0; i < 50; i++)
cache.put(i, new Value("str" + i, i * 100));
Collection<List<Boolean>> lists = grid().compute().broadcast(new IgniteCallable<List<Boolean>>() {
@IgniteInstanceResource
private Ignite ignite;
@Override public List<Boolean> call() throws Exception {
IgniteClosure<Cache.Entry<Integer, Value>, Boolean> transformer =
new IgniteClosure<Cache.Entry<Integer, Value>, Boolean>() {
@IgniteInstanceResource
Ignite ignite;
@Override public Boolean apply(Cache.Entry<Integer, Value> e) {
return ignite != null;
}
};
return ignite.cache("test-cache").query(new ScanQuery<Integer, Value>().setLocal(true),
transformer).getAll();
}
});
List<Boolean> res = new ArrayList<>(F.flatCollections(lists));
assertEquals(50, res.size());
for (int i = 0; i < 50; i++)
assertEquals(Boolean.TRUE, res.get(i));
}
finally {
cache.destroy();
}
}
/**
*/
private static class Value {
/** */
@SuppressWarnings("unused")
private String str;
/** */
private int idx;
/**
* @param str String.
* @param idx Integer.
*/
public Value(String str, int idx) {
this.str = str;
this.idx = idx;
}
}
}
| |
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.oep.dossiermgt.model;
import com.liferay.portal.kernel.bean.AutoEscapeBeanHandler;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.kernel.util.ProxyUtil;
import com.liferay.portal.kernel.util.StringBundler;
import com.liferay.portal.model.BaseModel;
import com.liferay.portal.model.impl.BaseModelImpl;
import com.liferay.portal.util.PortalUtil;
import org.oep.dossiermgt.service.ClpSerializer;
import org.oep.dossiermgt.service.EbMessageLocalServiceUtil;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* @author trungdk
*/
public class EbMessageClp extends BaseModelImpl<EbMessage> implements EbMessage {
public EbMessageClp() {
}
@Override
public Class<?> getModelClass() {
return EbMessage.class;
}
@Override
public String getModelClassName() {
return EbMessage.class.getName();
}
@Override
public long getPrimaryKey() {
return _ebMessageId;
}
@Override
public void setPrimaryKey(long primaryKey) {
setEbMessageId(primaryKey);
}
@Override
public Serializable getPrimaryKeyObj() {
return _ebMessageId;
}
@Override
public void setPrimaryKeyObj(Serializable primaryKeyObj) {
setPrimaryKey(((Long)primaryKeyObj).longValue());
}
@Override
public Map<String, Object> getModelAttributes() {
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put("ebMessageId", getEbMessageId());
attributes.put("companyId", getCompanyId());
attributes.put("userId", getUserId());
attributes.put("groupId", getGroupId());
attributes.put("createDate", getCreateDate());
attributes.put("messageId", getMessageId());
attributes.put("cpaId", getCpaId());
attributes.put("service", getService());
attributes.put("action", getAction());
attributes.put("conversationId", getConversationId());
attributes.put("fromPartyId", getFromPartyId());
attributes.put("fromPartyType", getFromPartyType());
attributes.put("toPartyId", getToPartyId());
attributes.put("toPartyType", getToPartyType());
attributes.put("refToMessageId", getRefToMessageId());
attributes.put("status", getStatus());
attributes.put("statusDescription", getStatusDescription());
attributes.put("ackMessageId", getAckMessageId());
attributes.put("ackStatus", getAckStatus());
attributes.put("ackStatusDescription", getAckStatusDescription());
attributes.put("messageDescription", getMessageDescription());
attributes.put("inbound", getInbound());
return attributes;
}
@Override
public void setModelAttributes(Map<String, Object> attributes) {
Long ebMessageId = (Long)attributes.get("ebMessageId");
if (ebMessageId != null) {
setEbMessageId(ebMessageId);
}
Long companyId = (Long)attributes.get("companyId");
if (companyId != null) {
setCompanyId(companyId);
}
Long userId = (Long)attributes.get("userId");
if (userId != null) {
setUserId(userId);
}
Long groupId = (Long)attributes.get("groupId");
if (groupId != null) {
setGroupId(groupId);
}
Date createDate = (Date)attributes.get("createDate");
if (createDate != null) {
setCreateDate(createDate);
}
String messageId = (String)attributes.get("messageId");
if (messageId != null) {
setMessageId(messageId);
}
String cpaId = (String)attributes.get("cpaId");
if (cpaId != null) {
setCpaId(cpaId);
}
String service = (String)attributes.get("service");
if (service != null) {
setService(service);
}
String action = (String)attributes.get("action");
if (action != null) {
setAction(action);
}
String conversationId = (String)attributes.get("conversationId");
if (conversationId != null) {
setConversationId(conversationId);
}
String fromPartyId = (String)attributes.get("fromPartyId");
if (fromPartyId != null) {
setFromPartyId(fromPartyId);
}
String fromPartyType = (String)attributes.get("fromPartyType");
if (fromPartyType != null) {
setFromPartyType(fromPartyType);
}
String toPartyId = (String)attributes.get("toPartyId");
if (toPartyId != null) {
setToPartyId(toPartyId);
}
String toPartyType = (String)attributes.get("toPartyType");
if (toPartyType != null) {
setToPartyType(toPartyType);
}
String refToMessageId = (String)attributes.get("refToMessageId");
if (refToMessageId != null) {
setRefToMessageId(refToMessageId);
}
String status = (String)attributes.get("status");
if (status != null) {
setStatus(status);
}
String statusDescription = (String)attributes.get("statusDescription");
if (statusDescription != null) {
setStatusDescription(statusDescription);
}
String ackMessageId = (String)attributes.get("ackMessageId");
if (ackMessageId != null) {
setAckMessageId(ackMessageId);
}
String ackStatus = (String)attributes.get("ackStatus");
if (ackStatus != null) {
setAckStatus(ackStatus);
}
String ackStatusDescription = (String)attributes.get(
"ackStatusDescription");
if (ackStatusDescription != null) {
setAckStatusDescription(ackStatusDescription);
}
String messageDescription = (String)attributes.get("messageDescription");
if (messageDescription != null) {
setMessageDescription(messageDescription);
}
Integer inbound = (Integer)attributes.get("inbound");
if (inbound != null) {
setInbound(inbound);
}
}
@Override
public long getEbMessageId() {
return _ebMessageId;
}
@Override
public void setEbMessageId(long ebMessageId) {
_ebMessageId = ebMessageId;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setEbMessageId", long.class);
method.invoke(_ebMessageRemoteModel, ebMessageId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getCompanyId() {
return _companyId;
}
@Override
public void setCompanyId(long companyId) {
_companyId = companyId;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setCompanyId", long.class);
method.invoke(_ebMessageRemoteModel, companyId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getUserId() {
return _userId;
}
@Override
public void setUserId(long userId) {
_userId = userId;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setUserId", long.class);
method.invoke(_ebMessageRemoteModel, userId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getUserUuid() throws SystemException {
return PortalUtil.getUserValue(getUserId(), "uuid", _userUuid);
}
@Override
public void setUserUuid(String userUuid) {
_userUuid = userUuid;
}
@Override
public long getGroupId() {
return _groupId;
}
@Override
public void setGroupId(long groupId) {
_groupId = groupId;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setGroupId", long.class);
method.invoke(_ebMessageRemoteModel, groupId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public Date getCreateDate() {
return _createDate;
}
@Override
public void setCreateDate(Date createDate) {
_createDate = createDate;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setCreateDate", Date.class);
method.invoke(_ebMessageRemoteModel, createDate);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getMessageId() {
return _messageId;
}
@Override
public void setMessageId(String messageId) {
_messageId = messageId;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setMessageId", String.class);
method.invoke(_ebMessageRemoteModel, messageId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getCpaId() {
return _cpaId;
}
@Override
public void setCpaId(String cpaId) {
_cpaId = cpaId;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setCpaId", String.class);
method.invoke(_ebMessageRemoteModel, cpaId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getService() {
return _service;
}
@Override
public void setService(String service) {
_service = service;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setService", String.class);
method.invoke(_ebMessageRemoteModel, service);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getAction() {
return _action;
}
@Override
public void setAction(String action) {
_action = action;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setAction", String.class);
method.invoke(_ebMessageRemoteModel, action);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getConversationId() {
return _conversationId;
}
@Override
public void setConversationId(String conversationId) {
_conversationId = conversationId;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setConversationId",
String.class);
method.invoke(_ebMessageRemoteModel, conversationId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getFromPartyId() {
return _fromPartyId;
}
@Override
public void setFromPartyId(String fromPartyId) {
_fromPartyId = fromPartyId;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setFromPartyId", String.class);
method.invoke(_ebMessageRemoteModel, fromPartyId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getFromPartyType() {
return _fromPartyType;
}
@Override
public void setFromPartyType(String fromPartyType) {
_fromPartyType = fromPartyType;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setFromPartyType", String.class);
method.invoke(_ebMessageRemoteModel, fromPartyType);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getToPartyId() {
return _toPartyId;
}
@Override
public void setToPartyId(String toPartyId) {
_toPartyId = toPartyId;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setToPartyId", String.class);
method.invoke(_ebMessageRemoteModel, toPartyId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getToPartyType() {
return _toPartyType;
}
@Override
public void setToPartyType(String toPartyType) {
_toPartyType = toPartyType;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setToPartyType", String.class);
method.invoke(_ebMessageRemoteModel, toPartyType);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getRefToMessageId() {
return _refToMessageId;
}
@Override
public void setRefToMessageId(String refToMessageId) {
_refToMessageId = refToMessageId;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setRefToMessageId",
String.class);
method.invoke(_ebMessageRemoteModel, refToMessageId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getStatus() {
return _status;
}
@Override
public void setStatus(String status) {
_status = status;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setStatus", String.class);
method.invoke(_ebMessageRemoteModel, status);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getStatusDescription() {
return _statusDescription;
}
@Override
public void setStatusDescription(String statusDescription) {
_statusDescription = statusDescription;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setStatusDescription",
String.class);
method.invoke(_ebMessageRemoteModel, statusDescription);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getAckMessageId() {
return _ackMessageId;
}
@Override
public void setAckMessageId(String ackMessageId) {
_ackMessageId = ackMessageId;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setAckMessageId", String.class);
method.invoke(_ebMessageRemoteModel, ackMessageId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getAckStatus() {
return _ackStatus;
}
@Override
public void setAckStatus(String ackStatus) {
_ackStatus = ackStatus;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setAckStatus", String.class);
method.invoke(_ebMessageRemoteModel, ackStatus);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getAckStatusDescription() {
return _ackStatusDescription;
}
@Override
public void setAckStatusDescription(String ackStatusDescription) {
_ackStatusDescription = ackStatusDescription;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setAckStatusDescription",
String.class);
method.invoke(_ebMessageRemoteModel, ackStatusDescription);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getMessageDescription() {
return _messageDescription;
}
@Override
public void setMessageDescription(String messageDescription) {
_messageDescription = messageDescription;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setMessageDescription",
String.class);
method.invoke(_ebMessageRemoteModel, messageDescription);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public int getInbound() {
return _inbound;
}
@Override
public void setInbound(int inbound) {
_inbound = inbound;
if (_ebMessageRemoteModel != null) {
try {
Class<?> clazz = _ebMessageRemoteModel.getClass();
Method method = clazz.getMethod("setInbound", int.class);
method.invoke(_ebMessageRemoteModel, inbound);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
public BaseModel<?> getEbMessageRemoteModel() {
return _ebMessageRemoteModel;
}
public void setEbMessageRemoteModel(BaseModel<?> ebMessageRemoteModel) {
_ebMessageRemoteModel = ebMessageRemoteModel;
}
public Object invokeOnRemoteModel(String methodName,
Class<?>[] parameterTypes, Object[] parameterValues)
throws Exception {
Object[] remoteParameterValues = new Object[parameterValues.length];
for (int i = 0; i < parameterValues.length; i++) {
if (parameterValues[i] != null) {
remoteParameterValues[i] = ClpSerializer.translateInput(parameterValues[i]);
}
}
Class<?> remoteModelClass = _ebMessageRemoteModel.getClass();
ClassLoader remoteModelClassLoader = remoteModelClass.getClassLoader();
Class<?>[] remoteParameterTypes = new Class[parameterTypes.length];
for (int i = 0; i < parameterTypes.length; i++) {
if (parameterTypes[i].isPrimitive()) {
remoteParameterTypes[i] = parameterTypes[i];
}
else {
String parameterTypeName = parameterTypes[i].getName();
remoteParameterTypes[i] = remoteModelClassLoader.loadClass(parameterTypeName);
}
}
Method method = remoteModelClass.getMethod(methodName,
remoteParameterTypes);
Object returnValue = method.invoke(_ebMessageRemoteModel,
remoteParameterValues);
if (returnValue != null) {
returnValue = ClpSerializer.translateOutput(returnValue);
}
return returnValue;
}
@Override
public void persist() throws SystemException {
if (this.isNew()) {
EbMessageLocalServiceUtil.addEbMessage(this);
}
else {
EbMessageLocalServiceUtil.updateEbMessage(this);
}
}
@Override
public EbMessage toEscapedModel() {
return (EbMessage)ProxyUtil.newProxyInstance(EbMessage.class.getClassLoader(),
new Class[] { EbMessage.class }, new AutoEscapeBeanHandler(this));
}
@Override
public Object clone() {
EbMessageClp clone = new EbMessageClp();
clone.setEbMessageId(getEbMessageId());
clone.setCompanyId(getCompanyId());
clone.setUserId(getUserId());
clone.setGroupId(getGroupId());
clone.setCreateDate(getCreateDate());
clone.setMessageId(getMessageId());
clone.setCpaId(getCpaId());
clone.setService(getService());
clone.setAction(getAction());
clone.setConversationId(getConversationId());
clone.setFromPartyId(getFromPartyId());
clone.setFromPartyType(getFromPartyType());
clone.setToPartyId(getToPartyId());
clone.setToPartyType(getToPartyType());
clone.setRefToMessageId(getRefToMessageId());
clone.setStatus(getStatus());
clone.setStatusDescription(getStatusDescription());
clone.setAckMessageId(getAckMessageId());
clone.setAckStatus(getAckStatus());
clone.setAckStatusDescription(getAckStatusDescription());
clone.setMessageDescription(getMessageDescription());
clone.setInbound(getInbound());
return clone;
}
@Override
public int compareTo(EbMessage ebMessage) {
long primaryKey = ebMessage.getPrimaryKey();
if (getPrimaryKey() < primaryKey) {
return -1;
}
else if (getPrimaryKey() > primaryKey) {
return 1;
}
else {
return 0;
}
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof EbMessageClp)) {
return false;
}
EbMessageClp ebMessage = (EbMessageClp)obj;
long primaryKey = ebMessage.getPrimaryKey();
if (getPrimaryKey() == primaryKey) {
return true;
}
else {
return false;
}
}
public Class<?> getClpSerializerClass() {
return _clpSerializerClass;
}
@Override
public int hashCode() {
return (int)getPrimaryKey();
}
@Override
public String toString() {
StringBundler sb = new StringBundler(45);
sb.append("{ebMessageId=");
sb.append(getEbMessageId());
sb.append(", companyId=");
sb.append(getCompanyId());
sb.append(", userId=");
sb.append(getUserId());
sb.append(", groupId=");
sb.append(getGroupId());
sb.append(", createDate=");
sb.append(getCreateDate());
sb.append(", messageId=");
sb.append(getMessageId());
sb.append(", cpaId=");
sb.append(getCpaId());
sb.append(", service=");
sb.append(getService());
sb.append(", action=");
sb.append(getAction());
sb.append(", conversationId=");
sb.append(getConversationId());
sb.append(", fromPartyId=");
sb.append(getFromPartyId());
sb.append(", fromPartyType=");
sb.append(getFromPartyType());
sb.append(", toPartyId=");
sb.append(getToPartyId());
sb.append(", toPartyType=");
sb.append(getToPartyType());
sb.append(", refToMessageId=");
sb.append(getRefToMessageId());
sb.append(", status=");
sb.append(getStatus());
sb.append(", statusDescription=");
sb.append(getStatusDescription());
sb.append(", ackMessageId=");
sb.append(getAckMessageId());
sb.append(", ackStatus=");
sb.append(getAckStatus());
sb.append(", ackStatusDescription=");
sb.append(getAckStatusDescription());
sb.append(", messageDescription=");
sb.append(getMessageDescription());
sb.append(", inbound=");
sb.append(getInbound());
sb.append("}");
return sb.toString();
}
@Override
public String toXmlString() {
StringBundler sb = new StringBundler(70);
sb.append("<model><model-name>");
sb.append("org.oep.dossiermgt.model.EbMessage");
sb.append("</model-name>");
sb.append(
"<column><column-name>ebMessageId</column-name><column-value><![CDATA[");
sb.append(getEbMessageId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>companyId</column-name><column-value><![CDATA[");
sb.append(getCompanyId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>userId</column-name><column-value><![CDATA[");
sb.append(getUserId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>groupId</column-name><column-value><![CDATA[");
sb.append(getGroupId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>createDate</column-name><column-value><![CDATA[");
sb.append(getCreateDate());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>messageId</column-name><column-value><![CDATA[");
sb.append(getMessageId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>cpaId</column-name><column-value><![CDATA[");
sb.append(getCpaId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>service</column-name><column-value><![CDATA[");
sb.append(getService());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>action</column-name><column-value><![CDATA[");
sb.append(getAction());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>conversationId</column-name><column-value><![CDATA[");
sb.append(getConversationId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>fromPartyId</column-name><column-value><![CDATA[");
sb.append(getFromPartyId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>fromPartyType</column-name><column-value><![CDATA[");
sb.append(getFromPartyType());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>toPartyId</column-name><column-value><![CDATA[");
sb.append(getToPartyId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>toPartyType</column-name><column-value><![CDATA[");
sb.append(getToPartyType());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>refToMessageId</column-name><column-value><![CDATA[");
sb.append(getRefToMessageId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>status</column-name><column-value><![CDATA[");
sb.append(getStatus());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>statusDescription</column-name><column-value><![CDATA[");
sb.append(getStatusDescription());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>ackMessageId</column-name><column-value><![CDATA[");
sb.append(getAckMessageId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>ackStatus</column-name><column-value><![CDATA[");
sb.append(getAckStatus());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>ackStatusDescription</column-name><column-value><![CDATA[");
sb.append(getAckStatusDescription());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>messageDescription</column-name><column-value><![CDATA[");
sb.append(getMessageDescription());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>inbound</column-name><column-value><![CDATA[");
sb.append(getInbound());
sb.append("]]></column-value></column>");
sb.append("</model>");
return sb.toString();
}
private long _ebMessageId;
private long _companyId;
private long _userId;
private String _userUuid;
private long _groupId;
private Date _createDate;
private String _messageId;
private String _cpaId;
private String _service;
private String _action;
private String _conversationId;
private String _fromPartyId;
private String _fromPartyType;
private String _toPartyId;
private String _toPartyType;
private String _refToMessageId;
private String _status;
private String _statusDescription;
private String _ackMessageId;
private String _ackStatus;
private String _ackStatusDescription;
private String _messageDescription;
private int _inbound;
private BaseModel<?> _ebMessageRemoteModel;
private Class<?> _clpSerializerClass = org.oep.dossiermgt.service.ClpSerializer.class;
}
| |
/* Copyright (c) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gdata.model.gd;
import com.google.gdata.model.Element;
import com.google.gdata.model.ElementCreator;
import com.google.gdata.model.ElementKey;
import com.google.gdata.model.ElementMetadata;
import com.google.gdata.model.MetadataRegistry;
import com.google.gdata.model.atom.Entry;
import java.util.List;
/**
* Describes an entry used by a recurrence exception entry link.
*
*
*/
public class RecurrenceExceptionEntry extends Entry {
/**
* The key for this element.
*/
@SuppressWarnings("hiding")
public static final ElementKey<Void,
RecurrenceExceptionEntry> KEY = ElementKey.of(Entry.KEY.getId(),
Void.class, RecurrenceExceptionEntry.class);
/**
* Registers the metadata for this element.
*/
public static void registerMetadata(MetadataRegistry registry) {
if (registry.isRegistered(KEY)) {
return;
}
// Register superclass metadata.
Entry.registerMetadata(registry);
// The builder for this element
ElementCreator builder = registry.build(KEY);
// Local properties
builder.addElement(Comments.KEY);
builder.addElement(EventStatus.KEY);
builder.addElement(OriginalEvent.KEY);
builder.addElement(Transparency.KEY);
builder.addElement(Visibility.KEY);
builder.addElement(When.KEY).setCardinality(
ElementMetadata.Cardinality.MULTIPLE);
builder.addElement(Where.KEY).setCardinality(
ElementMetadata.Cardinality.MULTIPLE);
builder.addElement(Who.KEY).setCardinality(
ElementMetadata.Cardinality.MULTIPLE);
}
/**
* Constructs an instance using the default key.
*/
public RecurrenceExceptionEntry() {
super(KEY);
}
/**
* Constructs a new instance by doing a shallow copy of data from an existing
* {@link Entry} instance.
*
* @param sourceEntry source entry
*/
public RecurrenceExceptionEntry(Entry sourceEntry) {
super(KEY, sourceEntry);
}
/**
* Subclass constructor, allows subclasses to supply their own element key.
*/
protected RecurrenceExceptionEntry(ElementKey<?,
? extends RecurrenceExceptionEntry> key) {
super(key);
}
/**
* Constructs a new instance by doing a shallow copy of data from an existing
* {@link Entry} instance. Will use the given {@link ElementKey} as the key
* for the element. This constructor is used when adapting from one element
* key to another. You cannot call this constructor directly, instead use
* {@link Element#createElement(ElementKey, Element)}.
*
* @param key The key to use for this element.
* @param source source element
*/
protected RecurrenceExceptionEntry(ElementKey<?,
? extends RecurrenceExceptionEntry> key, Entry source) {
super(key, source);
}
@Override
public RecurrenceExceptionEntry lock() {
return (RecurrenceExceptionEntry) super.lock();
}
/**
* Returns the Comments class.
*
* @return Comments class
*/
public Comments getComments() {
return super.getElement(Comments.KEY);
}
/**
* Sets the Comments class.
*
* @param comments Comments class or {@code null} to reset
* @return this to enable chaining setters
*/
public RecurrenceExceptionEntry setComments(Comments comments) {
super.setElement(Comments.KEY, comments);
return this;
}
/**
* Returns whether it has the Comments class.
*
* @return whether it has the Comments class
*/
public boolean hasComments() {
return super.hasElement(Comments.KEY);
}
/**
* Returns the event status.
*
* @return event status
*/
public EventStatus getEventStatus() {
return super.getElement(EventStatus.KEY);
}
/**
* Sets the event status.
*
* @param eventStatus event status or {@code null} to reset
* @return this to enable chaining setters
*/
public RecurrenceExceptionEntry setEventStatus(EventStatus eventStatus) {
super.setElement(EventStatus.KEY, eventStatus);
return this;
}
/**
* Returns whether it has the event status.
*
* @return whether it has the event status
*/
public boolean hasEventStatus() {
return super.hasElement(EventStatus.KEY);
}
/**
* Returns the original event.
*
* @return original event
*/
public OriginalEvent getOriginalEvent() {
return super.getElement(OriginalEvent.KEY);
}
/**
* Sets the original event.
*
* @param originalEvent original event or {@code null} to reset
* @return this to enable chaining setters
*/
public RecurrenceExceptionEntry setOriginalEvent(OriginalEvent originalEvent)
{
super.setElement(OriginalEvent.KEY, originalEvent);
return this;
}
/**
* Returns whether it has the original event.
*
* @return whether it has the original event
*/
public boolean hasOriginalEvent() {
return super.hasElement(OriginalEvent.KEY);
}
/**
* Returns the event transparency.
*
* @return event transparency
*/
public Transparency getTransparency() {
return super.getElement(Transparency.KEY);
}
/**
* Sets the event transparency.
*
* @param transparency event transparency or {@code null} to reset
* @return this to enable chaining setters
*/
public RecurrenceExceptionEntry setTransparency(Transparency transparency) {
super.setElement(Transparency.KEY, transparency);
return this;
}
/**
* Returns whether it has the event transparency.
*
* @return whether it has the event transparency
*/
public boolean hasTransparency() {
return super.hasElement(Transparency.KEY);
}
/**
* Returns the event visibility.
*
* @return event visibility
*/
public Visibility getVisibility() {
return super.getElement(Visibility.KEY);
}
/**
* Sets the event visibility.
*
* @param visibility event visibility or {@code null} to reset
* @return this to enable chaining setters
*/
public RecurrenceExceptionEntry setVisibility(Visibility visibility) {
super.setElement(Visibility.KEY, visibility);
return this;
}
/**
* Returns whether it has the event visibility.
*
* @return whether it has the event visibility
*/
public boolean hasVisibility() {
return super.hasElement(Visibility.KEY);
}
/**
* Returns the time period descriptions.
*
* @return time period descriptions
*/
public List<When> getWhen() {
return super.getElements(When.KEY);
}
/**
* Adds a new time period description.
*
* @param when time period description
*/
public RecurrenceExceptionEntry addWhen(When when) {
super.addElement(when);
return this;
}
/**
* Removes an existing time period description.
*
* @param when time period description
* @return true if the when was removed
*/
public boolean removeWhen(When when) {
return super.removeElement(when);
}
/**
* Removes all existing time period description instances.
*/
public void clearWhen() {
super.removeElement(When.KEY);
}
/**
* Returns whether it has the time period descriptions.
*
* @return whether it has the time period descriptions
*/
public boolean hasWhen() {
return super.hasElement(When.KEY);
}
/**
* Returns the place descriptions.
*
* @return place descriptions
*/
public List<Where> getWhere() {
return super.getElements(Where.KEY);
}
/**
* Adds a new place description.
*
* @param where place description
*/
public RecurrenceExceptionEntry addWhere(Where where) {
super.addElement(where);
return this;
}
/**
* Removes an existing place description.
*
* @param where place description
* @return true if the where was removed
*/
public boolean removeWhere(Where where) {
return super.removeElement(where);
}
/**
* Removes all existing place description instances.
*/
public void clearWhere() {
super.removeElement(Where.KEY);
}
/**
* Returns whether it has the place descriptions.
*
* @return whether it has the place descriptions
*/
public boolean hasWhere() {
return super.hasElement(Where.KEY);
}
/**
* Returns the person descriptions.
*
* @return person descriptions
*/
public List<Who> getWho() {
return super.getElements(Who.KEY);
}
/**
* Adds a new person description.
*
* @param who person description
*/
public RecurrenceExceptionEntry addWho(Who who) {
super.addElement(who);
return this;
}
/**
* Removes an existing person description.
*
* @param who person description
* @return true if the who was removed
*/
public boolean removeWho(Who who) {
return super.removeElement(who);
}
/**
* Removes all existing person description instances.
*/
public void clearWho() {
super.removeElement(Who.KEY);
}
/**
* Returns whether it has the person descriptions.
*
* @return whether it has the person descriptions
*/
public boolean hasWho() {
return super.hasElement(Who.KEY);
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.internal.cache;
import java.io.IOException;
import com.gemstone.gemfire.DataSerializer;
import com.gemstone.gemfire.cache.util.ObjectSizer;
import com.gemstone.gemfire.internal.Assert;
import com.gemstone.gemfire.internal.DSCODE;
import com.gemstone.gemfire.internal.HeapDataOutputStream;
import com.gemstone.gemfire.internal.NullDataOutputStream;
import com.gemstone.gemfire.internal.cache.lru.Sizeable;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.pdx.PdxInstance;
/**
* Produces instances that implement CachedDeserializable.
* @author Darrel
* @since 5.0.2
*
*/
public class CachedDeserializableFactory {
private static final boolean PREFER_DESERIALIZED = !Boolean
.getBoolean("gemfire.PREFER_SERIALIZED");
private static final boolean PREFER_RAW_OBJECT = GemFireCacheImpl
.gfxdSystem() || Boolean.getBoolean("gemfire.PREFER_RAW_OBJECT");
private static final boolean STORE_ALL_VALUE_FORMS = Boolean
.getBoolean("gemfire.STORE_ALL_VALUE_FORMS");
/**
* Returns true when the cache has been configured to always store the raw
* objects and not wrapped in a CachedDeserializable. All calls to
* CachedDeserializable constructors should first consult this setting to
* check if it should be invoked or not.
*
* This is currently always set for GemFireXD to avoid extra wrapping and
* unneeded serialization/deserialization/size-calculation costs associated
* with CachedDeserializable when using the byte[][] as such will be always
* more efficient.
*/
public static boolean preferObject() {
return PREFER_RAW_OBJECT;
}
/**
* Creates and returns an instance of CachedDeserializable that contains the
* specified byte array.
*
* Always check for {@link #preferObject()} before invoking this.
*/
public static CachedDeserializable create(byte[] v) {
Assert.assertTrue(!PREFER_RAW_OBJECT,
"should not be invoked for gemfire.PREFER_RAW_OBJECT");
return createNoCheck(v);
}
/**
* Creates and returns an instance of CachedDeserializable that contains the
* specified byte array.
*/
static CachedDeserializable createNoCheck(final byte[] v) {
if (STORE_ALL_VALUE_FORMS) {
return new StoreAllCachedDeserializable(v);
}
else if (PREFER_DESERIALIZED) {
if (isPdxEncoded(v) && cachePrefersPdx()) {
return new PreferBytesCachedDeserializable(v);
} else {
return new VMCachedDeserializable(v);
}
} else {
return new PreferBytesCachedDeserializable(v);
}
}
private static boolean isPdxEncoded(byte[] v) {
// assert v != null;
if (v.length > 0) {
return v[0] == DSCODE.PDX;
}
return false;
}
/**
* Creates and returns an instance of CachedDeserializable that contains the
* specified object (that is not a byte[]).
*
* Always check for {@link #preferObject()} before invoking this.
*/
public static CachedDeserializable create(Object object, int serializedSize) {
Assert.assertTrue(!PREFER_RAW_OBJECT,
"should not be invoked for gemfire.PREFER_RAW_OBJECT");
if (STORE_ALL_VALUE_FORMS) {
return new StoreAllCachedDeserializable(object);
}
else if (PREFER_DESERIALIZED) {
if (object instanceof PdxInstance && cachePrefersPdx()) {
return new PreferBytesCachedDeserializable(object);
} else {
return new VMCachedDeserializable(object, serializedSize);
}
} else {
return new PreferBytesCachedDeserializable(object);
}
}
private static boolean cachePrefersPdx() {
GemFireCacheImpl gfc = GemFireCacheImpl.getInstance();
if (gfc != null) {
return gfc.getPdxReadSerialized();
}
return false;
}
/**
* Wrap cd in a new CachedDeserializable.
*/
public static CachedDeserializable create(CachedDeserializable cd) {
Assert.assertTrue(!PREFER_RAW_OBJECT,
"should not be invoked for PREFER_RAW_OBJECT");
if (STORE_ALL_VALUE_FORMS) {
// storeAll cds are immutable just return it w/o wrapping
return cd;
}
else if (PREFER_DESERIALIZED) {
if (cd instanceof PreferBytesCachedDeserializable) {
return cd;
} else {
return new VMCachedDeserializable((VMCachedDeserializable) cd);
}
} else {
// preferBytes cds are immutable so just return it w/o wrapping
return cd;
}
}
/**
* Return the heap overhead in bytes for each CachedDeserializable instance.
*/
public static int overhead() {
// TODO: revisit this code. If we move to per-region cds then this can no longer be static.
// TODO: This method also does not work well with the way off heap is determined using the cache.
if (STORE_ALL_VALUE_FORMS) {
return StoreAllCachedDeserializable.MEM_OVERHEAD;
}
else if (PREFER_RAW_OBJECT) {
return 0;
}
else if (PREFER_DESERIALIZED) {
// PDX: this may instead be PreferBytesCachedDeserializable.MEM_OVERHEAD
return VMCachedDeserializable.MEM_OVERHEAD;
}
else {
return PreferBytesCachedDeserializable.MEM_OVERHEAD;
}
}
/**
* Return the number of bytes the specified byte array will consume
* of heap memory.
*/
public static int getByteSize(byte[] serializedValue) {
// add 4 for the length field of the byte[]
return serializedValue.length + Sizeable.PER_OBJECT_OVERHEAD + 4;
}
public static int getArrayOfBytesSize(final byte[][] value,
final boolean addObjectOverhead) {
int result = 4 * (value.length + 1);
if (addObjectOverhead) {
result += Sizeable.PER_OBJECT_OVERHEAD * (value.length + 1);
}
for (byte[] bytes : value) {
if (bytes != null) {
result += bytes.length;
}
}
return result;
}
/**
* Return an estimate of the amount of heap memory used for the object.
* If it is not a byte[] then account for CachedDeserializable overhead.
* when it is wrapped by a CachedDeserializable.
*/
public static int calcMemSize(Object o) {
return calcMemSize(o, null, true);
}
public static int calcMemSize(Object o, ObjectSizer os, boolean addOverhead) {
return calcMemSize(o, os, addOverhead, true);
}
/**
* If not calcSerializedSize then return -1 if we can't figure out the mem size.
*/
public static int calcMemSize(Object o, ObjectSizer os, boolean addOverhead, boolean calcSerializedSize) {
int result;
if (o instanceof byte[]) {
// does not need to be wrapped so overhead never added
result = getByteSize((byte[])o);
addOverhead = false;
} else if (o == null) {
// does not need to be wrapped so overhead never added
result = 0;
addOverhead = false;
} else if (o instanceof String) {
result = (((String)o).length() * 2)
+ 4 // for the length of the char[]
+ (Sizeable.PER_OBJECT_OVERHEAD * 2) // for String obj and Char[] obj
+ 4 // for obj ref to char[] on String; note should be 8 on 64-bit vm
+ 4 // for offset int field on String
+ 4 // for count int field on String
+ 4 // for hash int field on String
;
} else if (o instanceof byte[][]) {
result = getArrayOfBytesSize((byte[][])o, true);
addOverhead = false;
} else if (o instanceof CachedDeserializable) {
// overhead never added
result = ((CachedDeserializable)o).getSizeInBytes();
addOverhead = false;
} else if (o instanceof Sizeable) {
result = ((Sizeable)o).getSizeInBytes();
} else if (os != null) {
result = os.sizeof(o);
} else if (calcSerializedSize) {
result = Sizeable.PER_OBJECT_OVERHEAD + 4;
NullDataOutputStream dos = new NullDataOutputStream();
try {
DataSerializer.writeObject(o, dos);
result += dos.size();
} catch (IOException ex) {
RuntimeException ex2 = new IllegalArgumentException(LocalizedStrings.CachedDeserializableFactory_COULD_NOT_CALCULATE_SIZE_OF_OBJECT.toLocalizedString());
ex2.initCause(ex);
throw ex2;
}
} else {
// return -1 to signal the caller that we did not compute the size
result = -1;
addOverhead = false;
}
if (addOverhead) {
result += overhead();
}
// GemFireCache.getInstance().getLogger().info("DEBUG calcMemSize: o=<" + o + "> o.class=" + (o != null ? o.getClass() : "<null>") + " os=" + os + " result=" + result, new RuntimeException("STACK"));
return result;
}
/**
* Return an estimate of the number of bytes this object will consume
* when serialized. This is the number of bytes that will be written
* on the wire including the 4 bytes needed to encode the length.
*/
public static int calcSerializedSize(Object o) {
int result;
if (o instanceof byte[]) {
result = getByteSize((byte[])o) - Sizeable.PER_OBJECT_OVERHEAD;
} else if (o instanceof byte[][]) {
result = getArrayOfBytesSize((byte[][])o, false);
} else if (o instanceof CachedDeserializable) {
result = ((CachedDeserializable)o).getSizeInBytes() + 4 - overhead();
} else if (o instanceof Sizeable) {
result = ((Sizeable)o).getSizeInBytes() + 4;
} else if (o instanceof HeapDataOutputStream) {
result = ((HeapDataOutputStream)o).size() + 4;
} else {
result = 4;
NullDataOutputStream dos = new NullDataOutputStream();
try {
DataSerializer.writeObject(o, dos);
result += dos.size();
} catch (IOException ex) {
RuntimeException ex2 = new IllegalArgumentException(LocalizedStrings.CachedDeserializableFactory_COULD_NOT_CALCULATE_SIZE_OF_OBJECT.toLocalizedString());
ex2.initCause(ex);
throw ex2;
}
}
// GemFireCache.getInstance().getLogger().info("DEBUG calcSerializedSize: o=<" + o + "> o.class=" + (o != null ? o.getClass() : "<null>") + " result=" + result, new RuntimeException("STACK"));
return result;
}
/**
* Return how much memory this object will consume
* if it is in serialized form
*/
public static int calcSerializedMemSize(Object o) {
int result = calcSerializedSize(o);
result += Sizeable.PER_OBJECT_OVERHEAD;
if (!(o instanceof byte[])) {
result += overhead();
}
return result;
}
}
| |
/**
* Copyright (c) 2012-2019 Netflix, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.msl.io;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.nio.charset.Charset;
import java.util.Random;
import java.util.Set;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.netflix.msl.MslCryptoException;
import com.netflix.msl.MslEncodingException;
import com.netflix.msl.entityauth.EntityAuthenticationScheme;
import com.netflix.msl.util.Base64;
import com.netflix.msl.util.MockMslContext;
import com.netflix.msl.util.MslContext;
/**
* MSL utilities unit tests.
*
* @author Wesley Miaw <wmiaw@netflix.com>
*/
public class MslEncoderUtilsTest {
/** Encoding charset. */
private static final Charset UTF_8 = Charset.forName("UTF-8");
/** URL-safe Base64 examples. */
private static final String[][] B64_URL_EXAMPLES = new String[][] {
{ "The long winded author is going for a walk while the light breeze bellows in his ears.",
"VGhlIGxvbmcgd2luZGVkIGF1dGhvciBpcyBnb2luZyBmb3IgYSB3YWxrIHdoaWxlIHRoZSBsaWdodCBicmVlemUgYmVsbG93cyBpbiBoaXMgZWFycy4" },
{ "Sometimes porcupines need beds to sleep on.",
"U29tZXRpbWVzIHBvcmN1cGluZXMgbmVlZCBiZWRzIHRvIHNsZWVwIG9uLg" },
{ "Even the restless dreamer enjoys home-cooked foods.",
"RXZlbiB0aGUgcmVzdGxlc3MgZHJlYW1lciBlbmpveXMgaG9tZS1jb29rZWQgZm9vZHMu" }
};
private static final String KEY_BOOLEAN = "boolean";
private static final String KEY_NUMBER = "number";
private static final String KEY_STRING = "string";
private static final String KEY_OBJECT = "object";
private static final String KEY_ARRAY = "array";
private static final int MAX_ELEMENTS = 12;
private static final int MAX_DEPTH = 3;
private static final int MAX_STRING_CHARS = 25;
/**
* @param random random source.
* @return a random string of random length.
*/
private static final String randomString(final Random random) {
final byte[] raw = new byte[1 + random.nextInt(MAX_STRING_CHARS - 1)];
return Base64.encode(raw);
}
/**
* @param random random source.
* @return a MSL object containing no MSL objects or MSL arrays.
* @throws MslEncoderException if there is an error building the MSL object.
*/
private static MslObject createFlatMslObject(final Random random) throws MslEncoderException {
final MslObject mo = new MslObject();
for (int i = 1 + random.nextInt(MAX_ELEMENTS - 1); i > 0; --i) {
switch (random.nextInt(3)) {
case 0:
mo.put(KEY_BOOLEAN + i, random.nextBoolean());
break;
case 1:
mo.put(KEY_NUMBER + i, random.nextInt());
break;
case 2:
mo.put(KEY_STRING + i, randomString(random));
break;
}
}
return mo;
}
/**
* @param random random source.
* @param depth maximum depth. A depth of 1 indicates no children may have
* more children.
* @return a MSL object that may contain MSL objects or MSL arrays.
* @throws MslEncoderException if there is an error building the MSL object.
*/
private static MslObject createDeepMslObject(final Random random, final int depth) throws MslEncoderException {
final MslObject mo = new MslObject();
for (int i = 1 + random.nextInt(MAX_ELEMENTS - 1); i > 0; --i) {
switch (random.nextInt(5)) {
case 0:
mo.put(KEY_BOOLEAN + i, random.nextBoolean());
break;
case 1:
mo.put(KEY_NUMBER + i, random.nextInt());
break;
case 2:
mo.put(KEY_STRING + i, randomString(random));
break;
case 3:
mo.put(KEY_OBJECT + i, (depth > 1) ? createDeepMslObject(random, depth - 1) : createFlatMslObject(random));
break;
case 4:
mo.put(KEY_ARRAY + i, (depth > 1) ? createDeepMslArray(random, depth - 1) : createFlatMslArray(random));
break;
}
}
return mo;
}
/**
* @param random random source.
* @return a MSL array containing no MSL objects or MSL arrays.
* @throws MslEncoderException if there is an error building the MSL array.
*/
private static MslArray createFlatMslArray(final Random random) throws MslEncoderException {
final MslArray ma = new MslArray();
for (int i = 1 + random.nextInt(MAX_ELEMENTS - 1); i > 0; --i) {
switch (random.nextInt(4)) {
case 0:
ma.put(-1, random.nextBoolean());
break;
case 1:
ma.put(-1, random.nextInt());
break;
case 2:
ma.put(-1, randomString(random));
break;
case 3:
ma.put(-1, null);
break;
}
}
return ma;
}
/**
* @param random random source.
* @param depth maximum depth. A depth of 1 indicates no children may have
* more children.
* @return a MSL array that may contain MSL objects or MSL arrays.
* @throws MslEncoderException if there is an error building the MSL array.
*/
private static MslArray createDeepMslArray(final Random random, final int depth) throws MslEncoderException {
final MslArray ma = new MslArray();
for (int i = 1 + random.nextInt(MAX_ELEMENTS - 1); i > 0; --i) {
switch (random.nextInt(4)) {
case 0:
ma.put(-1, random.nextBoolean());
break;
case 1:
ma.put(-1, random.nextInt());
break;
case 2:
ma.put(-1, randomString(random));
break;
case 3:
ma.put(-1, null);
break;
case 4:
ma.put(-1, (depth > 1) ? createDeepMslObject(random, depth - 1) : createFlatMslObject(random));
break;
case 5:
ma.put(-1, (depth > 1) ? createDeepMslArray(random, depth - 1) : createFlatMslArray(random));
break;
}
}
return ma;
}
/**
* @param o the object to change.
* @return a new object with a changed value.
* @throws MslEncoderException if the object type is unknown or there is an error
* parsing/building the MSL objects or arrays.
*/
private static Object changeValue(final Object o) throws MslEncoderException {
final Random random = new Random();
if (o instanceof String) {
return (String)o + "x";
} else if (o instanceof Number) {
return ((Number)o).doubleValue() + 1;
} else if (o instanceof Boolean) {
return !((Boolean)o).booleanValue();
} else if (o instanceof MslObject) {
final MslObject childMo = encoder.createObject(((MslObject)o).getMap());
final Set<String> childNames = childMo.getKeys();
if (childNames.size() > 0) {
final String childName = childNames.toArray(new String[0])[random.nextInt(childNames.size())];
return changeValue(childMo, childName);
} else {
childMo.put(KEY_NUMBER + "1", 1);
return childMo;
}
} else if (o instanceof MslArray) {
final MslArray childMa = encoder.createArray(((MslArray)o).getCollection());
childMa.put(-1, random.nextInt());
return childMa;
} else if (o == null) {
return true;
}
throw new MslEncoderException("Unknown object type " + o.getClass());
}
/**
* @param mo MSL object to create a changed version of.
* @param name name of value to change.
* @return a new MSL object with the value associated with the given name
* randomly changed.
* @throws MslEncoderException if the name does not exist or there is an error
* parsing/building the MSL objects.
*/
private static MslObject changeValue(final MslObject mo, final String name) throws MslEncoderException {
final MslObject newMo = encoder.createObject(mo.getMap());
final Object o = newMo.opt(name);
newMo.put(name, changeValue(o));
return newMo;
}
private static MslArray changeValue(final MslArray ma, final int index) throws MslEncoderException {
final MslArray newMa = encoder.createArray(ma.getCollection());
final Object o = newMa.opt(index);
newMa.put(index, changeValue(o));
return newMa;
}
@BeforeClass
public static void setup() throws MslEncoderException, MslEncodingException, MslCryptoException {
final MslContext ctx = new MockMslContext(EntityAuthenticationScheme.PSK, false);
encoder = ctx.getMslEncoderFactory();
random = new Random();
flatMo = createFlatMslObject(random);
deepMo = createDeepMslObject(random, MAX_DEPTH);
nullMo = null;
flatMa = createFlatMslArray(random);
deepMa = createDeepMslArray(random, MAX_DEPTH);
nullMa = null;
}
@AfterClass
public static void teardown() {
flatMo = null;
deepMo = null;
flatMa = null;
deepMa = null;
random = null;
encoder = null;
}
@Test
public void b64url() {
for (final String[] example : B64_URL_EXAMPLES) {
final String text = example[0];
final String base64 = example[1];
// Encode the text as bytes and as a string.
{
final String encoded = MslEncoderUtils.b64urlEncode(text.getBytes(UTF_8));
final String encodedString = MslEncoderUtils.b64urlEncode(text);
assertEquals(base64, encoded);
assertEquals(base64, encodedString);
}
// Decode the base64 to bytes and to a string.
{
final byte[] decoded = MslEncoderUtils.b64urlDecode(base64);
assertArrayEquals(text.getBytes(UTF_8), decoded);
}
}
}
@Test
public void mslObjectEqual() throws MslEncoderException {
assertTrue(MslEncoderUtils.equalObjects(flatMo, flatMo));
assertEquals(MslEncoderUtils.hashObject(flatMo), MslEncoderUtils.hashObject(flatMo));
final MslObject mo = encoder.createObject(flatMo.getMap());
assertTrue(MslEncoderUtils.equalObjects(flatMo, mo));
assertEquals(MslEncoderUtils.hashObject(flatMo), MslEncoderUtils.hashObject(mo));
}
@Test
public void mslObjectInequal() throws MslEncoderException {
final Set<String> names = flatMo.getKeys();
for (final String name : names) {
final MslObject mo = changeValue(flatMo, name);
assertFalse(MslEncoderUtils.equalObjects(flatMo, mo));
assertNotEquals(MslEncoderUtils.hashObject(flatMo), MslEncoderUtils.hashObject(mo));
}
}
@Test
public void mslObjectNull() throws MslEncoderException {
assertFalse(MslEncoderUtils.equalObjects(null, new MslObject()));
assertFalse(MslEncoderUtils.equalObjects(new MslObject(), null));
assertTrue(MslEncoderUtils.equalObjects(nullMo, nullMo));
assertEquals(MslEncoderUtils.hashObject(nullMo), MslEncoderUtils.hashObject(nullMo));
}
@Test
public void mslObjectChildrenEqual() throws MslEncoderException {
assertTrue(MslEncoderUtils.equalObjects(deepMo, deepMo));
final MslObject mo = encoder.createObject(deepMo.getMap());
assertTrue(MslEncoderUtils.equalObjects(deepMo, mo));
assertEquals(MslEncoderUtils.hashObject(deepMo), MslEncoderUtils.hashObject(mo));
}
@Test
public void mslObjectChildrenInequal() throws MslEncoderException {
final Set<String> names = deepMo.getKeys();
for (final String name : names) {
final MslObject mo = changeValue(deepMo, name);
assertFalse(MslEncoderUtils.equalObjects(deepMo, mo));
assertNotEquals(MslEncoderUtils.hashObject(deepMo), MslEncoderUtils.hashObject(mo));
}
}
@Test
public void mslArrayEqual() throws MslEncoderException {
assertTrue(MslEncoderUtils.equalArrays(flatMa, flatMa));
assertEquals(MslEncoderUtils.hashArray(flatMa), MslEncoderUtils.hashArray(flatMa));
final MslArray ma = encoder.createArray(flatMa.getCollection());
assertTrue(MslEncoderUtils.equalArrays(flatMa, ma));
assertEquals(MslEncoderUtils.hashArray(flatMa), MslEncoderUtils.hashArray(ma));
}
@Test
public void mslArrayInequal() throws MslEncoderException {
final Random random = new Random();
final MslArray ma1 = encoder.createArray(flatMa.getCollection());
if (ma1.size() > 0) {
ma1.remove(random.nextInt(ma1.size()));
assertFalse(MslEncoderUtils.equalArrays(flatMa, ma1));
assertNotEquals(MslEncoderUtils.hashArray(flatMa), MslEncoderUtils.hashArray(ma1));
}
final MslArray ma2 = encoder.createArray(flatMa.getCollection());
ma2.put(-1, random.nextInt());
assertFalse(MslEncoderUtils.equalArrays(flatMa, ma2));
assertNotEquals(MslEncoderUtils.hashArray(flatMa), MslEncoderUtils.hashArray(ma2));
if (flatMa.size() > 0) {
final MslArray ma3 = changeValue(flatMa, random.nextInt(flatMa.size()));
assertFalse(MslEncoderUtils.equalArrays(flatMa, ma3));
assertNotEquals(MslEncoderUtils.hashArray(flatMa), MslEncoderUtils.hashArray(ma3));
}
}
@Test
public void mslArrayNull() throws MslEncoderException {
assertFalse(MslEncoderUtils.equalArrays(null, new MslArray()));
assertFalse(MslEncoderUtils.equalArrays(new MslArray(), null));
assertTrue(MslEncoderUtils.equalArrays(nullMa, nullMa));
assertEquals(MslEncoderUtils.hashArray(nullMa), MslEncoderUtils.hashArray(nullMa));
}
@Test
public void mslArrayChildrenEqual() throws MslEncoderException {
assertTrue(MslEncoderUtils.equalArrays(deepMa, deepMa));
assertEquals(MslEncoderUtils.hashArray(deepMa), MslEncoderUtils.hashArray(deepMa));
final MslArray ma = encoder.createArray(deepMa.getCollection());
assertTrue(MslEncoderUtils.equalArrays(deepMa, ma));
assertEquals(MslEncoderUtils.hashArray(deepMa), MslEncoderUtils.hashArray(ma));
}
@Test
public void mslArrayChildrenInequal() throws MslEncoderException {
final Random random = new Random();
final MslArray ma1 = encoder.createArray(deepMa.getCollection());
if (ma1.size() > 0) {
ma1.remove(random.nextInt(ma1.size()));
assertFalse(MslEncoderUtils.equalArrays(deepMa, ma1));
assertNotEquals(MslEncoderUtils.hashArray(deepMa), MslEncoderUtils.hashArray(ma1));
}
final MslArray ma2 = encoder.createArray(deepMa.getCollection());
ma2.put(-1, random.nextInt());
assertFalse(MslEncoderUtils.equalArrays(deepMa, ma2));
assertNotEquals(MslEncoderUtils.hashArray(deepMa), MslEncoderUtils.hashArray(ma2));
if (deepMa.size() > 0) {
final MslArray ma3 = changeValue(deepMa, random.nextInt(deepMa.size()));
assertFalse(MslEncoderUtils.equalArrays(deepMa, ma3));
assertNotEquals(MslEncoderUtils.hashArray(deepMa), MslEncoderUtils.hashArray(ma3));
}
}
@Test
public void mergeNulls() throws MslEncoderException {
final MslObject mo1 = null;
final MslObject mo2 = null;
final MslObject merged = MslEncoderUtils.merge(mo1, mo2);
assertNull(merged);
}
@Test
public void mergeFirstNull() throws MslEncoderException {
final MslObject mo1 = null;
final MslObject mo2 = deepMo;
final MslObject merged = MslEncoderUtils.merge(mo1, mo2);
assertTrue(MslEncoderUtils.equalObjects(merged, mo2));
assertEquals(MslEncoderUtils.hashObject(merged), MslEncoderUtils.hashObject(mo2));
}
@Test
public void mergeSecondNull() throws MslEncoderException {
final MslObject mo1 = deepMo;
final MslObject mo2 = null;
final MslObject merged = MslEncoderUtils.merge(mo1, mo2);
assertTrue(MslEncoderUtils.equalObjects(merged, mo1));
assertEquals(MslEncoderUtils.hashObject(merged), MslEncoderUtils.hashObject(mo1));
}
@Test
public void mergeOverwriting() throws MslEncoderException {
final MslObject mo1 = createFlatMslObject(random);
final MslObject mo2 = createFlatMslObject(random);
// Insert some shared keys.
mo1.put("key1", true);
mo2.put("key1", "value1");
mo1.put("key2", 17);
mo2.put("key2", 34);
// Ensure second overwrites first.
final MslObject merged = MslEncoderUtils.merge(mo1, mo2);
for (final String key : merged.getKeys()) {
final Object value = merged.get(key);
if (key.equals("key1") || key.equals("key2")) {
assertEquals(mo2.get(key), value);
} else if (mo2.has(key)) {
assertEquals(mo2.get(key), value);
} else {
assertEquals(mo1.get(key), value);
}
}
}
@Test
public void objectHash() throws MslEncoderException {
final MslObject mo1 = deepMo;
final MslObject mo2 = new MslObject(mo1.getMap());
assertTrue(MslEncoderUtils.equalObjects(mo1, mo2));
assertEquals(MslEncoderUtils.hashObject(mo2), MslEncoderUtils.hashObject(mo1));
final String[] keys = mo1.getKeys().toArray(new String[0]);
final String key = keys[0];
final Object value = mo1.get(key);
mo1.remove(key);
mo1.put(key + "x", value);
assertFalse(MslEncoderUtils.equalObjects(mo1, mo2));
assertNotEquals(MslEncoderUtils.hashObject(mo2), MslEncoderUtils.hashObject(mo1));
mo1.put(key, value);
assertFalse(MslEncoderUtils.equalObjects(mo1, mo2));
assertNotEquals(MslEncoderUtils.hashObject(mo2), MslEncoderUtils.hashObject(mo1));
mo1.remove(key + "x");
assertTrue(MslEncoderUtils.equalObjects(mo1, mo2));
assertEquals(MslEncoderUtils.hashObject(mo2), MslEncoderUtils.hashObject(mo1));
}
/** MSL encoder factory. */
private static MslEncoderFactory encoder;
private static Random random;
private static MslObject flatMo, deepMo, nullMo;
private static MslArray flatMa, deepMa, nullMa;
}
| |
package ca.uhn.fhir.rest.server;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.Bundle;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.dstu.resource.Patient;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.rest.annotation.History;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Read;
import ca.uhn.fhir.rest.annotation.Since;
import ca.uhn.fhir.util.PortUtil;
/**
* Created by dsotnikov on 2/25/2014.
*/
public class HistoryTest {
private static CloseableHttpClient ourClient;
private static int ourPort;
private static Server ourServer;
/**
* We test this here because of bug 3- At one point VRead would "steal" instance history calls and handle them
*/
@Test
public void testVread() throws Exception {
{
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/123/_history/456");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
Patient bundle = new FhirContext().newXmlParser().parseResource(Patient.class, responseContent);
assertEquals("vread", bundle.getNameFirstRep().getFamilyFirstRep().getValue());
}
}
@Test
public void testServerHistory() throws Exception {
{
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/_history");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
Bundle bundle = new FhirContext().newXmlParser().parseBundle(responseContent);
assertEquals(2, bundle.getEntries().size());
assertEquals("http://localhost:" + ourPort +"/Patient/h1/_history/1", bundle.getEntries().get(0).getLinkSelf().getValue());
assertEquals("http://localhost:" + ourPort +"/Patient/h1/_history/2", bundle.getEntries().get(1).getLinkSelf().getValue());
}
}
@Test
public void testInstanceHistory() throws Exception {
{
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/123/_history");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
Bundle bundle = new FhirContext().newXmlParser().parseBundle(responseContent);
assertEquals(2, bundle.getEntries().size());
assertEquals("http://localhost:" + ourPort +"/Patient/ih1/_history/1", bundle.getEntries().get(0).getLinkSelf().getValue());
assertEquals("http://localhost:" + ourPort +"/Patient/ih1/_history/2", bundle.getEntries().get(1).getLinkSelf().getValue());
}
}
@Test
public void testTypeHistory() throws Exception {
{
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/_history");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
Bundle bundle = new FhirContext().newXmlParser().parseBundle(responseContent);
assertEquals(2, bundle.getEntries().size());
assertEquals("http://localhost:" + ourPort +"/Patient/th1/_history/1", bundle.getEntries().get(0).getLinkSelf().getValue());
assertEquals("http://localhost:" + ourPort +"/Patient/th1/_history/2", bundle.getEntries().get(1).getLinkSelf().getValue());
}
}
@AfterClass
public static void afterClass() throws Exception {
ourServer.stop();
}
@BeforeClass
public static void beforeClass() throws Exception {
ourPort = PortUtil.findFreePort();
ourServer = new Server(ourPort);
DummyPlainProvider plainProvider = new DummyPlainProvider();
DummyResourceProvider patientProvider = new DummyResourceProvider();
ServletHandler proxyHandler = new ServletHandler();
RestfulServer servlet = new RestfulServer();
servlet.setPlainProviders(plainProvider);
servlet.setResourceProviders(patientProvider);
ServletHolder servletHolder = new ServletHolder(servlet);
proxyHandler.addServletWithMapping(servletHolder, "/*");
ourServer.setHandler(proxyHandler);
ourServer.start();
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
HttpClientBuilder builder = HttpClientBuilder.create();
builder.setConnectionManager(connectionManager);
ourClient = builder.build();
}
public static class DummyResourceProvider implements IResourceProvider {
@Override
public Class<? extends IResource> getResourceType() {
return Patient.class;
}
@Read(version=true)
public Patient vread(@IdParam IdDt theId) {
Patient retVal = new Patient();
retVal.addName().addFamily("vread");
retVal.setId(theId);
return retVal;
}
@History
public List<Patient> instanceHistory(@IdParam IdDt theId) {
ArrayList<Patient> retVal = new ArrayList<Patient>();
Patient patient = new Patient();
patient.setId("Patient/ih1/_history/1");
patient.addName().addFamily("history");
retVal.add(patient);
Patient patient2 = new Patient();
patient2.setId("Patient/ih1/_history/2");
patient2.addName().addFamily("history");
retVal.add(patient2);
return retVal;
}
@History
public List<Patient> typeHistory() {
ArrayList<Patient> retVal = new ArrayList<Patient>();
Patient patient = new Patient();
patient.setId("Patient/th1/_history/1");
patient.addName().addFamily("history");
retVal.add(patient);
Patient patient2 = new Patient();
patient2.setId("Patient/th1/_history/2");
patient2.addName().addFamily("history");
retVal.add(patient2);
return retVal;
}
}
/**
* Created by dsotnikov on 2/25/2014.
*/
public static class DummyPlainProvider {
@History
public List<Patient> history(@Since InstantDt theSince) {
ArrayList<Patient> retVal = new ArrayList<Patient>();
Patient patient = new Patient();
patient.setId("Patient/h1/_history/1");
patient.addName().addFamily("history");
retVal.add(patient);
Patient patient2 = new Patient();
patient2.setId("Patient/h1/_history/2");
patient2.addName().addFamily("history");
retVal.add(patient2);
return retVal;
}
}
}
| |
package libnoiseforjava.module;
/**
* Michael Nugent
* Date: 3/9/12
* Time: 6:12 PM
* URL: https://github.com/michaelnugent/libnoiseforjava
* Package: libnoiseforjava.module
*/
/*
* A speed-improved simplex noise algorithm for 2D, 3D and 4D in Java.
*
* Based on example code by Stefan Gustavson (stegu@itn.liu.se).
* Optimisations by Peter Eastman (peastman@drizzle.stanford.edu).
* Better rank ordering method by Stefan Gustavson in 2012.
*
* This could be speeded up even further, but it's useful as it is.
*
* Version 2012-03-09
*
* This code was placed in the public domain by its original author,
* Stefan Gustavson. You may use it as you see fit, but
* attribution is appreciated.
*
* Modified by Michael Nugent (michael@michaelnugent.org) for the
* libnoise framework 20120309
* All libnoise expects 3d, but I've left the 2d and 4d functions in for
* reference.
*
*/
public class Simplex extends ModuleBase { // Simplex noise in 2D, 3D and 4D
private static Grad grad3[] = {new Grad(1, 1, 0), new Grad(-1, 1, 0), new Grad(1, -1, 0), new Grad(-1, -1, 0),
new Grad(1, 0, 1), new Grad(-1, 0, 1), new Grad(1, 0, -1), new Grad(-1, 0, -1),
new Grad(0, 1, 1), new Grad(0, -1, 1), new Grad(0, 1, -1), new Grad(0, -1, -1)};
private static Grad grad4[] = {new Grad(0, 1, 1, 1), new Grad(0, 1, 1, -1), new Grad(0, 1, -1, 1), new Grad(0, 1, -1, -1),
new Grad(0, -1, 1, 1), new Grad(0, -1, 1, -1), new Grad(0, -1, -1, 1), new Grad(0, -1, -1, -1),
new Grad(1, 0, 1, 1), new Grad(1, 0, 1, -1), new Grad(1, 0, -1, 1), new Grad(1, 0, -1, -1),
new Grad(-1, 0, 1, 1), new Grad(-1, 0, 1, -1), new Grad(-1, 0, -1, 1), new Grad(-1, 0, -1, -1),
new Grad(1, 1, 0, 1), new Grad(1, 1, 0, -1), new Grad(1, -1, 0, 1), new Grad(1, -1, 0, -1),
new Grad(-1, 1, 0, 1), new Grad(-1, 1, 0, -1), new Grad(-1, -1, 0, 1), new Grad(-1, -1, 0, -1),
new Grad(1, 1, 1, 0), new Grad(1, 1, -1, 0), new Grad(1, -1, 1, 0), new Grad(1, -1, -1, 0),
new Grad(-1, 1, 1, 0), new Grad(-1, 1, -1, 0), new Grad(-1, -1, 1, 0), new Grad(-1, -1, -1, 0)};
private static short p[] = {151, 160, 137, 91, 90, 15,
131, 13, 201, 95, 96, 53, 194, 233, 7, 225, 140, 36, 103, 30, 69, 142, 8, 99, 37, 240, 21, 10, 23,
190, 6, 148, 247, 120, 234, 75, 0, 26, 197, 62, 94, 252, 219, 203, 117, 35, 11, 32, 57, 177, 33,
88, 237, 149, 56, 87, 174, 20, 125, 136, 171, 168, 68, 175, 74, 165, 71, 134, 139, 48, 27, 166,
77, 146, 158, 231, 83, 111, 229, 122, 60, 211, 133, 230, 220, 105, 92, 41, 55, 46, 245, 40, 244,
102, 143, 54, 65, 25, 63, 161, 1, 216, 80, 73, 209, 76, 132, 187, 208, 89, 18, 169, 200, 196,
135, 130, 116, 188, 159, 86, 164, 100, 109, 198, 173, 186, 3, 64, 52, 217, 226, 250, 124, 123,
5, 202, 38, 147, 118, 126, 255, 82, 85, 212, 207, 206, 59, 227, 47, 16, 58, 17, 182, 189, 28, 42,
223, 183, 170, 213, 119, 248, 152, 2, 44, 154, 163, 70, 221, 153, 101, 155, 167, 43, 172, 9,
129, 22, 39, 253, 19, 98, 108, 110, 79, 113, 224, 232, 178, 185, 112, 104, 218, 246, 97, 228,
251, 34, 242, 193, 238, 210, 144, 12, 191, 179, 162, 241, 81, 51, 145, 235, 249, 14, 239, 107,
49, 192, 214, 31, 181, 199, 106, 157, 184, 84, 204, 176, 115, 121, 50, 45, 127, 4, 150, 254,
138, 236, 205, 93, 222, 114, 67, 29, 24, 72, 243, 141, 128, 195, 78, 66, 215, 61, 156, 180};
// To remove the need for index wrapping, double the permutation table length
private short perm[] = new short[512];
private short permMod12[] = new short[512];
private double seed = 0;
public Simplex() {
super(0);
for (int i = 0; i < 512; i++) {
perm[i] = p[i & 255];
permMod12[i] = (short) (perm[i] % 12);
}
}
public double getSeed() {
return seed;
}
public void setSeed(double seed) {
this.seed = seed;
}
public void setSeed(int seed) {
this.seed = (double) seed;
}
// Skewing and unskewing factors for 2, 3, and 4 dimensions
private static final double F2 = 0.5 * (Math.sqrt(3.0) - 1.0);
private static final double G2 = (3.0 - Math.sqrt(3.0)) / 6.0;
private static final double F3 = 1.0 / 3.0;
private static final double G3 = 1.0 / 6.0;
private static final double F4 = (Math.sqrt(5.0) - 1.0) / 4.0;
private static final double G4 = (5.0 - Math.sqrt(5.0)) / 20.0;
// This method is a *lot* faster than using (int)Math.floor(x)
private static int fastfloor(double x) {
int xi = (int) x;
return x < xi ? xi - 1 : xi;
}
private static double dot(Grad g, double x, double y) {
return g.x * x + g.y * y;
}
private static double dot(Grad g, double x, double y, double z) {
return g.x * x + g.y * y + g.z * z;
}
private static double dot(Grad g, double x, double y, double z, double w) {
return g.x * x + g.y * y + g.z * z + g.w * w;
}
// 2D simplex noise
public double getValue(double xin, double yin, double noop) {
double n0, n1, n2; // Noise contributions from the three corners
// Skew the input space to determine which simplex cell we're in
double s = (xin + yin) * F2; // Hairy factor for 2D
int i = fastfloor(xin + s);
int j = fastfloor(yin + s);
double t = (i + j) * G2;
double X0 = i - t; // Unskew the cell origin back to (x,y) space
double Y0 = j - t;
double x0 = xin - X0; // The x,y distances from the cell origin
double y0 = yin - Y0;
// For the 2D case, the simplex shape is an equilateral triangle.
// Determine which simplex we are in.
int i1, j1; // Offsets for second (middle) corner of simplex in (i,j) coords
if (x0 > y0) {
i1 = 1;
j1 = 0;
} // lower triangle, XY order: (0,0)->(1,0)->(1,1)
else {
i1 = 0;
j1 = 1;
} // upper triangle, YX order: (0,0)->(0,1)->(1,1)
// A step of (1,0) in (i,j) means a step of (1-c,-c) in (x,y), and
// a step of (0,1) in (i,j) means a step of (-c,1-c) in (x,y), where
// c = (3-sqrt(3))/6
double x1 = x0 - i1 + G2; // Offsets for middle corner in (x,y) unskewed coords
double y1 = y0 - j1 + G2;
double x2 = x0 - 1.0 + 2.0 * G2; // Offsets for last corner in (x,y) unskewed coords
double y2 = y0 - 1.0 + 2.0 * G2;
// Work out the hashed gradient indices of the three simplex corners
int ii = i & 255;
int jj = j & 255;
int gi0 = permMod12[ii + perm[jj]];
int gi1 = permMod12[ii + i1 + perm[jj + j1]];
int gi2 = permMod12[ii + 1 + perm[jj + 1]];
// Calculate the contribution from the three corners
double t0 = 0.5 - x0 * x0 - y0 * y0;
if (t0 < 0) n0 = 0.0;
else {
t0 *= t0;
n0 = t0 * t0 * dot(grad3[gi0], x0, y0); // (x,y) of grad3 used for 2D gradient
}
double t1 = 0.5 - x1 * x1 - y1 * y1;
if (t1 < 0) n1 = 0.0;
else {
t1 *= t1;
n1 = t1 * t1 * dot(grad3[gi1], x1, y1);
}
double t2 = 0.5 - x2 * x2 - y2 * y2;
if (t2 < 0) n2 = 0.0;
else {
t2 *= t2;
n2 = t2 * t2 * dot(grad3[gi2], x2, y2);
}
// Add contributions from each corner to get the final noise value.
// The result is scaled to return values in the interval [-1,1].
return 70.0 * (n0 + n1 + n2);
}
// // 3D simplex noise
// public double getValue(double xin, double yin, double zin) {
// double n0, n1, n2, n3; // Noise contributions from the four corners
// // Skew the input space to determine which simplex cell we're in
// xin+=(seed + (seed * 7)) % Double.MAX_VALUE;
// xin+=(seed + (seed * 13)) % Double.MAX_VALUE;
// xin+=(seed + (seed * 17)) % Double.MAX_VALUE;
// double s = (xin+yin+zin)*F3; // Very nice and simple skew factor for 3D
// int i = fastfloor(xin+s);
// int j = fastfloor(yin+s);
// int k = fastfloor(zin+s);
// double t = (i+j+k)*G3;
// double X0 = i-t; // Unskew the cell origin back to (x,y,z) space
// double Y0 = j-t;
// double Z0 = k-t;
// double x0 = xin-X0; // The x,y,z distances from the cell origin
// double y0 = yin-Y0;
// double z0 = zin-Z0;
// // For the 3D case, the simplex shape is a slightly irregular tetrahedron.
// // Determine which simplex we are in.
// int i1, j1, k1; // Offsets for second corner of simplex in (i,j,k) coords
// int i2, j2, k2; // Offsets for third corner of simplex in (i,j,k) coords
// if(x0>=y0) {
// if(y0>=z0)
// { i1=1; j1=0; k1=0; i2=1; j2=1; k2=0; } // X Y Z order
// else if(x0>=z0) { i1=1; j1=0; k1=0; i2=1; j2=0; k2=1; } // X Z Y order
// else { i1=0; j1=0; k1=1; i2=1; j2=0; k2=1; } // Z X Y order
// }
// else { // x0<y0
// if(y0<z0) { i1=0; j1=0; k1=1; i2=0; j2=1; k2=1; } // Z Y X order
// else if(x0<z0) { i1=0; j1=1; k1=0; i2=0; j2=1; k2=1; } // Y Z X order
// else { i1=0; j1=1; k1=0; i2=1; j2=1; k2=0; } // Y X Z order
// }
// // A step of (1,0,0) in (i,j,k) means a step of (1-c,-c,-c) in (x,y,z),
// // a step of (0,1,0) in (i,j,k) means a step of (-c,1-c,-c) in (x,y,z), and
// // a step of (0,0,1) in (i,j,k) means a step of (-c,-c,1-c) in (x,y,z), where
// // c = 1/6.
// double x1 = x0 - i1 + G3; // Offsets for second corner in (x,y,z) coords
// double y1 = y0 - j1 + G3;
// double z1 = z0 - k1 + G3;
// double x2 = x0 - i2 + 2.0*G3; // Offsets for third corner in (x,y,z) coords
// double y2 = y0 - j2 + 2.0*G3;
// double z2 = z0 - k2 + 2.0*G3;
// double x3 = x0 - 1.0 + 3.0*G3; // Offsets for last corner in (x,y,z) coords
// double y3 = y0 - 1.0 + 3.0*G3;
// double z3 = z0 - 1.0 + 3.0*G3;
// // Work out the hashed gradient indices of the four simplex corners
// int ii = i & 255;
// int jj = j & 255;
// int kk = k & 255;
//
// int gi0 = permMod12[ii+perm[jj+perm[kk]]];
// int gi1 = permMod12[ii+i1+perm[jj+j1+perm[kk+k1]]];
// int gi2 = permMod12[ii+i2+perm[jj+j2+perm[kk+k2]]];
// int gi3 = permMod12[ii+1+perm[jj+1+perm[kk+1]]];
// // Calculate the contribution from the four corners
// double t0 = 0.6 - x0*x0 - y0*y0 - z0*z0;
// if(t0<0) n0 = 0.0;
// else {
// t0 *= t0;
// n0 = t0 * t0 * dot(grad3[gi0], x0, y0, z0);
// }
// double t1 = 0.6 - x1*x1 - y1*y1 - z1*z1;
// if(t1<0) n1 = 0.0;
// else {
// t1 *= t1;
// n1 = t1 * t1 * dot(grad3[gi1], x1, y1, z1);
// }
// double t2 = 0.6 - x2*x2 - y2*y2 - z2*z2;
// if(t2<0) n2 = 0.0;
// else {
// t2 *= t2;
// n2 = t2 * t2 * dot(grad3[gi2], x2, y2, z2);
// }
// double t3 = 0.6 - x3*x3 - y3*y3 - z3*z3;
// if(t3<0) n3 = 0.0;
// else {
// t3 *= t3;
// n3 = t3 * t3 * dot(grad3[gi3], x3, y3, z3);
// }
// // Add contributions from each corner to get the final noise value.
// // The result is scaled to stay just inside [-1,1]
// return 32.0*(n0 + n1 + n2 + n3);
// }
// 4D simplex noise, better simplex rank ordering method 2012-03-09
public double getValue4d(double x, double y, double z, double w) {
double n0, n1, n2, n3, n4; // Noise contributions from the five corners
// Skew the (x,y,z,w) space to determine which cell of 24 simplices we're in
double s = (x + y + z + w) * F4; // Factor for 4D skewing
int i = fastfloor(x + s);
int j = fastfloor(y + s);
int k = fastfloor(z + s);
int l = fastfloor(w + s);
double t = (i + j + k + l) * G4; // Factor for 4D unskewing
double X0 = i - t; // Unskew the cell origin back to (x,y,z,w) space
double Y0 = j - t;
double Z0 = k - t;
double W0 = l - t;
double x0 = x - X0; // The x,y,z,w distances from the cell origin
double y0 = y - Y0;
double z0 = z - Z0;
double w0 = w - W0;
// For the 4D case, the simplex is a 4D shape I won't even try to describe.
// To find out which of the 24 possible simplices we're in, we need to
// determine the magnitude ordering of x0, y0, z0 and w0.
// Six pair-wise comparisons are performed between each possible pair
// of the four coordinates, and the results are used to rank the numbers.
int rankx = 0;
int ranky = 0;
int rankz = 0;
int rankw = 0;
if (x0 > y0) rankx++;
else ranky++;
if (x0 > z0) rankx++;
else rankz++;
if (x0 > w0) rankx++;
else rankw++;
if (y0 > z0) ranky++;
else rankz++;
if (y0 > w0) ranky++;
else rankw++;
if (z0 > w0) rankz++;
else rankw++;
int i1, j1, k1, l1; // The integer offsets for the second simplex corner
int i2, j2, k2, l2; // The integer offsets for the third simplex corner
int i3, j3, k3, l3; // The integer offsets for the fourth simplex corner
// simplex[c] is a 4-vector with the numbers 0, 1, 2 and 3 in some order.
// Many values of c will never occur, since e.g. x>y>z>w makes x<z, y<w and x<w
// impossible. Only the 24 indices which have non-zero entries make any sense.
// We use a thresholding to set the coordinates in turn from the largest magnitude.
// Rank 3 denotes the largest coordinate.
i1 = rankx >= 3 ? 1 : 0;
j1 = ranky >= 3 ? 1 : 0;
k1 = rankz >= 3 ? 1 : 0;
l1 = rankw >= 3 ? 1 : 0;
// Rank 2 denotes the second largest coordinate.
i2 = rankx >= 2 ? 1 : 0;
j2 = ranky >= 2 ? 1 : 0;
k2 = rankz >= 2 ? 1 : 0;
l2 = rankw >= 2 ? 1 : 0;
// Rank 1 denotes the second smallest coordinate.
i3 = rankx >= 1 ? 1 : 0;
j3 = ranky >= 1 ? 1 : 0;
k3 = rankz >= 1 ? 1 : 0;
l3 = rankw >= 1 ? 1 : 0;
// The fifth corner has all coordinate offsets = 1, so no need to compute that.
double x1 = x0 - i1 + G4; // Offsets for second corner in (x,y,z,w) coords
double y1 = y0 - j1 + G4;
double z1 = z0 - k1 + G4;
double w1 = w0 - l1 + G4;
double x2 = x0 - i2 + 2.0 * G4; // Offsets for third corner in (x,y,z,w) coords
double y2 = y0 - j2 + 2.0 * G4;
double z2 = z0 - k2 + 2.0 * G4;
double w2 = w0 - l2 + 2.0 * G4;
double x3 = x0 - i3 + 3.0 * G4; // Offsets for fourth corner in (x,y,z,w) coords
double y3 = y0 - j3 + 3.0 * G4;
double z3 = z0 - k3 + 3.0 * G4;
double w3 = w0 - l3 + 3.0 * G4;
double x4 = x0 - 1.0 + 4.0 * G4; // Offsets for last corner in (x,y,z,w) coords
double y4 = y0 - 1.0 + 4.0 * G4;
double z4 = z0 - 1.0 + 4.0 * G4;
double w4 = w0 - 1.0 + 4.0 * G4;
// Work out the hashed gradient indices of the five simplex corners
int ii = i & 255;
int jj = j & 255;
int kk = k & 255;
int ll = l & 255;
int gi0 = perm[ii + perm[jj + perm[kk + perm[ll]]]] % 32;
int gi1 = perm[ii + i1 + perm[jj + j1 + perm[kk + k1 + perm[ll + l1]]]] % 32;
int gi2 = perm[ii + i2 + perm[jj + j2 + perm[kk + k2 + perm[ll + l2]]]] % 32;
int gi3 = perm[ii + i3 + perm[jj + j3 + perm[kk + k3 + perm[ll + l3]]]] % 32;
int gi4 = perm[ii + 1 + perm[jj + 1 + perm[kk + 1 + perm[ll + 1]]]] % 32;
// Calculate the contribution from the five corners
double t0 = 0.6 - x0 * x0 - y0 * y0 - z0 * z0 - w0 * w0;
if (t0 < 0) n0 = 0.0;
else {
t0 *= t0;
n0 = t0 * t0 * dot(grad4[gi0], x0, y0, z0, w0);
}
double t1 = 0.6 - x1 * x1 - y1 * y1 - z1 * z1 - w1 * w1;
if (t1 < 0) n1 = 0.0;
else {
t1 *= t1;
n1 = t1 * t1 * dot(grad4[gi1], x1, y1, z1, w1);
}
double t2 = 0.6 - x2 * x2 - y2 * y2 - z2 * z2 - w2 * w2;
if (t2 < 0) n2 = 0.0;
else {
t2 *= t2;
n2 = t2 * t2 * dot(grad4[gi2], x2, y2, z2, w2);
}
double t3 = 0.6 - x3 * x3 - y3 * y3 - z3 * z3 - w3 * w3;
if (t3 < 0) n3 = 0.0;
else {
t3 *= t3;
n3 = t3 * t3 * dot(grad4[gi3], x3, y3, z3, w3);
}
double t4 = 0.6 - x4 * x4 - y4 * y4 - z4 * z4 - w4 * w4;
if (t4 < 0) n4 = 0.0;
else {
t4 *= t4;
n4 = t4 * t4 * dot(grad4[gi4], x4, y4, z4, w4);
}
// Sum up and scale the result to cover the range [-1,1]
return 27.0 * (n0 + n1 + n2 + n3 + n4);
}
// Inner class to speed upp gradient computations
// (array access is a lot slower than member access)
private static class Grad {
double x, y, z, w;
Grad(double x, double y, double z) {
this.x = x;
this.y = y;
this.z = z;
}
Grad(double x, double y, double z, double w) {
this.x = x;
this.y = y;
this.z = z;
this.w = w;
}
}
}
| |
/**
* Java Web Archive Toolkit - Software to read and validate ARC, WARC
* and GZip files. (http://jwat.org/)
* Copyright 2011-2012 Netarkivet.dk (http://netarkivet.dk/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jwat.common;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class TestUtf8 {
String utf8File = "test-utf8.warc";
@Test
public void test_utf8_decode_encode() {
boolean bDebugOutput = System.getProperty("jwat.debug.output") != null;
InputStream in;
byte[] org;
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] tmpBytes = new byte[256];
ByteArrayInputStream bin;
StringBuffer sb = new StringBuffer();
byte[] copy;
int read;
int c;
UTF8 utf8 = new UTF8();
try {
if (bDebugOutput) {
}
in = this.getClass().getClassLoader().getResourceAsStream(utf8File);
while ((read = in.read(tmpBytes, 0, tmpBytes.length)) != -1) {
out.write(tmpBytes, 0, read);
}
org = out.toByteArray();
out.close();
out.reset();
bin = new ByteArrayInputStream(org);
while ((c = bin.read()) != -1) {
c = utf8.readUtf8(c, bin);
if (c == -1) {
Assert.fail("Unexpected EOF!");
}
if (!utf8.bValidChar) {
Assert.fail("Unexpected invalid utf8 char!");
}
sb.append((char)c);
}
String str = sb.toString();
for (int i=0; i<str.length(); ++i) {
c = str.charAt(i);
utf8.writeUtf8(c, out);
}
copy = out.toByteArray();
out.close();
out.reset();
Assert.assertArrayEquals(org, copy);
} catch (IOException e) {
}
}
@Test
public void test_utf8_encode_decode() {
StringBuffer sb = new StringBuffer();
ByteArrayOutputStream out = new ByteArrayOutputStream();
ByteArrayInputStream bin;
byte[] org;
byte[] org2;
int c;
UTF8 utf8 = new UTF8();
try {
for (int i=0; i<0x00110000; ++i) {
sb.append((char)i);
}
String str = sb.toString();
for (int i=0; i<str.length(); ++i) {
c = str.charAt(i);
utf8.writeUtf8(c, out);
}
org = out.toByteArray();
out.close();
out.reset();
bin = new ByteArrayInputStream(org);
sb.setLength(0);
while ((c = bin.read()) != -1) {
c = utf8.readUtf8(c, bin);
if (c == -1) {
Assert.fail("Unexpected EOF!");
}
if (!utf8.bValidChar) {
Assert.fail("Unexpected invalid utf8 char!");
}
sb.append((char)c);
}
String copy = sb.toString();
Assert.assertEquals(str, copy);
int[] ints = new int[0x00110000];
for (int i=0; i<0x00110000; ++i) {
ints[i] = i;
}
for (int i=0; i<ints.length; ++i) {
c = ints[i];
utf8.writeUtf8(c, out);
}
org2 = out.toByteArray();
out.close();
out.reset();
bin = new ByteArrayInputStream(org2);
int[] ints2 = new int[0x00110000];
int idx = 0;
while ((c = bin.read()) != -1) {
c = utf8.readUtf8(c, bin);
if (c == -1) {
Assert.fail("Unexpected EOF!");
}
if (!utf8.bValidChar) {
Assert.fail("Unexpected invalid utf8 char!");
}
ints2[idx++] = c;
}
Assert.assertEquals(ints2.length, idx);
Assert.assertArrayEquals(ints, ints2);
} catch (IOException e) {
Assert.fail("Unexpected exception!");
}
}
@Test
public void test_utf8_invalid() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] arr;
ByteArrayInputStream bin;
int c;
UTF8 utf8 = new UTF8();
try {
try {
utf8.writeUtf8(0x00110001, out);
Assert.fail("Unexpected exception!");
} catch (IOException e) {
}
out.close();
out.reset();
utf8.writeUtf8(0x1F, out);
arr = out.toByteArray();
Assert.assertEquals(1, arr.length);
bin = new ByteArrayInputStream(arr);
c = bin.read();
Assert.assertThat(-1, is(not(equalTo(c))));
c = utf8.readUtf8(c, bin);
Assert.assertTrue(utf8.bValidChar);
Assert.assertEquals(0x1F, c);
c = bin.read();
Assert.assertEquals(-1, c);
out.close();
out.reset();
arr[0] |= 0x80;
bin = new ByteArrayInputStream(arr);
c = bin.read();
Assert.assertThat(-1, is(not(equalTo(c))));
c = utf8.readUtf8(c, bin);
Assert.assertFalse(utf8.bValidChar);
Assert.assertEquals(0, c);
c = bin.read();
Assert.assertEquals(-1, c);
utf8.writeUtf8(0x00012345, out);
arr = out.toByteArray();
Assert.assertEquals(4, arr.length);
bin = new ByteArrayInputStream(arr);
c = bin.read();
Assert.assertThat(-1, is(not(equalTo(c))));
c = utf8.readUtf8(c, bin);
Assert.assertTrue(utf8.bValidChar);
Assert.assertEquals(0x00012345, c);
c = bin.read();
Assert.assertEquals(-1, c);
out.close();
out.reset();
arr[3] |= 0xC0;
bin = new ByteArrayInputStream(arr);
c = bin.read();
Assert.assertThat(-1, is(not(equalTo(c))));
c = utf8.readUtf8(c, bin);
Assert.assertFalse(utf8.bValidChar);
//Assert.assertEquals(0, c);
c = bin.read();
Assert.assertEquals(-1, c);
byte[] arr2 = new byte[3];
System.arraycopy(arr, 0, arr2, 0, 3);
bin = new ByteArrayInputStream(arr2);
c = bin.read();
Assert.assertThat(-1, is(not(equalTo(c))));
c = utf8.readUtf8(c, bin);
Assert.assertFalse(utf8.bValidChar);
Assert.assertEquals(-1, c);
arr = new byte[2];
arr[0] = (byte)0xC0;
arr[1] = (byte)(42 | 0x80);
bin = new ByteArrayInputStream(arr);
c = bin.read();
Assert.assertThat(-1, is(not(equalTo(c))));
c = utf8.readUtf8(c, bin);
Assert.assertFalse(utf8.bValidChar);
Assert.assertEquals(42, c);
c = bin.read();
Assert.assertEquals(-1, c);
arr = new byte[3];
arr[0] = (byte)0xE0;
arr[1] = (byte)0x80;
arr[2] = (byte)(42 | 0x80);
bin = new ByteArrayInputStream(arr);
c = bin.read();
Assert.assertThat(-1, is(not(equalTo(c))));
c = utf8.readUtf8(c, bin);
Assert.assertFalse(utf8.bValidChar);
Assert.assertEquals(42, c);
c = bin.read();
Assert.assertEquals(-1, c);
arr = new byte[4];
arr[0] = (byte)0xF0;
arr[1] = (byte)0x80;
arr[2] = (byte)0x80;
arr[3] = (byte)(42 | 0x80);
bin = new ByteArrayInputStream(arr);
c = bin.read();
Assert.assertThat(-1, is(not(equalTo(c))));
c = utf8.readUtf8(c, bin);
Assert.assertFalse(utf8.bValidChar);
Assert.assertEquals(42, c);
c = bin.read();
Assert.assertEquals(-1, c);
} catch (IOException e) {
Assert.fail("Unexpected exception!");
}
}
}
| |
/*
Copyright 2016, 2017 Institut National de la Recherche Agronomique
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package fr.inra.maiage.bibliome.alvisnlp.bibliomefactory.modules.projectors;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
import fr.inra.maiage.bibliome.alvisnlp.bibliomefactory.modules.SectionModule.SectionResolvedObjects;
import fr.inra.maiage.bibliome.alvisnlp.bibliomefactory.modules.trie.TyDIExportProjector;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.Annotation;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.Corpus;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.DefaultNames;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.NameType;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.expressions.ResolverException;
import fr.inra.maiage.bibliome.alvisnlp.core.module.ProcessingContext;
import fr.inra.maiage.bibliome.alvisnlp.core.module.lib.AlvisNLPModule;
import fr.inra.maiage.bibliome.alvisnlp.core.module.lib.Param;
import fr.inra.maiage.bibliome.util.CartesianProduct;
import fr.inra.maiage.bibliome.util.EquivalenceHashSets;
import fr.inra.maiage.bibliome.util.EquivalenceSets;
import fr.inra.maiage.bibliome.util.filelines.EquivFileLines;
import fr.inra.maiage.bibliome.util.filelines.FileLines;
import fr.inra.maiage.bibliome.util.filelines.InvalidFileLineEntry;
import fr.inra.maiage.bibliome.util.filelines.TabularFormat;
import fr.inra.maiage.bibliome.util.newprojector.CharFilter;
import fr.inra.maiage.bibliome.util.newprojector.CharMapper;
import fr.inra.maiage.bibliome.util.newprojector.Dictionary;
import fr.inra.maiage.bibliome.util.newprojector.Match;
import fr.inra.maiage.bibliome.util.newprojector.Matcher;
import fr.inra.maiage.bibliome.util.newprojector.State;
import fr.inra.maiage.bibliome.util.newprojector.chars.Filters;
import fr.inra.maiage.bibliome.util.newprojector.chars.Mappers;
import fr.inra.maiage.bibliome.util.newprojector.states.AllValuesState;
import fr.inra.maiage.bibliome.util.streams.SourceStream;
import fr.inra.maiage.bibliome.util.streams.TargetStream;
@AlvisNLPModule(obsoleteUseInstead=TyDIExportProjector.class)
public abstract class TyDIProjector extends Projector<SectionResolvedObjects,String,Dictionary<String>> {
private String canonicalFormFeature = DefaultNames.getCanonicalFormFeature();
private SourceStream lemmaFile = null;
private SourceStream synonymsFile = null;
private SourceStream quasiSynonymsFile = null;
private SourceStream acronymsFile = null;
private SourceStream mergeFile = null;
private SourceStream typographicVariationsFile = null;
private TargetStream saveDictFile = null;
@Override
protected SectionResolvedObjects createResolvedObjects(ProcessingContext<Corpus> ctx) throws ResolverException {
return new SectionResolvedObjects(ctx, this);
}
@Override
protected Dictionary<String> newDictionary(State<String> root, CharFilter charFilter, CharMapper charMapper) {
return new Dictionary<String>(new AllValuesState<String>(), charFilter, charMapper);
}
@Override
protected void fillDictionary(ProcessingContext<Corpus> ctx, Corpus corpus, Dictionary<String> dict) throws Exception {
Logger logger = getLogger(ctx);
logger.info("reading lemma");
LemmaLines lemmaLines = new LemmaLines(ctx);
BufferedReader r = lemmaFile.getBufferedReader();
lemmaLines.process(r, 0);
r.close();
logger.fine(Integer.toString(lemmaLines.lemmaToTerm.size()) + " entries");
synonymLines.setLogger(logger);
logger.info("reading synonyms");
r = synonymsFile.getBufferedReader();
synonymLines.process(r, lemmaLines.lemmaToTerm);
r.close();
logger.fine(Integer.toString(lemmaLines.lemmaToTerm.size()) + " entries");
logger.info("reading quasi-synonyms");
r = quasiSynonymsFile.getBufferedReader();
synonymLines.process(r, lemmaLines.lemmaToTerm);
r.close();
logger.fine(Integer.toString(lemmaLines.lemmaToTerm.size()) + " entries");
logger.info("saturating merged");
saturateMerged(ctx, lemmaLines.lemmaToTerm);
logger.fine(Integer.toString(lemmaLines.lemmaToTerm.size()) + " entries");
logger.info("reading typographic variants");
EquivalenceSets<String> variants = new EquivalenceHashSets<String>();
loadEquivalenceFile(ctx, typographicVariationsFile, variants);
if (acronymsFile != null) {
logger.info("reading acronyms");
loadEquivalenceFile(ctx, acronymsFile, variants);
}
Dictionary<Set<String>> variantsDict = new Dictionary<Set<String>>(new AllValuesState<Set<String>>(), Filters.ACCEPT_ALL, Mappers.IDENTITY);
variantsDict.addEntries(variants.getMap());
Matcher<Set<String>> variantMatcher = new Matcher<Set<String>>(variantsDict, Filters.START_WORD, Filters.END_WORD);
logger.info("saturating with typographic variants");
Map<String,String> map = new LinkedHashMap<String,String>();
for (Map.Entry<String,String> e : lemmaLines.lemmaToTerm.entrySet()) {
String lemma = e.getKey();
List<Match<Set<String>>> matches = searchVariants(variantMatcher, lemma);
if (matches.isEmpty()) {
map.put(lemma, e.getValue());
continue;
}
StringBuilder variant = new StringBuilder();
List<String> suffixes = new ArrayList<String>(matches.size());
List<Collection<String>> variations = new ArrayList<Collection<String>>(matches.size());
int lastPos = 0;
String lastVariation = null;
for (int i = 0; i < matches.size(); ++i) {
Match<Set<String>> w = matches.get(i);
String currentVariation = lemma.substring(w.getStart(), w.getEnd());
if (i == 0) {
variant.append(lemma.substring(0, w.getStart()));
lastPos = w.getEnd();
}
else {
if (w.getStart() < lastPos) {
logger.warning(String.format("overlapping variations: '%s' / '%s'", lastVariation, currentVariation));
continue;
}
suffixes.add(lemma.substring(lastPos, w.getStart()));
lastPos = w.getEnd();
}
lastVariation = currentVariation;
variations.add(w.getState().getValues().iterator().next());
}
suffixes.add(lemma.substring(lastPos));
int prefixLength = variant.length();
CartesianProduct<String> cp = new CartesianProduct<String>(variations);
List<String> v = cp.getElements();
String canonical = e.getValue();
while (cp.next()) {
variant.setLength(prefixLength);
for (int i = 0; i < v.size(); ++i) {
variant.append(v.get(i));
variant.append(suffixes.get(i));
}
String sv = variant.toString();
if (lemmaLines.lemmaToTerm.containsKey(sv)) {
String cv = lemmaLines.lemmaToTerm.get(sv);
if (!cv.equals(canonical)) {
logger.warning(String.format("%s has canonical %s, but variant %s has canonical %s", lemma, canonical, sv, cv));
}
}
map.put(sv, canonical);
}
}
logger.info(String.format("%d entries", map.size()));
/*if (commaKludge) {
logger.info("kludging commas");
Map<String,String[]> toAdd = new LinkedHashMap<String,String[]>();
for (Map.Entry<String,String[]> e : map.entrySet()) {
String form = e.getKey();
if (form.indexOf(',') == -1)
continue;
String commaFreeForm = form.replace(",", "");
String[] entry = e.getValue();
String[] commaFreeEntry = Arrays.copyOf(entry, entry.length);
commaFreeEntry[0] = commaFreeForm;
toAdd.put(commaFreeForm, commaFreeEntry);
}
map.putAll(toAdd);
}*/
logger.info(String.format("%d entries", map.size()));
for (Map.Entry<String,String> e : map.entrySet())
dict.addEntry(e.getKey(), e.getValue());
if (saveDictFile != null) {
logger.info("saving developped terminology into " + saveDictFile.getName());
PrintStream ps = saveDictFile.getPrintStream();
for (Map.Entry<String,String> e : map.entrySet()) {
ps.print(e.getKey());
ps.print('\t');
ps.println(e.getValue());
}
ps.close();
}
}
@Override
protected void handleEntryValues(ProcessingContext<Corpus> ctx, Dictionary<String> dict, Annotation a, String entry) {
a.addFeature(canonicalFormFeature, entry);
}
private void saturateMerged(ProcessingContext<Corpus> ctx, Map<String,String> map) throws FileNotFoundException, UnsupportedEncodingException, IOException, InvalidFileLineEntry {
EquivalenceSets<String> merged = loadEquivalenceFile(ctx, mergeFile, null);
Map<String,String> toAdd = new LinkedHashMap<String,String>();
for (Map.Entry<String,String> e : map.entrySet()) {
String lemma = e.getKey();
if (merged.getMap().containsKey(lemma)) {
String term = e.getValue();
for (String m : merged.getMap().get(lemma))
toAdd.put(m, term);
}
}
map.putAll(toAdd);
}
private static final TabularFormat equivalenceSetsTabularFormat = new TabularFormat();
static {
equivalenceSetsTabularFormat.setMinColumns(2);
equivalenceSetsTabularFormat.setMaxColumns(2);
equivalenceSetsTabularFormat.setSkipBlank(true);
equivalenceSetsTabularFormat.setSkipEmpty(true);
}
private EquivalenceSets<String> loadEquivalenceFile(ProcessingContext<Corpus> ctx, SourceStream source, EquivalenceSets<String> eqSets) throws FileNotFoundException, UnsupportedEncodingException, IOException, InvalidFileLineEntry {
if (eqSets == null)
eqSets = new EquivalenceHashSets<String>();
EquivFileLines efl = new EquivFileLines(equivalenceSetsTabularFormat, getLogger(ctx));
BufferedReader r = source.getBufferedReader();
efl.process(r, eqSets);
r.close();
return eqSets;
}
private static final List<Match<Set<String>>> searchVariants(Matcher<Set<String>> variantMatcher, String lemma) {
variantMatcher.reset();
// variantMatcher.getDictionary().match(variantMatcher, lemma);
variantMatcher.match(lemma);
variantMatcher.endMatches();
return variantMatcher.getMatches();
}
private class LemmaLines extends FileLines<Integer> {
/** The lemma to term. */
private final Map<String,String> lemmaToTerm = new LinkedHashMap<String,String>();
/** The term to lemma. */
private final Map<String,String> termToLemma = new LinkedHashMap<String,String>();
/**
* Instantiates a new lemma lines.
* @param ctx
*
* @throws FileNotFoundException
* the file not found exception
* @throws UnsupportedEncodingException
* the unsupported encoding exception
* @throws IOException
* Signals that an I/O exception has occurred.
*/
private LemmaLines(ProcessingContext<Corpus> ctx) {
super(TyDIProjector.this.getLogger(ctx));
getFormat().setNumColumns(2);
}
/*
* (non-Javadoc)
*
* @see org.bibliome.filelines.FileLines#processArray(int,
* java.lang.String[])
*/
@Override
public void processEntry(Integer foo, int lineno, List<String> entry) {
String lemma = entry.get(0);
String term = entry.get(1);
if (lemmaToTerm.containsKey(lemma)) {
String prevTerm = lemmaToTerm.get(lemma);
if (!prevTerm.equals(term)) {
getLogger().warning(String.format("ambiguous lemma %s: %s / %s", lemma, prevTerm, term));
}
}
lemmaToTerm.put(lemma, term);
if (termToLemma.containsKey(term)) {
String prevLemma = termToLemma.get(term);
if (!prevLemma.equals(lemma)) {
getLogger().warning(String.format("several lemmas for %s: %s / %s", term, prevLemma, lemma));
}
}
termToLemma.put(term, lemma);
}
}
private final FileLines<Map<String,String>> synonymLines = new FileLines<Map<String,String>>() {
@Override
public void processEntry(Map<String,String> lemmaToTerm, int lineno, List<String> entry) {
String lemma = entry.get(0);
String canonical = entry.get(1);
lemmaToTerm.put(lemma, canonical);
}
};
@Param(nameType=NameType.FEATURE)
public String getCanonicalFormFeature() {
return canonicalFormFeature;
}
@Param
public SourceStream getLemmaFile() {
return lemmaFile;
}
@Param
public SourceStream getSynonymsFile() {
return synonymsFile;
}
@Param
public SourceStream getQuasiSynonymsFile() {
return quasiSynonymsFile;
}
@Param(mandatory = false)
public SourceStream getAcronymsFile() {
return acronymsFile;
}
@Param
public SourceStream getMergeFile() {
return mergeFile;
}
@Param(mandatory = false)
public SourceStream getTypographicVariationsFile() {
return typographicVariationsFile;
}
@Param(mandatory = false)
public TargetStream getSaveDictFile() {
return saveDictFile;
}
public void setSaveDictFile(TargetStream saveDictFile) {
this.saveDictFile = saveDictFile;
}
public void setCanonicalFormFeature(String canonicalFormFeature) {
this.canonicalFormFeature = canonicalFormFeature;
}
public void setLemmaFile(SourceStream lemmaFile) {
this.lemmaFile = lemmaFile;
}
public void setSynonymsFile(SourceStream synonymsFile) {
this.synonymsFile = synonymsFile;
}
public void setQuasiSynonymsFile(SourceStream quasiSynonymsFile) {
this.quasiSynonymsFile = quasiSynonymsFile;
}
public void setAcronymsFile(SourceStream acronymsFile) {
this.acronymsFile = acronymsFile;
}
public void setMergeFile(SourceStream mergeFile) {
this.mergeFile = mergeFile;
}
public void setTypographicVariationsFile(SourceStream typographicVariationsFile) {
this.typographicVariationsFile = typographicVariationsFile;
}
@Override
protected String[] addFeaturesToSectionFilter() {
return null;
}
@Override
protected String[] addLayersToSectionFilter() {
return null;
}
}
| |
/* Copyright 2017 Telstra Open Source
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openkilda.wfm.topology;
import org.openkilda.messaging.Topic;
import org.openkilda.wfm.ConfigurationException;
import org.openkilda.wfm.CtrlBoltRef;
import org.openkilda.wfm.LaunchEnvironment;
import org.openkilda.wfm.NameCollisionException;
import org.openkilda.wfm.PropertiesReader;
import org.openkilda.wfm.StreamNameCollisionException;
import org.openkilda.wfm.ctrl.RouteBolt;
import org.openkilda.wfm.kafka.CustomNamedSubscription;
import org.openkilda.wfm.topology.utils.HealthCheckBolt;
import org.openkilda.wfm.topology.utils.KafkaRecordTranslator;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.StormSubmitter;
import org.apache.storm.kafka.bolt.KafkaBolt;
import org.apache.storm.kafka.bolt.mapper.FieldNameBasedTupleToKafkaMapper;
import org.apache.storm.kafka.bolt.selector.DefaultTopicSelector;
import org.apache.storm.kafka.spout.KafkaSpout;
import org.apache.storm.kafka.spout.KafkaSpoutConfig;
import org.apache.storm.thrift.TException;
import org.apache.storm.topology.BoltDeclarer;
import org.apache.storm.topology.TopologyBuilder;
import org.apache.storm.tuple.Fields;
import org.kohsuke.args4j.CmdLineException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Properties;
/**
* Represents abstract topology.
*/
public abstract class AbstractTopology implements Topology {
private static final Logger logger = LoggerFactory.getLogger(AbstractTopology.class);
protected final LaunchEnvironment env;
protected final PropertiesReader propertiesReader;
protected TopologyConfig config;
protected final String topologyName;
private final Properties kafkaProperties;
public static final String SPOUT_ID_CTRL = "ctrl.in";
public static final String BOLT_ID_CTRL_ROUTE = "ctrl.route";
public static final String BOLT_ID_CTRL_OUTPUT = "ctrl.out";
public static final String MESSAGE_FIELD = "message";
public static final Fields fieldMessage = new Fields(MESSAGE_FIELD);
protected AbstractTopology(LaunchEnvironment env) throws ConfigurationException {
this.env = env;
String builtinName = makeTopologyName();
String name = env.getTopologyName();
if (name == null) {
name = builtinName;
}
topologyName = name;
propertiesReader = env.makePropertiesReader(name, builtinName);
config = new TopologyConfig(propertiesReader);
kafkaProperties = makeKafkaProperties();
}
protected void setup() throws TException, NameCollisionException {
if (config.getLocal()) {
setupLocal();
} else {
setupRemote();
}
}
private void setupRemote() throws TException, NameCollisionException {
Config config = makeStormConfig();
config.setDebug(false);
logger.info("Submit Topology: {}", getTopologyName());
StormSubmitter.submitTopology(getTopologyName(), config, createTopology());
}
private void setupLocal() throws NameCollisionException {
Config config = makeStormConfig();
config.setDebug(true);
LocalCluster cluster = new LocalCluster();
cluster.submitTopology(getTopologyName(), config, createTopology());
logger.info("Start Topology: {} (local)", getTopologyName());
localExecutionMainLoop();
cluster.shutdown();
}
protected static int handleLaunchException(Exception error) {
int errorCode;
try {
throw error;
} catch (CmdLineException e) {
System.err.println(e.getMessage());
System.err.println();
System.err.println("Allowed options and arguments:");
e.getParser().printUsage(System.err);
errorCode = 2;
} catch (ConfigurationException e) {
System.err.println(e.getMessage());
errorCode = 3;
} catch (TException e) {
logger.error("Unable to complete topology setup: {}", e.getMessage());
errorCode = 4;
} catch (Exception e) {
logger.error("Unhandled exception", e);
errorCode = 1;
}
return errorCode;
}
private Properties makeKafkaProperties() {
Properties kafka = new Properties();
kafka.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
kafka.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
kafka.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getKafkaHosts());
kafka.setProperty(ConsumerConfig.GROUP_ID_CONFIG, getTopologyName());
kafka.setProperty("request.required.acks", "1");
return kafka;
}
protected Config makeStormConfig() {
Config stormConfig = new Config();
stormConfig.setNumWorkers(config.getWorkers(topologyName));
if (config.getLocal()) {
stormConfig.setMaxTaskParallelism(config.getParallelism());
}
return stormConfig;
}
protected void localExecutionMainLoop() {
logger.info("Sleep while local topology is executing");
try {
Thread.sleep(config.getLocalExecutionTime());
} catch (InterruptedException e) {
logger.warn("Execution process have been interrupted.");
}
}
public String getTopologyName() {
return topologyName;
}
public TopologyConfig getConfig() {
return config;
}
/**
* Creates Kafka topic if it does not exist.
*
* @param topic Kafka topic
*/
protected void checkAndCreateTopic(final String topic) {
// FIXME(nmarchenko): do we need this? need check
}
/**
* Creates Kafka spout.
*
* @param topic Kafka topic
* @return {@link KafkaSpout}
*/
protected KafkaSpout<String, String> createKafkaSpout(String topic, String spoutId) {
KafkaSpoutConfig<String, String> config = makeKafkaSpoutConfigBuilder(spoutId, topic)
.build();
return new KafkaSpout<>(config);
}
/**
* Creates Kafka bolt.
*
* @param topic Kafka topic
* @return {@link KafkaBolt}
*/
protected KafkaBolt createKafkaBolt(final String topic) {
return new KafkaBolt<String, String>()
.withProducerProperties(kafkaProperties)
.withTopicSelector(new DefaultTopicSelector(topic))
.withTupleToKafkaMapper(new FieldNameBasedTupleToKafkaMapper<>());
}
protected void createCtrlBranch(TopologyBuilder builder, List<CtrlBoltRef> targets)
throws StreamNameCollisionException {
checkAndCreateTopic(config.getKafkaCtrlTopic());
KafkaSpout kafkaSpout;
kafkaSpout = createKafkaSpout(config.getKafkaCtrlTopic(), SPOUT_ID_CTRL);
builder.setSpout(SPOUT_ID_CTRL, kafkaSpout);
RouteBolt route = new RouteBolt(getTopologyName());
builder.setBolt(BOLT_ID_CTRL_ROUTE, route)
.shuffleGrouping(SPOUT_ID_CTRL);
KafkaBolt kafkaBolt = createKafkaBolt(config.getKafkaCtrlTopic());
BoltDeclarer outputSetup = builder.setBolt(BOLT_ID_CTRL_OUTPUT, kafkaBolt)
.shuffleGrouping(BOLT_ID_CTRL_ROUTE, route.STREAM_ID_ERROR);
for (CtrlBoltRef ref : targets) {
String boltId = ref.getBoltId();
ref.getDeclarer().allGrouping(BOLT_ID_CTRL_ROUTE, route.registerEndpoint(boltId));
outputSetup.shuffleGrouping(boltId, ref.getBolt().getCtrlStreamId());
}
}
/**
* Creates health-check handler spout and bolts.
*
* @param builder topology builder
* @param prefix component id
*/
protected void createHealthCheckHandler(TopologyBuilder builder, String prefix) {
checkAndCreateTopic(Topic.HEALTH_CHECK);
KafkaSpout healthCheckKafkaSpout = createKafkaSpout(Topic.HEALTH_CHECK, prefix);
builder.setSpout(prefix + "HealthCheckKafkaSpout", healthCheckKafkaSpout, 1);
HealthCheckBolt healthCheckBolt = new HealthCheckBolt(prefix);
builder.setBolt(prefix + "HealthCheckBolt", healthCheckBolt, 1)
.shuffleGrouping(prefix + "HealthCheckKafkaSpout");
KafkaBolt healthCheckKafkaBolt = createKafkaBolt(Topic.HEALTH_CHECK);
builder.setBolt(prefix + "HealthCheckKafkaBolt", healthCheckKafkaBolt, 1)
.shuffleGrouping(prefix + "HealthCheckBolt", Topic.HEALTH_CHECK);
}
protected KafkaSpoutConfig.Builder<String, String> makeKafkaSpoutConfigBuilder(String spoutId, String topic) {
return new KafkaSpoutConfig.Builder<>(
config.getKafkaHosts(), StringDeserializer.class, StringDeserializer.class,
new CustomNamedSubscription(topic))
.setGroupId(makeKafkaGroupName(spoutId))
.setRecordTranslator(new KafkaRecordTranslator<>())
// NB: There is an issue with using the default of "earliest uncommitted message" -
// if we erase the topics, then the committed will be > the latest .. and so
// we won't process any messages.
// NOW: we'll miss any messages generated while the topology is down.
.setFirstPollOffsetStrategy(KafkaSpoutConfig.FirstPollOffsetStrategy.LATEST);
}
protected String makeKafkaGroupName(String spoutId) {
return String.format("%s__%s", getTopologyName(), spoutId);
}
}
| |
package com.instaclustr.cassandra.backup;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Calendar;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.TimeZone;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMultimap;
import com.instaclustr.cassandra.backup.guice.BackuperFactory;
import com.instaclustr.cassandra.backup.guice.RestorerFactory;
import com.instaclustr.cassandra.backup.impl.ManifestEntry;
import com.instaclustr.cassandra.backup.impl.SSTableUtils;
import com.instaclustr.cassandra.backup.impl.StorageLocation;
import com.instaclustr.cassandra.backup.impl.backup.BackupCommitLogsOperationRequest;
import com.instaclustr.cassandra.backup.impl.backup.BackupOperation;
import com.instaclustr.cassandra.backup.impl.backup.BackupOperationRequest;
import com.instaclustr.cassandra.backup.impl.backup.Backuper;
import com.instaclustr.cassandra.backup.impl.restore.RestoreCommitLogsOperationRequest;
import com.instaclustr.cassandra.backup.impl.restore.RestoreOperation;
import com.instaclustr.cassandra.backup.impl.restore.RestoreOperationRequest;
import com.instaclustr.cassandra.backup.impl.restore.Restorer;
import com.instaclustr.cassandra.backup.local.LocalFileBackuper;
import com.instaclustr.cassandra.backup.local.LocalFileRestorer;
import com.instaclustr.threading.Executors.FixedTasksExecutor;
import jmx.org.apache.cassandra.CassandraVersion;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
public class BackupRestoreTest {
public static final CassandraVersion THREE = CassandraVersion.parse("3.0.0");
private final String sha1Hash = "3a1bd6900872256303b1ed036881cd35f5b670ce";
private String testSnapshotName = "testSnapshot";
// Adler32 computed by python
// zlib.adler32("dnvbjaksdbhr7239iofhusdkjfhgkauyg83uhdjshkusdhoryhjzdgfk8ei") & 0xffffffff -> 2973505342
private static final String nodeId = "DUMMY_NODE_ID";
private static final String clusterId = "DUMMY_CLUSTER_ID";
private static final String backupBucket = Optional.ofNullable(System.getenv("TEST_BUCKET")).orElse("fooo");
private final Long independentChecksum = 2973505342L;
private List<String> tokens = ImmutableList.of("1", "2", "3", "4", "5");
private String confDir = "config";
private final List<TestFileConfig> versionsToTest = ImmutableList.of(
new TestFileConfig(sha1Hash, THREE)
);
private static final Map<String, Path> tempDirs = new LinkedHashMap<>();
@BeforeClass(alwaysRun = true)
public void setup() throws IOException, URISyntaxException {
for (TestFileConfig testFileConfig : versionsToTest) {
Path containerTempRoot = Files.createTempDirectory(testFileConfig.cassandraVersion.toString());
Path containerBackupRoot = Files.createTempDirectory(testFileConfig.cassandraVersion.toString());
BackupRestoreTestUtils.createTempDirectories(containerTempRoot, BackupRestoreTestUtils.cleanableDirs);
BackupRestoreTestUtils.createTempDirectories(containerTempRoot, BackupRestoreTestUtils.uncleanableDirs);
tempDirs.put(testFileConfig.cassandraVersion.toString(), containerTempRoot);
tempDirs.put(testFileConfig.cassandraVersion.toString() + "-backup-location", containerBackupRoot);
}
BackupRestoreTestUtils.resetDirectories(versionsToTest, tempDirs, testSnapshotName);
}
private List<Path> resolveSSTableComponentPaths(final String keyspace, final String table, final Path cassandraRoot, final int sequence, final TestFileConfig testFileConfig) {
return BackupRestoreTestUtils.SSTABLE_FILES.stream()
.map(name -> cassandraRoot.resolve("data")
.resolve(keyspace)
.resolve(table)
.resolve(String.format("%s-%s-big-%s", testFileConfig.getSstablePrefix(keyspace, table), sequence, name)))
.collect(Collectors.toList());
}
private void testBackupAndRestore(final BackupOperationRequest backupRequest,
final RestoreOperationRequest restoreRequest,
final TestFileConfig testFileConfig) throws Exception {
final Path sharedContainerRoot = backupRequest.sharedContainerPath;
final File manifestFile = new File(sharedContainerRoot.resolve("manifests/" + testSnapshotName).toString());
final Calendar calendar = Calendar.getInstance();
calendar.setTimeZone(TimeZone.getTimeZone("GMT"));
calendar.set(2017, Calendar.MAY, 2, 2, 6, 0);
// restoreRequest.timestampStart = calendar.getTimeInMillis();
// restoreRequest.timestampEnd = System.currentTimeMillis();
new BackupOperation(null,
new HashMap<String, BackuperFactory>() {{
put("file", new BackuperFactory() {
@Override
public Backuper createBackuper(final BackupOperationRequest backupOperationRequest) {
return new LocalFileBackuper(new FixedTasksExecutor(), backupRequest);
}
@Override
public Backuper createCommitLogBackuper(final BackupCommitLogsOperationRequest backupCommitLogsOperationRequest) {
return null;
}
});
}},
backupRequest).run();
BackupRestoreTestUtils.clearDirs(backupRequest.sharedContainerPath, BackupRestoreTestUtils.cleanableDirs);
BackupRestoreTestUtils.createConfigFiles(sharedContainerRoot.resolve(confDir));
//Make sure we deleted the files
restoreRequest.keyspaceTables.entries().forEach(x -> {
resolveSSTableComponentPaths(x.getKey(), x.getValue(), sharedContainerRoot, 1, testFileConfig).stream()
.map(Path::toFile)
.map(File::exists)
.forEach(Assert::assertFalse);
});
new RestoreOperation(new HashMap<String, RestorerFactory>() {{
put("file", new RestorerFactory() {
@Override
public Restorer createRestorer(final RestoreOperationRequest restoreOperationRequest) {
return new LocalFileRestorer(new FixedTasksExecutor(), restoreRequest);
}
@Override
public Restorer createCommitLogRestorer(final RestoreCommitLogsOperationRequest restoreCommitLogsOperationRequest) {
return null;
}
});
}}, restoreRequest).run();
// Confirm manifest downloaded
assertTrue(manifestFile.exists());
restoreRequest.keyspaceTables.entries().forEach(x -> {
Stream.of(1, 2, 3).forEach(sequence ->
resolveSSTableComponentPaths(x.getKey(), x.getValue(), sharedContainerRoot, sequence, testFileConfig).stream()
.map(Path::toFile)
.map(File::exists)
.forEach(Assert::assertTrue));
});
// Confirm cassandra.yaml present and includes tokens
final Path cassandraYaml = sharedContainerRoot.resolve(confDir).resolve("cassandra.yaml");
assertTrue(cassandraYaml.toFile().exists());
String cassandraYamlText = new String(Files.readAllBytes(cassandraYaml));
// Assert.assertTrue(cassandraYamlText.contains("initial_token: ")); //this is not really testing that we have configured tokens properly
// Assert.assertTrue(cassandraYamlText.contains("auto_bootstrap: false"));
// Assert.assertFalse(cassandraYamlText.contains("auto_bootstrap: true"));
}
@Test(description = "Full backup and restore to an existing cluster", groups = {"basic"})
public void basicRestore() throws Exception {
basicProviderBackupRestore(backupBucket);
}
// @Test(description = "Full backup and restore to an existing cluster", groups = {"gcp"})
// public void basicGCPRestore() throws Exception {
// //TODO: make it easier to test multiple different buckets (from diff providers in one test run)
// basicProviderBackupRestore(StorageProviders.GCP_BLOB, bucket);
// }
//
// @Test(description = "Full backup and restore to an existing cluster", groups = {"aws"})
// public void basicAWSRestore() throws Exception {
// basicProviderBackupRestore(StorageProviders.AWS_S3, bucket);
// }
//
// @Test(description = "Full backup and restore to an existing cluster", groups = {"azure"})
// public void basicAzureRestore() throws Exception {
// basicProviderBackupRestore(StorageProviders.AZURE_BLOB, bucket);
// }
public void basicProviderBackupRestore(final String bucket) throws Exception {
final String keyspace = "keyspace1";
final String table = "table1";
for (TestFileConfig testFileConfig : versionsToTest) {
final Path sharedContainerRoot = tempDirs.get(testFileConfig.cassandraVersion.toString());
final Path backupPath = tempDirs.get(testFileConfig.cassandraVersion.toString() + "-backup-location");
final StorageLocation storageLocation = new StorageLocation(String.format("file://%s/%s/%s/%s",
backupPath.toString(),
bucket,
clusterId,
nodeId));
final BackupOperationRequest backupRequest = new BackupOperationRequest(
storageLocation,
null,
null,
10,
true,
sharedContainerRoot,
sharedContainerRoot,
ImmutableList.of(),
testSnapshotName,
true,
null
);
final RestoreOperationRequest restoreRequest = new RestoreOperationRequest(
storageLocation,
10,
true,
sharedContainerRoot,
sharedContainerRoot,
sharedContainerRoot,
true,
testSnapshotName,
ImmutableMultimap.of()
);
testBackupAndRestore(backupRequest, restoreRequest, testFileConfig);
}
}
@Test(description = "Check that we are checksumming properly")
public void testCalculateDigest() throws Exception {
for (TestFileConfig testFileConfig : versionsToTest) {
final String keyspace = "keyspace1";
final String table1 = "table1";
final Path table1Path = tempDirs.get(testFileConfig.cassandraVersion.toString()).resolve("data/" + keyspace + "/" + table1);
final Path path = table1Path.resolve(String.format("%s-1-big-Data.db", testFileConfig.getSstablePrefix(keyspace, table1)));
final String checksum = SSTableUtils.calculateChecksum(path);
assertEquals(checksum, String.valueOf(independentChecksum));
}
}
@BeforeTest
private void hardResetTestDirs() throws IOException, URISyntaxException {
cleanUp();
setup();
}
@Test(description = "Test that the manifest is correctly constructed, includes expected files and generates checksum if necessary")
public void testSSTableLister() throws Exception {
hardResetTestDirs(); //TODO not sure why this doesn't recreate things fully given its called before each test
for (TestFileConfig testFileConfig : versionsToTest) {
Path backupRoot = Paths.get("/backupRoot/keyspace1");
final String keyspace = "keyspace1";
final String table1 = "table1";
final Path table1Path = tempDirs.get(testFileConfig.cassandraVersion.toString()).resolve("data/" + keyspace + "/" + table1);
Collection<ManifestEntry> manifest = SSTableUtils.ssTableManifest(table1Path, backupRoot.resolve(table1Path.getFileName())).collect(Collectors.toList());
final String table2 = "table2";
final Path table2Path = tempDirs.get(testFileConfig.cassandraVersion.toString()).resolve("data/" + keyspace + "/" + table2);
manifest.addAll(SSTableUtils.ssTableManifest(table2Path, backupRoot.resolve(table2Path.getFileName())).collect(Collectors.toList()));
Map<Path, Path> manifestMap = new HashMap<>();
for (ManifestEntry e : manifest) {
manifestMap.put(e.localFile, e.objectKey);
}
if (CassandraVersion.isTwoZero(testFileConfig.cassandraVersion)) {
// table1 is un-compressed so should have written out a sha1 digest
final Path localPath1 = table1Path.resolve(String.format("%s-1-big-Data.db", testFileConfig.getSstablePrefix(keyspace, table1)));
assertEquals(manifestMap.get(localPath1),
backupRoot.resolve(String.format("%s/1-%s/%s-1-big-Data.db", table1, sha1Hash, testFileConfig.getSstablePrefix(keyspace, table1))));
final Path localPath2 = table1Path.resolve(String.format("%s-3-big-Index.db", testFileConfig.getSstablePrefix(keyspace, table1)));
final String checksum2 = SSTableUtils.calculateChecksum(localPath2);
assertEquals(manifestMap.get(localPath2),
backupRoot.resolve(String.format("%s/3-%s/%s-3-big-Index.db", table1, checksum2, testFileConfig.getSstablePrefix(keyspace, table1))));
final Path localPath3 = table2Path.resolve(String.format("%s-1-big-Data.db", testFileConfig.getSstablePrefix(keyspace, table2)));
final String checksum3 = SSTableUtils.calculateChecksum(localPath3);
assertEquals(manifestMap.get(localPath3),
backupRoot.resolve(String.format("%s/1-%s/%s-1-big-Data.db", table2, checksum3, testFileConfig.getSstablePrefix(keyspace, table2))));
assertNull(manifestMap.get(table2Path.resolve(String.format("%s-3-big-Index.db", testFileConfig.getSstablePrefix(keyspace, table2)))));
} else {
assertEquals(manifestMap.get(table1Path.resolve(String.format("%s-1-big-Data.db", testFileConfig.getSstablePrefix(keyspace, table1)))),
backupRoot.resolve(String.format("%s/1-1000000000/%s-1-big-Data.db", table1, testFileConfig.getSstablePrefix(keyspace, table1))));
// Cassandra doesn't create CRC32 file for 2.0.x
assertEquals(manifestMap.get(table1Path.resolve(String.format("%s-2-big-Digest.crc32", testFileConfig.getSstablePrefix(keyspace, table1)))),
backupRoot.resolve(String.format("%s/2-1000000000/%s-2-big-Digest.crc32", table1, testFileConfig.getSstablePrefix(keyspace, table1))));
assertEquals(manifestMap.get(table1Path.resolve(String.format("%s-3-big-Index.db", testFileConfig.getSstablePrefix(keyspace, table1)))),
backupRoot.resolve(String.format("%s/3-1000000000/%s-3-big-Index.db", table1, testFileConfig.getSstablePrefix(keyspace, table1))));
assertEquals(manifestMap.get(table2Path.resolve(String.format("%s-1-big-Data.db", testFileConfig.getSstablePrefix(keyspace, table2)))),
backupRoot.resolve(String.format("%s/1-1000000000/%s-1-big-Data.db", table2, testFileConfig.getSstablePrefix(keyspace, table2))));
assertEquals(manifestMap.get(table2Path.resolve(String.format("%s-2-big-Digest.crc32", testFileConfig.getSstablePrefix(keyspace, table2)))),
backupRoot.resolve(String.format("%s/2-1000000000/%s-2-big-Digest.crc32", table2, testFileConfig.getSstablePrefix(keyspace, table2))));
assertNull(manifestMap.get(table2Path.resolve(String.format("%s-3-big-Index.db", testFileConfig.getSstablePrefix(keyspace, table2)))));
}
assertNull(manifestMap.get(table1Path.resolve("manifest.json")));
assertNull(manifestMap.get(table1Path.resolve("backups")));
assertNull(manifestMap.get(table1Path.resolve("snapshots")));
}
}
@AfterClass(alwaysRun = true)
public void cleanUp() throws IOException {
BackupRestoreTestUtils.deleteTempDirectories(tempDirs);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.dht;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.CacheRebalanceMode;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.events.Event;
import org.apache.ignite.events.EventType;
import org.apache.ignite.internal.IgniteKernal;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionFullMap;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionMap;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPreloader;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearCacheAdapter;
import org.apache.ignite.internal.util.typedef.CAX;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheRebalanceMode.ASYNC;
import static org.apache.ignite.cache.CacheRebalanceMode.SYNC;
/**
* Test cases for partitioned cache {@link GridDhtPreloader preloader}.
*/
public class GridCacheDhtPreloadDelayedSelfTest extends GridCommonAbstractTest {
/** Key count. */
private static final int KEY_CNT = 100;
/** Preload delay. */
private static final int PRELOAD_DELAY = 5000;
/** Preload mode. */
private CacheRebalanceMode preloadMode = ASYNC;
/** Preload delay. */
private long delay = -1;
/** IP finder. */
private TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration c = super.getConfiguration(igniteInstanceName);
assert preloadMode != null;
CacheConfiguration cc = defaultCacheConfiguration();
cc.setCacheMode(PARTITIONED);
cc.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
cc.setRebalanceMode(preloadMode);
cc.setRebalanceDelay(delay);
cc.setAffinity(new RendezvousAffinityFunction(false, 128));
cc.setBackups(1);
cc.setAtomicityMode(TRANSACTIONAL);
TcpDiscoverySpi disco = new TcpDiscoverySpi();
disco.setIpFinder(ipFinder);
c.setDiscoverySpi(disco);
c.setCacheConfiguration(cc);
return c;
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
stopAllGrids();
}
/**
* @throws Exception If failed.
*/
public void testManualPreload() throws Exception {
delay = -1;
Ignite g0 = startGrid(0);
int cnt = KEY_CNT;
IgniteCache<String, Integer> c0 = g0.cache(DEFAULT_CACHE_NAME);
for (int i = 0; i < cnt; i++)
c0.put(Integer.toString(i), i);
Ignite g1 = startGrid(1);
Ignite g2 = startGrid(2);
IgniteCache<String, Integer> c1 = g1.cache(DEFAULT_CACHE_NAME);
IgniteCache<String, Integer> c2 = g2.cache(DEFAULT_CACHE_NAME);
for (int i = 0; i < cnt; i++)
assertNull(c1.localPeek(Integer.toString(i), CachePeekMode.ONHEAP));
for (int i = 0; i < cnt; i++)
assertNull(c2.localPeek(Integer.toString(i), CachePeekMode.ONHEAP));
final CountDownLatch l1 = new CountDownLatch(1);
final CountDownLatch l2 = new CountDownLatch(1);
g1.events().localListen(new IgnitePredicate<Event>() {
@Override public boolean apply(Event evt) {
l1.countDown();
return true;
}
}, EventType.EVT_CACHE_REBALANCE_STOPPED);
g2.events().localListen(new IgnitePredicate<Event>() {
@Override public boolean apply(Event evt) {
l2.countDown();
return true;
}
}, EventType.EVT_CACHE_REBALANCE_STOPPED);
info("Beginning to wait for cache1 repartition.");
GridDhtCacheAdapter<String, Integer> d0 = dht(0);
GridDhtCacheAdapter<String, Integer> d1 = dht(1);
GridDhtCacheAdapter<String, Integer> d2 = dht(2);
checkMaps(false, d0, d1, d2);
// Force preload.
c1.rebalance();
l1.await();
info("Cache1 is repartitioned.");
checkMaps(false, d0, d1, d2);
info("Beginning to wait for cache2 repartition.");
// Force preload.
c2.rebalance();
l2.await();
info("Cache2 is repartitioned.");
checkMaps(true, d0, d1, d2);
checkCache(c0, cnt);
checkCache(c1, cnt);
checkCache(c2, cnt);
}
/**
* @throws Exception If failed.
*/
public void testDelayedPreload() throws Exception {
delay = PRELOAD_DELAY;
Ignite g0 = startGrid(0);
int cnt = KEY_CNT;
IgniteCache<String, Integer> c0 = g0.cache(DEFAULT_CACHE_NAME);
for (int i = 0; i < cnt; i++)
c0.put(Integer.toString(i), i);
Ignite g1 = startGrid(1);
Ignite g2 = startGrid(2);
IgniteCache<String, Integer> c1 = g1.cache(DEFAULT_CACHE_NAME);
IgniteCache<String, Integer> c2 = g2.cache(DEFAULT_CACHE_NAME);
for (int i = 0; i < cnt; i++)
assertNull(c1.localPeek(Integer.toString(i), CachePeekMode.ONHEAP));
for (int i = 0; i < cnt; i++)
assertNull(c2.localPeek(Integer.toString(i), CachePeekMode.ONHEAP));
final CountDownLatch l1 = new CountDownLatch(1);
final CountDownLatch l2 = new CountDownLatch(1);
g1.events().localListen(new IgnitePredicate<Event>() {
@Override public boolean apply(Event evt) {
l1.countDown();
return true;
}
}, EventType.EVT_CACHE_REBALANCE_STOPPED);
g2.events().localListen(new IgnitePredicate<Event>() {
@Override public boolean apply(Event evt) {
l2.countDown();
return true;
}
}, EventType.EVT_CACHE_REBALANCE_STOPPED);
U.sleep(1000);
GridDhtCacheAdapter<String, Integer> d0 = dht(0);
GridDhtCacheAdapter<String, Integer> d1 = dht(1);
GridDhtCacheAdapter<String, Integer> d2 = dht(2);
info("Beginning to wait for caches repartition.");
checkMaps(false, d0, d1, d2);
assert l1.await(PRELOAD_DELAY * 3 / 2, MILLISECONDS);
assert l2.await(PRELOAD_DELAY * 3 / 2, MILLISECONDS);
U.sleep(1000);
info("Caches are repartitioned.");
checkMaps(true, d0, d1, d2);
checkCache(c0, cnt);
checkCache(c1, cnt);
checkCache(c2, cnt);
}
/**
* @throws Exception If failed.
*/
public void testAutomaticPreload() throws Exception {
delay = 0;
preloadMode = CacheRebalanceMode.SYNC;
Ignite g0 = startGrid(0);
int cnt = KEY_CNT;
IgniteCache<String, Integer> c0 = g0.cache(DEFAULT_CACHE_NAME);
for (int i = 0; i < cnt; i++)
c0.put(Integer.toString(i), i);
Ignite g1 = startGrid(1);
Ignite g2 = startGrid(2);
IgniteCache<String, Integer> c1 = g1.cache(DEFAULT_CACHE_NAME);
IgniteCache<String, Integer> c2 = g2.cache(DEFAULT_CACHE_NAME);
GridDhtCacheAdapter<String, Integer> d0 = dht(0);
GridDhtCacheAdapter<String, Integer> d1 = dht(1);
GridDhtCacheAdapter<String, Integer> d2 = dht(2);
checkMaps(true, d0, d1, d2);
checkCache(c0, cnt);
checkCache(c1, cnt);
checkCache(c2, cnt);
}
/**
* @throws Exception If failed.
*/
public void testAutomaticPreloadWithEmptyCache() throws Exception {
preloadMode = SYNC;
delay = 0;
Collection<Ignite> ignites = new ArrayList<>();
try {
for (int i = 0; i < 5; i++) {
ignites.add(startGrid(i));
awaitPartitionMapExchange();
for (Ignite g : ignites) {
info(">>> Checking affinity for grid: " + g.name());
GridDhtPartitionTopology top = topology(g);
GridDhtPartitionFullMap fullMap = top.partitionMap(true);
for (Map.Entry<UUID, GridDhtPartitionMap> fe : fullMap.entrySet()) {
UUID nodeId = fe.getKey();
GridDhtPartitionMap m = fe.getValue();
for (Map.Entry<Integer, GridDhtPartitionState> e : m.entrySet()) {
int p = e.getKey();
GridDhtPartitionState state = e.getValue();
Collection<ClusterNode> nodes = affinityNodes(g, p);
Collection<UUID> nodeIds = U.nodeIds(nodes);
assert nodeIds.contains(nodeId) : "Invalid affinity mapping [nodeId=" + nodeId +
", part=" + p + ", state=" + state + ", igniteInstanceName=" +
G.ignite(nodeId).name() + ", affNames=" + U.nodes2names(nodes) +
", affIds=" + nodeIds + ']';
}
}
}
}
}
finally {
stopAllGrids();
}
}
/**
* @throws Exception If failed.
*/
public void testManualPreloadSyncMode() throws Exception {
preloadMode = CacheRebalanceMode.SYNC;
delay = -1;
try {
startGrid(0);
}
finally {
stopAllGrids();
}
}
/**
* @throws Exception If failed.
*/
public void testPreloadManyNodes() throws Exception {
delay = 0;
preloadMode = ASYNC;
startGridsMultiThreaded(9);
U.sleep(2000);
try {
delay = -1;
preloadMode = ASYNC;
Ignite g = startGrid(9);
info(">>> Starting manual preload");
long start = System.currentTimeMillis();
g.cache(DEFAULT_CACHE_NAME).rebalance().get();
info(">>> Finished preloading of empty cache in " + (System.currentTimeMillis() - start) + "ms.");
}
finally {
stopAllGrids();
}
}
/**
* @param g Grid.
* @return Topology.
*/
private GridDhtPartitionTopology topology(Ignite g) {
return ((GridNearCacheAdapter<Integer, String>)((IgniteKernal)g).<Integer, String>internalCache(DEFAULT_CACHE_NAME)).dht().topology();
}
/**
* @param g Grid.
* @return Affinity.
*/
private Affinity<Object> affinity(Ignite g) {
return g.affinity(DEFAULT_CACHE_NAME);
}
/**
* @param g Grid.
* @param p Partition.
* @return Affinity nodes.
*/
private Collection<ClusterNode> affinityNodes(Ignite g, int p) {
return affinity(g).mapPartitionToPrimaryAndBackups(p);
}
/**
* Checks if keys are present.
*
* @param c Cache.
* @param keyCnt Key count.
*/
private void checkCache(IgniteCache<String, Integer> c, int keyCnt) {
Ignite g = c.unwrap(Ignite.class);
for (int i = 0; i < keyCnt; i++) {
String key = Integer.toString(i);
if (affinity(c).isPrimaryOrBackup(g.cluster().localNode(), key))
assertEquals(Integer.valueOf(i), c.localPeek(key));
}
}
/**
* Checks maps for equality.
*
* @param strict Strict check flag.
* @param caches Maps to compare.
* @throws Exception If failed.
*/
@SafeVarargs
private final void checkMaps(final boolean strict, final GridDhtCacheAdapter<String, Integer>... caches)
throws Exception {
if (caches.length < 2)
return;
GridTestUtils.retryAssert(log, 50, 500, new CAX() {
@Override public void applyx() {
info("Checking partition maps.");
for (int i = 0; i < caches.length; i++)
info("Partition map for node " + i + ": " + caches[i].topology().partitionMap(false).toFullString());
GridDhtPartitionFullMap orig = caches[0].topology().partitionMap(true);
for (int i = 1; i < caches.length; i++) {
GridDhtPartitionFullMap cmp = caches[i].topology().partitionMap(true);
assert orig.keySet().equals(cmp.keySet());
for (Map.Entry<UUID, GridDhtPartitionMap> entry : orig.entrySet()) {
UUID nodeId = entry.getKey();
GridDhtPartitionMap nodeMap = entry.getValue();
GridDhtPartitionMap cmpMap = cmp.get(nodeId);
assert cmpMap != null;
assert nodeMap.keySet().equals(cmpMap.keySet());
for (Map.Entry<Integer, GridDhtPartitionState> nodeEntry : nodeMap.entrySet()) {
GridDhtPartitionState state = cmpMap.get(nodeEntry.getKey());
assert state != null;
assert state != GridDhtPartitionState.EVICTED;
assert !strict || state == GridDhtPartitionState.OWNING : "Invalid partition state: " + state;
assert state == nodeEntry.getValue();
}
}
}
}
});
}
}
| |
package uk.ac.soton.ecs.comp6237.utils;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Insets;
import javax.swing.JScrollPane;
import javax.swing.SwingUtilities;
/**
* FlowLayout subclass that fully supports wrapping of components.
*/
public class WrapLayout extends FlowLayout
{
private static final long serialVersionUID = 1L;
/**
* Constructs a new <code>WrapLayout</code> with a left alignment and a
* default 5-unit horizontal and vertical gap.
*/
public WrapLayout()
{
super();
}
/**
* Constructs a new <code>FlowLayout</code> with the specified alignment and
* a default 5-unit horizontal and vertical gap. The value of the alignment
* argument must be one of <code>WrapLayout</code>, <code>WrapLayout</code>,
* or <code>WrapLayout</code>.
*
* @param align
* the alignment value
*/
public WrapLayout(int align)
{
super(align);
}
/**
* Creates a new flow layout manager with the indicated alignment and the
* indicated horizontal and vertical gaps.
* <p>
* The value of the alignment argument must be one of
* <code>WrapLayout</code>, <code>WrapLayout</code>, or
* <code>WrapLayout</code>.
*
* @param align
* the alignment value
* @param hgap
* the horizontal gap between components
* @param vgap
* the vertical gap between components
*/
public WrapLayout(int align, int hgap, int vgap)
{
super(align, hgap, vgap);
}
/**
* Returns the preferred dimensions for this layout given the <i>visible</i>
* components in the specified target container.
*
* @param target
* the component which needs to be laid out
* @return the preferred dimensions to lay out the subcomponents of the
* specified container
*/
@Override
public Dimension preferredLayoutSize(Container target)
{
return layoutSize(target, true);
}
/**
* Returns the minimum dimensions needed to layout the <i>visible</i>
* components contained in the specified target container.
*
* @param target
* the component which needs to be laid out
* @return the minimum dimensions to lay out the subcomponents of the
* specified container
*/
@Override
public Dimension minimumLayoutSize(Container target)
{
final Dimension minimum = layoutSize(target, false);
minimum.width -= (getHgap() + 1);
return minimum;
}
/**
* Returns the minimum or preferred dimension needed to layout the target
* container.
*
* @param target
* target to get layout size for
* @param preferred
* should preferred size be calculated
* @return the dimension to layout the target container
*/
private Dimension layoutSize(Container target, boolean preferred)
{
synchronized (target.getTreeLock())
{
// Each row must fit with the width allocated to the containter.
// When the container width = 0, the preferred width of the
// container
// has not yet been calculated so lets ask for the maximum.
int targetWidth = target.getSize().width;
if (targetWidth == 0)
targetWidth = Integer.MAX_VALUE;
final int hgap = getHgap();
final int vgap = getVgap();
final Insets insets = target.getInsets();
final int horizontalInsetsAndGap = insets.left + insets.right + (hgap * 2);
final int maxWidth = targetWidth - horizontalInsetsAndGap;
// Fit components into the allowed width
final Dimension dim = new Dimension(0, 0);
int rowWidth = 0;
int rowHeight = 0;
final int nmembers = target.getComponentCount();
for (int i = 0; i < nmembers; i++)
{
final Component m = target.getComponent(i);
if (m.isVisible())
{
final Dimension d = preferred ? m.getPreferredSize() : m.getMinimumSize();
// Can't add the component to current row. Start a new row.
if (rowWidth + d.width > maxWidth)
{
addRow(dim, rowWidth, rowHeight);
rowWidth = 0;
rowHeight = 0;
}
// Add a horizontal gap for all components after the first
if (rowWidth != 0)
{
rowWidth += hgap;
}
rowWidth += d.width;
rowHeight = Math.max(rowHeight, d.height);
}
}
addRow(dim, rowWidth, rowHeight);
dim.width += horizontalInsetsAndGap;
dim.height += insets.top + insets.bottom + vgap * 2;
// When using a scroll pane or the DecoratedLookAndFeel we need to
// make sure the preferred size is less than the size of the
// target containter so shrinking the container size works
// correctly. Removing the horizontal gap is an easy way to do this.
final Container scrollPane = SwingUtilities.getAncestorOfClass(JScrollPane.class, target);
if (scrollPane != null && target.isValid())
{
dim.width -= (hgap + 1);
}
return dim;
}
}
/*
* A new row has been completed. Use the dimensions of this row to update
* the preferred size for the container.
*
* @param dim update the width and height when appropriate
*
* @param rowWidth the width of the row to add
*
* @param rowHeight the height of the row to add
*/
private void addRow(Dimension dim, int rowWidth, int rowHeight)
{
dim.width = Math.max(dim.width, rowWidth);
if (dim.height > 0)
{
dim.height += getVgap();
}
dim.height += rowHeight;
}
}
| |
package net.glowstone.scoreboard;
import lombok.Getter;
import net.glowstone.net.message.play.scoreboard.ScoreboardObjectiveMessage;
import net.glowstone.util.TextMessage;
import net.kyori.adventure.text.Component;
import org.bukkit.OfflinePlayer;
import org.bukkit.scoreboard.Criterias;
import org.bukkit.scoreboard.DisplaySlot;
import org.bukkit.scoreboard.Objective;
import org.bukkit.scoreboard.RenderType;
import org.bukkit.scoreboard.Score;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map.Entry;
import java.util.Set;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Scoreboard objective and associated data.
*/
public final class GlowObjective implements Objective {
private final String name;
private final String criteria;
private final HashMap<String, GlowScore> scores = new HashMap<>();
DisplaySlot displaySlot;
@Getter
private GlowScoreboard scoreboard;
private String displayName;
private RenderType renderType;
/**
* Creates a scoreboard objective.
*
* @param scoreboard the scoreboard to add to
* @param name the name of the objective
* @param criteria one of the constants from {@link Criterias}, or anything else if this score
* is only modified by commands and/or plugins.
*/
public GlowObjective(GlowScoreboard scoreboard, String name, String criteria) {
this.scoreboard = scoreboard;
this.name = name;
this.criteria = criteria;
renderType = RenderType.INTEGER;
displayName = name;
}
/**
* Removes this objective from the scoreboard.
*
* @throws IllegalStateException if this objective already isn't registered with a scoreboard
*/
@Override
public void unregister() throws IllegalStateException {
checkValid();
for (Entry<String, GlowScore> entry : scores.entrySet()) {
scoreboard.getScoresForName(entry.getKey()).remove(entry.getValue());
}
scoreboard.removeObjective(this);
scoreboard = null;
}
void checkValid() {
if (scoreboard == null) {
throw new IllegalStateException("Cannot manipulate unregistered objective");
}
}
////////////////////////////////////////////////////////////////////////////
// Properties
@Override
public String getName() throws IllegalStateException {
checkValid();
return name;
}
@Override
public @NotNull Component displayName() throws IllegalStateException {
throw new UnsupportedOperationException("Adventure API is not yet supported.");
}
@Override
public void displayName(@Nullable Component component) throws IllegalStateException, IllegalArgumentException {
throw new UnsupportedOperationException("Adventure API is not yet supported.");
}
@Override
public String getCriteria() throws IllegalStateException {
checkValid();
return criteria;
}
@Override
public String getDisplayName() throws IllegalStateException {
checkValid();
return displayName;
}
/**
* Sets the display name.
*
* @param displayName the new display name, up to 32 characters long
* @throws IllegalArgumentException if {@code displayName} is null or longer than 32 characters
* @throws IllegalStateException if this objective isn't registered with a scoreboard
*/
@Override
public void setDisplayName(String displayName)
throws IllegalStateException, IllegalArgumentException {
checkValid();
checkNotNull(displayName, "displayName cannot be null");
checkArgument(displayName.length() <= 32,
"displayName cannot be longer than 32 characters");
this.displayName = displayName;
scoreboard.broadcast(
ScoreboardObjectiveMessage.update(name, new TextMessage(displayName), renderType));
}
@Override
public DisplaySlot getDisplaySlot() throws IllegalStateException {
checkValid();
return displaySlot;
}
@Override
public org.bukkit.scoreboard.@NotNull RenderType getRenderType() throws IllegalStateException {
return renderType == RenderType.HEARTS
? org.bukkit.scoreboard.RenderType.HEARTS
: org.bukkit.scoreboard.RenderType.INTEGER;
}
@Override
public void setRenderType(org.bukkit.scoreboard.@NotNull RenderType renderType)
throws IllegalStateException {
this.renderType = (renderType == org.bukkit.scoreboard.RenderType.HEARTS
? RenderType.HEARTS : RenderType.INTEGER);
}
public void setRenderType(String renderType) {
setRenderType(org.bukkit.scoreboard.RenderType.valueOf(renderType.toUpperCase(Locale.ROOT)));
}
/**
* Sets the {@link DisplaySlot} where this objective displays.
*
* @param slot the DisplaySlot, or null to hide the objective
* @throws IllegalStateException if this objective isn't registered with a scoreboard
*/
@Override
public void setDisplaySlot(DisplaySlot slot) throws IllegalStateException {
checkValid();
if (slot != displaySlot) {
if (displaySlot != null) {
scoreboard.setDisplaySlot(displaySlot, null);
}
if (slot != null) {
scoreboard.setDisplaySlot(slot, this);
}
}
}
public RenderType getType() throws IllegalStateException {
checkValid();
return renderType;
}
/**
* Sets the {@link RenderType} for this objective.
*
* @param renderType the new render type
* @throws IllegalArgumentException if {@code renderType} is null
* @throws IllegalStateException if this objective isn't registered with a scoreboard
*/
public void setType(RenderType renderType) throws IllegalStateException {
checkValid();
checkNotNull(renderType, "RenderType cannot be null");
this.renderType = renderType;
scoreboard.broadcast(
ScoreboardObjectiveMessage.update(name, new TextMessage(displayName), renderType));
}
@Override
public boolean isModifiable() throws IllegalStateException {
checkValid();
return !criteria.equalsIgnoreCase(Criterias.HEALTH);
}
////////////////////////////////////////////////////////////////////////////
// Score management
/**
* Returns a score, creating it if necessary.
*
* @param entry the key (e.g. player name or team name)
* @return the score for {@code entry}
* @throws IllegalArgumentException if {@code entry} is null
* @throws IllegalStateException if this objective isn't registered with a scoreboard
*/
@Override
public Score getScore(String entry) throws IllegalArgumentException, IllegalStateException {
checkNotNull(entry, "Entry cannot be null");
checkValid();
return scores.computeIfAbsent(entry, entryCopy -> {
GlowScore score = new GlowScore(this, entryCopy);
scores.put(entryCopy, score);
scoreboard.getScoresForName(entryCopy).add(score);
return score;
});
}
@Override
@Deprecated
public Score getScore(OfflinePlayer player)
throws IllegalArgumentException, IllegalStateException {
checkNotNull(player, "Player cannot be null");
return getScore(player.getName());
}
/**
* Deletes a score directly.
*
* @param entry The key to delete.
*/
void deleteScore(String entry) {
scores.remove(entry);
}
/**
* Returns whether a score is defined.
*
* @param entry the key (e.g. player name or team name)
* @return true if the score exists; false otherwise
* @throws IllegalArgumentException if {@code entry} is null
* @throws IllegalStateException if this objective isn't registered with a scoreboard
*/
public boolean hasScore(String entry) throws IllegalArgumentException, IllegalStateException {
checkNotNull(entry, "Entry cannot be null");
checkValid();
return scores.containsKey(entry);
}
public Set<String> getEntries() throws IllegalStateException {
return scores.keySet();
}
}
| |
/*
* Copyright 2020 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.contribs.queue.nats;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.conductor.core.events.queue.Message;
import com.netflix.conductor.core.events.queue.ObservableQueue;
import io.nats.client.NUID;
import rx.Observable;
import rx.Scheduler;
/** @author Oleksiy Lysak */
public abstract class NATSAbstractQueue implements ObservableQueue {
private static final Logger LOGGER = LoggerFactory.getLogger(NATSAbstractQueue.class);
protected LinkedBlockingQueue<Message> messages = new LinkedBlockingQueue<>();
protected final Lock mu = new ReentrantLock();
private final String queueType;
private ScheduledExecutorService execs;
private final Scheduler scheduler;
protected final String queueURI;
protected final String subject;
protected String queue;
// Indicates that observe was called (Event Handler) and we must to re-initiate subscription
// upon reconnection
private boolean observable;
private boolean isOpened;
private volatile boolean running;
NATSAbstractQueue(String queueURI, String queueType, Scheduler scheduler) {
this.queueURI = queueURI;
this.queueType = queueType;
this.scheduler = scheduler;
// If queue specified (e.g. subject:queue) - split to subject & queue
if (queueURI.contains(":")) {
this.subject = queueURI.substring(0, queueURI.indexOf(':'));
queue = queueURI.substring(queueURI.indexOf(':') + 1);
} else {
this.subject = queueURI;
queue = null;
}
LOGGER.info(
String.format(
"Initialized with queueURI=%s, subject=%s, queue=%s",
queueURI, subject, queue));
}
void onMessage(String subject, byte[] data) {
String payload = new String(data);
LOGGER.info(String.format("Received message for %s: %s", subject, payload));
Message dstMsg = new Message();
dstMsg.setId(NUID.nextGlobal());
dstMsg.setPayload(payload);
messages.add(dstMsg);
}
@Override
public Observable<Message> observe() {
LOGGER.info("Observe invoked for queueURI " + queueURI);
observable = true;
mu.lock();
try {
subscribe();
} finally {
mu.unlock();
}
Observable.OnSubscribe<Message> onSubscribe =
subscriber -> {
Observable<Long> interval =
Observable.interval(100, TimeUnit.MILLISECONDS, scheduler);
interval.flatMap(
(Long x) -> {
if (!isRunning()) {
LOGGER.debug(
"Component stopped, skip listening for messages from NATS Queue");
return Observable.from(Collections.emptyList());
} else {
List<Message> available = new LinkedList<>();
messages.drainTo(available);
if (!available.isEmpty()) {
AtomicInteger count = new AtomicInteger(0);
StringBuilder buffer = new StringBuilder();
available.forEach(
msg -> {
buffer.append(msg.getId())
.append("=")
.append(msg.getPayload());
count.incrementAndGet();
if (count.get() < available.size()) {
buffer.append(",");
}
});
LOGGER.info(
String.format(
"Batch from %s to conductor is %s",
subject, buffer.toString()));
}
return Observable.from(available);
}
})
.subscribe(subscriber::onNext, subscriber::onError);
};
return Observable.create(onSubscribe);
}
@Override
public String getType() {
return queueType;
}
@Override
public String getName() {
return queueURI;
}
@Override
public String getURI() {
return queueURI;
}
@Override
public List<String> ack(List<Message> messages) {
return Collections.emptyList();
}
@Override
public void setUnackTimeout(Message message, long unackTimeout) {}
@Override
public long size() {
return messages.size();
}
@Override
public void publish(List<Message> messages) {
messages.forEach(
message -> {
try {
String payload = message.getPayload();
publish(subject, payload.getBytes());
LOGGER.info(String.format("Published message to %s: %s", subject, payload));
} catch (Exception ex) {
LOGGER.error(
"Failed to publish message "
+ message.getPayload()
+ " to "
+ subject,
ex);
throw new RuntimeException(ex);
}
});
}
@Override
public boolean rePublishIfNoAck() {
return true;
}
@Override
public void close() {
LOGGER.info("Closing connection for " + queueURI);
mu.lock();
try {
if (execs != null) {
execs.shutdownNow();
execs = null;
}
closeSubs();
closeConn();
isOpened = false;
} finally {
mu.unlock();
}
}
public void open() {
// do nothing if not closed
if (isOpened) {
return;
}
mu.lock();
try {
try {
connect();
// Re-initiated subscription if existed
if (observable) {
subscribe();
}
} catch (Exception ignore) {
}
execs = Executors.newScheduledThreadPool(1);
execs.scheduleAtFixedRate(this::monitor, 0, 500, TimeUnit.MILLISECONDS);
isOpened = true;
} finally {
mu.unlock();
}
}
private void monitor() {
if (isConnected()) {
return;
}
LOGGER.error("Monitor invoked for " + queueURI);
mu.lock();
try {
closeSubs();
closeConn();
// Connect
connect();
// Re-initiated subscription if existed
if (observable) {
subscribe();
}
} catch (Exception ex) {
LOGGER.error("Monitor failed with " + ex.getMessage() + " for " + queueURI, ex);
} finally {
mu.unlock();
}
}
public boolean isClosed() {
return !isOpened;
}
void ensureConnected() {
if (!isConnected()) {
throw new RuntimeException("No nats connection");
}
}
@Override
public void start() {
LOGGER.info("Started listening to {}:{}", getClass().getSimpleName(), queueURI);
running = true;
}
@Override
public void stop() {
LOGGER.info("Stopped listening to {}:{}", getClass().getSimpleName(), queueURI);
running = false;
}
@Override
public boolean isRunning() {
return running;
}
abstract void connect();
abstract boolean isConnected();
abstract void publish(String subject, byte[] data) throws Exception;
abstract void subscribe();
abstract void closeSubs();
abstract void closeConn();
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import com.carrotsearch.hppc.ObjectOpenHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableList;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.queries.TermsFilter;
import org.apache.lucene.search.*;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.RegexpFilter;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatService;
import org.elasticsearch.index.codec.postingsformat.PostingFormats;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatService;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.search.FieldDataTermsFilter;
import org.elasticsearch.index.similarity.SimilarityLookupService;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
/**
*
*/
public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
public static class Defaults {
public static final FieldType FIELD_TYPE = new FieldType();
public static final boolean DOC_VALUES = false;
static {
FIELD_TYPE.setIndexed(true);
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setStoreTermVectors(false);
FIELD_TYPE.setOmitNorms(false);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
FIELD_TYPE.freeze();
}
public static final float BOOST = 1.0f;
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
}
public abstract static class Builder<T extends Builder, Y extends AbstractFieldMapper> extends Mapper.Builder<T, Y> {
protected final FieldType fieldType;
protected Boolean docValues;
protected float boost = Defaults.BOOST;
protected boolean omitNormsSet = false;
protected String indexName;
protected NamedAnalyzer indexAnalyzer;
protected NamedAnalyzer searchAnalyzer;
protected Boolean includeInAll;
protected boolean indexOptionsSet = false;
protected PostingsFormatProvider postingsProvider;
protected DocValuesFormatProvider docValuesProvider;
protected SimilarityProvider similarity;
protected Loading normsLoading;
@Nullable
protected Settings fieldDataSettings;
protected final MultiFields.Builder multiFieldsBuilder;
protected CopyTo copyTo;
protected Builder(String name, FieldType fieldType) {
super(name);
this.fieldType = fieldType;
multiFieldsBuilder = new MultiFields.Builder();
}
public T index(boolean index) {
this.fieldType.setIndexed(index);
return builder;
}
public T store(boolean store) {
this.fieldType.setStored(store);
return builder;
}
public T docValues(boolean docValues) {
this.docValues = docValues;
return builder;
}
public T storeTermVectors(boolean termVectors) {
if (termVectors) {
this.fieldType.setStoreTermVectors(termVectors);
} // don't set it to false, it is default and might be flipped by a more specific option
return builder;
}
public T storeTermVectorOffsets(boolean termVectorOffsets) {
if (termVectorOffsets) {
this.fieldType.setStoreTermVectors(termVectorOffsets);
}
this.fieldType.setStoreTermVectorOffsets(termVectorOffsets);
return builder;
}
public T storeTermVectorPositions(boolean termVectorPositions) {
if (termVectorPositions) {
this.fieldType.setStoreTermVectors(termVectorPositions);
}
this.fieldType.setStoreTermVectorPositions(termVectorPositions);
return builder;
}
public T storeTermVectorPayloads(boolean termVectorPayloads) {
if (termVectorPayloads) {
this.fieldType.setStoreTermVectors(termVectorPayloads);
}
this.fieldType.setStoreTermVectorPayloads(termVectorPayloads);
return builder;
}
public T tokenized(boolean tokenized) {
this.fieldType.setTokenized(tokenized);
return builder;
}
public T boost(float boost) {
this.boost = boost;
return builder;
}
public T omitNorms(boolean omitNorms) {
this.fieldType.setOmitNorms(omitNorms);
this.omitNormsSet = true;
return builder;
}
public T indexOptions(IndexOptions indexOptions) {
this.fieldType.setIndexOptions(indexOptions);
this.indexOptionsSet = true;
return builder;
}
public T indexName(String indexName) {
this.indexName = indexName;
return builder;
}
public T indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.indexAnalyzer = indexAnalyzer;
return builder;
}
public T searchAnalyzer(NamedAnalyzer searchAnalyzer) {
this.searchAnalyzer = searchAnalyzer;
return builder;
}
public T includeInAll(Boolean includeInAll) {
this.includeInAll = includeInAll;
return builder;
}
public T postingsFormat(PostingsFormatProvider postingsFormat) {
this.postingsProvider = postingsFormat;
return builder;
}
public T docValuesFormat(DocValuesFormatProvider docValuesFormat) {
this.docValuesProvider = docValuesFormat;
return builder;
}
public T similarity(SimilarityProvider similarity) {
this.similarity = similarity;
return builder;
}
public T normsLoading(Loading normsLoading) {
this.normsLoading = normsLoading;
return builder;
}
public T fieldDataSettings(Settings settings) {
this.fieldDataSettings = settings;
return builder;
}
public T multiFieldPathType(ContentPath.Type pathType) {
multiFieldsBuilder.pathType(pathType);
return builder;
}
public T addMultiField(Mapper.Builder mapperBuilder) {
multiFieldsBuilder.add(mapperBuilder);
return builder;
}
public T copyTo(CopyTo copyTo) {
this.copyTo = copyTo;
return builder;
}
public Names buildNames(BuilderContext context) {
return new Names(name, buildIndexName(context), indexName == null ? name : indexName, buildFullName(context), context.path().sourcePath());
}
public String buildIndexName(BuilderContext context) {
String actualIndexName = indexName == null ? name : indexName;
return context.path().pathAsText(actualIndexName);
}
public String buildFullName(BuilderContext context) {
return context.path().fullPathAsText(name);
}
}
private static final ThreadLocal<List<Field>> FIELD_LIST = new ThreadLocal<List<Field>>() {
protected List<Field> initialValue() {
return new ArrayList<>(2);
}
};
protected final Names names;
protected float boost;
protected FieldType fieldType;
private final boolean docValues;
protected final NamedAnalyzer indexAnalyzer;
protected NamedAnalyzer searchAnalyzer;
protected PostingsFormatProvider postingsFormat;
protected DocValuesFormatProvider docValuesFormat;
protected final SimilarityProvider similarity;
protected Loading normsLoading;
protected Settings customFieldDataSettings;
protected FieldDataType fieldDataType;
protected final MultiFields multiFields;
protected CopyTo copyTo;
protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer,
NamedAnalyzer searchAnalyzer, PostingsFormatProvider postingsFormat,
DocValuesFormatProvider docValuesFormat, SimilarityProvider similarity,
Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings) {
this(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer, postingsFormat, docValuesFormat, similarity,
normsLoading, fieldDataSettings, indexSettings, MultiFields.empty(), null);
}
protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer,
NamedAnalyzer searchAnalyzer, PostingsFormatProvider postingsFormat,
DocValuesFormatProvider docValuesFormat, SimilarityProvider similarity,
Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
this.names = names;
this.boost = boost;
this.fieldType = fieldType;
this.fieldType.freeze();
// automatically set to keyword analyzer if its indexed and not analyzed
if (indexAnalyzer == null && !this.fieldType.tokenized() && this.fieldType.indexed()) {
this.indexAnalyzer = Lucene.KEYWORD_ANALYZER;
} else {
this.indexAnalyzer = indexAnalyzer;
}
// automatically set to keyword analyzer if its indexed and not analyzed
if (searchAnalyzer == null && !this.fieldType.tokenized() && this.fieldType.indexed()) {
this.searchAnalyzer = Lucene.KEYWORD_ANALYZER;
} else {
this.searchAnalyzer = searchAnalyzer;
}
if (postingsFormat == null) {
if (defaultPostingFormat() != null) {
postingsFormat = PostingFormats.getAsProvider(defaultPostingFormat());
}
}
this.postingsFormat = postingsFormat;
this.docValuesFormat = docValuesFormat;
this.similarity = similarity;
this.normsLoading = normsLoading;
this.customFieldDataSettings = fieldDataSettings;
if (fieldDataSettings == null) {
this.fieldDataType = defaultFieldDataType();
} else {
// create a new field data type, with the default settings as well as the "new ones"
this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
ImmutableSettings.builder().put(defaultFieldDataType().getSettings()).put(fieldDataSettings)
);
}
if (docValues != null) {
this.docValues = docValues;
} else if (fieldDataType == null) {
this.docValues = false;
} else {
this.docValues = FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldDataType.getFormat(indexSettings));
}
this.multiFields = multiFields;
this.copyTo = copyTo;
}
@Nullable
protected String defaultPostingFormat() {
return null;
}
@Nullable
protected String defaultDocValuesFormat() {
return null;
}
@Override
public String name() {
return names.name();
}
@Override
public Names names() {
return this.names;
}
public abstract FieldType defaultFieldType();
public abstract FieldDataType defaultFieldDataType();
@Override
public final FieldDataType fieldDataType() {
return fieldDataType;
}
@Override
public FieldType fieldType() {
return fieldType;
}
@Override
public float boost() {
return this.boost;
}
@Override
public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
@Override
public Analyzer searchAnalyzer() {
return this.searchAnalyzer;
}
@Override
public Analyzer searchQuoteAnalyzer() {
return this.searchAnalyzer;
}
@Override
public SimilarityProvider similarity() {
return similarity;
}
@Override
public CopyTo copyTo() {
return copyTo;
}
@Override
public void parse(ParseContext context) throws IOException {
final List<Field> fields = FIELD_LIST.get();
assert fields.isEmpty();
try {
parseCreateField(context, fields);
for (Field field : fields) {
if (!customBoost()) {
field.setBoost(boost);
}
if (context.listener().beforeFieldAdded(this, field, context)) {
context.doc().add(field);
}
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);
} finally {
fields.clear();
}
multiFields.parse(this, context);
if (copyTo != null) {
copyTo.parse(context);
}
}
/**
* Parse the field value and populate <code>fields</code>.
*/
protected abstract void parseCreateField(ParseContext context, List<Field> fields) throws IOException;
/**
* Derived classes can override it to specify that boost value is set by derived classes.
*/
protected boolean customBoost() {
return false;
}
@Override
public void traverse(FieldMapperListener fieldMapperListener) {
fieldMapperListener.fieldMapper(this);
multiFields.traverse(fieldMapperListener);
}
@Override
public void traverse(ObjectMapperListener objectMapperListener) {
// nothing to do here...
}
@Override
public Object valueForSearch(Object value) {
return value;
}
@Override
public BytesRef indexedValueForSearch(Object value) {
return BytesRefs.toBytesRef(value);
}
@Override
public Query queryStringTermQuery(Term term) {
return null;
}
@Override
public boolean useTermQueryWithQueryString() {
return false;
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
return new TermQuery(names().createIndexNameTerm(indexedValueForSearch(value)));
}
@Override
public Filter termFilter(Object value, @Nullable QueryParseContext context) {
return new TermFilter(names().createIndexNameTerm(indexedValueForSearch(value)));
}
@Override
public Filter termsFilter(List values, @Nullable QueryParseContext context) {
BytesRef[] bytesRefs = new BytesRef[values.size()];
for (int i = 0; i < bytesRefs.length; i++) {
bytesRefs[i] = indexedValueForSearch(values.get(i));
}
return new TermsFilter(names.indexName(), bytesRefs);
}
/**
* A terms filter based on the field data cache
*/
@Override
public Filter fieldDataTermsFilter(List values, @Nullable QueryParseContext context) {
// create with initial size large enough to avoid rehashing
ObjectOpenHashSet<BytesRef> terms =
new ObjectOpenHashSet<>((int) (values.size() * (1 + ObjectOpenHashSet.DEFAULT_LOAD_FACTOR)));
for (int i = 0, len = values.size(); i < len; i++) {
terms.add(indexedValueForSearch(values.get(i)));
}
return FieldDataTermsFilter.newBytes(context.getForField(this), terms);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return new TermRangeQuery(names.indexName(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower, includeUpper);
}
@Override
public Filter rangeFilter(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return new TermRangeFilter(names.indexName(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower, includeUpper);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
return new FuzzyQuery(names.createIndexNameTerm(indexedValueForSearch(value)), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions);
}
@Override
public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
PrefixQuery query = new PrefixQuery(names().createIndexNameTerm(indexedValueForSearch(value)));
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
@Override
public Filter prefixFilter(Object value, @Nullable QueryParseContext context) {
return new PrefixFilter(names().createIndexNameTerm(indexedValueForSearch(value)));
}
@Override
public Query regexpQuery(Object value, int flags, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
RegexpQuery query = new RegexpQuery(names().createIndexNameTerm(indexedValueForSearch(value)), flags);
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
@Override
public Filter regexpFilter(Object value, int flags, @Nullable QueryParseContext parseContext) {
return new RegexpFilter(names().createIndexNameTerm(indexedValueForSearch(value)), flags);
}
@Override
public Filter nullValueFilter() {
return null;
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
if (!this.getClass().equals(mergeWith.getClass())) {
String mergedType = mergeWith.getClass().getSimpleName();
if (mergeWith instanceof AbstractFieldMapper) {
mergedType = ((AbstractFieldMapper) mergeWith).contentType();
}
mergeContext.addConflict("mapper [" + names.fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
// different types, return
return;
}
AbstractFieldMapper fieldMergeWith = (AbstractFieldMapper) mergeWith;
if (this.fieldType().indexed() != fieldMergeWith.fieldType().indexed() || this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index values");
}
if (this.fieldType().stored() != fieldMergeWith.fieldType().stored()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store values");
}
if (!this.hasDocValues() && fieldMergeWith.hasDocValues()) {
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitely set
// when the doc_values field data format is configured
mergeContext.addConflict("mapper [" + names.fullName() + "] has different " + TypeParsers.DOC_VALUES + " values");
}
if (this.fieldType().omitNorms() && !fieldMergeWith.fieldType.omitNorms()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] cannot enable norms (`norms.enabled`)");
}
if (this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different tokenize values");
}
if (this.fieldType().storeTermVectors() != fieldMergeWith.fieldType().storeTermVectors()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector values");
}
if (this.fieldType().storeTermVectorOffsets() != fieldMergeWith.fieldType().storeTermVectorOffsets()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_offsets values");
}
if (this.fieldType().storeTermVectorPositions() != fieldMergeWith.fieldType().storeTermVectorPositions()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_positions values");
}
if (this.fieldType().storeTermVectorPayloads() != fieldMergeWith.fieldType().storeTermVectorPayloads()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_payloads values");
}
if (this.indexAnalyzer == null) {
if (fieldMergeWith.indexAnalyzer != null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_analyzer");
}
} else if (fieldMergeWith.indexAnalyzer == null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_analyzer");
} else if (!this.indexAnalyzer.name().equals(fieldMergeWith.indexAnalyzer.name())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_analyzer");
}
if (!this.names().equals(fieldMergeWith.names())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_name");
}
if (this.similarity == null) {
if (fieldMergeWith.similarity() != null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different similarity");
}
} else if (fieldMergeWith.similarity() == null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different similarity");
} else if (!this.similarity().equals(fieldMergeWith.similarity())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different similarity");
}
multiFields.merge(mergeWith, mergeContext);
if (!mergeContext.mergeFlags().simulate()) {
// apply changeable values
this.fieldType = new FieldType(this.fieldType);
this.fieldType.setOmitNorms(fieldMergeWith.fieldType.omitNorms());
this.fieldType.freeze();
this.boost = fieldMergeWith.boost;
this.normsLoading = fieldMergeWith.normsLoading;
this.copyTo = fieldMergeWith.copyTo;
if (fieldMergeWith.postingsFormat != null) {
this.postingsFormat = fieldMergeWith.postingsFormat;
}
if (fieldMergeWith.docValuesFormat != null) {
this.docValuesFormat = fieldMergeWith.docValuesFormat;
}
if (fieldMergeWith.searchAnalyzer != null) {
this.searchAnalyzer = fieldMergeWith.searchAnalyzer;
}
if (fieldMergeWith.customFieldDataSettings != null) {
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
ImmutableSettings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
);
}
}
}
}
@Override
public PostingsFormatProvider postingsFormatProvider() {
return postingsFormat;
}
@Override
public DocValuesFormatProvider docValuesFormatProvider() {
return docValuesFormat;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(names.name());
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
doXContentBody(builder, includeDefaults, params);
return builder.endObject();
}
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType());
if (includeDefaults || !names.name().equals(names.indexNameClean())) {
builder.field("index_name", names.indexNameClean());
}
if (includeDefaults || boost != 1.0f) {
builder.field("boost", boost);
}
FieldType defaultFieldType = defaultFieldType();
if (includeDefaults || fieldType.indexed() != defaultFieldType.indexed() ||
fieldType.tokenized() != defaultFieldType.tokenized()) {
builder.field("index", indexTokenizeOptionToString(fieldType.indexed(), fieldType.tokenized()));
}
if (includeDefaults || fieldType.stored() != defaultFieldType.stored()) {
builder.field("store", fieldType.stored());
}
if (includeDefaults || hasDocValues() != Defaults.DOC_VALUES) {
builder.field(TypeParsers.DOC_VALUES, docValues);
}
if (includeDefaults || fieldType.storeTermVectors() != defaultFieldType.storeTermVectors()) {
builder.field("term_vector", termVectorOptionsToString(fieldType));
}
if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms() || normsLoading != null) {
builder.startObject("norms");
if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms()) {
builder.field("enabled", !fieldType.omitNorms());
}
if (normsLoading != null) {
builder.field(Loading.KEY, normsLoading);
}
builder.endObject();
}
if (includeDefaults || fieldType.indexOptions() != defaultFieldType.indexOptions()) {
builder.field("index_options", indexOptionToString(fieldType.indexOptions()));
}
if (indexAnalyzer == null && searchAnalyzer == null) {
if (includeDefaults) {
builder.field("analyzer", "default");
}
} else if (indexAnalyzer == null) {
// searchAnalyzer != null
if (includeDefaults || (!searchAnalyzer.name().startsWith("_") && !searchAnalyzer.name().equals("default"))) {
builder.field("search_analyzer", searchAnalyzer.name());
}
} else if (searchAnalyzer == null) {
// indexAnalyzer != null
if (includeDefaults || (!indexAnalyzer.name().startsWith("_") && !indexAnalyzer.name().equals("default"))) {
builder.field("index_analyzer", indexAnalyzer.name());
}
} else if (indexAnalyzer.name().equals(searchAnalyzer.name())) {
// indexAnalyzer == searchAnalyzer
if (includeDefaults || (!indexAnalyzer.name().startsWith("_") && !indexAnalyzer.name().equals("default"))) {
builder.field("analyzer", indexAnalyzer.name());
}
} else {
// both are there but different
if (includeDefaults || (!indexAnalyzer.name().startsWith("_") && !indexAnalyzer.name().equals("default"))) {
builder.field("index_analyzer", indexAnalyzer.name());
}
if (includeDefaults || (!searchAnalyzer.name().startsWith("_") && !searchAnalyzer.name().equals("default"))) {
builder.field("search_analyzer", searchAnalyzer.name());
}
}
if (postingsFormat != null) {
if (includeDefaults || !postingsFormat.name().equals(defaultPostingFormat())) {
builder.field("postings_format", postingsFormat.name());
}
} else if (includeDefaults) {
String format = defaultPostingFormat();
if (format == null) {
format = PostingsFormatService.DEFAULT_FORMAT;
}
builder.field("postings_format", format);
}
if (docValuesFormat != null) {
if (includeDefaults || !docValuesFormat.name().equals(defaultDocValuesFormat())) {
builder.field(DOC_VALUES_FORMAT, docValuesFormat.name());
}
} else if (includeDefaults) {
String format = defaultDocValuesFormat();
if (format == null) {
format = DocValuesFormatService.DEFAULT_FORMAT;
}
builder.field(DOC_VALUES_FORMAT, format);
}
if (similarity() != null) {
builder.field("similarity", similarity().name());
} else if (includeDefaults) {
builder.field("similariry", SimilarityLookupService.DEFAULT_SIMILARITY);
}
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
}
multiFields.toXContent(builder, params);
if (copyTo != null) {
copyTo.toXContent(builder, params);
}
}
protected static String indexOptionToString(IndexOptions indexOption) {
switch (indexOption) {
case DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS:
return TypeParsers.INDEX_OPTIONS_OFFSETS;
case DOCS_AND_FREQS:
return TypeParsers.INDEX_OPTIONS_FREQS;
case DOCS_AND_FREQS_AND_POSITIONS:
return TypeParsers.INDEX_OPTIONS_POSITIONS;
case DOCS_ONLY:
return TypeParsers.INDEX_OPTIONS_DOCS;
default:
throw new ElasticsearchIllegalArgumentException("Unknown IndexOptions [" + indexOption + "]");
}
}
public static String termVectorOptionsToString(FieldType fieldType) {
if (!fieldType.storeTermVectors()) {
return "no";
} else if (!fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
return "yes";
} else if (fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
return "with_offsets";
} else {
StringBuilder builder = new StringBuilder("with");
if (fieldType.storeTermVectorPositions()) {
builder.append("_positions");
}
if (fieldType.storeTermVectorOffsets()) {
builder.append("_offsets");
}
if (fieldType.storeTermVectorPayloads()) {
builder.append("_payloads");
}
return builder.toString();
}
}
protected static String indexTokenizeOptionToString(boolean indexed, boolean tokenized) {
if (!indexed) {
return "no";
} else if (tokenized) {
return "analyzed";
} else {
return "not_analyzed";
}
}
protected abstract String contentType();
@Override
public void close() {
multiFields.close();
}
@Override
public boolean isNumeric() {
return false;
}
@Override
public boolean isSortable() {
return true;
}
public boolean hasDocValues() {
return docValues;
}
@Override
public Loading normsLoading(Loading defaultLoading) {
return normsLoading == null ? defaultLoading : normsLoading;
}
public static class MultiFields {
public static MultiFields empty() {
return new MultiFields(Defaults.PATH_TYPE, ImmutableOpenMap.<String, Mapper>of());
}
public static class Builder {
private final ImmutableOpenMap.Builder<String, Mapper.Builder> mapperBuilders = ImmutableOpenMap.builder();
private ContentPath.Type pathType = Defaults.PATH_TYPE;
public Builder pathType(ContentPath.Type pathType) {
this.pathType = pathType;
return this;
}
public Builder add(Mapper.Builder builder) {
mapperBuilders.put(builder.name(), builder);
return this;
}
@SuppressWarnings("unchecked")
public MultiFields build(AbstractFieldMapper.Builder mainFieldBuilder, BuilderContext context) {
if (pathType == Defaults.PATH_TYPE && mapperBuilders.isEmpty()) {
return empty();
} else if (mapperBuilders.isEmpty()) {
return new MultiFields(pathType, ImmutableOpenMap.<String, Mapper>of());
} else {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(mainFieldBuilder.name());
ImmutableOpenMap.Builder mapperBuilders = this.mapperBuilders;
for (ObjectObjectCursor<String, Mapper.Builder> cursor : this.mapperBuilders) {
String key = cursor.key;
Mapper.Builder value = cursor.value;
mapperBuilders.put(key, value.build(context));
}
context.path().remove();
context.path().pathType(origPathType);
ImmutableOpenMap.Builder<String, Mapper> mappers = mapperBuilders.cast();
return new MultiFields(pathType, mappers.build());
}
}
}
private final ContentPath.Type pathType;
private volatile ImmutableOpenMap<String, Mapper> mappers;
public MultiFields(ContentPath.Type pathType, ImmutableOpenMap<String, Mapper> mappers) {
this.pathType = pathType;
this.mappers = mappers;
// we disable the all in multi-field mappers
for (ObjectCursor<Mapper> cursor : mappers.values()) {
Mapper mapper = cursor.value;
if (mapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll();
}
}
}
public void parse(AbstractFieldMapper mainField, ParseContext context) throws IOException {
if (mappers.isEmpty()) {
return;
}
context.setWithinMultiFields();
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(mainField.name());
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.parse(context);
}
context.path().remove();
context.path().pathType(origPathType);
context.clearWithinMultiFields();
}
// No need for locking, because locking is taken care of in ObjectMapper#merge and DocumentMapper#merge
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
AbstractFieldMapper mergeWithMultiField = (AbstractFieldMapper) mergeWith;
List<FieldMapper> newFieldMappers = null;
ImmutableOpenMap.Builder<String, Mapper> newMappersBuilder = null;
for (ObjectCursor<Mapper> cursor : mergeWithMultiField.multiFields.mappers.values()) {
Mapper mergeWithMapper = cursor.value;
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeContext.mergeFlags().simulate()) {
// we disable the all in multi-field mappers
if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll();
}
if (newMappersBuilder == null) {
newMappersBuilder = ImmutableOpenMap.builder(mappers);
}
newMappersBuilder.put(mergeWithMapper.name(), mergeWithMapper);
if (mergeWithMapper instanceof AbstractFieldMapper) {
if (newFieldMappers == null) {
newFieldMappers = new ArrayList<>(2);
}
newFieldMappers.add((FieldMapper) mergeWithMapper);
}
}
} else {
mergeIntoMapper.merge(mergeWithMapper, mergeContext);
}
}
// first add all field mappers
if (newFieldMappers != null) {
mergeContext.docMapper().addFieldMappers(newFieldMappers);
}
// now publish mappers
if (newMappersBuilder != null) {
mappers = newMappersBuilder.build();
}
}
public void traverse(FieldMapperListener fieldMapperListener) {
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.traverse(fieldMapperListener);
}
}
public void close() {
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.close();
}
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (pathType != Defaults.PATH_TYPE) {
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
}
if (!mappers.isEmpty()) {
builder.startObject("fields");
for (ObjectCursor<Mapper> cursor : mappers.values()) {
cursor.value.toXContent(builder, params);
}
builder.endObject();
}
return builder;
}
}
/**
* Represents a list of fields with optional boost factor where the current field should be copied to
*/
public static class CopyTo {
private final ImmutableList<String> copyToFields;
private CopyTo(ImmutableList<String> copyToFields) {
this.copyToFields = copyToFields;
}
/**
* Creates instances of the fields that the current field should be copied to
*/
public void parse(ParseContext context) throws IOException {
if (!context.isWithinCopyTo()) {
for (String field : copyToFields) {
parse(field, context);
}
}
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (!copyToFields.isEmpty()) {
builder.startArray("copy_to");
for (String field : copyToFields) {
builder.value(field);
}
builder.endArray();
}
return builder;
}
public static class Builder {
private final ImmutableList.Builder<String> copyToBuilders = ImmutableList.builder();
public Builder add(String field) {
copyToBuilders.add(field);
return this;
}
public CopyTo build() {
return new CopyTo(copyToBuilders.build());
}
}
public ImmutableList<String> copyToFields() {
return copyToFields;
}
/**
* Creates an copy of the current field with given field name and boost
*/
public void parse(String field, ParseContext context) throws IOException {
context.setWithinCopyTo();
FieldMappers mappers = context.docMapper().mappers().indexName(field);
if (mappers != null && !mappers.isEmpty()) {
mappers.mapper().parse(context);
} else {
int posDot = field.lastIndexOf('.');
if (posDot > 0) {
// Compound name
String objectPath = field.substring(0, posDot);
String fieldPath = field.substring(posDot + 1);
ObjectMapper mapper = context.docMapper().objectMappers().get(objectPath);
if (mapper == null) {
//TODO: Create an object dynamically?
throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]");
}
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(ContentPath.Type.FULL);
context.path().add(objectPath);
// We might be in dynamically created field already, so need to clean withinNewMapper flag
// and then restore it, so we wouldn't miss new mappers created from copy_to fields
boolean origWithinNewMapper = context.isWithinNewMapper();
context.clearWithinNewMapper();
try {
mapper.parseDynamicValue(context, fieldPath, context.parser().currentToken());
} finally {
if (origWithinNewMapper) {
context.setWithinNewMapper();
} else {
context.clearWithinNewMapper();
}
context.path().remove();
context.path().pathType(origPathType);
}
} else {
// We might be in dynamically created field already, so need to clean withinNewMapper flag
// and then restore it, so we wouldn't miss new mappers created from copy_to fields
boolean origWithinNewMapper = context.isWithinNewMapper();
context.clearWithinNewMapper();
try {
context.docMapper().root().parseDynamicValue(context, field, context.parser().currentToken());
} finally {
if (origWithinNewMapper) {
context.setWithinNewMapper();
} else {
context.clearWithinNewMapper();
}
}
}
}
context.clearWithinCopyTo();
}
}
}
| |
/**
* Copyright (C) 2009-2014 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.network;
import org.dasein.cloud.CloudException;
import org.dasein.cloud.CloudProvider;
import org.dasein.cloud.InternalException;
import org.dasein.cloud.OperationNotSupportedException;
import org.dasein.cloud.ProviderContext;
import org.dasein.cloud.Requirement;
import org.dasein.cloud.ResourceStatus;
import org.dasein.cloud.identity.ServiceAction;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
/**
* Provides a basic implementation of load balancer support that you can extend and customize to support your cloud.
* <p>Created by George Reese: 3/7/13 9:48 PM</p>
* @author George Reese
* @version 2013.04 initial version
* @since 2013.04
*/
public abstract class AbstractLoadBalancerSupport<T extends CloudProvider> implements LoadBalancerSupport {
private T provider;
public AbstractLoadBalancerSupport(@Nonnull T provider) {
this.provider = provider;
}
@Override
public void addDataCenters(@Nonnull String toLoadBalancerId, @Nonnull String ... dataCenterIdsToAdd) throws CloudException, InternalException {
if( getCapabilities().isDataCenterLimited() ) {
throw new OperationNotSupportedException("Adding data centers has not been implemented for " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
else {
throw new OperationNotSupportedException("Load balancers are not data-center constrained in " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
}
@Override
public void addIPEndpoints(@Nonnull String toLoadBalancerId, @Nonnull String ... ipAddresses) throws CloudException, InternalException {
throw new OperationNotSupportedException("Adding IP endpoints to an existing load balancer is not currently implemented for " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
@Override
public void addServers(@Nonnull String toLoadBalancerId, @Nonnull String ... serverIdsToAdd) throws CloudException, InternalException {
throw new OperationNotSupportedException("Adding VM endpoints to an existing load balancer is not currently implemented for " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
@Override
@Deprecated
public @Nonnull String create(@Nonnull String name, @Nonnull String description, @Nullable String addressId, @Nullable String[] dataCenterIds, @Nullable LbListener[] listeners, @Nullable String[] serverIds, @Nullable String[] subnetIds, @Nullable LbType type) throws CloudException, InternalException {
throw new OperationNotSupportedException("Load balancer creation is not implemented in " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
@Override
public @Nonnull String createLoadBalancer(@Nonnull LoadBalancerCreateOptions options) throws CloudException, InternalException {
ArrayList<String> serverIds = new ArrayList<String>();
for( LoadBalancerEndpoint endpoint : options.getEndpoints() ) {
if( endpoint.getEndpointType().equals(LbEndpointType.VM) ) {
serverIds.add(endpoint.getEndpointValue());
}
}
//noinspection deprecation
return create(options.getName(), options.getDescription(), options.getProviderIpAddressId(), options.getProviderDataCenterIds(), options.getListeners(), serverIds.toArray(new String[serverIds.size()]), options.getProviderSubnetIds(), options.getType());
}
@Override
public SSLCertificate createSSLCertificate(@Nonnull SSLCertificateCreateOptions options) throws CloudException, InternalException {
throw new OperationNotSupportedException("Creating a server certificate is not implemented in " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
@Override
public @Nonnull LoadBalancerAddressType getAddressType() throws CloudException, InternalException {
return LoadBalancerAddressType.DNS;
}
/**
* @return the current authentication context for any calls through this support object
* @throws CloudException no context was set
*/
protected @Nonnull ProviderContext getContext() throws CloudException {
ProviderContext ctx = getProvider().getContext();
if( ctx == null ) {
throw new CloudException("No context was specified for this request");
}
return ctx;
}
@Override
public LoadBalancer getLoadBalancer(@Nonnull String loadBalancerId) throws CloudException, InternalException {
for( LoadBalancer lb : listLoadBalancers() ) {
if( loadBalancerId.equals(lb.getProviderLoadBalancerId()) ) {
return lb;
}
}
return null;
}
@Override
@Deprecated
public @Nonnull Iterable<LoadBalancerServer> getLoadBalancerServerHealth(@Nonnull String loadBalancerId) throws CloudException, InternalException {
ArrayList<LoadBalancerServer> servers = new ArrayList<LoadBalancerServer>();
for( LoadBalancerEndpoint endpoint : listEndpoints(loadBalancerId) ) {
if( endpoint.getEndpointType().equals(LbEndpointType.VM) ) {
LoadBalancerServer server = new LoadBalancerServer();
server.setCurrentState(LoadBalancerServerState.valueOf(endpoint.getCurrentState().name()));
server.setCurrentStateDescription(endpoint.getStateDescription());
server.setCurrentStateReason(endpoint.getStateReason());
servers.add(server);
}
}
return servers;
}
@Override
@Deprecated
public @Nonnull Iterable<LoadBalancerServer> getLoadBalancerServerHealth(@Nonnull String loadBalancerId, @Nonnull String... serverIdsToCheck) throws CloudException, InternalException {
ArrayList<LoadBalancerServer> servers = new ArrayList<LoadBalancerServer>();
for( LoadBalancerEndpoint endpoint : listEndpoints(loadBalancerId) ) {
if( endpoint.getEndpointType().equals(LbEndpointType.VM) ) {
boolean included = false;
for( String id : serverIdsToCheck ) {
if( id.equals(endpoint.getEndpointValue()) ) {
included = true;
break;
}
}
if( included ) {
LoadBalancerServer server = new LoadBalancerServer();
server.setCurrentState(LoadBalancerServerState.valueOf(endpoint.getCurrentState().name()));
server.setCurrentStateDescription(endpoint.getStateDescription());
server.setCurrentStateReason(endpoint.getStateReason());
servers.add(server);
}
}
}
return servers;
}
@Override
public @Nonnegative int getMaxPublicPorts() throws CloudException, InternalException {
return 1;
}
/**
* @return the provider object associated with any calls through this support object
*/
protected final @Nonnull T getProvider() {
return provider;
}
@Override
public @Nullable SSLCertificate getSSLCertificate(@Nonnull String certificateName) throws CloudException, InternalException {
throw new OperationNotSupportedException("Getting server certificates is not implemented in " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
@Deprecated
public @Nonnull Requirement identifyEndpointsOnCreateRequirement() throws CloudException, InternalException {
return getCapabilities().identifyEndpointsOnCreateRequirement();
}
@Override
@Deprecated
public @Nonnull Requirement identifyListenersOnCreateRequirement() throws CloudException, InternalException {
return getCapabilities().identifyListenersOnCreateRequirement();
}
@Override
@Deprecated
public boolean isAddressAssignedByProvider() throws CloudException, InternalException {
return getCapabilities().isAddressAssignedByProvider();
}
@Override
@Deprecated
public boolean isDataCenterLimited() throws CloudException, InternalException {
return getCapabilities().isDataCenterLimited();
}
@Override
public @Nonnull Iterable<LoadBalancer> listLoadBalancers() throws CloudException, InternalException {
return Collections.emptyList();
}
@Override
public @Nonnull Iterable<ResourceStatus> listLoadBalancerStatus() throws CloudException, InternalException {
ArrayList<ResourceStatus> status = new ArrayList<ResourceStatus>();
for( LoadBalancer lb : listLoadBalancers() ) {
status.add(new ResourceStatus(lb.getProviderLoadBalancerId(), lb.getCurrentState()));
}
return status;
}
@Override
public @Nonnull Iterable<LoadBalancerEndpoint> listEndpoints(@Nonnull String forLoadBalancerId) throws CloudException, InternalException {
ArrayList<LoadBalancerEndpoint> endpoints = new ArrayList<LoadBalancerEndpoint>();
LoadBalancer lb = getLoadBalancer(forLoadBalancerId);
if( lb == null ) {
throw new CloudException("No such load balancer: " + forLoadBalancerId);
}
@SuppressWarnings("deprecation") String[] ids = lb.getProviderServerIds();
//noinspection ConstantConditions
if( ids != null ) {
for( String id : ids ) {
endpoints.add(LoadBalancerEndpoint.getInstance(LbEndpointType.VM, id, LbEndpointState.ACTIVE));
}
}
return endpoints;
}
@Override
public @Nonnull Iterable<LoadBalancerEndpoint> listEndpoints(@Nonnull String forLoadBalancerId, @Nonnull LbEndpointType type, @Nonnull String ... endpoints) throws CloudException, InternalException {
ArrayList<LoadBalancerEndpoint> matches = new ArrayList<LoadBalancerEndpoint>();
for( LoadBalancerEndpoint endpoint : listEndpoints(forLoadBalancerId) ) {
if( endpoint.getEndpointType().equals(type) ) {
boolean included = false;
for( String value : endpoints ) {
if( value.equals(endpoint.getEndpointValue()) ) {
included = true;
break;
}
}
if( included ) {
matches.add(endpoint);
}
}
}
return matches;
}
@Override
public @Nonnull Iterable<SSLCertificate> listSSLCertificates() throws CloudException, InternalException {
throw new OperationNotSupportedException("Listing server certificates is not implemented in " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
@Override
@Deprecated
public @Nonnull Iterable<LbAlgorithm> listSupportedAlgorithms() throws CloudException, InternalException {
return getCapabilities().listSupportedAlgorithms();
}
@Override
@Deprecated
public @Nonnull Iterable<LbEndpointType> listSupportedEndpointTypes() throws CloudException, InternalException {
return getCapabilities().listSupportedEndpointTypes();
}
@Override
@Deprecated
public @Nonnull Iterable<LbPersistence> listSupportedPersistenceOptions() throws CloudException, InternalException {
return getCapabilities().listSupportedPersistenceOptions();
}
@Override
@Deprecated
public @Nonnull Iterable<LbProtocol> listSupportedProtocols() throws CloudException, InternalException {
return getCapabilities().listSupportedProtocols();
}
@Override
@Deprecated
public @Nonnull Iterable<IPVersion> listSupportedIPVersions() throws CloudException, InternalException {
return getCapabilities().listSupportedIPVersions();
}
@Override
public @Nonnull String[] mapServiceAction(@Nonnull ServiceAction action) {
return new String[0];
}
@Override
@Deprecated
public void remove(@Nonnull String loadBalancerId) throws CloudException, InternalException {
throw new OperationNotSupportedException("Load balancer removal is not implemented in " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
@Override
public void removeDataCenters(@Nonnull String fromLoadBalancerId, @Nonnull String... dataCenterIdsToRemove) throws CloudException, InternalException {
if( getCapabilities().isDataCenterLimited() ) {
throw new OperationNotSupportedException("Removing data centers has not been implemented for " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
else {
throw new OperationNotSupportedException("Load balancers are not data-center constrained in " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
}
@Override
public void removeIPEndpoints(@Nonnull String fromLoadBalancerId, @Nonnull String ... addresses) throws CloudException, InternalException {
throw new OperationNotSupportedException("Removing IP endpoints from an existing load balancer is not currently implemented for " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
@Override
public void removeLoadBalancer(@Nonnull String loadBalancerId) throws CloudException, InternalException {
//noinspection deprecation
remove(loadBalancerId);
}
@Override
public void removeSSLCertificate(@Nonnull String certificateName) throws CloudException, InternalException {
throw new OperationNotSupportedException("Removing server certificate is not implemented in " +
getContext().getRegionId() + " of " + getProvider().getCloudName());
}
@Override
public void removeServers(@Nonnull String fromLoadBalancerId, @Nonnull String... serverIdsToRemove) throws CloudException, InternalException {
throw new OperationNotSupportedException("Removing VM endpoints from an existing load balancer is not currently implemented for " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
@Override
@Deprecated
public final boolean requiresListenerOnCreate() throws CloudException, InternalException {
return getCapabilities().identifyListenersOnCreateRequirement().equals(Requirement.REQUIRED);
}
@Override
@Deprecated
public final boolean requiresServerOnCreate() throws CloudException, InternalException {
return getCapabilities().identifyEndpointsOnCreateRequirement().equals(Requirement.REQUIRED);
}
@Override
public void setSSLCertificate( @Nonnull SetLoadBalancerSSLCertificateOptions options ) throws CloudException, InternalException {
throw new OperationNotSupportedException("Setting SSL certificate is not implemented in " + getContext().getRegionId() + " of " + getProvider().getCloudName());
}
@Override
@Deprecated
public boolean supportsAddingEndpoints() throws CloudException, InternalException {
return getCapabilities().supportsAddingEndpoints();
}
@Override
@Deprecated
public boolean supportsMonitoring() throws CloudException, InternalException {
return getCapabilities().supportsMonitoring();
}
@Override
@Deprecated
public boolean supportsMultipleTrafficTypes() throws CloudException, InternalException {
return getCapabilities().supportsMultipleTrafficTypes();
}
@Override
public LoadBalancerHealthCheck createLoadBalancerHealthCheck(@Nullable String name, @Nullable String description, @Nullable String host, @Nullable LoadBalancerHealthCheck.HCProtocol protocol, int port, @Nullable String path, int interval, int timeout, int healthyCount, int unhealthyCount) throws CloudException, InternalException{
throw new OperationNotSupportedException("Health Checks have not been implemented for " + getProvider().getCloudName());
}
@Override
public LoadBalancerHealthCheck createLoadBalancerHealthCheck(@Nonnull HealthCheckOptions options) throws CloudException, InternalException{
throw new OperationNotSupportedException("Health Checks have not been implemented for " + getProvider().getCloudName());
}
@Override
public void attachHealthCheckToLoadBalancer(@Nonnull String providerLoadBalancerId, @Nonnull String providerLBHealthCheckId)throws CloudException, InternalException{
throw new OperationNotSupportedException("Health Checks have not been implemented for " + getProvider().getCloudName());
}
@Override
public Iterable<LoadBalancerHealthCheck> listLBHealthChecks(@Nullable HealthCheckFilterOptions opts) throws CloudException, InternalException{
throw new OperationNotSupportedException("Health Checks have not been implemented for " + getProvider().getCloudName());
}
@Override
public LoadBalancerHealthCheck getLoadBalancerHealthCheck(@Nonnull String providerLBHealthCheckId, @Nullable String providerLoadBalancerId)throws CloudException, InternalException{
throw new OperationNotSupportedException("Health Checks have not been implemented for " + getProvider().getCloudName());
}
@Override
@Deprecated
public HashMap<String, String> getInstanceHealth(@Nonnull String providerLoadBalancerId, @Nullable String providerVirtualMachineId) throws CloudException, InternalException{
throw new OperationNotSupportedException("Health Checks have not been implemented for " + getProvider().getCloudName());
}
@Override
public LoadBalancerHealthCheck modifyHealthCheck(@Nonnull String providerLBHealthCheckId, @Nonnull HealthCheckOptions options) throws InternalException, CloudException{
throw new OperationNotSupportedException("Health Checks have not been implemented for " + getProvider().getCloudName());
}
@Override
public void removeLoadBalancerHealthCheck(@Nonnull String providerLoadBalancerId) throws CloudException, InternalException{
throw new OperationNotSupportedException("Health Checks have not been implemented for " + getProvider().getCloudName());
}
@Override
public void detatchHealthCheck(String loadBalancerId, String heathcheckId) throws CloudException, InternalException{
throw new OperationNotSupportedException("Health Checks have not been implemented for " + getProvider().getCloudName());
}
@Override
@Deprecated
public boolean healthCheckRequiresLoadBalancer() throws CloudException, InternalException{
return getCapabilities().healthCheckRequiresLoadBalancer();
}
@Override
@Deprecated
public @Nonnull String getProviderTermForLoadBalancer( @Nonnull Locale locale ) {
try {
return getCapabilities().getProviderTermForLoadBalancer(locale);
} catch( CloudException e ) {
} catch( InternalException e ) {
}
throw new RuntimeException("Unable to get a provider term for load balancer.");
}
@Override
public void setFirewalls(@Nonnull String providerLoadBalancerId, @Nonnull String... firewallIds) throws CloudException, InternalException {
throw new OperationNotSupportedException("Setting firewalls have not been implemented for " + getProvider().getCloudName());
}
@Override
public void attachLoadBalancerToSubnets(@Nonnull String toLoadBalancerId, @Nonnull String... subnetIdsToAdd) throws CloudException, InternalException {
throw new OperationNotSupportedException("Attaching load balancer to subnets has not been implemented for " + getProvider().getCloudName());
}
@Override
public void detachLoadBalancerFromSubnets(@Nonnull String fromLoadBalancerId, @Nonnull String... subnetIdsToDelete) throws CloudException, InternalException {
throw new OperationNotSupportedException("Detaching load balancer to subnets has not been implemented for " + getProvider().getCloudName());
}
}
| |
/*
* Copyright 2011 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.j2objc.gen;
import com.google.devtools.j2objc.GenerationTest;
import com.google.devtools.j2objc.Options;
import java.io.IOException;
/**
* Tests for {@link ObjectiveCHeaderGenerator}.
*
* @author Tom Ball
*/
public class ObjectiveCHeaderGeneratorTest extends GenerationTest {
@Override
protected void tearDown() throws Exception {
Options.resetDeprecatedDeclarations();
Options.setDocCommentsEnabled(false);
super.tearDown();
}
public void testInnerEnumWithPackage() throws IOException {
String translation = translateSourceFile(
"package mypackage;"
+ "public class Example { MyClass myclass = new MyClass(); }"
+ "enum Abcd { A, B, C; }"
+ "class MyClass {}", "Example", "mypackage/Example.h");
assertTranslation(translation, "@interface MypackageExample");
assertTranslation(translation, "} MypackageAbcd;"); // enum declaration
assertTranslation(translation, "@interface MypackageAbcdEnum");
assertTranslation(translation, "@interface MypackageMyClass");
assertTranslation(translation, "MypackageMyClass *myclass_;");
}
public void testTypeNameTranslation() throws IOException {
String translation = translateSourceFile(
"public class Example {}", "Example", "Example.h");
assertTranslation(translation, "@interface Example ");
}
public void testDeprecatedTypeNameTranslation() throws IOException {
Options.enableDeprecatedDeclarations();
String translation = translateSourceFile(
"public @Deprecated class Example {}", "Example", "Example.h");
assertTranslation(translation, "__attribute__((deprecated))\n@interface Example ");
}
public void testDeprecatedTypeNameTranslationIsTurnedOff() throws IOException {
String translation = translateSourceFile(
"public @Deprecated class Example {}", "Example", "Example.h");
assertFalse(translation.contains("__attribute__((deprecated))"));
}
public void testFullyQualifiedDeprecatedTypeNameTranslation() throws IOException {
Options.enableDeprecatedDeclarations();
String translation = translateSourceFile(
"public @java.lang.Deprecated class Example {}", "Example", "Example.h");
assertTranslation(translation, "__attribute__((deprecated))\n@interface Example ");
}
public void testPackageTypeNameTranslation() throws IOException {
String translation = translateSourceFile(
"package unit.test; public class Example {}", "Example", "unit/test/Example.h");
assertTranslation(translation, "@interface UnitTestExample ");
}
public void testPackageTypeNameTranslationWithInnerClass() throws IOException {
String translation = translateSourceFile(
"package unit.test; public class Example { class Inner {}}",
"Example", "unit/test/Example.h");
assertTranslation(translation, "@interface UnitTestExample ");
assertTranslation(translation, "Example_Inner");
assertTranslation(translation, "@interface UnitTestExample_Inner ");
}
public void testSuperclassTypeTranslation() throws IOException {
String translation = translateSourceFile(
"public class MyException extends Exception {}", "MyException", "MyException.h");
assertTranslation(translation, "@interface MyException : JavaLangException");
}
public void testImplementsTypeTranslation() throws IOException {
String translation = translateSourceFile(
"import java.io.Serializable; public class Example implements Serializable {}",
"Example", "Example.h");
assertTranslation(translation, "@interface Example : NSObject < JavaIoSerializable >");
}
public void testImportTranslation() throws IOException {
String translation = translateSourceFile(
"public class MyException extends Exception { MyException(Throwable t) {super(t);}}",
"MyException", "MyException.h");
assertTranslation(translation, "@class JavaLangThrowable;");
assertTranslation(translation, "#include \"java/lang/Exception.h\"");
}
public void testForwardDeclarationTranslation() throws IOException {
String translation = translateSourceFile(
"public class MyException extends Exception { MyException(Throwable t) {super(t);}}",
"MyException", "MyException.h");
assertTranslation(translation, "@class JavaLangThrowable;");
}
public void testInstanceVariableTranslation() throws IOException {
String translation = translateSourceFile(
"public class Example { Exception testException; }",
"Example", "Example.h");
assertTranslation(translation, "JavaLangException *testException_;");
}
public void testInterfaceTranslation() throws IOException {
String translation = translateSourceFile(
"package unit.test; public interface Example {}",
"Example", "unit/test/Example.h");
assertTranslation(translation, "@protocol UnitTestExample");
}
public void testDeprecatedInterfaceTranslation() throws IOException {
Options.enableDeprecatedDeclarations();
String translation = translateSourceFile(
"package unit.test; public @Deprecated interface Example {}",
"Example", "unit/test/Example.h");
assertTranslation(translation, "__attribute__((deprecated))\n@protocol UnitTestExample");
}
public void testInterfaceWithMethodTranslation() throws IOException {
String translation = translateSourceFile(
"package unit.test; public interface Example { Example getExample(); }",
"Example", "unit/test/Example.h");
assertTranslation(translation, "(id<UnitTestExample>)getExample;");
}
public void testInterfaceWithDeprecatedMethodTranslation() throws IOException {
Options.enableDeprecatedDeclarations();
String translation = translateSourceFile(
"package unit.test; public interface Example { @Deprecated Example getExample(); }",
"Example", "unit/test/Example.h");
assertTranslation(translation,
"- (id<UnitTestExample>)getExample __attribute__((deprecated));");
}
public void testSuperInterfaceTranslation() throws IOException {
String translation = translateSourceFile(
"package unit.test; public interface Example extends Bar {} interface Bar {}",
"Example", "unit/test/Example.h");
assertTranslation(translation,
"@protocol UnitTestExample < UnitTestBar, NSObject, JavaObject >");
}
public void testConstTranslation() throws IOException {
String translation = translateSourceFile(
"package unit.test; public class Example { public static final int FOO=1; }",
"Example", "unit/test/Example.h");
assertTranslation(translation, "#define UnitTestExample_FOO 1");
assertFalse(translation.contains("initialize"));
}
public void testStaticVariableTranslation() throws IOException {
String translation = translateSourceFile(
"public class Example { public static java.util.Date today; }",
"Example", "Example.h");
assertTranslatedLines(translation,
"FOUNDATION_EXPORT JavaUtilDate *Example_today_;",
"J2OBJC_STATIC_FIELD_GETTER(Example, today_, JavaUtilDate *)",
"J2OBJC_STATIC_FIELD_SETTER(Example, today_, JavaUtilDate *)");
assertFalse(translation.contains("initialize"));
assertFalse(translation.contains("dealloc"));
}
public void testStaticVariableWithInitTranslation() throws IOException {
String translation = translateSourceFile(
"public class Example { public static java.util.Date today = new java.util.Date(); }",
"Example", "Example.h");
assertTranslatedLines(translation,
"FOUNDATION_EXPORT JavaUtilDate *Example_today_;",
"J2OBJC_STATIC_FIELD_GETTER(Example, today_, JavaUtilDate *)",
"J2OBJC_STATIC_FIELD_SETTER(Example, today_, JavaUtilDate *)");
assertFalse(translation.contains("+ (void)initialize;"));
assertFalse(translation.contains("dealloc"));
}
public void testInitMessageTranslation() throws IOException {
String translation = translateSourceFile(
"public class Example { void init() {} }", "Example", "Example.h");
assertTranslation(translation, "- (void)init__ OBJC_METHOD_FAMILY_NONE;");
}
public void testInitializeMessageTranslation() throws IOException {
String translation = translateSourceFile(
"public class Example { void initialize() {} }", "Example", "Example.h");
assertTranslation(translation, "- (void)initialize__ OBJC_METHOD_FAMILY_NONE;");
}
public void testToStringRenaming() throws IOException {
String translation = translateSourceFile(
"public class Example { public String toString() { return super.toString(); } }",
"Example", "Example.h");
assertTranslation(translation, "- (NSString *)description;");
}
public void testMultipleObjectDeclaration() throws IOException {
String translation = translateSourceFile(
"public class Example { String one, two, three; }",
"Example", "Example.h");
assertTranslation(translation, "NSString *one_, *two_, *three_;");
}
public void testMultiplePrimitiveDeclaration() throws IOException {
String translation = translateSourceFile(
"public class Example { int one, two, three; }",
"Example", "Example.h");
assertTranslation(translation, "int one_, two_, three_;");
}
public void testMultipleInterfaceDeclaration() throws IOException {
String translation = translateSourceFile(
"public class Example { Comparable one, two, three; }",
"Example", "Example.h");
assertTranslation(translation, "id<JavaLangComparable> one_, two_, three_;");
}
public void testMultipleClassDeclaration() throws IOException {
String translation = translateSourceFile(
"public class Example { Class<?> one, two, three; }",
"Example", "Example.h");
assertTranslation(translation, "IOSClass *one_, *two_, *three_;");
}
public void testInnerClassDeclaration() throws IOException {
String translation = translateSourceFile(
"public class Example { class Inner {} }",
"Example", "Example.h");
assertTranslation(translation, "@interface Example_Inner : NSObject");
assertNotInTranslation(translation, "Example *this");
assertTranslation(translation, "- (instancetype)initWithExample:(Example *)outer$;");
}
public void testInnerClassDeclarationWithOuterReference() throws IOException {
String translation = translateSourceFile(
"public class Example { int i; class Inner { void test() { int j = i; } } }",
"Example", "Example.h");
assertTranslation(translation, "@interface Example_Inner : NSObject");
assertTranslation(translation, "- (instancetype)initWithExample:(Example *)outer$;");
translation = getTranslatedFile("Example.m");
assertTranslation(translation, "Example *this$0_;");
}
public void testAnonymousClassDeclaration() throws IOException {
String translation = translateSourceFile(
"public class Example { Runnable run = new Runnable() { public void run() {} }; }",
"Example", "Example.h");
assertTranslation(translation, "@interface Example_$1 : NSObject < JavaLangRunnable >");
assertTranslation(translation, "- (void)run;");
// Outer reference is not required.
assertNotInTranslation(translation, "Example *this");
assertNotInTranslation(translation, "- (id)initWithExample:");
}
public void testEnum() throws IOException {
String translation = translateSourceFile(
"public enum Color { RED, WHITE, BLUE }",
"Color", "Color.h");
assertTranslation(translation, "typedef enum {");
assertTranslation(translation, "Color_RED = 0,");
assertTranslation(translation, "Color_WHITE = 1,");
assertTranslation(translation, "Color_BLUE = 2,");
assertTranslation(translation, "} Color;");
assertTranslation(translation, "@interface ColorEnum : JavaLangEnum < NSCopying > {");
assertTranslation(translation, "+ (IOSObjectArray *)values;");
assertTranslation(translation, "+ (ColorEnum *)valueOfWithNSString:(NSString *)name;");
assertTranslation(translation, "FOUNDATION_EXPORT ColorEnum *ColorEnum_values_[];");
assertTranslatedLines(translation,
"#define ColorEnum_RED ColorEnum_values_[Color_RED]",
"J2OBJC_STATIC_FIELD_GETTER(ColorEnum, RED, ColorEnum *)");
assertTranslatedLines(translation,
"#define ColorEnum_WHITE ColorEnum_values_[Color_WHITE]",
"J2OBJC_STATIC_FIELD_GETTER(ColorEnum, WHITE, ColorEnum *)");
assertTranslatedLines(translation,
"#define ColorEnum_BLUE ColorEnum_values_[Color_BLUE]",
"J2OBJC_STATIC_FIELD_GETTER(ColorEnum, BLUE, ColorEnum *)");
}
public void testEnumWithParameters() throws IOException {
String translation = translateSourceFile(
"public enum Color { RED(0xff0000), WHITE(0xffffff), BLUE(0x0000ff); "
+ "private int rgb; private Color(int rgb) { this.rgb = rgb; } "
+ "public int getRgb() { return rgb; }}",
"Color", "Color.h");
assertTranslation(translation, "@interface ColorEnum : JavaLangEnum");
translation = getTranslatedFile("Color.m");
assertTranslation(translation, "int rgb_;");
assertTranslatedLines(translation,
"- (instancetype)initWithInt:(jint)rgb",
"withNSString:(NSString *)__name",
"withInt:(jint)__ordinal;");
}
public void testEnumWithMultipleConstructors() throws IOException {
String translation = translateSourceFile(
"public enum Color { RED(0xff0000), WHITE(0xffffff, false), BLUE(0x0000ff); "
+ "private int rgb; private boolean primary;"
+ "private Color(int rgb, boolean primary) { this.rgb = rgb; this.primary = primary; } "
+ "private Color(int rgb) { this(rgb, true); } "
+ "public int getRgb() { return rgb; }"
+ "public boolean isPrimaryColor() { return primary; }}",
"Color", "Color.h");
assertTranslation(translation, "@interface ColorEnum : JavaLangEnum");
translation = getTranslatedFile("Color.m");
assertTranslation(translation, "jboolean primary_;");
assertTranslatedLines(translation,
"- (instancetype)initWithInt:(jint)rgb",
"withNSString:(NSString *)__name",
"withInt:(jint)__ordinal;");
assertTranslatedLines(translation,
"- (instancetype)initWithInt:(jint)rgb",
"withBoolean:(jboolean)primary",
"withNSString:(NSString *)__name",
"withInt:(jint)__ordinal;");
assertTranslation(translation,
"[self initColorEnumWithInt:rgb withBoolean:YES withNSString:__name withInt:__ordinal]");
assertTranslatedLines(translation,
"if (self = [super initWithNSString:__name withInt:__ordinal]) {",
"self->rgb_ = rgb;",
"self->primary_ = primary;");
}
public void testArrayFieldDeclaration() throws IOException {
String translation = translateSourceFile(
"public class Example { char[] before; char after[]; }",
"Example", "Example.h");
assertTranslation(translation, "IOSCharArray *before_;");
assertTranslation(translation, "IOSCharArray *after_;");
}
public void testForwardDeclarationOfInnerType() throws IOException {
String translation = translateSourceFile(
"public class Example { Foo foo; class Foo {} }", "Example", "Example.h");
// Test that Foo is forward declared because Example contains a field of
// type Foo and Foo is declared after Example.
assertTranslation(translation, "@class Example_Foo;");
}
public void testAnnotationGeneration() throws IOException {
String translation = translateSourceFile(
"package foo; import java.lang.annotation.*; @Retention(RetentionPolicy.RUNTIME) "
+ "public @interface Compatible { boolean fooable() default false; }",
"Compatible", "foo/Compatible.h");
// Test that the annotation was declared as a protocol and a value class.
assertTranslation(translation, "@protocol FooCompatible < JavaLangAnnotationAnnotation >");
assertTranslation(translation, "@interface FooCompatible : NSObject < FooCompatible >");
// Verify that the value is defined as a property instead of a method.
assertTranslation(translation, "@private\n jboolean fooable;");
assertTranslation(translation, "@property (readonly) jboolean fooable;");
// Verify default value accessor is generated for property.
assertTranslation(translation, "+ (jboolean)fooableDefault;");
// Check that constructor was created with the property as parameter.
assertTranslation(translation, "- (instancetype)initWithFooable:(jboolean)fooable_;");
}
public void testCharacterEdgeValues() throws IOException {
String translation = translateSourceFile(
"public class Test { "
+ " public static final char MIN = 0; "
+ " public static final char MAX = '\uffff'; "
+ "}", "Test", "Test.h");
assertTranslation(translation, "x00");
assertTranslation(translation, "0xffff");
}
public void testEnumNaming() throws IOException {
String translation = translateSourceFile(
"public enum MyEnum { ONE, TWO, THREE }",
"MyEnum", "MyEnum.h");
assertTranslation(translation, "} MyEnum;");
assertTranslation(translation, "@interface MyEnumEnum : JavaLangEnum");
assertTranslation(translation, "FOUNDATION_EXPORT MyEnumEnum *MyEnumEnum_values_[];");
assertTranslation(translation, "#define MyEnumEnum_ONE MyEnumEnum_values_[MyEnum_ONE]");
}
public void testNoImportForMappedTypes() throws IOException {
String translation = translateSourceFile(
"public class Test extends Object implements Cloneable { "
+ " public String toString() { return \"\"; }"
+ " public Class<?> myClass() { return getClass(); }}",
"Test", "Test.h");
assertFalse(translation.contains("#include \"java/lang/Class.h\""));
assertFalse(translation.contains("#include \"java/lang/Cloneable.h\""));
assertFalse(translation.contains("#include \"java/lang/Object.h\""));
assertFalse(translation.contains("#include \"java/lang/String.h\""));
assertFalse(translation.contains("#include \"Class.h\""));
assertFalse(translation.contains("#include \"NSCopying.h\""));
assertFalse(translation.contains("#include \"NSObject.h\""));
assertFalse(translation.contains("#include \"NSString.h\""));
assertTranslation(translation, "NSCopying");
}
public void testAnonymousConcreteSubclassOfGenericAbstractType() throws IOException {
String translation = translateSourceFile(
"public class Test {"
+ " interface FooInterface<T> { public void foo1(T t); public void foo2(); }"
+ " abstract static class Foo<T> implements FooInterface<T> { public void foo2() { } }"
+ " Foo<Integer> foo = new Foo<Integer>() {"
+ " public void foo1(Integer i) { } }; }",
"Test", "Test.h");
assertTranslation(translation, "foo1WithId:(JavaLangInteger *)i");
}
// Verify that an empty Java enum doesn't define an empty C enum,
// which is illegal.
public void testEmptyEnum() throws IOException {
String header = translateSourceFile("public class A { enum Foo {} }", "A", "A.h");
String impl = getTranslatedFile("A.m");
// Verify there's no C enum.
assertFalse(header.contains("typedef enum {\n} A_Foo;"));
// Verify there's still a Java enum type.
assertTranslation(header, "@interface A_FooEnum : JavaLangEnum");
assertTranslation(impl, "@implementation A_FooEnum");
}
public void testEnumWithInterfaces() throws IOException {
String translation = translateSourceFile(
"public class A { interface I {} "
+ "enum Foo implements I, Runnable, Cloneable { "
+ "A, B, C; public void run() {}}}", "A", "A.h");
assertTranslation(translation,
"@interface A_FooEnum : JavaLangEnum < NSCopying, A_I, JavaLangRunnable >");
assertTranslation(translation, "#include \"java/lang/Runnable.h\"");
}
public void testExternalNativeMethod() throws IOException {
String translation = translateSourceFile(
"package foo; class Example { native void external(String s); "
+ " void test(String str) { external(str); }}", "Example", "foo/Example.h");
// Verify test() is in main interface.
assertTranslation(translation,
"@interface FooExample : NSObject {\n}\n\n- (void)testWithNSString:(NSString *)str;");
// Verify external() is in native methods interface.
assertTranslation(translation,
"@interface FooExample (NativeMethods)\n- (void)externalWithNSString:(NSString *)s;");
// Verify category method isn't implemented, but is invoked.
translation = getTranslatedFile("foo/Example.m");
assertTranslation(translation, "@implementation FooExample\n");
assertFalse(translation.contains("- (void)externalWithNSString:(NSString *)s"));
assertTranslation(translation, "[self externalWithNSString:str];");
}
public void testPropertiesOfTypeWeakOuter() throws IOException {
String sourceContent =
" import com.google.j2objc.annotations.Weak;"
+ "import com.google.j2objc.annotations.WeakOuter;"
+ "public class FooBar {"
+ " @Weak private Internal fieldBar;"
+ " private Internal fieldFoo;"
+ " @WeakOuter"
+ " private class Internal {"
+ " }"
+ "}";
String translation = translateSourceFile(sourceContent, "FooBar", "FooBar.m");
assertTranslatedLines(translation,
"__weak FooBar_Internal *fieldBar_;",
"FooBar_Internal *fieldFoo_;");
}
public void testAddIgnoreDeprecationWarningsPragmaIfDeprecatedDeclarationsIsEnabled()
throws IOException {
Options.enableDeprecatedDeclarations();
String sourceContent = "class Test {}";
String translation = translateSourceFile(sourceContent, "FooBar", "FooBar.h");
assertTranslation(translation, "#pragma clang diagnostic push");
assertTranslation(translation, "#pragma GCC diagnostic ignored \"-Wdeprecated-declarations\"");
assertTranslation(translation, "#pragma clang diagnostic pop");
}
public void testDoNotAddIgnoreDeprecationWarningsPragmaIfDeprecatedDeclarationsIsDisabled()
throws IOException {
String sourceContent = "class Test {}";
String translation = translateSourceFile(sourceContent, "FooBar", "FooBar.h");
assertNotInTranslation(translation, "#pragma clang diagnostic push");
assertNotInTranslation(translation,
"#pragma GCC diagnostic ignored \"-Wdeprecated-declarations\"");
assertNotInTranslation(translation, "#pragma clang diagnostic pop");
}
public void testInnerAnnotationGeneration() throws IOException {
String source = "import java.lang.annotation.*; public abstract class Test { "
+ "@Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) "
+ "public @interface Initialize {}}";
String translation = translateSourceFile(source, "Test", "Test.h");
assertTranslation(translation, "@protocol Test_Initialize < JavaLangAnnotationAnnotation >");
assertTranslation(translation, "@interface Test_Initialize : NSObject < Test_Initialize >");
}
public void testFieldSetterGeneration() throws IOException {
String translation = translateSourceFile(
"import com.google.j2objc.annotations.Weak;"
+ "class Test { Object o; @Weak String s; static Integer i; }", "Test", "Test.h");
assertTranslation(translation, "J2OBJC_FIELD_SETTER(Test, o_, id)");
// Make sure the @Weak and static fields don't generate setters.
assertOccurrences(translation, "J2OBJC_FIELD_SETTER", 1);
}
public void testEnumWithNameAndOrdinalParameters() throws IOException {
String translation = translateSourceFile(
"public enum Test { FOO(\"foo\", 3), BAR(\"bar\", 5); "
+ "private String name; private int ordinal; "
+ "private Test(String name, int ordinal) { this.name = name; this.ordinal = ordinal; }"
+ "public String getName() { return name; }}",
"Test", "Test.h");
assertTranslation(translation, "@interface TestEnum : JavaLangEnum");
translation = getTranslatedFile("Test.m");
assertTranslation(translation, "NSString *name_Test_;");
assertTranslation(translation, "int ordinal_Test_;");
assertTranslatedLines(translation,
"- (instancetype)initWithNSString:(NSString *)name",
"withInt:(jint)ordinal",
"withNSString:(NSString *)__name",
"withInt:(jint)__ordinal;");
}
public void testDeprecatedEnumType() throws IOException {
Options.enableDeprecatedDeclarations();
String translation = translateSourceFile(
"@Deprecated public enum Test { A, B }", "Test", "Test.h");
assertTranslation(translation, "__attribute__((deprecated))\n@interface TestEnum");
}
public void testLongConstants() throws IOException {
String translation = translateSourceFile(
"class Test { static final long FOO = 123; }", "Test", "Test.h");
assertTranslation(translation, "123LL");
}
public void testDocComments() throws IOException {
Options.setDocCommentsEnabled(true);
String translation = translateSourceFile(
"/** Class javadoc for {@link Test}. */ class Test { \n"
+ "/** Field javadoc. */\n"
+ "int i;"
+ "/** Method javadoc.\n"
+ " * @param foo Unused.\n"
+ " * @return always false.\n"
+ " */\n"
+ "boolean test(int foo) { return false; } }", "Test", "Test.h");
assertTranslation(translation, "@brief Class javadoc for Test .");
assertTranslation(translation, "@brief Field javadoc.");
assertTranslatedLines(translation,
"@brief Method javadoc.",
"@param foo Unused.",
"@return always false.");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.schema;
import java.nio.ByteBuffer;
import java.util.Map;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableMap;
import org.apache.cassandra.cql3.Attributes;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.service.reads.PercentileSpeculativeRetryPolicy;
import org.apache.cassandra.service.reads.SpeculativeRetryPolicy;
import org.apache.cassandra.service.reads.repair.ReadRepairStrategy;
import org.apache.cassandra.utils.BloomCalculations;
import static java.lang.String.format;
public final class TableParams
{
public static final TableParams DEFAULT = TableParams.builder().build();
public enum Option
{
BLOOM_FILTER_FP_CHANCE,
CACHING,
COMMENT,
COMPACTION,
COMPRESSION,
DEFAULT_TIME_TO_LIVE,
EXTENSIONS,
GC_GRACE_SECONDS,
MAX_INDEX_INTERVAL,
MEMTABLE_FLUSH_PERIOD_IN_MS,
MIN_INDEX_INTERVAL,
SPECULATIVE_RETRY,
ADDITIONAL_WRITE_POLICY,
CRC_CHECK_CHANCE,
CDC,
READ_REPAIR;
@Override
public String toString()
{
return name().toLowerCase();
}
}
public final String comment;
public final double bloomFilterFpChance;
public final double crcCheckChance;
public final int gcGraceSeconds;
public final int defaultTimeToLive;
public final int memtableFlushPeriodInMs;
public final int minIndexInterval;
public final int maxIndexInterval;
public final SpeculativeRetryPolicy speculativeRetry;
public final SpeculativeRetryPolicy additionalWritePolicy;
public final CachingParams caching;
public final CompactionParams compaction;
public final CompressionParams compression;
public final ImmutableMap<String, ByteBuffer> extensions;
public final boolean cdc;
public final ReadRepairStrategy readRepair;
private TableParams(Builder builder)
{
comment = builder.comment;
bloomFilterFpChance = builder.bloomFilterFpChance == null
? builder.compaction.defaultBloomFilterFbChance()
: builder.bloomFilterFpChance;
crcCheckChance = builder.crcCheckChance;
gcGraceSeconds = builder.gcGraceSeconds;
defaultTimeToLive = builder.defaultTimeToLive;
memtableFlushPeriodInMs = builder.memtableFlushPeriodInMs;
minIndexInterval = builder.minIndexInterval;
maxIndexInterval = builder.maxIndexInterval;
speculativeRetry = builder.speculativeRetry;
additionalWritePolicy = builder.additionalWritePolicy;
caching = builder.caching;
compaction = builder.compaction;
compression = builder.compression;
extensions = builder.extensions;
cdc = builder.cdc;
readRepair = builder.readRepair;
}
public static Builder builder()
{
return new Builder();
}
public static Builder builder(TableParams params)
{
return new Builder().bloomFilterFpChance(params.bloomFilterFpChance)
.caching(params.caching)
.comment(params.comment)
.compaction(params.compaction)
.compression(params.compression)
.crcCheckChance(params.crcCheckChance)
.defaultTimeToLive(params.defaultTimeToLive)
.gcGraceSeconds(params.gcGraceSeconds)
.maxIndexInterval(params.maxIndexInterval)
.memtableFlushPeriodInMs(params.memtableFlushPeriodInMs)
.minIndexInterval(params.minIndexInterval)
.speculativeRetry(params.speculativeRetry)
.additionalWritePolicy(params.additionalWritePolicy)
.extensions(params.extensions)
.cdc(params.cdc)
.readRepair(params.readRepair);
}
public Builder unbuild()
{
return builder(this);
}
public void validate()
{
compaction.validate();
compression.validate();
double minBloomFilterFpChanceValue = BloomCalculations.minSupportedBloomFilterFpChance();
if (bloomFilterFpChance <= minBloomFilterFpChanceValue || bloomFilterFpChance > 1)
{
fail("%s must be larger than %s and less than or equal to 1.0 (got %s)",
Option.BLOOM_FILTER_FP_CHANCE,
minBloomFilterFpChanceValue,
bloomFilterFpChance);
}
if (crcCheckChance < 0 || crcCheckChance > 1.0)
{
fail("%s must be larger than or equal to 0 and smaller than or equal to 1.0 (got %s)",
Option.CRC_CHECK_CHANCE,
crcCheckChance);
}
if (defaultTimeToLive < 0)
fail("%s must be greater than or equal to 0 (got %s)", Option.DEFAULT_TIME_TO_LIVE, defaultTimeToLive);
if (defaultTimeToLive > Attributes.MAX_TTL)
fail("%s must be less than or equal to %d (got %s)", Option.DEFAULT_TIME_TO_LIVE, Attributes.MAX_TTL, defaultTimeToLive);
if (gcGraceSeconds < 0)
fail("%s must be greater than or equal to 0 (got %s)", Option.GC_GRACE_SECONDS, gcGraceSeconds);
if (minIndexInterval < 1)
fail("%s must be greater than or equal to 1 (got %s)", Option.MIN_INDEX_INTERVAL, minIndexInterval);
if (maxIndexInterval < minIndexInterval)
{
fail("%s must be greater than or equal to %s (%s) (got %s)",
Option.MAX_INDEX_INTERVAL,
Option.MIN_INDEX_INTERVAL,
minIndexInterval,
maxIndexInterval);
}
if (memtableFlushPeriodInMs < 0)
fail("%s must be greater than or equal to 0 (got %s)", Option.MEMTABLE_FLUSH_PERIOD_IN_MS, memtableFlushPeriodInMs);
}
private static void fail(String format, Object... args)
{
throw new ConfigurationException(format(format, args));
}
@Override
public boolean equals(Object o)
{
if (this == o)
return true;
if (!(o instanceof TableParams))
return false;
TableParams p = (TableParams) o;
return comment.equals(p.comment)
&& bloomFilterFpChance == p.bloomFilterFpChance
&& crcCheckChance == p.crcCheckChance
&& gcGraceSeconds == p.gcGraceSeconds
&& defaultTimeToLive == p.defaultTimeToLive
&& memtableFlushPeriodInMs == p.memtableFlushPeriodInMs
&& minIndexInterval == p.minIndexInterval
&& maxIndexInterval == p.maxIndexInterval
&& speculativeRetry.equals(p.speculativeRetry)
&& caching.equals(p.caching)
&& compaction.equals(p.compaction)
&& compression.equals(p.compression)
&& extensions.equals(p.extensions)
&& cdc == p.cdc
&& readRepair == p.readRepair;
}
@Override
public int hashCode()
{
return Objects.hashCode(comment,
bloomFilterFpChance,
crcCheckChance,
gcGraceSeconds,
defaultTimeToLive,
memtableFlushPeriodInMs,
minIndexInterval,
maxIndexInterval,
speculativeRetry,
caching,
compaction,
compression,
extensions,
cdc,
readRepair);
}
@Override
public String toString()
{
return MoreObjects.toStringHelper(this)
.add(Option.COMMENT.toString(), comment)
.add(Option.BLOOM_FILTER_FP_CHANCE.toString(), bloomFilterFpChance)
.add(Option.CRC_CHECK_CHANCE.toString(), crcCheckChance)
.add(Option.GC_GRACE_SECONDS.toString(), gcGraceSeconds)
.add(Option.DEFAULT_TIME_TO_LIVE.toString(), defaultTimeToLive)
.add(Option.MEMTABLE_FLUSH_PERIOD_IN_MS.toString(), memtableFlushPeriodInMs)
.add(Option.MIN_INDEX_INTERVAL.toString(), minIndexInterval)
.add(Option.MAX_INDEX_INTERVAL.toString(), maxIndexInterval)
.add(Option.SPECULATIVE_RETRY.toString(), speculativeRetry)
.add(Option.CACHING.toString(), caching)
.add(Option.COMPACTION.toString(), compaction)
.add(Option.COMPRESSION.toString(), compression)
.add(Option.EXTENSIONS.toString(), extensions)
.add(Option.CDC.toString(), cdc)
.add(Option.READ_REPAIR.toString(), readRepair)
.toString();
}
public static final class Builder
{
private String comment = "";
private Double bloomFilterFpChance;
private double crcCheckChance = 1.0;
private int gcGraceSeconds = 864000; // 10 days
private int defaultTimeToLive = 0;
private int memtableFlushPeriodInMs = 0;
private int minIndexInterval = 128;
private int maxIndexInterval = 2048;
private SpeculativeRetryPolicy speculativeRetry = PercentileSpeculativeRetryPolicy.NINETY_NINE_P;
private SpeculativeRetryPolicy additionalWritePolicy = PercentileSpeculativeRetryPolicy.NINETY_NINE_P;
private CachingParams caching = CachingParams.DEFAULT;
private CompactionParams compaction = CompactionParams.DEFAULT;
private CompressionParams compression = CompressionParams.DEFAULT;
private ImmutableMap<String, ByteBuffer> extensions = ImmutableMap.of();
private boolean cdc;
private ReadRepairStrategy readRepair = ReadRepairStrategy.BLOCKING;
public Builder()
{
}
public TableParams build()
{
return new TableParams(this);
}
public Builder comment(String val)
{
comment = val;
return this;
}
public Builder bloomFilterFpChance(double val)
{
bloomFilterFpChance = val;
return this;
}
public Builder crcCheckChance(double val)
{
crcCheckChance = val;
return this;
}
public Builder gcGraceSeconds(int val)
{
gcGraceSeconds = val;
return this;
}
public Builder defaultTimeToLive(int val)
{
defaultTimeToLive = val;
return this;
}
public Builder memtableFlushPeriodInMs(int val)
{
memtableFlushPeriodInMs = val;
return this;
}
public Builder minIndexInterval(int val)
{
minIndexInterval = val;
return this;
}
public Builder maxIndexInterval(int val)
{
maxIndexInterval = val;
return this;
}
public Builder speculativeRetry(SpeculativeRetryPolicy val)
{
speculativeRetry = val;
return this;
}
public Builder additionalWritePolicy(SpeculativeRetryPolicy val)
{
additionalWritePolicy = val;
return this;
}
public Builder caching(CachingParams val)
{
caching = val;
return this;
}
public Builder compaction(CompactionParams val)
{
compaction = val;
return this;
}
public Builder compression(CompressionParams val)
{
compression = val;
return this;
}
public Builder cdc(boolean val)
{
cdc = val;
return this;
}
public Builder readRepair(ReadRepairStrategy val)
{
readRepair = val;
return this;
}
public Builder extensions(Map<String, ByteBuffer> val)
{
extensions = ImmutableMap.copyOf(val);
return this;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.migration.cli.commonscli;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Serializable;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
/**
* A formatter of help messages for command line options.
*
* <p>
* Example:
* </p>
*
* <pre>
* Options options = new Options();
* options.addOption(OptionBuilder.withLongOpt("file").withDescription("The file to be processed").hasArg().withArgName("FILE")
* .isRequired().create('f'));
* options.addOption(OptionBuilder.withLongOpt("version").withDescription("Print the version of the application").create('v'));
* options.addOption(OptionBuilder.withLongOpt("help").create('h'));
*
* String header = "Do something useful with an input file\n\n";
* String footer = "\nPlease report issues at http://example.com/issues";
*
* HelpFormatter formatter = new HelpFormatter();
* formatter.printHelp("myapp", header, options, footer, true);
* </pre>
*
* This produces the following output:
*
* <pre>
* usage: myapp -f <FILE> [-h] [-v]
* Do something useful with an input file
*
* -f,--file <FILE> The file to be processed
* -h,--help
* -v,--version Print the version of the application
*
* Please report issues at http://example.com/issues
* </pre>
*
* @version $Id$
*/
public class HelpFormatter {
// --------------------------------------------------------------- Constants
/** default number of characters per line */
public static final int DEFAULT_WIDTH = 74;
/** default padding to the left of each line */
public static final int DEFAULT_LEFT_PAD = 1;
/** number of space characters to be prefixed to each description line */
public static final int DEFAULT_DESC_PAD = 3;
/** the string to display at the beginning of the usage statement */
public static final String DEFAULT_SYNTAX_PREFIX = "usage: ";
/** default prefix for shortOpts */
public static final String DEFAULT_OPT_PREFIX = "-";
/** default prefix for long Option */
public static final String DEFAULT_LONG_OPT_PREFIX = "--";
/**
* default separator displayed between a long Option and its value
*
* @since 1.3
**/
public static final String DEFAULT_LONG_OPT_SEPARATOR = " ";
/** default name for an argument */
public static final String DEFAULT_ARG_NAME = "arg";
// -------------------------------------------------------------- Attributes
/**
* number of characters per line
*
* @deprecated Scope will be made private for next major version - use get/setWidth methods instead.
*/
@Deprecated
public int defaultWidth = DEFAULT_WIDTH;
/**
* amount of padding to the left of each line
*
* @deprecated Scope will be made private for next major version - use get/setLeftPadding methods instead.
*/
@Deprecated
public int defaultLeftPad = DEFAULT_LEFT_PAD;
/**
* the number of characters of padding to be prefixed to each description line
*
* @deprecated Scope will be made private for next major version - use get/setDescPadding methods instead.
*/
@Deprecated
public int defaultDescPad = DEFAULT_DESC_PAD;
/**
* the string to display at the beginning of the usage statement
*
* @deprecated Scope will be made private for next major version - use get/setSyntaxPrefix methods instead.
*/
@Deprecated
public String defaultSyntaxPrefix = DEFAULT_SYNTAX_PREFIX;
/**
* the new line string
*
* @deprecated Scope will be made private for next major version - use get/setNewLine methods instead.
*/
@Deprecated
public String defaultNewLine = System.getProperty("line.separator");
/**
* the shortOpt prefix
*
* @deprecated Scope will be made private for next major version - use get/setOptPrefix methods instead.
*/
@Deprecated
public String defaultOptPrefix = DEFAULT_OPT_PREFIX;
/**
* the long Opt prefix
*
* @deprecated Scope will be made private for next major version - use get/setLongOptPrefix methods instead.
*/
@Deprecated
public String defaultLongOptPrefix = DEFAULT_LONG_OPT_PREFIX;
/**
* the name of the argument
*
* @deprecated Scope will be made private for next major version - use get/setArgName methods instead.
*/
@Deprecated
public String defaultArgName = DEFAULT_ARG_NAME;
/**
* Comparator used to sort the options when they output in help text
*
* Defaults to case-insensitive alphabetical sorting by option key
*/
protected Comparator<Option> optionComparator = new OptionComparator();
/** The separator displayed between the long option and its value. */
private String longOptSeparator = DEFAULT_LONG_OPT_SEPARATOR;
/**
* Sets the 'width'.
*
* @param width the new value of 'width'
*/
public void setWidth(int width) {
this.defaultWidth = width;
}
/**
* Returns the 'width'.
*
* @return the 'width'
*/
public int getWidth() {
return defaultWidth;
}
/**
* Sets the 'leftPadding'.
*
* @param padding the new value of 'leftPadding'
*/
public void setLeftPadding(int padding) {
this.defaultLeftPad = padding;
}
/**
* Returns the 'leftPadding'.
*
* @return the 'leftPadding'
*/
public int getLeftPadding() {
return defaultLeftPad;
}
/**
* Sets the 'descPadding'.
*
* @param padding the new value of 'descPadding'
*/
public void setDescPadding(int padding) {
this.defaultDescPad = padding;
}
/**
* Returns the 'descPadding'.
*
* @return the 'descPadding'
*/
public int getDescPadding() {
return defaultDescPad;
}
/**
* Sets the 'syntaxPrefix'.
*
* @param prefix the new value of 'syntaxPrefix'
*/
public void setSyntaxPrefix(String prefix) {
this.defaultSyntaxPrefix = prefix;
}
/**
* Returns the 'syntaxPrefix'.
*
* @return the 'syntaxPrefix'
*/
public String getSyntaxPrefix() {
return defaultSyntaxPrefix;
}
/**
* Sets the 'newLine'.
*
* @param newline the new value of 'newLine'
*/
public void setNewLine(String newline) {
this.defaultNewLine = newline;
}
/**
* Returns the 'newLine'.
*
* @return the 'newLine'
*/
public String getNewLine() {
return defaultNewLine;
}
/**
* Sets the 'optPrefix'.
*
* @param prefix the new value of 'optPrefix'
*/
public void setOptPrefix(String prefix) {
this.defaultOptPrefix = prefix;
}
/**
* Returns the 'optPrefix'.
*
* @return the 'optPrefix'
*/
public String getOptPrefix() {
return defaultOptPrefix;
}
/**
* Sets the 'longOptPrefix'.
*
* @param prefix the new value of 'longOptPrefix'
*/
public void setLongOptPrefix(String prefix) {
this.defaultLongOptPrefix = prefix;
}
/**
* Returns the 'longOptPrefix'.
*
* @return the 'longOptPrefix'
*/
public String getLongOptPrefix() {
return defaultLongOptPrefix;
}
/**
* Set the separator displayed between a long option and its value. Ensure that the separator specified is supported by the
* parser used, typically ' ' or '='.
*
* @param longOptSeparator the separator, typically ' ' or '='.
* @since 1.3
*/
public void setLongOptSeparator(String longOptSeparator) {
this.longOptSeparator = longOptSeparator;
}
/**
* Returns the separator displayed between a long option and its value.
*
* @return the separator
* @since 1.3
*/
public String getLongOptSeparator() {
return longOptSeparator;
}
/**
* Sets the 'argName'.
*
* @param name the new value of 'argName'
*/
public void setArgName(String name) {
this.defaultArgName = name;
}
/**
* Returns the 'argName'.
*
* @return the 'argName'
*/
public String getArgName() {
return defaultArgName;
}
/**
* Comparator used to sort the options when they output in help text. Defaults to case-insensitive alphabetical sorting by
* option key.
*
* @return the {@link Comparator} currently in use to sort the options
* @since 1.2
*/
public Comparator<Option> getOptionComparator() {
return optionComparator;
}
/**
* Set the comparator used to sort the options when they output in help text. Passing in a null comparator will keep the
* options in the order they were declared.
*
* @param comparator the {@link Comparator} to use for sorting the options
* @since 1.2
*/
public void setOptionComparator(Comparator<Option> comparator) {
this.optionComparator = comparator;
}
/**
* Print the help for <code>options</code> with the specified command line syntax. This method prints help information to
* System.out.
*
* @param cmdLineSyntax the syntax for this application
* @param options the Options instance
*/
public void printHelp(String cmdLineSyntax, Options options) {
printHelp(getWidth(), cmdLineSyntax, null, options, null, false);
}
/**
* Print the help for <code>options</code> with the specified command line syntax. This method prints help information to
* System.out.
*
* @param cmdLineSyntax the syntax for this application
* @param options the Options instance
* @param autoUsage whether to print an automatically generated usage statement
*/
public void printHelp(String cmdLineSyntax, Options options, boolean autoUsage) {
printHelp(getWidth(), cmdLineSyntax, null, options, null, autoUsage);
}
/**
* Print the help for <code>options</code> with the specified command line syntax. This method prints help information to
* System.out.
*
* @param cmdLineSyntax the syntax for this application
* @param header the banner to display at the beginning of the help
* @param options the Options instance
* @param footer the banner to display at the end of the help
*/
public void printHelp(String cmdLineSyntax, String header, Options options, String footer) {
printHelp(cmdLineSyntax, header, options, footer, false);
}
/**
* Print the help for <code>options</code> with the specified command line syntax. This method prints help information to
* System.out.
*
* @param cmdLineSyntax the syntax for this application
* @param header the banner to display at the beginning of the help
* @param options the Options instance
* @param footer the banner to display at the end of the help
* @param autoUsage whether to print an automatically generated usage statement
*/
public void printHelp(String cmdLineSyntax, String header, Options options, String footer, boolean autoUsage) {
printHelp(getWidth(), cmdLineSyntax, header, options, footer, autoUsage);
}
/**
* Print the help for <code>options</code> with the specified command line syntax. This method prints help information to
* System.out.
*
* @param width the number of characters to be displayed on each line
* @param cmdLineSyntax the syntax for this application
* @param header the banner to display at the beginning of the help
* @param options the Options instance
* @param footer the banner to display at the end of the help
*/
public void printHelp(int width, String cmdLineSyntax, String header, Options options, String footer) {
printHelp(width, cmdLineSyntax, header, options, footer, false);
}
/**
* Print the help for <code>options</code> with the specified command line syntax. This method prints help information to
* System.out.
*
* @param width the number of characters to be displayed on each line
* @param cmdLineSyntax the syntax for this application
* @param header the banner to display at the beginning of the help
* @param options the Options instance
* @param footer the banner to display at the end of the help
* @param autoUsage whether to print an automatically generated usage statement
*/
public void printHelp(int width, String cmdLineSyntax, String header, Options options, String footer, boolean autoUsage) {
PrintWriter pw = new PrintWriter(System.out);
printHelp(pw, width, cmdLineSyntax, header, options, getLeftPadding(), getDescPadding(), footer, autoUsage);
pw.flush();
}
/**
* Print the help for <code>options</code> with the specified command line syntax.
*
* @param pw the writer to which the help will be written
* @param width the number of characters to be displayed on each line
* @param cmdLineSyntax the syntax for this application
* @param header the banner to display at the beginning of the help
* @param options the Options instance
* @param leftPad the number of characters of padding to be prefixed to each line
* @param descPad the number of characters of padding to be prefixed to each description line
* @param footer the banner to display at the end of the help
*
* @throws IllegalStateException if there is no room to print a line
*/
public void printHelp(PrintWriter pw, int width, String cmdLineSyntax, String header, Options options, int leftPad,
int descPad, String footer) {
printHelp(pw, width, cmdLineSyntax, header, options, leftPad, descPad, footer, false);
}
/**
* Print the help for <code>options</code> with the specified command line syntax.
*
* @param pw the writer to which the help will be written
* @param width the number of characters to be displayed on each line
* @param cmdLineSyntax the syntax for this application
* @param header the banner to display at the beginning of the help
* @param options the Options instance
* @param leftPad the number of characters of padding to be prefixed to each line
* @param descPad the number of characters of padding to be prefixed to each description line
* @param footer the banner to display at the end of the help
* @param autoUsage whether to print an automatically generated usage statement
*
* @throws IllegalStateException if there is no room to print a line
*/
public void printHelp(PrintWriter pw, int width, String cmdLineSyntax, String header, Options options, int leftPad,
int descPad, String footer, boolean autoUsage) {
if (cmdLineSyntax == null || cmdLineSyntax.length() == 0) {
throw new IllegalArgumentException("cmdLineSyntax not provided");
}
if (autoUsage) {
printUsage(pw, width, cmdLineSyntax, options);
} else {
printUsage(pw, width, cmdLineSyntax);
}
if (header != null && header.trim().length() > 0) {
printWrapped(pw, width, header);
}
printOptions(pw, width, options, leftPad, descPad);
if (footer != null && footer.trim().length() > 0) {
printWrapped(pw, width, footer);
}
}
/**
* Prints the usage statement for the specified application.
*
* @param pw The PrintWriter to print the usage statement
* @param width The number of characters to display per line
* @param app The application name
* @param options The command line Options
*/
public void printUsage(PrintWriter pw, int width, String app, Options options) {
// initialise the string buffer
StringBuffer buff = new StringBuffer(getSyntaxPrefix()).append(app).append(" ");
// create a list for processed option groups
Collection<OptionGroup> processedGroups = new ArrayList<OptionGroup>();
List<Option> optList = new ArrayList<Option>(options.getOptions());
if (getOptionComparator() != null) {
Collections.sort(optList, getOptionComparator());
}
// iterate over the options
for (Iterator<Option> it = optList.iterator(); it.hasNext();) {
// get the next Option
Option option = it.next();
// check if the option is part of an OptionGroup
OptionGroup group = options.getOptionGroup(option);
// if the option is part of a group
if (group != null) {
// and if the group has not already been processed
if (!processedGroups.contains(group)) {
// add the group to the processed list
processedGroups.add(group);
// add the usage clause
appendOptionGroup(buff, group);
}
// otherwise the option was displayed in the group
// previously so ignore it.
}
// if the Option is not part of an OptionGroup
else {
appendOption(buff, option, option.isRequired());
}
if (it.hasNext()) {
buff.append(" ");
}
}
// call printWrapped
printWrapped(pw, width, buff.toString().indexOf(' ') + 1, buff.toString());
}
/**
* Appends the usage clause for an OptionGroup to a StringBuffer. The clause is wrapped in square brackets if the group is
* required. The display of the options is handled by appendOption
*
* @param buff the StringBuffer to append to
* @param group the group to append
* @see #appendOption(StringBuffer,Option,boolean)
*/
private void appendOptionGroup(StringBuffer buff, OptionGroup group) {
if (!group.isRequired()) {
buff.append("[");
}
List<Option> optList = new ArrayList<Option>(group.getOptions());
if (getOptionComparator() != null) {
Collections.sort(optList, getOptionComparator());
}
// for each option in the OptionGroup
for (Iterator<Option> it = optList.iterator(); it.hasNext();) {
// whether the option is required or not is handled at group level
appendOption(buff, it.next(), true);
if (it.hasNext()) {
buff.append(" | ");
}
}
if (!group.isRequired()) {
buff.append("]");
}
}
/**
* Appends the usage clause for an Option to a StringBuffer.
*
* @param buff the StringBuffer to append to
* @param option the Option to append
* @param required whether the Option is required or not
*/
private void appendOption(StringBuffer buff, Option option, boolean required) {
if (!required) {
buff.append("[");
}
if (option.getOpt() != null) {
buff.append("-").append(option.getOpt());
} else {
buff.append("--").append(option.getLongOpt());
}
// if the Option has a value and a non blank argname
if (option.hasArg() && (option.getArgName() == null || option.getArgName().length() != 0)) {
buff.append(option.getOpt() == null ? longOptSeparator : " ");
buff.append("<").append(option.getArgName() != null ? option.getArgName() : getArgName()).append(">");
}
// if the Option is not a required option
if (!required) {
buff.append("]");
}
}
/**
* Print the cmdLineSyntax to the specified writer, using the specified width.
*
* @param pw The printWriter to write the help to
* @param width The number of characters per line for the usage statement.
* @param cmdLineSyntax The usage statement.
*/
public void printUsage(PrintWriter pw, int width, String cmdLineSyntax) {
int argPos = cmdLineSyntax.indexOf(' ') + 1;
printWrapped(pw, width, getSyntaxPrefix().length() + argPos, getSyntaxPrefix() + cmdLineSyntax);
}
/**
* Print the help for the specified Options to the specified writer, using the specified width, left padding and description
* padding.
*
* @param pw The printWriter to write the help to
* @param width The number of characters to display per line
* @param options The command line Options
* @param leftPad the number of characters of padding to be prefixed to each line
* @param descPad the number of characters of padding to be prefixed to each description line
*/
public void printOptions(PrintWriter pw, int width, Options options, int leftPad, int descPad) {
StringBuffer sb = new StringBuffer();
renderOptions(sb, width, options, leftPad, descPad);
pw.println(sb.toString());
}
/**
* Print the specified text to the specified PrintWriter.
*
* @param pw The printWriter to write the help to
* @param width The number of characters to display per line
* @param text The text to be written to the PrintWriter
*/
public void printWrapped(PrintWriter pw, int width, String text) {
printWrapped(pw, width, 0, text);
}
/**
* Print the specified text to the specified PrintWriter.
*
* @param pw The printWriter to write the help to
* @param width The number of characters to display per line
* @param nextLineTabStop The position on the next line for the first tab.
* @param text The text to be written to the PrintWriter
*/
public void printWrapped(PrintWriter pw, int width, int nextLineTabStop, String text) {
StringBuffer sb = new StringBuffer(text.length());
renderWrappedTextBlock(sb, width, nextLineTabStop, text);
pw.println(sb.toString());
}
// --------------------------------------------------------------- Protected
/**
* Render the specified Options and return the rendered Options in a StringBuffer.
*
* @param sb The StringBuffer to place the rendered Options into.
* @param width The number of characters to display per line
* @param options The command line Options
* @param leftPad the number of characters of padding to be prefixed to each line
* @param descPad the number of characters of padding to be prefixed to each description line
*
* @return the StringBuffer with the rendered Options contents.
*/
protected StringBuffer renderOptions(StringBuffer sb, int width, Options options, int leftPad, int descPad) {
final String lpad = createPadding(leftPad);
final String dpad = createPadding(descPad);
// first create list containing only <lpad>-a,--aaa where
// -a is opt and --aaa is long opt; in parallel look for
// the longest opt string this list will be then used to
// sort options ascending
int max = 0;
List<StringBuffer> prefixList = new ArrayList<StringBuffer>();
List<Option> optList = options.helpOptions();
if (getOptionComparator() != null) {
Collections.sort(optList, getOptionComparator());
}
for (Option option : optList) {
StringBuffer optBuf = new StringBuffer();
if (option.getOpt() == null) {
optBuf.append(lpad).append(" ").append(getLongOptPrefix()).append(option.getLongOpt());
} else {
optBuf.append(lpad).append(getOptPrefix()).append(option.getOpt());
if (option.hasLongOpt()) {
optBuf.append(',').append(getLongOptPrefix()).append(option.getLongOpt());
}
}
if (option.hasArg()) {
String argName = option.getArgName();
if (argName != null && argName.length() == 0) {
// if the option has a blank argname
optBuf.append(' ');
} else {
optBuf.append(option.hasLongOpt() ? longOptSeparator : " ");
optBuf.append("<").append(argName != null ? option.getArgName() : getArgName()).append(">");
}
}
prefixList.add(optBuf);
max = optBuf.length() > max ? optBuf.length() : max;
}
int x = 0;
for (Iterator<Option> it = optList.iterator(); it.hasNext();) {
Option option = it.next();
StringBuilder optBuf = new StringBuilder(prefixList.get(x++).toString());
if (optBuf.length() < max) {
optBuf.append(createPadding(max - optBuf.length()));
}
optBuf.append(dpad);
int nextLineTabStop = max + descPad;
if (option.getDescription() != null) {
optBuf.append(option.getDescription());
}
renderWrappedText(sb, width, nextLineTabStop, optBuf.toString());
if (it.hasNext()) {
sb.append(getNewLine());
}
}
return sb;
}
/**
* Render the specified text and return the rendered Options in a StringBuffer.
*
* @param sb The StringBuffer to place the rendered text into.
* @param width The number of characters to display per line
* @param nextLineTabStop The position on the next line for the first tab.
* @param text The text to be rendered.
*
* @return the StringBuffer with the rendered Options contents.
*/
protected StringBuffer renderWrappedText(StringBuffer sb, int width, int nextLineTabStop, String text) {
int pos = findWrapPos(text, width, 0);
if (pos == -1) {
sb.append(rtrim(text));
return sb;
}
sb.append(rtrim(text.substring(0, pos))).append(getNewLine());
if (nextLineTabStop >= width) {
// stops infinite loop happening
nextLineTabStop = 1;
}
// all following lines must be padded with nextLineTabStop space characters
final String padding = createPadding(nextLineTabStop);
while (true) {
text = padding + text.substring(pos).trim();
pos = findWrapPos(text, width, 0);
if (pos == -1) {
sb.append(text);
return sb;
}
if (text.length() > width && pos == nextLineTabStop - 1) {
pos = width;
}
sb.append(rtrim(text.substring(0, pos))).append(getNewLine());
}
}
/**
* Render the specified text width a maximum width. This method differs from renderWrappedText by not removing leading
* spaces after a new line.
*
* @param sb The StringBuffer to place the rendered text into.
* @param width The number of characters to display per line
* @param nextLineTabStop The position on the next line for the first tab.
* @param text The text to be rendered.
*/
private Appendable renderWrappedTextBlock(StringBuffer sb, int width, int nextLineTabStop, String text) {
try {
BufferedReader in = new BufferedReader(new StringReader(text));
String line;
boolean firstLine = true;
while ((line = in.readLine()) != null) {
if (!firstLine) {
sb.append(getNewLine());
} else {
firstLine = false;
}
renderWrappedText(sb, width, nextLineTabStop, line);
}
} catch (IOException e) // NOPMD
{
// cannot happen
}
return sb;
}
/**
* Finds the next text wrap position after <code>startPos</code> for the text in <code>text</code> with the column width
* <code>width</code>. The wrap point is the last position before startPos+width having a whitespace character (space, \n,
* \r). If there is no whitespace character before startPos+width, it will return startPos+width.
*
* @param text The text being searched for the wrap position
* @param width width of the wrapped text
* @param startPos position from which to start the lookup whitespace character
* @return position on which the text must be wrapped or -1 if the wrap position is at the end of the text
*/
protected int findWrapPos(String text, int width, int startPos) {
// the line ends before the max wrap pos or a new line char found
int pos = text.indexOf('\n', startPos);
if (pos != -1 && pos <= width) {
return pos + 1;
}
pos = text.indexOf('\t', startPos);
if (pos != -1 && pos <= width) {
return pos + 1;
}
if (startPos + width >= text.length()) {
return -1;
}
// look for the last whitespace character before startPos+width
for (pos = startPos + width; pos >= startPos; --pos) {
final char c = text.charAt(pos);
if (c == ' ' || c == '\n' || c == '\r') {
break;
}
}
// if we found it - just return
if (pos > startPos) {
return pos;
}
// if we didn't find one, simply chop at startPos+width
pos = startPos + width;
return pos == text.length() ? -1 : pos;
}
/**
* Return a String of padding of length <code>len</code>.
*
* @param len The length of the String of padding to create.
*
* @return The String of padding
*/
protected String createPadding(int len) {
char[] padding = new char[len];
Arrays.fill(padding, ' ');
return new String(padding);
}
/**
* Remove the trailing whitespace from the specified String.
*
* @param s The String to remove the trailing padding from.
*
* @return The String of without the trailing padding
*/
protected String rtrim(String s) {
if (s == null || s.length() == 0) {
return s;
}
int pos = s.length();
while (pos > 0 && Character.isWhitespace(s.charAt(pos - 1))) {
--pos;
}
return s.substring(0, pos);
}
// ------------------------------------------------------ Package protected
// ---------------------------------------------------------------- Private
// ---------------------------------------------------------- Inner classes
/**
* This class implements the <code>Comparator</code> interface for comparing Options.
*/
private static class OptionComparator implements Comparator<Option>, Serializable {
/** The serial version UID. */
private static final long serialVersionUID = 5305467873966684014L;
/**
* Compares its two arguments for order. Returns a negative integer, zero, or a positive integer as the first argument
* is less than, equal to, or greater than the second.
*
* @param opt1 The first Option to be compared.
* @param opt2 The second Option to be compared.
* @return a negative integer, zero, or a positive integer as the first argument is less than, equal to, or greater than
* the second.
*/
public int compare(Option opt1, Option opt2) {
return opt1.getKey().compareToIgnoreCase(opt2.getKey());
}
}
}
| |
/*
* Copyright (c) 2016, EMC Corporation.
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* + Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* + Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* + The name of EMC Corporation may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.emc.ecs.metadata.client;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.emc.ecs.metadata.bo.BillingBO;
import com.emc.ecs.metadata.bo.NamespaceBO;
import com.emc.ecs.metadata.bo.ObjectBO;
import com.emc.ecs.metadata.bo.VdcBO;
import com.emc.ecs.metadata.dao.BillingDAO;
import com.emc.ecs.metadata.dao.EcsCollectionType;
import com.emc.ecs.metadata.dao.NamespaceDAO;
import com.emc.ecs.metadata.dao.ObjectDAO;
import com.emc.ecs.metadata.dao.VdcDAO;
import com.emc.ecs.metadata.dao.elasticsearch.ElasticBillingDAO;
import com.emc.ecs.metadata.dao.elasticsearch.ElasticBucketOwnerDAO;
import com.emc.ecs.metadata.dao.elasticsearch.ElasticDAOConfig;
import com.emc.ecs.metadata.dao.elasticsearch.ElasticNamespaceDetailDAO;
import com.emc.ecs.metadata.dao.elasticsearch.ElasticNamespaceQuotaDAO;
import com.emc.ecs.metadata.dao.elasticsearch.ElasticS3ObjectDAO;
import com.emc.ecs.metadata.dao.elasticsearch.ElasticVdcDetailDAO;
import com.emc.ecs.metadata.dao.file.FileBillingDAO;
import com.emc.ecs.metadata.dao.file.FileNamespaceDAO;
import com.emc.ecs.metadata.dao.file.FileObjectDAO;
import com.emc.ecs.metadata.dao.file.FileVdcDAO;
/**
* ECS S3 client to collect Metadata from various ECS systems
*
*/
public class MetadataCollectorClient {
private static final Integer DEFAULT_ECS_MGMT_PORT = 4443;
private static final Integer DEFAULT_ECS_ALTERNATIVE_MGMT_PORT = 9101;
private static final String ECS_COLLECT_BILLING_DATA = "billing";
private static final String ECS_COLLECT_OBJECT_DATA = "object";
private static final String ECS_COLLECT_OBJECT_DATA_NAMESPACE = "object-namespace";
private static final String ECS_COLLECT_OBJECT_DATA_BUCKET = "object-bucket";
private static final String ECS_COLLECT_OBJECT_VERSION_DATA = "object-version";
private static final String ECS_COLLECT_NAMESPACE_DETAIL = "namespace-detail";
private static final String ECS_COLLECT_NAMESPACE_QUOTA = "namespace-quota";
private static final String ECS_COLLECT_ALL_VDC = "vdc";
private static final String ECS_COLLECT_BUCKET_OWNER = "bucket-owner";
private static final String ECS_COLLECT_ALL_DATA = "all";
private static final String ECS_HOSTS_CONFIG_ARGUMENT = "--ecs-hosts";
private static final String ECS_MGMT_ACCESS_KEY_CONFIG_ARGUMENT = "--ecs-mgmt-access-key";
private static final String ECS_MGMT_SECRET_KEY_CONFIG_ARGUMENT = "--ecs-mgmt-secret-key";
private static final String ECS_MGMT_PORT_CONFIG_ARGUMENT = "--ecs-mgmt-port";
private static final String ECS_ALT_MGMT_PORT_CONFIG_ARGUMENT = "--ecs-alt-mgmt-port";
private static final String ECS_COLLECT_DATA_CONFIG_ARGUMENT = "--collect-data";
private static final String ECS_COLLECT_MODIFIED_OBJECT_CONFIG_ARGUMENT = "--collect-only-modified-objects";
private static final String ECS_INIT_INDEXES_ONLY_CONFIG_ARGUMENT = "--init-indexes-only";
private static final String ECS_COLLECT_OBJECT_DATA_NAME_ARGUMENT = "--bucket-name";
private static final String ECS_COLLECT_OBJECT_DATA_NAMESPACE_ARGUMENT = "--namespace-name";
private static final String ELASTIC_HOSTS_CONFIG_ARGUMENT = "--elastic-hosts";
private static final String ELASTIC_PORT_CONFIG_ARGUMENT = "--elastic-port";
private static final String ELASTIC_CLUSTER_CONFIG_ARGUMENT = "--elastic-cluster";
// xpack security arguments
public final static String XPACK_SECURITY_USER_ARG = "--xpack-user";
public final static String XPACK_SECURITY_USER_PASSWORD_ARG = "--xpack-pwd";
public final static String XPACK_SSL_KEY_ARG = "--xpack-key";
public final static String XPACK_SSL_CERTIFICATE_ARG = "--xpack-cert";
public final static String XPACK_SSL_CERTIFICATE_AUTH_ARG = "--xpack--cert-ca";
private static final String ECS_OBJECT_LAST_MODIFIED_MD_KEY = "LastModified";
// secret argument to test various collection time
// specific x number of days before current day
private static final String ECS_COLLECTION_DAY_SHIFT_ARGUMENT = "--collection-day-shift";
private static final String DATA_DATE_PATTERN = "yyyy-MM-dd'T'HH:mm:ss'Z'";
private static final SimpleDateFormat DATA_DATE_FORMAT = new SimpleDateFormat(DATA_DATE_PATTERN);
private static final String menuString = "Usage: MetadataCollector [" + ECS_HOSTS_CONFIG_ARGUMENT + " <host1,host2>] " +
"[" + ECS_MGMT_ACCESS_KEY_CONFIG_ARGUMENT + " <admin-username>]" +
"[" + ECS_MGMT_SECRET_KEY_CONFIG_ARGUMENT + "<admin-password>]" +
"[" + ECS_MGMT_PORT_CONFIG_ARGUMENT + "<mgmt-port {default: 4443}>]" +
"[" + ELASTIC_HOSTS_CONFIG_ARGUMENT + " <host1,host2>] " +
"[" + ELASTIC_PORT_CONFIG_ARGUMENT + "<elastic-port {default: 9300}>]" +
"[" + ELASTIC_CLUSTER_CONFIG_ARGUMENT + "<elastic-cluster>]" +
"[" + ECS_INIT_INDEXES_ONLY_CONFIG_ARGUMENT + "]" +
"[" + ECS_COLLECT_MODIFIED_OBJECT_CONFIG_ARGUMENT + "<number of days>" + " | " +
ECS_COLLECT_DATA_CONFIG_ARGUMENT + " <" +
ECS_COLLECT_BILLING_DATA + "|" +
ECS_COLLECT_OBJECT_DATA + "|" +
ECS_COLLECT_OBJECT_VERSION_DATA + "|" +
ECS_COLLECT_NAMESPACE_DETAIL + "|" +
ECS_COLLECT_NAMESPACE_QUOTA + "|" +
ECS_COLLECT_ALL_VDC + "|" +
ECS_COLLECT_BUCKET_OWNER + "|" +
ECS_COLLECT_OBJECT_DATA_NAMESPACE + "|" +
ECS_COLLECT_OBJECT_DATA_BUCKET + "|" +
ECS_COLLECT_ALL_DATA +">] " +
"[" + XPACK_SECURITY_USER_ARG + "<xpack-username> " +
XPACK_SECURITY_USER_PASSWORD_ARG + "<xpack-password> " +
XPACK_SSL_KEY_ARG + "<ssl-key> " +
XPACK_SSL_CERTIFICATE_ARG + "<ssl-certificate> " +
XPACK_SSL_CERTIFICATE_AUTH_ARG + "<ssl-certificate-authorities> ]";
private static String ecsHosts = "";
private static String ecsMgmtAccessKey = "";
private static String ecsMgmtSecretKey = "";
private static String elasticHosts = "";
private static Integer elasticPort = 9300;
private static String elasticCluster = "ecs-analytics";
private static Integer ecsMgmtPort = DEFAULT_ECS_MGMT_PORT;
private static String collectData = ECS_COLLECT_ALL_DATA;
private static Integer relativeDayShift = 0;
private static Integer objectModifiedSinceNoOfDays = 0;
private static boolean relativeObjectModifiedSinceOption = false;
private static boolean initIndexesOnlyOption = false;
private static Integer ecsAlternativeMgmtPort = DEFAULT_ECS_ALTERNATIVE_MGMT_PORT;
private static String xpackUser;
private static String xpackPassword;
private static String xpackSslKey;
private static String xpackSslCertificate;
private static String xpackSsslCertificateAuth;
private static String bucketName;
private static String objectNamespace;
private final static Logger logger = LoggerFactory.getLogger(MetadataCollectorClient.class);
// Thread pool
private static ThreadPoolExecutor threadPoolExecutor =
(ThreadPoolExecutor) Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
private static Queue<Future<?>> futures = new ConcurrentLinkedQueue<Future<?>>();
private static AtomicLong objectCount = new AtomicLong(0L);
public static void main(String[] args) throws Exception {
// handle passed in arguments
handleArguments(args);
// grab current to timestamp in order
// to label collected data with time
Date collectionTime = new Date(System.currentTimeMillis());
if(initIndexesOnlyOption) {
initIndexesOnly(collectionTime);
// no need to go further
return;
}
if(relativeObjectModifiedSinceOption) {
// collect object data
collectObjectDataModifiedSinceDate(collectionTime, objectModifiedSinceNoOfDays);
} else {
// check if secret day shifting testing option was specified
if( relativeDayShift != 0 ) {
Long epochTime = collectionTime.getTime();
Long daysShift = TimeUnit.DAYS.toMillis(relativeDayShift);
collectionTime = new Date(epochTime - daysShift);
}
if(collectData.equals(ECS_COLLECT_BILLING_DATA) ){
// collect billing data
collectBillingData(collectionTime);
}
else if(collectData.equals(ECS_COLLECT_OBJECT_DATA) ) {
// only collection all object if the modified since option has been specified
if(!relativeObjectModifiedSinceOption){
// collect object data
collectObjectData(collectionTime);
}
} else if(collectData.equals(ECS_COLLECT_OBJECT_VERSION_DATA)) {
// collect object data
collectObjectVersionData(collectionTime);
} else if(collectData.equals(ECS_COLLECT_ALL_DATA)) {
// collect billing data
collectBillingData(collectionTime);
// only collection all object if the modified since option has not been specified
if(!relativeObjectModifiedSinceOption) {
// collect object data
collectObjectData(collectionTime);
}
collectNamespaceDetails(collectionTime);
collectNamespaceQuota(collectionTime);
} else if(collectData.equals(ECS_COLLECT_NAMESPACE_DETAIL)) {
// collect namespace details
collectNamespaceDetails(collectionTime);
} else if(collectData.equals(ECS_COLLECT_NAMESPACE_QUOTA)) {
// collect namespace quota
collectNamespaceQuota(collectionTime);
} else if(collectData.equals(ECS_COLLECT_BUCKET_OWNER)) {
// collect bucket owner
collectBucketOwnership(collectionTime);
} else if(collectData.equals(ECS_COLLECT_ALL_VDC)) {
// collect vdc list
collectVdcList(collectionTime);
} else if(collectData.equals(ECS_COLLECT_OBJECT_DATA_NAMESPACE)) {
// collect namespace
collectObjectDataByNamespace(collectionTime);
} else if(collectData.equals(ECS_COLLECT_OBJECT_DATA_BUCKET)) {
// collect bucket
collectObjectDataByBucket(collectionTime);
} else {
System.err.println("Unsupported data collection action: " + collectData );
System.err.println(menuString);
System.exit(0);
}
}
// wait for all threads to complete their work
while ( !futures.isEmpty() ) {
try {
Future<?> future = futures.poll();
if(future != null){
future.get();
}
} catch (InterruptedException e) {
logger.error(e.getLocalizedMessage());
} catch (ExecutionException e) {
e.printStackTrace();
logger.error(e.getLocalizedMessage());
}
}
Long objectCollectionFinish = System.currentTimeMillis();
Double deltaTime = Double.valueOf((objectCollectionFinish - collectionTime.getTime())) / 1000 ;
logger.info("Collected " + objectCount.get() + " objects");
logger.info("Total collection time: " + deltaTime + " seconds");
// take everything down once all threads have completed their work
threadPoolExecutor.shutdown();
// wait for all threads to terminate
boolean termination = false;
do {
try {
termination = threadPoolExecutor.awaitTermination(2, TimeUnit.MINUTES);
} catch (InterruptedException e) {
logger.error(e.getLocalizedMessage());
termination = true;
}
} while(!termination);
}
private static void collectObjectDataByBucket(Date collectionTime) {
List<String> hosts = Arrays.asList(ecsHosts.split(","));
// instantiate billing BO
BillingBO billingBO = new BillingBO( ecsMgmtAccessKey,
ecsMgmtSecretKey,
hosts,
ecsMgmtPort,
null, // dao is not required in this case
objectCount );
// Instantiate DAO
ObjectDAO objectDAO = null;
if(!elasticHosts.isEmpty()) {
// Instantiate ElasticSearch DAO
ElasticDAOConfig daoConfig = new ElasticDAOConfig();
daoConfig.setHosts(Arrays.asList(elasticHosts.split(",")));
daoConfig.setPort(elasticPort);
daoConfig.setClusterName(elasticCluster);
daoConfig.setCollectionTime(collectionTime);
daoConfig.setCollectionType(EcsCollectionType.object);
initXPackConfig(daoConfig);
objectDAO = new ElasticS3ObjectDAO(daoConfig);
// init indexes
objectDAO.initIndexes(collectionTime);
} else {
// Instantiate file DAO
objectDAO = new FileObjectDAO();
}
ObjectBO objectBO = new ObjectBO(billingBO, hosts, objectDAO, threadPoolExecutor, futures, objectCount );
// Start collection
objectBO.collectObjectDataByBucket(collectionTime, objectNamespace, bucketName);
objectBO.shutdown();
}
private static void collectObjectDataByNamespace(Date collectionTime) {
List<String> hosts = Arrays.asList(ecsHosts.split(","));
// instantiate billing BO
BillingBO billingBO = new BillingBO( ecsMgmtAccessKey,
ecsMgmtSecretKey,
hosts,
ecsMgmtPort,
null, // dao is not required in this case
objectCount );
// Instantiate DAO
ObjectDAO objectDAO = null;
if(!elasticHosts.isEmpty()) {
// Instantiate ElasticSearch DAO
ElasticDAOConfig daoConfig = new ElasticDAOConfig();
daoConfig.setHosts(Arrays.asList(elasticHosts.split(",")));
daoConfig.setPort(elasticPort);
daoConfig.setClusterName(elasticCluster);
daoConfig.setCollectionTime(collectionTime);
daoConfig.setCollectionType(EcsCollectionType.object);
initXPackConfig(daoConfig);
objectDAO = new ElasticS3ObjectDAO(daoConfig);
// init indexes
objectDAO.initIndexes(collectionTime);
} else {
// Instantiate file DAO
objectDAO = new FileObjectDAO();
}
ObjectBO objectBO = new ObjectBO(billingBO, hosts, objectDAO, threadPoolExecutor, futures, objectCount );
// Start collection
objectBO.collectObjectDataByNamespace(collectionTime, objectNamespace);
objectBO.shutdown();
}
/**
* Handles passed in arguments
* @param args
*/
private static void handleArguments(String[] args) {
if ( args.length > 0 && args[0].contains("--help")) {
System.err.println (menuString);
System.err.println("Example queue name are: *");
System.exit(0);
} else {
int i = 0;
String arg;
while (i < args.length && args[i].startsWith("--")) {
arg = args[i++];
if (arg.contains(ECS_HOSTS_CONFIG_ARGUMENT)) {
if (i < args.length) {
ecsHosts = args[i++];
} else {
System.err.println(ECS_HOSTS_CONFIG_ARGUMENT + " requires hosts value(s)");
System.exit(0);
}
} else if (arg.contains(ECS_MGMT_ACCESS_KEY_CONFIG_ARGUMENT)) {
if (i < args.length) {
ecsMgmtAccessKey = args[i++];
} else {
System.err.println(ECS_MGMT_ACCESS_KEY_CONFIG_ARGUMENT + " requires an access-key value");
System.exit(0);
}
} else if (arg.equals(ECS_MGMT_SECRET_KEY_CONFIG_ARGUMENT)) {
if (i < args.length) {
ecsMgmtSecretKey = args[i++];
} else {
System.err.println(ECS_MGMT_SECRET_KEY_CONFIG_ARGUMENT + " requires a secret-key value");
System.exit(0);
}
} else if (arg.equals(ECS_MGMT_PORT_CONFIG_ARGUMENT)) {
if (i < args.length) {
ecsMgmtPort = Integer.valueOf(args[i++]);
} else {
System.err.println(ECS_MGMT_PORT_CONFIG_ARGUMENT + " requires a mgmt port value");
System.exit(0);
}
} else if (arg.equals(ECS_ALT_MGMT_PORT_CONFIG_ARGUMENT)) {
if (i < args.length) {
ecsAlternativeMgmtPort = Integer.valueOf(args[i++]);
} else {
System.err.println(ECS_ALT_MGMT_PORT_CONFIG_ARGUMENT + " requires an alternative mgmt port value");
System.exit(0);
}
} else if (arg.equals(ECS_COLLECT_MODIFIED_OBJECT_CONFIG_ARGUMENT)) {
if (i < args.length) {
relativeObjectModifiedSinceOption = true;
objectModifiedSinceNoOfDays = Integer.valueOf(args[i++]);
} else {
System.err.println(ECS_COLLECT_MODIFIED_OBJECT_CONFIG_ARGUMENT + " requires a specified number of days value");
System.exit(0);
}
} else if (arg.equals(ECS_COLLECT_DATA_CONFIG_ARGUMENT)) {
if (i < args.length) {
collectData = args[i++];
} else {
System.err.println(ECS_COLLECT_DATA_CONFIG_ARGUMENT + " requires a collect data value");
System.exit(0);
}
} else if (arg.contains(ELASTIC_HOSTS_CONFIG_ARGUMENT)) {
if (i < args.length) {
elasticHosts = args[i++];
} else {
System.err.println(ELASTIC_HOSTS_CONFIG_ARGUMENT + " requires hosts value(s)");
System.exit(0);
}
} else if (arg.equals(ELASTIC_PORT_CONFIG_ARGUMENT)) {
if (i < args.length) {
elasticPort = Integer.valueOf(args[i++]);
} else {
System.err.println(ELASTIC_PORT_CONFIG_ARGUMENT + " requires a port value");
System.exit(0);
}
} else if (arg.equals(ELASTIC_CLUSTER_CONFIG_ARGUMENT)) {
if (i < args.length) {
elasticCluster = args[i++];
} else {
System.err.println( ELASTIC_CLUSTER_CONFIG_ARGUMENT + " requires a cluster value");
System.exit(0);
}
} else if (arg.equals(ECS_COLLECTION_DAY_SHIFT_ARGUMENT)) {
if (i < args.length) {
relativeDayShift = Integer.valueOf(args[i++]);
} else {
System.err.println(ECS_COLLECTION_DAY_SHIFT_ARGUMENT + " requires a day shift value port value");
System.exit(0);
}
} else if (arg.equals( ECS_INIT_INDEXES_ONLY_CONFIG_ARGUMENT)) {
initIndexesOnlyOption = true;
} else if (arg.equals( XPACK_SECURITY_USER_ARG)) {
if (i < args.length) {
xpackUser = args[i++];
} else {
System.err.println( XPACK_SECURITY_USER_ARG + " requires a value");
System.exit(0);
}
} else if (arg.equals( XPACK_SECURITY_USER_PASSWORD_ARG)) {
if (i < args.length) {
xpackPassword = args[i++];
} else {
System.err.println( XPACK_SECURITY_USER_PASSWORD_ARG + " requires a value");
System.exit(0);
}
} else if (arg.equals( XPACK_SSL_KEY_ARG)) {
if (i < args.length) {
xpackSslKey = args[i++];
} else {
System.err.println( XPACK_SSL_KEY_ARG + " requires a value");
System.exit(0);
}
} else if (arg.equals( XPACK_SSL_CERTIFICATE_ARG)) {
if (i < args.length) {
xpackSslCertificate = args[i++];
} else {
System.err.println( XPACK_SSL_CERTIFICATE_ARG + " requires a value");
System.exit(0);
}
} else if (arg.equals( XPACK_SSL_CERTIFICATE_AUTH_ARG)) {
if (i < args.length) {
xpackSsslCertificateAuth = args[i++];
} else {
System.err.println( XPACK_SSL_CERTIFICATE_AUTH_ARG + " requires a value");
System.exit(0);
}
} else if (arg.equals(ECS_COLLECT_OBJECT_DATA_NAMESPACE_ARGUMENT)) {
if (i < args.length) {
objectNamespace = args[i++];
} else {
System.err.println(ECS_COLLECT_OBJECT_DATA_NAMESPACE_ARGUMENT + " requires namespace");
System.exit(0);
}
} else if (arg.equals(ECS_COLLECT_OBJECT_DATA_NAME_ARGUMENT)) {
if (i < args.length) {
bucketName = args[i++];
} else {
System.err.println(ECS_COLLECT_OBJECT_DATA_NAME_ARGUMENT + " requires bucket");
System.exit(0);
}
} else {
System.err.println("Unrecognized option: " + arg);
System.err.println(menuString);
System.exit(0);
}
}
if (bucketName!=null) {
if (objectNamespace==null || "".equals(objectNamespace)) {
System.err.println(ECS_COLLECT_OBJECT_DATA_NAMESPACE_ARGUMENT + " requires namespace, " + ECS_COLLECT_OBJECT_DATA_NAME_ARGUMENT + " requires bucket");
System.exit(0);
}
}
}
if(initIndexesOnlyOption) {
// Check hosts
if(elasticHosts.isEmpty()) {
System.err.println("Missing Elastic hostname use " + ELASTIC_HOSTS_CONFIG_ARGUMENT +
"<host1, host2> to specify a value" );
return;
}
} else {
// Check hosts
if(ecsHosts.isEmpty()) {
System.err.println("Missing ECS hostname use " + ECS_HOSTS_CONFIG_ARGUMENT +
"<host1, host2> to specify a value" );
return;
}
// management access/user key
if(ecsMgmtAccessKey.isEmpty()) {
System.err.println("Missing managment access key use" + ECS_MGMT_ACCESS_KEY_CONFIG_ARGUMENT +
"<admin-username> to specify a value" );
return;
}
// management access/user key
if(ecsMgmtSecretKey.isEmpty()) {
System.err.println("Missing management secret key use " + ECS_MGMT_SECRET_KEY_CONFIG_ARGUMENT +
"<admin-password> to specify a value" );
return;
}
}
}
/**
* Collects Billing data
*
* @param collectionTime
*/
private static void collectBillingData(Date collectionTime) {
BillingDAO billingDAO = null;
if(!elasticHosts.isEmpty()) {
// Instantiate file DAO
ElasticDAOConfig daoConfig = new ElasticDAOConfig();
daoConfig.setHosts(Arrays.asList(elasticHosts.split(",")));
daoConfig.setPort(elasticPort);
daoConfig.setClusterName(elasticCluster);
daoConfig.setCollectionTime(collectionTime);
initXPackConfig(daoConfig);
billingDAO = new ElasticBillingDAO(daoConfig);
// init indexes
billingDAO.initIndexes(collectionTime);
} else {
// Instantiate file DAO
billingDAO = new FileBillingDAO(null);
}
// instantiate billing BO
BillingBO billingBO = new BillingBO( ecsMgmtAccessKey,
ecsMgmtSecretKey,
Arrays.asList(ecsHosts.split(",")),
ecsMgmtPort,
billingDAO,
objectCount );
// Start collection
billingBO.collectBillingData(collectionTime);
billingBO.shutdown();
}
/**
* Collects object data
*
* @param collectionTime
*/
private static void collectObjectData(Date collectionTime) {
List<String> hosts = Arrays.asList(ecsHosts.split(","));
// instantiate billing BO
BillingBO billingBO = new BillingBO( ecsMgmtAccessKey,
ecsMgmtSecretKey,
hosts,
ecsMgmtPort,
null, // dao is not required in this case
objectCount );
// Instantiate DAO
ObjectDAO objectDAO = null;
if(!elasticHosts.isEmpty()) {
// Instantiate ElasticSearch DAO
ElasticDAOConfig daoConfig = new ElasticDAOConfig();
daoConfig.setHosts(Arrays.asList(elasticHosts.split(",")));
daoConfig.setPort(elasticPort);
daoConfig.setClusterName(elasticCluster);
daoConfig.setCollectionTime(collectionTime);
daoConfig.setCollectionType(EcsCollectionType.object);
initXPackConfig(daoConfig);
objectDAO = new ElasticS3ObjectDAO(daoConfig);
// init indexes
objectDAO.initIndexes(collectionTime);
} else {
// Instantiate file DAO
objectDAO = new FileObjectDAO();
}
ObjectBO objectBO = new ObjectBO(billingBO, hosts, objectDAO, threadPoolExecutor, futures, objectCount );
// Start collection
objectBO.collectObjectData(collectionTime);
objectBO.shutdown();
}
/**
* Collect only objects modified since a certain date
*
* @param collectionTime
* @param numberOfDays
*/
private static void collectObjectDataModifiedSinceDate(Date collectionTime, Integer numberOfDays) {
List<String> hosts = Arrays.asList(ecsHosts.split(","));
// instantiate billing BO
BillingBO billingBO = new BillingBO( ecsMgmtAccessKey,
ecsMgmtSecretKey,
hosts,
ecsMgmtPort,
null, // dao is not required in this case
objectCount );
// Instantiate DAO
ObjectDAO objectDAO = null;
if(!elasticHosts.isEmpty()) {
// Instantiate ElasticSearch DAO
ElasticDAOConfig daoConfig = new ElasticDAOConfig();
daoConfig.setHosts(Arrays.asList(elasticHosts.split(",")));
daoConfig.setPort(elasticPort);
daoConfig.setClusterName(elasticCluster);
daoConfig.setCollectionTime(collectionTime);
daoConfig.setCollectionType(EcsCollectionType.object);
initXPackConfig(daoConfig);
objectDAO = new ElasticS3ObjectDAO(daoConfig);
// init indexes
objectDAO.initIndexes(collectionTime);
} else {
// Instantiate file DAO
objectDAO = new FileObjectDAO();
}
ObjectBO objectBO = new ObjectBO(billingBO, hosts, objectDAO, threadPoolExecutor, futures, objectCount );
// query criteria should look like ( LastModified >= 'since date' )
Date sinceDate = new Date( (collectionTime.getTime() - (TimeUnit.MILLISECONDS.convert(numberOfDays, TimeUnit.DAYS)) ));
String yesterdayDateTime = DATA_DATE_FORMAT.format( sinceDate );
String queryCriteria = "( " + ECS_OBJECT_LAST_MODIFIED_MD_KEY + " >= '" + yesterdayDateTime + "' )";
// Start collection
objectBO.collectObjectData(collectionTime, queryCriteria);
objectBO.shutdown();
}
/**
* Collects object version data
*
* @param collectionTime
*/
private static void collectObjectVersionData(Date collectionTime) {
List<String> hosts = Arrays.asList(ecsHosts.split(","));
// instantiate billing BO
BillingBO billingBO = new BillingBO( ecsMgmtAccessKey,
ecsMgmtSecretKey,
hosts,
ecsMgmtPort,
null, // dao is not required in this case
objectCount );
// Instantiate DAO
ObjectDAO objectDAO = null;
if(!elasticHosts.isEmpty()) {
// Instantiate ElasticSearch DAO
ElasticDAOConfig daoConfig = new ElasticDAOConfig();
daoConfig.setHosts(Arrays.asList(elasticHosts.split(",")));
daoConfig.setPort(elasticPort);
daoConfig.setClusterName(elasticCluster);
daoConfig.setCollectionTime(collectionTime);
daoConfig.setCollectionType(EcsCollectionType.object_version);
initXPackConfig(daoConfig);
objectDAO = new ElasticS3ObjectDAO(daoConfig);
// init indexes
objectDAO.initIndexes(collectionTime);
} else {
// Instantiate file DAO
objectDAO = new FileObjectDAO();
}
ObjectBO objectBO = new ObjectBO(billingBO, hosts, objectDAO, threadPoolExecutor, futures, objectCount );
//objectBO.
// Start collection
objectBO.collectObjectVersionData(collectionTime);
objectBO.shutdown();
}
private static void initIndexesOnly(Date collectionTime) {
// Instantiate Object DAO
ObjectDAO objectDAO = null;
BillingDAO billingDAO = null;
VdcDAO vdcDAO = null;
NamespaceDAO namespaceDAO = null;
if(!elasticHosts.isEmpty()) {
// Instantiate ElasticSearch DAO
ElasticDAOConfig daoConfig = new ElasticDAOConfig();
daoConfig.setHosts(Arrays.asList(elasticHosts.split(",")));
daoConfig.setPort(elasticPort);
daoConfig.setClusterName(elasticCluster);
daoConfig.setCollectionTime(collectionTime);
daoConfig.setCollectionType(EcsCollectionType.object);
initXPackConfig(daoConfig);
objectDAO = new ElasticS3ObjectDAO(daoConfig);
// init indexes
daoConfig.setCollectionType(EcsCollectionType.object_version);
objectDAO.initIndexes(collectionTime);
daoConfig.setCollectionType(EcsCollectionType.object);
objectDAO.initIndexes(collectionTime);
billingDAO = new ElasticBillingDAO(daoConfig);
// init indexes
billingDAO.initIndexes(collectionTime);
vdcDAO = new ElasticVdcDetailDAO(daoConfig);
vdcDAO.initIndexes(collectionTime);
vdcDAO = new ElasticBucketOwnerDAO(daoConfig);
vdcDAO.initIndexes(collectionTime);
namespaceDAO = new ElasticNamespaceDetailDAO(daoConfig);
namespaceDAO.initIndexes(collectionTime);
namespaceDAO = new ElasticNamespaceQuotaDAO(daoConfig);
namespaceDAO.initIndexes(collectionTime);
}
}
/**
* Collects Namespace details data
*
* @param collectionTime
*/
private static void collectNamespaceDetails(Date collectionTime) {
NamespaceDAO namespaceDAO = null;
if(!elasticHosts.isEmpty()) {
// Instantiate file DAO
ElasticDAOConfig daoConfig = new ElasticDAOConfig();
daoConfig.setHosts(Arrays.asList(elasticHosts.split(",")));
daoConfig.setPort(elasticPort);
daoConfig.setClusterName(elasticCluster);
daoConfig.setCollectionTime(collectionTime);
initXPackConfig(daoConfig);
namespaceDAO = new ElasticNamespaceDetailDAO(daoConfig);
// init indexes
namespaceDAO.initIndexes(collectionTime);
} else {
// Instantiate file DAO
namespaceDAO = new FileNamespaceDAO(null);
}
// instantiate billing BO
NamespaceBO namespaceBO = new NamespaceBO( ecsMgmtAccessKey,
ecsMgmtSecretKey,
Arrays.asList(ecsHosts.split(",")),
ecsMgmtPort,
namespaceDAO,
objectCount );
// Start collection
namespaceBO.collectNamespaceDetails(collectionTime);
namespaceBO.shutdown();
}
/**
* Collects Namespace quota data
*
* @param collectionTime
*/
private static void collectNamespaceQuota(Date collectionTime) {
NamespaceDAO namespaceDAO = null;
if(!elasticHosts.isEmpty()) {
// Instantiate file DAO
ElasticDAOConfig daoConfig = new ElasticDAOConfig();
daoConfig.setHosts(Arrays.asList(elasticHosts.split(",")));
daoConfig.setPort(elasticPort);
daoConfig.setClusterName(elasticCluster);
daoConfig.setCollectionTime(collectionTime);
initXPackConfig(daoConfig);
namespaceDAO = new ElasticNamespaceQuotaDAO(daoConfig);
// init indexes
namespaceDAO.initIndexes(collectionTime);
} else {
// Instantiate file DAO
namespaceDAO = new FileNamespaceDAO(null);
}
// instantiate billing BO
NamespaceBO namespaceBO = new NamespaceBO( ecsMgmtAccessKey,
ecsMgmtSecretKey,
Arrays.asList(ecsHosts.split(",")),
ecsMgmtPort,
namespaceDAO,
objectCount );
// Start collection
namespaceBO.collectNamespaceQuota(collectionTime);
namespaceBO.shutdown();
}
private static void collectBucketOwnership(Date collectionTime) {
VdcDAO vdcDAO = null;
if(!elasticHosts.isEmpty()) {
// Instantiate file DAO
ElasticDAOConfig daoConfig = new ElasticDAOConfig();
daoConfig.setHosts(Arrays.asList(elasticHosts.split(",")));
daoConfig.setPort(elasticPort);
daoConfig.setClusterName(elasticCluster);
daoConfig.setCollectionTime(collectionTime);
initXPackConfig(daoConfig);
vdcDAO = new ElasticBucketOwnerDAO(daoConfig);
// init indexes
vdcDAO.initIndexes(collectionTime);
} else {
// Instantiate file DAO
vdcDAO = new FileVdcDAO(null);
}
// instantiate billing BO
VdcBO vdcBO = new VdcBO( ecsMgmtAccessKey,
ecsMgmtSecretKey,
Arrays.asList(ecsHosts.split(",")),
ecsMgmtPort,
ecsAlternativeMgmtPort,
vdcDAO,
objectCount );
// Start collection
vdcBO.collectBucketOwner(collectionTime);
vdcBO.shutdown();
}
/**
*
* @param collectionTime
*/
private static void collectVdcList(Date collectionTime) {
VdcDAO vdcDAO = null;
if(!elasticHosts.isEmpty()) {
// Instantiate file DAO
ElasticDAOConfig daoConfig = new ElasticDAOConfig();
daoConfig.setHosts(Arrays.asList(elasticHosts.split(",")));
daoConfig.setPort(elasticPort);
daoConfig.setClusterName(elasticCluster);
daoConfig.setCollectionTime(collectionTime);
initXPackConfig(daoConfig);
vdcDAO = new ElasticVdcDetailDAO(daoConfig);
// init indexes
vdcDAO.initIndexes(collectionTime);
} else {
// Instantiate file DAO
vdcDAO = new FileVdcDAO(null);
}
// instantiate billing BO
VdcBO vdcBO = new VdcBO( ecsMgmtAccessKey,
ecsMgmtSecretKey,
Arrays.asList(ecsHosts.split(",")),
ecsMgmtPort,
vdcDAO,
objectCount );
// Start collection
vdcBO.collectVdcDetails(collectionTime);
vdcBO.shutdown();
}
private static void initXPackConfig(ElasticDAOConfig daoConfig) {
if (xpackUser!=null && !xpackUser.isEmpty()) {
daoConfig.setXpackUser(xpackUser);
daoConfig.setXpackPassword(xpackPassword);
daoConfig.setXpackSslKey(xpackSslKey);
daoConfig.setXpackSslCertificate(xpackSslCertificate);
daoConfig.setXpackSslCertificateAuthorities(xpackSsslCertificateAuth);
}
}
}
| |
package io.cattle.platform.iaas.api.auth.integration.ldap;
import static javax.naming.directory.SearchControls.*;
import io.cattle.platform.api.auth.Identity;
import io.cattle.platform.core.constants.IdentityConstants;
import io.cattle.platform.iaas.api.auth.AbstractTokenUtil;
import io.cattle.platform.iaas.api.auth.integration.interfaces.IdentityProvider;
import io.cattle.platform.iaas.api.auth.integration.ldap.ad.LdapServiceContextPoolFactory;
import io.cattle.platform.iaas.api.auth.integration.ldap.interfaces.LDAPConstants;
import io.cattle.platform.pool.PoolConfig;
import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException;
import io.github.ibuildthecloud.gdapi.util.ResponseCodes;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.naming.Context;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.Attributes;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import javax.naming.ldap.InitialLdapContext;
import javax.naming.ldap.LdapContext;
import javax.naming.ldap.LdapName;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.pool2.impl.AbandonedConfig;
import org.apache.commons.pool2.impl.GenericObjectPool;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class LDAPIdentityProvider implements IdentityProvider{
private static final Logger log = LoggerFactory.getLogger(LDAPIdentityProvider.class);
@Override
public List<Identity> searchIdentities(String name, boolean exactMatch) {
if (!isConfigured()){
notConfigured();
}
List<Identity> identities = new ArrayList<>();
for (String scope : scopes()) {
identities.addAll(searchIdentities(name, scope, exactMatch));
}
return identities;
}
@Override
public List<Identity> searchIdentities(String name, String scope, boolean exactMatch) {
if (!isConfigured()){
notConfigured();
}
name = escapeLDAPSearchFilter(name);
if (getConstantsConfig().getUserScope().equalsIgnoreCase(scope)) {
return searchUser(name, exactMatch);
} else if(getConstantsConfig().getGroupScope().equalsIgnoreCase(scope)) {
return searchGroup(name, exactMatch);
} else{
throw new ClientVisibleException(ResponseCodes.BAD_REQUEST, "invalidScope", "Identity type is not valid for Ldap", null);
}
}
@Override
public Identity getIdentity(String distinguishedName, String scope) {
if (!isConfigured()){
notConfigured();
}
if (!getConstantsConfig().scopes().contains(scope)) {
throw new ClientVisibleException(ResponseCodes.BAD_REQUEST, "invalidScope", "Identity type is not valid for Ldap", null);
}
try {
return getObject(distinguishedName, scope);
}
catch (ServiceContextCreationException e){
throw new ClientVisibleException(ResponseCodes.INTERNAL_SERVER_ERROR, "LdapDown", "Could not create service context.", null);
} catch (ServiceContextRetrievalException e) {
throw new ClientVisibleException(ResponseCodes.INTERNAL_SERVER_ERROR, "LdapDown", "Could not retrieve service context.", null);
}
}
@Override
public Identity transform(Identity identity) {
if (getConstantsConfig().scopes().contains(identity.getExternalIdType())) {
return getIdentity(identity.getExternalId(), identity.getExternalIdType());
} else {
throw new ClientVisibleException(ResponseCodes.BAD_REQUEST,
IdentityConstants.INVALID_TYPE, "Ldap does not provide: " + identity.getExternalIdType(), null);
}
}
@Override
public Identity untransform(Identity identity) {
if (!getConstantsConfig().scopes().contains(identity.getExternalIdType())){
throw new ClientVisibleException(ResponseCodes.BAD_REQUEST,
IdentityConstants.INVALID_TYPE, "Ldap does not provide: " + identity.getExternalIdType(), null);
}
return identity;
}
protected List<Identity> searchGroup(String name, boolean exactMatch) {
String query;
if (exactMatch) {
query = "(&(" + getConstantsConfig().getGroupSearchField() + '=' + name + ")(" + getConstantsConfig().objectClass() + '='
+ getConstantsConfig().getGroupObjectClass() + "))";
} else {
query = "(&(" + getConstantsConfig().getGroupSearchField() + "=*" + name + "*)(" + getConstantsConfig().objectClass() + '='
+ getConstantsConfig().getGroupObjectClass() + "))";
}
log.trace("LDAPIdentityProvider searchGroup query: " + query);
return resultsToIdentities(searchLdap(query));
}
protected List<Identity> searchUser(String name, boolean exactMatch) {
String query;
if (exactMatch)
{
query = "(&(" + getConstantsConfig().getUserSearchField() + '=' + name + ")(" + getConstantsConfig().objectClass() + '='
+ getConstantsConfig().getUserObjectClass() + "))";
} else {
query = "(&(" + getConstantsConfig().getUserSearchField() + "=*" + name + "*)(" + getConstantsConfig().objectClass() + '='
+ getConstantsConfig().getUserObjectClass() + "))";
}
log.trace("LDAPIdentityProvider searchUser query: " + query);
return resultsToIdentities(searchLdap(query));
}
protected List<Identity> resultsToIdentities(NamingEnumeration<SearchResult> results) {
List<Identity> identities = new ArrayList<>();
try {
if (!results.hasMore()) {
return identities;
}
} catch (NamingException e) {
return identities;
}
try {
while (results.hasMore()){
SearchResult result = results.next();
log.trace("LDAPIdentityProvider SearchResult: " + result);
LdapName dn = new LdapName(result.getNameInNamespace());
identities.add(attributesToIdentity(dn));
}
} catch (NamingException e) {
//Ldap Referrals are causing this.
getLogger().debug("While iterating results from an ldap search.", e);
return identities;
}
return identities;
}
protected Identity getObject(String distinguishedName, String scope) {
LdapContext context = null;
try {
LdapName object = new LdapName(distinguishedName);
context = getServiceContext();
Attributes search;
search = context.getAttributes(object);
if (!isType(search, scope) && !hasPermission(search)){
return null;
}
return attributesToIdentity(object);
}
catch (NamingException e) {
getLogger().info("Failed to get object: {} : {}", distinguishedName, e.getExplanation());
return null;
}
finally {
if (context != null) {
getContextPool().returnObject(context);
}
}
}
protected Identity attributesToIdentity(LdapName dn){
LdapContext context = getServiceContext();
try {
Attributes search = context.getAttributes(dn);
log.trace("Attributes for dn: " + dn + " to translate: " + search);
String externalIdType;
String accountName;
String externalId = dn.toString();
String login;
if (isType(search, getConstantsConfig().getUserObjectClass())){
externalIdType = getConstantsConfig().getUserScope();
if (search.get(getConstantsConfig().getUserNameField()) != null) {
accountName = (String) search.get(getConstantsConfig().getUserNameField()).get();
} else {
accountName = externalId;
}
login = (String) search.get(getConstantsConfig().getUserLoginField()).get();
} else if (isType(search, getConstantsConfig().getGroupObjectClass())) {
externalIdType = getConstantsConfig().getGroupScope();
if (search.get(getConstantsConfig().getGroupNameField()) != null) {
accountName = (String) search.get(getConstantsConfig().getGroupNameField()).get();
} else {
accountName = externalId;
}
if (search.get(getConstantsConfig().getUserLoginField()) != null) {
login = (String) search.get(getConstantsConfig().getUserLoginField()).get();
} else {
login = accountName;
}
} else {
return null;
}
return new Identity(externalIdType, externalId, accountName, null, null, login);
} catch (NamingException e) {
return null;
} finally {
if (context != null) {
getContextPool().returnObject(context);
}
}
}
protected boolean isType(Attributes search, String type) {
NamingEnumeration<?> objectClass;
try {
objectClass = search.get(getConstantsConfig().objectClass()).getAll();
while (objectClass.hasMoreElements()) {
Object object = objectClass.next();
if ((object.toString()).equalsIgnoreCase(type)){
return true;
}
}
return false;
} catch (NamingException e) {
getLogger().info("Failed to determine if object is type:" + type, e);
return false;
}
}
protected NamingEnumeration<SearchResult> searchLdap(String query) {
SearchControls controls = new SearchControls();
LdapContext context = null;
controls.setSearchScope(SUBTREE_SCOPE);
NamingEnumeration<SearchResult> results;
try {
context = getServiceContext();
results = context.search(getConstantsConfig().getDomain(), query, controls);
} catch (NamingException e) {
getLogger().error("When searching ldap from /v1/identity Failed to search: " + query + " scope:" + getConstantsConfig().getDomain(), e);
throw new ClientVisibleException(ResponseCodes.INTERNAL_SERVER_ERROR, getConstantsConfig().getConfig(),
"Organizational Unit not found.", null);
} finally {
if (context != null){
getContextPool().returnObject(context);
}
}
return results;
}
protected boolean hasPermission(Attributes attributes){
int permission;
try {
if (!isType(attributes, getConstantsConfig().getUserObjectClass())){
return true;
}
if (StringUtils.isNotBlank(getConstantsConfig().getUserEnabledAttribute())) {
permission = Integer.parseInt(attributes.get(getConstantsConfig().getUserEnabledAttribute()).get()
.toString());
} else {
return true;
}
} catch (NamingException e) {
getLogger().error("Failed to get USER_ENABLED_ATTRIBUTE.", e);
return false;
}
permission = permission & getConstantsConfig().getUserDisabledBitMask();
return permission != getConstantsConfig().getUserDisabledBitMask();
}
protected LdapContext login(String username, String password) {
if (StringUtils.isEmpty(password)) {
throw new UserLoginFailureException("Failed to login ldap User : Invalid Credentials");
}
Hashtable<String, String> props = new Hashtable<>();
props.put(Context.SECURITY_AUTHENTICATION, "simple");
props.put(Context.SECURITY_PRINCIPAL, username);
props.put(Context.SECURITY_CREDENTIALS, password);
props.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
LdapContext userContext;
try {
String url = "ldap://" + getConstantsConfig().getServer() + ':' + getConstantsConfig().getPort() + '/';
props.put(Context.PROVIDER_URL, url);
if (getConstantsConfig().getTls()) {
props.put(Context.SECURITY_PROTOCOL, "ssl");
}
userContext = new InitialLdapContext(props, null);
return userContext;
} catch (NamingException e) {
throw new UserLoginFailureException("Failed to login ldap User: " + LDAPUtils.errorCodeToDescription(e), e, username);
}
}
protected String escapeLDAPSearchFilter(String filter) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < filter.length(); i++) {
char curChar = filter.charAt(i);
switch (curChar) {
case '\\':
sb.append("\\5c");
break;
case '*':
sb.append("\\2a");
break;
case '(':
sb.append("\\28");
break;
case ')':
sb.append("\\29");
break;
case '\u0000':
sb.append("\\00");
break;
default:
sb.append(curChar);
}
}
return sb.toString();
}
protected LdapContext getServiceContext() {
try {
return getContextPool().borrowObject();
} catch (ServiceContextCreationException e) {
throw e;
} catch (Exception e) {
getLogger().error("Failed to get service context for ldap.", e);
throw new ServiceContextRetrievalException("Unable to borrow a service context from context pool.", e);
}
}
@PostConstruct
public void init() {
if (getContextPool() == null) {
GenericObjectPoolConfig config = new GenericObjectPoolConfig();
config.setTestOnBorrow(true);
PoolConfig.setConfig(config, "ldap.context.pool", "ldap.context.pool.", "global.pool.");
LdapServiceContextPoolFactory serviceContextPoolFactory = new LdapServiceContextPoolFactory(getConstantsConfig());
setContextPool(new GenericObjectPool<>(serviceContextPoolFactory, config));
AbandonedConfig abandonedConfig = new AbandonedConfig();
abandonedConfig.setUseUsageTracking(true);
abandonedConfig.setRemoveAbandonedOnMaintenance(true);
abandonedConfig.setRemoveAbandonedOnBorrow(true);
abandonedConfig.setRemoveAbandonedTimeout(60);
getContextPool().setAbandonedConfig(abandonedConfig);
}
}
public void reset() {
if (getContextPool() != null) {
getContextPool().close();
setContextPool(null);
}
init();
}
protected abstract void setContextPool(GenericObjectPool<LdapContext> ldapContextGenericObjectPool);
protected abstract AbstractTokenUtil getTokenUtils();
protected abstract GenericObjectPool<LdapContext> getContextPool();
protected abstract LDAPConstants getConstantsConfig();
protected abstract Logger getLogger();
protected void notConfigured() {
throw new ClientVisibleException(ResponseCodes.SERVICE_UNAVAILABLE,
"NotConfigured", "Ldap is not configured", null);
}
}
| |
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.acceptance.api.revision;
import static com.google.common.truth.Truth.assertThat;
import static com.google.gerrit.acceptance.PushOneCommit.FILE_CONTENT;
import static com.google.gerrit.acceptance.PushOneCommit.FILE_NAME;
import static com.google.gerrit.acceptance.PushOneCommit.PATCH;
import static org.eclipse.jgit.lib.Constants.HEAD;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.gerrit.acceptance.AbstractDaemonTest;
import com.google.gerrit.acceptance.NoHttpd;
import com.google.gerrit.acceptance.PushOneCommit;
import com.google.gerrit.acceptance.TestAccount;
import com.google.gerrit.extensions.api.changes.ChangeApi;
import com.google.gerrit.extensions.api.changes.CherryPickInput;
import com.google.gerrit.extensions.api.changes.DraftApi;
import com.google.gerrit.extensions.api.changes.DraftInput;
import com.google.gerrit.extensions.api.changes.ReviewInput;
import com.google.gerrit.extensions.api.changes.ReviewInput.CommentInput;
import com.google.gerrit.extensions.api.changes.RevisionApi;
import com.google.gerrit.extensions.api.changes.SubmitInput;
import com.google.gerrit.extensions.api.projects.BranchInput;
import com.google.gerrit.extensions.client.SubmitType;
import com.google.gerrit.extensions.common.ChangeInfo;
import com.google.gerrit.extensions.common.ChangeMessageInfo;
import com.google.gerrit.extensions.common.CommentInfo;
import com.google.gerrit.extensions.common.DiffInfo;
import com.google.gerrit.extensions.common.MergeableInfo;
import com.google.gerrit.extensions.common.RevisionInfo;
import com.google.gerrit.extensions.restapi.AuthException;
import com.google.gerrit.extensions.restapi.BinaryResult;
import com.google.gerrit.extensions.restapi.ResourceConflictException;
import com.google.gerrit.reviewdb.client.Patch;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.RefUpdate;
import org.junit.Before;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@NoHttpd
public class RevisionIT extends AbstractDaemonTest {
private TestAccount admin2;
@Before
public void setUp() throws Exception {
admin2 = accounts.admin2();
}
@Test
public void reviewTriplet() throws Exception {
PushOneCommit.Result r = createChange();
gApi.changes()
.id(project.get() + "~master~" + r.getChangeId())
.revision(r.getCommit().name())
.review(ReviewInput.approve());
}
@Test
public void reviewCurrent() throws Exception {
PushOneCommit.Result r = createChange();
gApi.changes()
.id(r.getChangeId())
.current()
.review(ReviewInput.approve());
}
@Test
public void reviewNumber() throws Exception {
PushOneCommit.Result r = createChange();
gApi.changes()
.id(r.getChangeId())
.revision(1)
.review(ReviewInput.approve());
r = updateChange(r, "new content");
gApi.changes()
.id(r.getChangeId())
.revision(2)
.review(ReviewInput.approve());
}
@Test
public void submit() throws Exception {
PushOneCommit.Result r = createChange();
gApi.changes()
.id(project.get() + "~master~" + r.getChangeId())
.current()
.review(ReviewInput.approve());
gApi.changes()
.id(project.get() + "~master~" + r.getChangeId())
.current()
.submit();
}
@Test(expected = AuthException.class)
public void submitOnBehalfOf() throws Exception {
PushOneCommit.Result r = createChange();
gApi.changes()
.id(project.get() + "~master~" + r.getChangeId())
.current()
.review(ReviewInput.approve());
SubmitInput in = new SubmitInput();
in.onBehalfOf = admin2.email;
gApi.changes()
.id(project.get() + "~master~" + r.getChangeId())
.current()
.submit(in);
}
@Test
public void deleteDraft() throws Exception {
PushOneCommit.Result r = createDraft();
gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.delete();
}
@Test
public void cherryPick() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%topic=someTopic");
CherryPickInput in = new CherryPickInput();
in.destination = "foo";
in.message = "it goes to stable branch";
gApi.projects()
.name(project.get())
.branch(in.destination)
.create(new BranchInput());
ChangeApi orig = gApi.changes()
.id(project.get() + "~master~" + r.getChangeId());
assertThat(orig.get().messages).hasSize(1);
ChangeApi cherry = orig.revision(r.getCommit().name())
.cherryPick(in);
assertThat(orig.get().messages).hasSize(2);
String cherryPickedRevision = cherry.get().currentRevision;
String expectedMessage = String.format(
"Patch Set 1: Cherry Picked\n\n" +
"This patchset was cherry picked to branch %s as commit %s",
in.destination, cherryPickedRevision);
Iterator<ChangeMessageInfo> origIt = orig.get().messages.iterator();
origIt.next();
assertThat(origIt.next().message).isEqualTo(expectedMessage);
assertThat(cherry.get().messages).hasSize(1);
Iterator<ChangeMessageInfo> cherryIt = cherry.get().messages.iterator();
expectedMessage = "Patch Set 1: Cherry Picked from branch master.";
assertThat(cherryIt.next().message).isEqualTo(expectedMessage);
assertThat(cherry.get().subject).contains(in.message);
assertThat(cherry.get().topic).isEqualTo("someTopic-foo");
cherry.current().review(ReviewInput.approve());
cherry.current().submit();
}
@Test
public void cherryPickwithNoTopic() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
CherryPickInput in = new CherryPickInput();
in.destination = "foo";
in.message = "it goes to stable branch";
gApi.projects()
.name(project.get())
.branch(in.destination)
.create(new BranchInput());
ChangeApi orig = gApi.changes()
.id(project.get() + "~master~" + r.getChangeId());
ChangeApi cherry = orig.revision(r.getCommit().name())
.cherryPick(in);
assertThat(cherry.get().topic).isNull();
cherry.current().review(ReviewInput.approve());
cherry.current().submit();
}
@Test
public void cherryPickToSameBranch() throws Exception {
PushOneCommit.Result r = createChange();
CherryPickInput in = new CherryPickInput();
in.destination = "master";
in.message = "it generates a new patch set\n\nChange-Id: " + r.getChangeId();
ChangeInfo cherryInfo = gApi.changes()
.id(project.get() + "~master~" + r.getChangeId())
.revision(r.getCommit().name())
.cherryPick(in)
.get();
assertThat(cherryInfo.messages).hasSize(2);
Iterator<ChangeMessageInfo> cherryIt = cherryInfo.messages.iterator();
assertThat(cherryIt.next().message).isEqualTo("Uploaded patch set 1.");
assertThat(cherryIt.next().message).isEqualTo("Uploaded patch set 2.");
}
@Test
public void cherryPickToSameBranchWithRebase() throws Exception {
// Push a new change, then merge it
PushOneCommit.Result baseChange = createChange();
String triplet = project.get() + "~master~" + baseChange.getChangeId();
RevisionApi baseRevision = gApi.changes().id(triplet).current();
baseRevision.review(ReviewInput.approve());
baseRevision.submit();
// Push a new change (change 1)
PushOneCommit.Result r1 = createChange();
// Push another new change (change 2)
String subject = "Test change\n\n" +
"Change-Id: Ideadbeefdeadbeefdeadbeefdeadbeefdeadbeef";
PushOneCommit push =
pushFactory.create(db, admin.getIdent(), testRepo, subject,
"another_file.txt", "another content");
PushOneCommit.Result r2 = push.to("refs/for/master");
// Change 2's parent should be change 1
assertThat(r2.getCommit().getParents()[0].name())
.isEqualTo(r1.getCommit().name());
// Cherry pick change 2 onto the same branch
triplet = project.get() + "~master~" + r2.getChangeId();
ChangeApi orig = gApi.changes().id(triplet);
CherryPickInput in = new CherryPickInput();
in.destination = "master";
in.message = subject;
ChangeApi cherry = orig.revision(r2.getCommit().name()).cherryPick(in);
ChangeInfo cherryInfo = cherry.get();
assertThat(cherryInfo.messages).hasSize(2);
Iterator<ChangeMessageInfo> cherryIt = cherryInfo.messages.iterator();
assertThat(cherryIt.next().message).isEqualTo("Uploaded patch set 1.");
assertThat(cherryIt.next().message).isEqualTo("Uploaded patch set 2.");
// Parent of change 2 should now be the change that was merged, i.e.
// change 2 is rebased onto the head of the master branch.
String newParent = cherryInfo.revisions.get(cherryInfo.currentRevision)
.commit.parents.get(0).commit;
assertThat(newParent).isEqualTo(baseChange.getCommit().name());
}
@Test
public void cherryPickIdenticalTree() throws Exception {
PushOneCommit.Result r = createChange();
CherryPickInput in = new CherryPickInput();
in.destination = "foo";
in.message = "it goes to stable branch";
gApi.projects()
.name(project.get())
.branch(in.destination)
.create(new BranchInput());
ChangeApi orig = gApi.changes()
.id(project.get() + "~master~" + r.getChangeId());
assertThat(orig.get().messages).hasSize(1);
ChangeApi cherry = orig.revision(r.getCommit().name())
.cherryPick(in);
assertThat(orig.get().messages).hasSize(2);
assertThat(cherry.get().subject).contains(in.message);
cherry.current().review(ReviewInput.approve());
cherry.current().submit();
exception.expect(ResourceConflictException.class);
exception.expectMessage("Cherry pick failed: identical tree");
orig.revision(r.getCommit().name()).cherryPick(in);
}
@Test
public void cherryPickConflict() throws Exception {
PushOneCommit.Result r = createChange();
CherryPickInput in = new CherryPickInput();
in.destination = "foo";
in.message = "it goes to stable branch";
gApi.projects()
.name(project.get())
.branch(in.destination)
.create(new BranchInput());
PushOneCommit push =
pushFactory.create(db, admin.getIdent(), testRepo, PushOneCommit.SUBJECT,
PushOneCommit.FILE_NAME, "another content");
push.to("refs/heads/foo");
String triplet = project.get() + "~master~" + r.getChangeId();
ChangeApi orig = gApi.changes().id(triplet);
assertThat(orig.get().messages).hasSize(1);
exception.expect(ResourceConflictException.class);
exception.expectMessage("Cherry pick failed: merge conflict");
orig.revision(r.getCommit().name()).cherryPick(in);
}
@Test
public void canRebase() throws Exception {
PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
PushOneCommit.Result r1 = push.to("refs/for/master");
merge(r1);
push = pushFactory.create(db, admin.getIdent(), testRepo);
PushOneCommit.Result r2 = push.to("refs/for/master");
boolean canRebase = gApi.changes()
.id(r2.getChangeId())
.revision(r2.getCommit().name())
.canRebase();
assertThat(canRebase).isFalse();
merge(r2);
testRepo.reset(r1.getCommit());
push = pushFactory.create(db, admin.getIdent(), testRepo);
PushOneCommit.Result r3 = push.to("refs/for/master");
canRebase = gApi.changes()
.id(r3.getChangeId())
.revision(r3.getCommit().name())
.canRebase();
assertThat(canRebase).isTrue();
}
@Test
public void setUnsetReviewedFlag() throws Exception {
PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
PushOneCommit.Result r = push.to("refs/for/master");
gApi.changes()
.id(r.getChangeId())
.current()
.setReviewed(PushOneCommit.FILE_NAME, true);
assertThat(Iterables.getOnlyElement(
gApi.changes()
.id(r.getChangeId())
.current()
.reviewed())).isEqualTo(PushOneCommit.FILE_NAME);
gApi.changes()
.id(r.getChangeId())
.current()
.setReviewed(PushOneCommit.FILE_NAME, false);
assertThat(gApi.changes().id(r.getChangeId()).current().reviewed())
.isEmpty();
}
@Test
public void mergeable() throws Exception {
ObjectId initial = repo().getRef(HEAD).getLeaf().getObjectId();
PushOneCommit push1 =
pushFactory.create(db, admin.getIdent(), testRepo, PushOneCommit.SUBJECT,
PushOneCommit.FILE_NAME, "push 1 content");
PushOneCommit.Result r1 = push1.to("refs/for/master");
assertMergeable(r1.getChangeId(), true);
merge(r1);
// Reset HEAD to initial so the new change is a merge conflict.
RefUpdate ru = repo().updateRef(HEAD);
ru.setNewObjectId(initial);
assertThat(ru.forceUpdate()).isEqualTo(RefUpdate.Result.FORCED);
PushOneCommit push2 =
pushFactory.create(db, admin.getIdent(), testRepo, PushOneCommit.SUBJECT,
PushOneCommit.FILE_NAME, "push 2 content");
PushOneCommit.Result r2 = push2.to("refs/for/master");
assertMergeable(r2.getChangeId(), false);
// TODO(dborowitz): Test for other-branches.
}
@Test
public void files() throws Exception {
PushOneCommit.Result r = createChange();
assertThat(Iterables.all(gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.files()
.keySet(), new Predicate<String>() {
@Override
public boolean apply(String file) {
return file.matches(FILE_NAME + '|' + Patch.COMMIT_MSG);
}
}))
.isTrue();
}
@Test
public void diff() throws Exception {
PushOneCommit.Result r = createChange();
DiffInfo diff = gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.file(FILE_NAME)
.diff();
assertThat(diff.metaA).isNull();
assertThat(diff.metaB.lines).isEqualTo(1);
}
@Test
public void content() throws Exception {
PushOneCommit.Result r = createChange();
BinaryResult bin = gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.file(FILE_NAME)
.content();
ByteArrayOutputStream os = new ByteArrayOutputStream();
bin.writeTo(os);
String res = new String(os.toByteArray(), StandardCharsets.UTF_8);
assertThat(res).isEqualTo(FILE_CONTENT);
}
private void assertMergeable(String id, boolean expected) throws Exception {
MergeableInfo m = gApi.changes().id(id).current().mergeable();
assertThat(m.mergeable).isEqualTo(expected);
assertThat(m.submitType).isEqualTo(SubmitType.MERGE_IF_NECESSARY);
assertThat(m.mergeableInto).isNull();
ChangeInfo c = gApi.changes().id(id).info();
assertThat(c.mergeable).isEqualTo(expected);
}
@Test
public void drafts() throws Exception {
PushOneCommit.Result r = createChange();
DraftInput in = new DraftInput();
in.line = 1;
in.message = "nit: trailing whitespace";
in.path = FILE_NAME;
DraftApi draftApi = gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.createDraft(in);
assertThat(draftApi
.get()
.message)
.isEqualTo(in.message);
assertThat(gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.draft(draftApi.get().id)
.get()
.message)
.isEqualTo(in.message);
assertThat(gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.drafts())
.hasSize(1);
in.message = "good catch!";
assertThat(gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.draft(draftApi.get().id)
.update(in)
.message)
.isEqualTo(in.message);
assertThat(gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.draft(draftApi.get().id)
.get()
.author
.email)
.isEqualTo(admin.email);
draftApi.delete();
assertThat(gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.drafts())
.isEmpty();
}
@Test
public void comments() throws Exception {
PushOneCommit.Result r = createChange();
CommentInput in = new CommentInput();
in.line = 1;
in.message = "nit: trailing whitespace";
in.path = FILE_NAME;
ReviewInput reviewInput = new ReviewInput();
Map<String, List<CommentInput>> comments = new HashMap<>();
comments.put(FILE_NAME, Collections.singletonList(in));
reviewInput.comments = comments;
reviewInput.message = "comment test";
gApi.changes()
.id(r.getChangeId())
.current()
.review(reviewInput);
Map<String, List<CommentInfo>> out = gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.comments();
assertThat(out).hasSize(1);
CommentInfo comment = Iterables.getOnlyElement(out.get(FILE_NAME));
assertThat(comment.message).isEqualTo(in.message);
assertThat(comment.author.email).isEqualTo(admin.email);
assertThat(comment.path).isNull();
List<CommentInfo> list = gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.commentsAsList();
assertThat(list).hasSize(1);
CommentInfo comment2 = list.get(0);
assertThat(comment2.path).isEqualTo(FILE_NAME);
assertThat(comment2.line).isEqualTo(comment.line);
assertThat(comment2.message).isEqualTo(comment.message);
assertThat(comment2.author.email).isEqualTo(comment.author.email);
assertThat(gApi.changes()
.id(r.getChangeId())
.revision(r.getCommit().name())
.comment(comment.id)
.get()
.message)
.isEqualTo(in.message);
}
@Test
public void patch() throws Exception {
PushOneCommit.Result r = createChange();
ChangeApi changeApi = gApi.changes()
.id(r.getChangeId());
BinaryResult bin = changeApi
.revision(r.getCommit().name())
.patch();
ByteArrayOutputStream os = new ByteArrayOutputStream();
bin.writeTo(os);
String res = new String(os.toByteArray(), StandardCharsets.UTF_8);
DateFormat dateFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss Z");
ChangeInfo change = changeApi.get();
RevisionInfo rev = change.revisions.get(change.currentRevision);
String date = dateFormat.format(rev.commit.author.date);
assertThat(res).isEqualTo(
String.format(PATCH, r.getCommitId().name(), date, r.getChangeId()));
}
private void merge(PushOneCommit.Result r) throws Exception {
revision(r).review(ReviewInput.approve());
revision(r).submit();
}
private PushOneCommit.Result updateChange(PushOneCommit.Result r,
String content) throws Exception {
PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo,
"test commit", "a.txt", content, r.getChangeId());
return push.to("refs/for/master");
}
private PushOneCommit.Result createDraft() throws Exception {
PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
return push.to("refs/drafts/master");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.twill.internal.utils;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.ByteStreams;
import org.objectweb.asm.AnnotationVisitor;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.FieldVisitor;
import org.objectweb.asm.Label;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.signature.SignatureReader;
import org.objectweb.asm.signature.SignatureVisitor;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.Queue;
import java.util.Set;
/**
* Utility class to help find out class dependencies.
*/
public final class Dependencies {
/**
* Represents a callback for accepting a class during dependency traversal.
*/
public interface ClassAcceptor {
/**
* Invoked when a class is being found as a dependency.
*
* @param className Name of the class.
* @param classUrl URL for the class resource.
* @param classPathUrl URL for the class path resource that contains the class resource.
* If the URL protocol is {@code file}, it would be the path to root package.
* If the URL protocol is {@code jar}, it would be the jar file.
* @return true keep finding dependencies on the given class.
*/
boolean accept(String className, URL classUrl, URL classPathUrl);
}
public static void findClassDependencies(ClassLoader classLoader,
ClassAcceptor acceptor,
String...classesToResolve) throws IOException {
findClassDependencies(classLoader, acceptor, ImmutableList.copyOf(classesToResolve));
}
/**
* Finds the class dependencies of the given class.
* @param classLoader ClassLoader for finding class bytecode.
* @param acceptor Predicate to accept a found class and its bytecode.
* @param classesToResolve Classes for looking for dependencies.
* @throws IOException Thrown where there is error when loading in class bytecode.
*/
public static void findClassDependencies(ClassLoader classLoader,
ClassAcceptor acceptor,
Iterable<String> classesToResolve) throws IOException {
final Set<String> seenClasses = Sets.newHashSet(classesToResolve);
final Queue<String> classes = Lists.newLinkedList(classesToResolve);
// Breadth-first-search classes dependencies.
while (!classes.isEmpty()) {
String className = classes.remove();
URL classUrl = getClassURL(className, classLoader);
if (classUrl == null) {
continue;
}
// Call the accept to see if it accept the current class.
if (!acceptor.accept(className, classUrl, getClassPathURL(className, classUrl))) {
continue;
}
InputStream is = classUrl.openStream();
try {
// Visit the bytecode to lookup classes that the visiting class is depended on.
new ClassReader(ByteStreams.toByteArray(is)).accept(new DependencyClassVisitor(new DependencyAcceptor() {
@Override
public void accept(String className) {
// See if the class is accepted
if (seenClasses.add(className)) {
classes.add(className);
}
}
}), ClassReader.SKIP_DEBUG + ClassReader.SKIP_FRAMES);
} finally {
is.close();
}
}
}
/**
* Returns the URL for loading the class bytecode of the given class, or null if it is not found or if it is
* a system class.
*/
private static URL getClassURL(String className, ClassLoader classLoader) {
String resourceName = className.replace('.', '/') + ".class";
return classLoader.getResource(resourceName);
}
private static URL getClassPathURL(String className, URL classUrl) {
try {
if ("file".equals(classUrl.getProtocol())) {
String path = classUrl.getFile();
// Compute the directory container the class.
int endIdx = path.length() - className.length() - ".class".length();
if (endIdx > 1) {
// If it is not the root directory, return the end index to remove the trailing '/'.
endIdx--;
}
return new URL("file", "", -1, path.substring(0, endIdx));
}
if ("jar".equals(classUrl.getProtocol())) {
String path = classUrl.getFile();
return URI.create(path.substring(0, path.indexOf("!/"))).toURL();
}
} catch (MalformedURLException e) {
throw Throwables.propagate(e);
}
throw new IllegalStateException("Unsupported class URL: " + classUrl);
}
/**
* A private interface for accepting a dependent class that is found during bytecode inspection.
*/
private interface DependencyAcceptor {
void accept(String className);
}
/**
* ASM ClassVisitor for extracting classes dependencies.
*/
private static final class DependencyClassVisitor extends ClassVisitor {
private final SignatureVisitor signatureVisitor;
private final DependencyAcceptor acceptor;
public DependencyClassVisitor(DependencyAcceptor acceptor) {
super(Opcodes.ASM4);
this.acceptor = acceptor;
this.signatureVisitor = new SignatureVisitor(Opcodes.ASM4) {
private String currentClass;
@Override
public void visitClassType(String name) {
currentClass = name;
addClass(name);
}
@Override
public void visitInnerClassType(String name) {
addClass(currentClass + "$" + name);
}
};
}
@Override
public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
addClass(name);
if (signature != null) {
new SignatureReader(signature).accept(signatureVisitor);
} else {
addClass(superName);
addClasses(interfaces);
}
}
@Override
public void visitOuterClass(String owner, String name, String desc) {
addClass(owner);
}
@Override
public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
addType(Type.getType(desc));
return null;
}
@Override
public void visitInnerClass(String name, String outerName, String innerName, int access) {
addClass(name);
}
@Override
public FieldVisitor visitField(int access, String name, String desc, String signature, Object value) {
if (signature != null) {
new SignatureReader(signature).acceptType(signatureVisitor);
} else {
addType(Type.getType(desc));
}
return new FieldVisitor(Opcodes.ASM4) {
@Override
public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
addType(Type.getType(desc));
return null;
}
};
}
@Override
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
if (signature != null) {
new SignatureReader(signature).accept(signatureVisitor);
} else {
addMethod(desc);
}
addClasses(exceptions);
return new MethodVisitor(Opcodes.ASM4) {
@Override
public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
addType(Type.getType(desc));
return null;
}
@Override
public AnnotationVisitor visitParameterAnnotation(int parameter, String desc, boolean visible) {
addType(Type.getType(desc));
return null;
}
@Override
public void visitTypeInsn(int opcode, String type) {
addType(Type.getObjectType(type));
}
@Override
public void visitFieldInsn(int opcode, String owner, String name, String desc) {
addType(Type.getObjectType(owner));
addType(Type.getType(desc));
}
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc) {
addType(Type.getObjectType(owner));
addMethod(desc);
}
@Override
public void visitLdcInsn(Object cst) {
if (cst instanceof Type) {
addType((Type) cst);
}
}
@Override
public void visitMultiANewArrayInsn(String desc, int dims) {
addType(Type.getType(desc));
}
@Override
public void visitLocalVariable(String name, String desc, String signature, Label start, Label end, int index) {
if (signature != null) {
new SignatureReader(signature).acceptType(signatureVisitor);
} else {
addType(Type.getType(desc));
}
}
};
}
private void addClass(String internalName) {
if (internalName == null || internalName.startsWith("java/")) {
return;
}
acceptor.accept(Type.getObjectType(internalName).getClassName());
}
private void addClasses(String[] classes) {
if (classes != null) {
for (String clz : classes) {
addClass(clz);
}
}
}
private void addType(Type type) {
if (type.getSort() == Type.ARRAY) {
type = type.getElementType();
}
if (type.getSort() == Type.OBJECT) {
addClass(type.getInternalName());
}
}
private void addMethod(String desc) {
addType(Type.getReturnType(desc));
for (Type type : Type.getArgumentTypes(desc)) {
addType(type);
}
}
}
private Dependencies() {
}
}
| |
package edu.ncsu.csc.nl.model.relation;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnore;
import edu.ncsu.csc.nl.model.Sentence;
import edu.ncsu.csc.nl.model.WordEdge;
import edu.ncsu.csc.nl.model.WordVertex;
import edu.ncsu.csc.nl.model.type.AccessControlElement;
import edu.ncsu.csc.nl.model.type.RelationSource;
import edu.ncsu.csc.nl.model.type.PartOfSpeech;
import edu.ncsu.csc.nl.model.type.Relationship;
import edu.ncsu.csc.nl.util.Utility;
/**
* AccessControlPattern represents a particular graph pattern that
* an access control marking generates.
*
* There are three types of wildcards that can be used with patterns
* (currently on the nodes themselves). Primarily used within the nodeEquals()
* Types:
* '*' matches anything
* '+' matches any existing object of the same element (subject,action,resource/object)
* '%' matches anything of the same part of speech.
* The wildcards are stored in the lemma value for a WordVertex
*
* @author Adminuser
*/
public class AccessControlPattern {
public static final String WILDCARD_ANYTHING = "*";
public static final String WILDCARD_SAME_ELEMENT = "+"; //ie, existing subject/action/resource
public static final String WILDCARD_SAME_PART_OF_SPEECH = "%";
private WordVertex _rootVertex;
private AccessControlRelation _accessControl;
/** In what sentences, does this access control pattern appear correctly? (ie, valid) */
private ArrayList<Sentence> _validSentenceAppearances = new ArrayList<Sentence>();
private ArrayList<Sentence> _invalidSentenceAppearances = new ArrayList<Sentence>();
private ArrayList<Sentence> _negativeSentenceAppearances = new ArrayList<Sentence>();
private RelationSource _source;
/** "temporary" flag to to save work by not constantly checking for transformations */
private boolean _hasBeenCheckedForTransformations = false;
/** how has this pattern been classified? */
private boolean _classification = true;
/** what is the base pattern of this string. this is the collapsed version minus node IDs. Lazily-created */
private String _stringPattern = null;
public AccessControlPattern(WordVertex root, AccessControlRelation ac, RelationSource source) {
_rootVertex = root;
_accessControl = ac;
_source = source;
}
/**
* Makes a deep copy of the passed in object with the exception of
* the passed in sentences
*
* @param acp
*/
public AccessControlPattern(AccessControlPattern acp) {
_rootVertex = (WordVertex) Utility.copy(acp.getRoot());
_accessControl = new AccessControlRelation(acp.getAccessControl(),_rootVertex);
_source = acp.getAccessControlSource();
_classification = acp.getClassification();
}
/**
* Default constructor for JSON mapping
*
* @return
*/
/*
public AccessControlPattern() {
}
*/
public WordVertex getRoot() {
return _rootVertex;
}
public AccessControlRelation getAccessControl() {
return _accessControl;
}
public String[] getFactors(ArrayList<AccessControlFactor> factors) {
return AccessControlFactor.getFactorValues(factors, this);
}
public boolean getClassification() {
return _classification;
}
public void setClassification(boolean newValue) {
_classification = newValue;
}
@JsonIgnore
public boolean hasValidSentence(Sentence s) {
return _validSentenceAppearances.contains(s);
}
@JsonIgnore
public int getNumberOfValidSentences() {
return _validSentenceAppearances.size();
}
@JsonIgnore
public void removeValidSentence(Sentence s) {
_validSentenceAppearances.remove(s);
}
@JsonIgnore
public void addValidSentence(Sentence s) {
_validSentenceAppearances.add(s);
}
@JsonIgnore
public Sentence[] getValidSentences() {
return _validSentenceAppearances.toArray(new Sentence[0]);
}
@JsonIgnore
public void setValidSentences(Sentence[] s) {
_validSentenceAppearances = new ArrayList<Sentence>();
Collections.addAll(_validSentenceAppearances, s);
}
@JsonIgnore
public boolean hasInvalidSentenceByOriginalPosition(Sentence s) {
return _invalidSentenceAppearances.contains(s);
}
@JsonIgnore
public int getNumberOfInvalidSentences() {
return _invalidSentenceAppearances.size();
}
@JsonIgnore
public void removeInvalidSentence(Sentence s) {
_invalidSentenceAppearances.remove(s);
}
@JsonIgnore
public void addInvalidSentence(Sentence s) {
_invalidSentenceAppearances.add(s);
}
public Sentence[] getInvalidSentences() {
return _invalidSentenceAppearances.toArray(new Sentence[0]);
}
public void setInvalidSentences(Sentence[] s) {
_invalidSentenceAppearances = new ArrayList<Sentence>();
Collections.addAll(_invalidSentenceAppearances, s);
}
@JsonIgnore
public boolean hasWildcardElement() {
return hasWildcardElement(this.getRoot(), new HashSet<Integer>());
}
@JsonIgnore
private boolean hasWildcardElement(WordVertex node, HashSet<Integer> visitedNodes) {
if (visitedNodes.contains(node.getID())) {
return false;
}
visitedNodes.add(node.getID());
if (isWildcard(node)) { return true; }
Iterator<WordEdge> children = node.getChildren();
while (children.hasNext()){
if (this.hasWildcardElement(children.next().getChildNode(), visitedNodes)) { return true; }
}
return false;
}
@JsonIgnore
public boolean hasNegativeSentence(Sentence s) {
return _negativeSentenceAppearances.contains(s);
}
@JsonIgnore
public int getNumberOfNegativeSentences() {
return _negativeSentenceAppearances.size();
}
@JsonIgnore
public void removeNegativeSentence(Sentence s) {
_negativeSentenceAppearances.remove(s);
}
@JsonIgnore
public void addNegativeSentence(Sentence s) {
_negativeSentenceAppearances.add(s);
}
@JsonIgnore
public Sentence[] getNegativeSentences() {
return _negativeSentenceAppearances.toArray(new Sentence[0]);
}
@JsonIgnore
public void setNegativeSentences(Sentence[] s) {
_negativeSentenceAppearances = new ArrayList<Sentence>();
Collections.addAll(_negativeSentenceAppearances, s);
}
public void removeAnyOccuranceOfSentence(Sentence s) {
this.removeValidSentence(s);
this.removeInvalidSentence(s);
this.removeNegativeSentence(s);
}
public void resetSentenceOccurances() {
_validSentenceAppearances = new ArrayList<Sentence>();
_invalidSentenceAppearances = new ArrayList<Sentence>();
_negativeSentenceAppearances = new ArrayList<Sentence>();
}
@JsonIgnore
public int getTotalNumberOfSentences() {
return _validSentenceAppearances.size() + _invalidSentenceAppearances.size() + _negativeSentenceAppearances.size();
}
public RelationSource getAccessControlSource() {
return _source;
}
public void setAccessControlSource(RelationSource newSource) {
_source = newSource;
}
public String toString() {
return this._accessControl +": "+ this.getTotalNumberOfSentences()+"|"+this.getRoot().getStringRepresentation();
}
public String toStringCollapsed() {
return this._accessControl +": "+ this.getTotalNumberOfSentences()+"|"+this.getRoot().getStringRepresentationPOSCollapsed();
}
/**
* Produces a basic pattern of the representation, but with all elements
* (subjects/actions/objects/prepositions wildcarded
* (so just parts of speech and relationships)
*
* @return
*/
public String toStringPattern() {
if (_stringPattern == null) {
AccessControlPattern newPattern = new AccessControlPattern(this);
newPattern = AccessControlPattern.createMatchingElementOnPartofSpeechPattern(newPattern, AccessControlElement.SUBJECT);
newPattern = AccessControlPattern.createMatchingElementOnPartofSpeechPattern(newPattern, AccessControlElement.ACTION);
newPattern = AccessControlPattern.createMatchingElementOnPartofSpeechPattern(newPattern, AccessControlElement.OBJECT);
newPattern = AccessControlPattern.createMatchingElementOnPartofSpeechPattern(newPattern, AccessControlElement.PREPOSITION);
String pattern = newPattern.toStringCollapsed(); // This looks like "%;%;%: 0|(1 % VB root (2 % NNPS nsubj )(4 % NNS dobj ))". Need to remove numbers
_stringPattern = pattern.replaceAll("[0-9]","");
}
return _stringPattern;
}
public boolean isSubjectNode(WordVertex wv) {
return this.getAccessControl().getSubjectVertexList().contains(wv);
}
public boolean isActionNode(WordVertex wv) {
return this.getAccessControl().getActionVertexList().contains(wv);
}
public boolean isObjectNode(WordVertex wv) {
return this.getAccessControl().getObjectVertexList().contains(wv);
}
/**
* vPattern
*
*
*
* @param v1
* @param vPattern should belong to this access control object (ie, a child of the pattern's root)
* @param uniqueSubjects
* @param uniqueObjects
* @return
*/
public boolean nodeEquals(WordVertex v1, WordVertex vPattern, HashSet<String> uniqueSubjects, HashSet<String> uniqueActions, HashSet<String> uniqueObjects) {
if (vPattern.getLemma().equals(WILDCARD_ANYTHING)) { // The '*' matches everything
return true;
}
if (!v1.getPartOfSpeech().equalsCollapsed(vPattern.getPartOfSpeech())) { return false; }
if (vPattern.getLemma().equals(WILDCARD_SAME_PART_OF_SPEECH)) { // The '%' just checks that the part of speech is the same, which we did right above
return true;
}
if (this.isSubjectNode(vPattern)) { // just need to have the lemma in WordVertex as one of the keys in uniqueSubjects
if (vPattern.getLemma().equals(WILDCARD_SAME_ELEMENT)) {
return uniqueSubjects.contains(v1.getLemma());
}
}
else if (this.isObjectNode(vPattern)) { // just need to have the lemma in WordVertex as one of the keys in uniqueSubjects
if (vPattern.getLemma().equals(WILDCARD_SAME_ELEMENT)) {
return uniqueObjects.contains(v1.getLemma());
}
}
return v1.getLemma().equalsIgnoreCase(vPattern.getLemma());
}
/**
* Does the passed in WordVertex have a wildcard?
*
* @param a
* @return
*/
public static boolean isWildcard(WordVertex wv) {
return wv.getLemma().equals(WILDCARD_ANYTHING) || wv.getLemma().equals(WILDCARD_SAME_ELEMENT) || wv.getLemma().equals(WILDCARD_SAME_PART_OF_SPEECH);
}
/**
* This tests whether or not two graph patterns are equivalent to each other....
*
* First it tests the node, then the children. The children do not need to come in the same order
*
* @param a root (or subgraph) of a the first graph
* @param b root (or subgraph) of the second graph to examine
* @return
*/
public static boolean graphPatternsEquals(AccessControlPattern acpA, AccessControlPattern acpB, WordVertex a, WordVertex b, boolean exactMatch) {
//System.out.println("A: "+a.getStringRepresentation());
//System.out.println("B: "+b.getStringRepresentation());
if (!a.getPartOfSpeech().equalsCollapsed(b.getPartOfSpeech())) { return false; }
if (acpA.isSubjectNode(a) != acpB.isSubjectNode(b)) { return false; }
if (acpA.isObjectNode(a) != acpB.isObjectNode(b)) { return false; }
if (acpA.isActionNode(a) != acpB.isActionNode(b)) { return false; }
if ( acpA.isSubjectNode(a) == false && acpA.isObjectNode(a) == false && !a.getLemma().equalsIgnoreCase(b.getLemma())) { return false; }
if ( acpA.isSubjectNode(a) || acpA.isObjectNode(a) || acpA.isActionNode(a)) {
//if one of the nodes is defined as a wildcard, and the other not, then the patterns differ.
if (isWildcard(a) != isWildcard(b)) {return false;}
if (exactMatch && !a.getLemma().equalsIgnoreCase(b.getLemma())) { return false; }
}
if (a.getNumberOfChildren() != b.getNumberOfChildren()) { return false; }
ArrayList<Integer> visitedNodesInB = new ArrayList<Integer>();
for (int i=0; i< a.getNumberOfChildren(); i++) {
WordEdge edgeA = a.getChildAt(i);
int childPositionB = b.getChildRelationship(edgeA.getRelationship(), visitedNodesInB);
if (childPositionB == -1) { return false; }
if (edgeA.isWildcardRelationship() != b.getChildAt(childPositionB).isWildcardRelationship()) { return false; }
visitedNodesInB.add(childPositionB);
if (!AccessControlPattern.graphPatternsEquals(acpA, acpB, edgeA.getChildNode(), b.getChildAt(childPositionB).getChildNode(),exactMatch)) {
return false;
}
}
return true;
}
/**
* Creates a new pattern from an existing pattern, but replaces any node of the specified element
* with a wildcard such that nodes would just match by part of speech for those nodes.
*
* The existing pattern is not modified during this process
*
* @param existingPattern
* @param elementToWildcard
* @return
*/
public static AccessControlPattern createMatchingElementOnPartofSpeechPattern(AccessControlPattern existingPattern, AccessControlElement elementToWildcard) {
AccessControlPattern newPattern = new AccessControlPattern(existingPattern);
replaceNodeWithWildcardPOS(newPattern,newPattern.getRoot(),elementToWildcard,new HashSet<Integer>());
return newPattern;
}
private static void replaceNodeWithWildcardPOS(AccessControlPattern pattern, WordVertex node, AccessControlElement elementToWildcard, HashSet<Integer> visitedNodes) {
if (visitedNodes.contains(node.getID())) { return; }
visitedNodes.add(node.getID());
switch(elementToWildcard) {
case SUBJECT: if (pattern.isSubjectNode(node)) { node.setLemma(WILDCARD_SAME_PART_OF_SPEECH); } break;
case ACTION: if (pattern.isActionNode(node)) { node.setLemma(WILDCARD_SAME_PART_OF_SPEECH); } break;
case OBJECT: if (pattern.isObjectNode(node)) { node.setLemma(WILDCARD_SAME_PART_OF_SPEECH); } break;
case PREPOSITION: Iterator<WordEdge> iChild = node.getChildren();
while (iChild.hasNext()) {
WordEdge we = iChild.next();
if (we.getRelationship().isPreposition() && pattern.isObjectNode(we.getChildNode())) {
we.setWildcardRelationship(true);
}
}
break;
}
Iterator<WordEdge> iChild = node.getChildren();
while (iChild.hasNext()) {
replaceNodeWithWildcardPOS(pattern, iChild.next().getChildNode(), elementToWildcard, visitedNodes);
}
}
/**
* Creates the basic pattern used to search for the initial set of resources and subjects.
* @param accessWord
* @param permissions
* @return
*/
public static AccessControlPattern createBasicPattern(String accessWord, String permissions) {
//String sentence = accessWord;
WordVertex actionWV = new WordVertex(1, accessWord, accessWord, PartOfSpeech.VB, "", 1, 1, 2);
WordVertex subjectWV = new WordVertex(2, WILDCARD_SAME_PART_OF_SPEECH, WILDCARD_SAME_PART_OF_SPEECH, PartOfSpeech.NN, "", 1, 1, 1);
WordVertex resourceWV = new WordVertex(3, WILDCARD_SAME_PART_OF_SPEECH, WILDCARD_SAME_PART_OF_SPEECH, PartOfSpeech.NN, "", 1, 1, 3);
WordEdge weActionToSubject = new WordEdge(Relationship.NSUBJ, actionWV, subjectWV);
WordEdge weActionToResource = new WordEdge(Relationship.DOBJ, actionWV, resourceWV);
actionWV.addChild(weActionToSubject);
actionWV.addChild(weActionToResource);
subjectWV.addParent(weActionToSubject);
resourceWV.addParent(weActionToResource);
ArrayList<WordVertex> actionList = new ArrayList<WordVertex>(); actionList.add(actionWV);
ArrayList<WordVertex> subjectList = new ArrayList<WordVertex>(); subjectList.add(subjectWV);
ArrayList<WordVertex> resourceList = new ArrayList<WordVertex>(); resourceList.add(resourceWV);
AccessControlRelation ac = new AccessControlRelation(subjectList, actionList, resourceList, permissions, null, null, true, RelationSource.SEED);
AccessControlPattern acp = new AccessControlPattern(actionWV, ac, RelationSource.SEED);
//System.out.println(acp.getRoot().getStringRepresentation());
return acp;
}
/**
* Creates the basic pattern, but using a pronoun for the subject.
* All three nodes are wild
* @param actionWord
* @param permissions
* @param object
*
* @return
*/
public static AccessControlPattern createBasicPatternWithPronominalSubject(String actionWord, String permissions, String object) {
//String sentence = accessWord;
WordVertex actionWV = new WordVertex(1, actionWord, actionWord, PartOfSpeech.VB, "", 1, 1, 2);
WordVertex subjectWV = new WordVertex(2, WILDCARD_SAME_PART_OF_SPEECH, WILDCARD_SAME_PART_OF_SPEECH, PartOfSpeech.PRP, "", 1, 1, 1);
WordVertex resourceWV = new WordVertex(3, object, object, PartOfSpeech.NN, "", 1, 1, 3);
WordEdge weActionToSubject = new WordEdge(Relationship.NSUBJ, actionWV, subjectWV);
WordEdge weActionToResource = new WordEdge(Relationship.DOBJ, actionWV, resourceWV);
actionWV.addChild(weActionToSubject);
actionWV.addChild(weActionToResource);
subjectWV.addParent(weActionToSubject);
resourceWV.addParent(weActionToResource);
ArrayList<WordVertex> actionList = new ArrayList<WordVertex>(); actionList.add(actionWV);
ArrayList<WordVertex> subjectList = new ArrayList<WordVertex>(); subjectList.add(subjectWV);
ArrayList<WordVertex> resourceList = new ArrayList<WordVertex>(); resourceList.add(resourceWV);
AccessControlRelation ac = new AccessControlRelation(subjectList, actionList, resourceList, permissions, null, null, true, RelationSource.SEED);
AccessControlPattern acp = new AccessControlPattern(actionWV, ac, RelationSource.SEED);
//System.out.println(acp.getRoot().getStringRepresentation());
return acp;
}
/**
* Creates the basic pattern with no subject
* All three nodes are wild
* @param actionWord
* @param permissions
* @param object
*
* @return
*/
public static AccessControlPattern createDoubleNodeWithMissingSubjectPassive(String actionWord, String permissions) {
//String sentence = accessWord;
WordVertex actionWV = new WordVertex(1, actionWord, actionWord, PartOfSpeech.VB, "", 1, 1, 2);
WordVertex resourceWV = new WordVertex(2, WILDCARD_SAME_PART_OF_SPEECH, WILDCARD_SAME_PART_OF_SPEECH, PartOfSpeech.NN, "", 1, 1, 3);
WordEdge weActionToResource = new WordEdge(Relationship.NSUBJPASS, actionWV, resourceWV);
actionWV.addChild(weActionToResource);
resourceWV.addParent(weActionToResource);
ArrayList<WordVertex> actionList = new ArrayList<WordVertex>(); actionList.add(actionWV);
ArrayList<WordVertex> subjectList = new ArrayList<WordVertex>();
ArrayList<WordVertex> resourceList = new ArrayList<WordVertex>(); resourceList.add(resourceWV);
AccessControlRelation ac = new AccessControlRelation(subjectList, actionList, resourceList, permissions, null, null, true, RelationSource.SEED);
AccessControlPattern acp = new AccessControlPattern(actionWV, ac, RelationSource.SEED);
//System.out.println(acp.getRoot().getStringRepresentation());
return acp;
}
/**
* Creates the basic pattern with no subject
* All three nodes are wild
* @param actionWord
* @param permissions
* @param object
*
* @return
*/
public static AccessControlPattern createDoubleNodeWithMissingSubjectActive(String actionWord, String permissions) {
//String sentence = accessWord;
WordVertex actionWV = new WordVertex(1, actionWord, actionWord, PartOfSpeech.VB, "", 1, 1, 2);
WordVertex resourceWV = new WordVertex(2, WILDCARD_SAME_PART_OF_SPEECH, WILDCARD_SAME_PART_OF_SPEECH, PartOfSpeech.NN, "", 1, 1, 3);
WordEdge weActionToResource = new WordEdge(Relationship.DOBJ, actionWV, resourceWV);
actionWV.addChild(weActionToResource);
resourceWV.addParent(weActionToResource);
ArrayList<WordVertex> actionList = new ArrayList<WordVertex>(); actionList.add(actionWV);
ArrayList<WordVertex> subjectList = new ArrayList<WordVertex>();
ArrayList<WordVertex> resourceList = new ArrayList<WordVertex>(); resourceList.add(resourceWV);
AccessControlRelation ac = new AccessControlRelation(subjectList, actionList, resourceList, permissions, null, null, true, RelationSource.SEED);
AccessControlPattern acp = new AccessControlPattern(actionWV, ac, RelationSource.SEED);
//System.out.println(acp.getRoot().getStringRepresentation());
return acp;
}
public void changePatternFromActiveToPassive() {
this.changeActiveElementsToPassive(this.getAccessControl().getSubjectVertexList());
this.changeActiveElementsToPassive(this.getAccessControl().getObjectVertexList());
}
/**
*
* @param list
* @return
*/
private void changeActiveElementsToPassive(List<WordVertex> list) {
for (WordVertex wv:list) {
for (int i=0;i< wv.getNumberOfParents();i++) {
WordEdge we = wv.getParentAt(i);
if (we.getRelationship() == Relationship.NSUBJ) { we.changeRelationship(Relationship.AGENT); }
if (we.getRelationship() == Relationship.DOBJ) { we.changeRelationship(Relationship.NSUBJPASS); }
}
}
}
public void changePatternFromPassiveToActive() {
this.changePassiveElementsToActive(this.getAccessControl().getSubjectVertexList());
this.changePassiveElementsToActive(this.getAccessControl().getObjectVertexList());
}
/**
*
* @param list
* @return
*/
private void changePassiveElementsToActive(List<WordVertex> list) {
for (WordVertex wv:list) {
for (int i=0;i< wv.getNumberOfParents();i++) {
WordEdge we = wv.getParentAt(i);
if (we.getRelationship() == Relationship.AGENT) { we.changeRelationship(Relationship.NSUBJ); }
if (we.getRelationship() == Relationship.NSUBJPASS) { we.changeRelationship(Relationship.DOBJ); }
}
}
}
@JsonIgnore
public void setCheckedForTransformations() {
_hasBeenCheckedForTransformations = true;
}
@JsonIgnore
public boolean hasBeenCheckedForTransformations() {
return _hasBeenCheckedForTransformations;
}
}
| |
/*
* Copyright 2012-2013 inBloom, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2012.04.20 at 03:09:04 PM EDT
//
package org.slc.sli.sample.entities;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* This entity represents subtests that assess specific learning objectives.
*
*
* <p>Java class for ObjectiveAssessment complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ObjectiveAssessment">
* <complexContent>
* <extension base="{http://ed-fi.org/0100}ComplexObjectType">
* <sequence>
* <element name="IdentificationCode" type="{http://ed-fi.org/0100}IdentificationCode"/>
* <element name="MaxRawScore" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="AssessmentPerformanceLevel" type="{http://ed-fi.org/0100}AssessmentPerformanceLevel" maxOccurs="unbounded" minOccurs="0"/>
* <element name="PercentOfAssessment" type="{http://ed-fi.org/0100}percent" minOccurs="0"/>
* <element name="Nomenclature" type="{http://ed-fi.org/0100}Nomenclature" minOccurs="0"/>
* <element name="AssessmentItemReference" type="{http://ed-fi.org/0100}ReferenceType" maxOccurs="unbounded" minOccurs="0"/>
* <element name="LearningObjectiveReference" type="{http://ed-fi.org/0100}LearningObjectiveReferenceType" maxOccurs="unbounded" minOccurs="0"/>
* <element name="LearningStandardReference" type="{http://ed-fi.org/0100}LearningStandardReferenceType" maxOccurs="unbounded" minOccurs="0"/>
* <element name="ObjectiveAssessmentReference" type="{http://ed-fi.org/0100}ReferenceType" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ObjectiveAssessment", propOrder = {
"identificationCode",
"maxRawScore",
"assessmentPerformanceLevel",
"percentOfAssessment",
"nomenclature",
"assessmentItemReference",
"learningObjectiveReference",
"learningStandardReference",
"objectiveAssessmentReference"
})
public class ObjectiveAssessment
extends ComplexObjectType
{
@XmlElement(name = "IdentificationCode", required = true)
protected String identificationCode;
@XmlElement(name = "MaxRawScore")
protected Integer maxRawScore;
@XmlElement(name = "AssessmentPerformanceLevel")
protected List<AssessmentPerformanceLevel> assessmentPerformanceLevel;
@XmlElement(name = "PercentOfAssessment")
protected Integer percentOfAssessment;
@XmlElement(name = "Nomenclature")
protected String nomenclature;
@XmlElement(name = "AssessmentItemReference")
protected List<ReferenceType> assessmentItemReference;
@XmlElement(name = "LearningObjectiveReference")
protected List<LearningObjectiveReferenceType> learningObjectiveReference;
@XmlElement(name = "LearningStandardReference")
protected List<LearningStandardReferenceType> learningStandardReference;
@XmlElement(name = "ObjectiveAssessmentReference")
protected List<ReferenceType> objectiveAssessmentReference;
/**
* Gets the value of the identificationCode property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getIdentificationCode() {
return identificationCode;
}
/**
* Sets the value of the identificationCode property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIdentificationCode(String value) {
this.identificationCode = value;
}
/**
* Gets the value of the maxRawScore property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getMaxRawScore() {
return maxRawScore;
}
/**
* Sets the value of the maxRawScore property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setMaxRawScore(Integer value) {
this.maxRawScore = value;
}
/**
* Gets the value of the assessmentPerformanceLevel property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the assessmentPerformanceLevel property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAssessmentPerformanceLevel().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link AssessmentPerformanceLevel }
*
*
*/
public List<AssessmentPerformanceLevel> getAssessmentPerformanceLevel() {
if (assessmentPerformanceLevel == null) {
assessmentPerformanceLevel = new ArrayList<AssessmentPerformanceLevel>();
}
return this.assessmentPerformanceLevel;
}
/**
* Gets the value of the percentOfAssessment property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getPercentOfAssessment() {
return percentOfAssessment;
}
/**
* Sets the value of the percentOfAssessment property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setPercentOfAssessment(Integer value) {
this.percentOfAssessment = value;
}
/**
* Gets the value of the nomenclature property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getNomenclature() {
return nomenclature;
}
/**
* Sets the value of the nomenclature property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setNomenclature(String value) {
this.nomenclature = value;
}
/**
* Gets the value of the assessmentItemReference property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the assessmentItemReference property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAssessmentItemReference().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link ReferenceType }
*
*
*/
public List<ReferenceType> getAssessmentItemReference() {
if (assessmentItemReference == null) {
assessmentItemReference = new ArrayList<ReferenceType>();
}
return this.assessmentItemReference;
}
/**
* Gets the value of the learningObjectiveReference property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the learningObjectiveReference property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getLearningObjectiveReference().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link LearningObjectiveReferenceType }
*
*
*/
public List<LearningObjectiveReferenceType> getLearningObjectiveReference() {
if (learningObjectiveReference == null) {
learningObjectiveReference = new ArrayList<LearningObjectiveReferenceType>();
}
return this.learningObjectiveReference;
}
/**
* Gets the value of the learningStandardReference property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the learningStandardReference property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getLearningStandardReference().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link LearningStandardReferenceType }
*
*
*/
public List<LearningStandardReferenceType> getLearningStandardReference() {
if (learningStandardReference == null) {
learningStandardReference = new ArrayList<LearningStandardReferenceType>();
}
return this.learningStandardReference;
}
/**
* Gets the value of the objectiveAssessmentReference property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the objectiveAssessmentReference property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getObjectiveAssessmentReference().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link ReferenceType }
*
*
*/
public List<ReferenceType> getObjectiveAssessmentReference() {
if (objectiveAssessmentReference == null) {
objectiveAssessmentReference = new ArrayList<ReferenceType>();
}
return this.objectiveAssessmentReference;
}
}
| |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.toolbar;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.util.Pair;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.TextView;
import org.chromium.base.ApiCompatibilityUtils;
import org.chromium.base.ThreadUtils;
import org.chromium.base.VisibleForTesting;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.WindowDelegate;
import org.chromium.chrome.browser.appmenu.AppMenuButtonHelper;
import org.chromium.chrome.browser.dom_distiller.DomDistillerServiceFactory;
import org.chromium.chrome.browser.dom_distiller.DomDistillerTabUtils;
import org.chromium.chrome.browser.ntp.NativePageFactory;
import org.chromium.chrome.browser.ntp.NewTabPage;
import org.chromium.chrome.browser.omnibox.LocationBar;
import org.chromium.chrome.browser.omnibox.LocationBarLayout;
import org.chromium.chrome.browser.omnibox.UrlBar;
import org.chromium.chrome.browser.omnibox.UrlFocusChangeListener;
import org.chromium.chrome.browser.pageinfo.WebsiteSettingsPopup;
import org.chromium.chrome.browser.profiles.Profile;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.chrome.browser.toolbar.ActionModeController.ActionBarDelegate;
import org.chromium.chrome.browser.util.ColorUtils;
import org.chromium.chrome.browser.widget.TintedDrawable;
import org.chromium.components.dom_distiller.core.DomDistillerService;
import org.chromium.components.dom_distiller.core.DomDistillerUrlUtils;
import org.chromium.components.security_state.ConnectionSecurityLevel;
import org.chromium.ui.base.WindowAndroid;
/**
* The Toolbar layout to be used for a custom tab. This is used for both phone and tablet UIs.
*/
public class CustomTabToolbar extends ToolbarLayout implements LocationBar,
View.OnLongClickListener {
private static final int TITLE_ANIM_DELAY_MS = 200;
private View mLocationBarFrameLayout;
private View mTitleUrlContainer;
private UrlBar mUrlBar;
private TextView mTitleBar;
private ImageView mSecurityButton;
private ImageButton mCustomActionButton;
private int mSecurityIconType;
private boolean mShouldShowTitle;
private ImageButton mCloseButton;
// Whether dark tint should be applied to icons and text.
private boolean mUseDarkColors;
private CustomTabToolbarAnimationDelegate mAnimDelegate;
private boolean mBackgroundColorSet;
private long mInitializeTimeStamp;
private Runnable mTitleAnimationStarter = new Runnable() {
@Override
public void run() {
mAnimDelegate.startTitleAnimation(getContext());
}
};
/**
* Constructor for getting this class inflated from an xml layout file.
*/
public CustomTabToolbar(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
setBackground(new ColorDrawable(
ApiCompatibilityUtils.getColor(getResources(), R.color.default_primary_color)));
mUrlBar = (UrlBar) findViewById(R.id.url_bar);
mUrlBar.setHint("");
mUrlBar.setDelegate(this);
mUrlBar.setEnabled(false);
mUrlBar.setAllowFocus(false);
mTitleBar = (TextView) findViewById(R.id.title_bar);
mLocationBarFrameLayout = findViewById(R.id.location_bar_frame_layout);
mTitleUrlContainer = findViewById(R.id.title_url_container);
mSecurityButton = (ImageButton) findViewById(R.id.security_button);
mSecurityIconType = ConnectionSecurityLevel.NONE;
mCustomActionButton = (ImageButton) findViewById(R.id.action_button);
mCustomActionButton.setOnLongClickListener(this);
mCloseButton = (ImageButton) findViewById(R.id.close_button);
mCloseButton.setOnLongClickListener(this);
mAnimDelegate = new CustomTabToolbarAnimationDelegate(mSecurityButton, mTitleUrlContainer);
}
@Override
protected int getToolbarHeightWithoutShadowResId() {
return R.dimen.custom_tabs_control_container_height;
}
@Override
public void initialize(ToolbarDataProvider toolbarDataProvider,
ToolbarTabController tabController, AppMenuButtonHelper appMenuButtonHelper) {
super.initialize(toolbarDataProvider, tabController, appMenuButtonHelper);
updateVisualsForState();
mInitializeTimeStamp = System.currentTimeMillis();
}
@Override
public void onNativeLibraryReady() {
super.onNativeLibraryReady();
mSecurityButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Tab currentTab = getToolbarDataProvider().getTab();
if (currentTab == null || currentTab.getWebContents() == null) return;
Activity activity = currentTab.getWindowAndroid().getActivity().get();
if (activity == null) return;
WebsiteSettingsPopup.show(activity, currentTab.getProfile(),
currentTab.getWebContents());
}
});
}
@Override
public void setCloseButtonImageResource(Drawable drawable) {
mCloseButton.setImageDrawable(drawable);
}
@Override
public void setCustomTabCloseClickHandler(OnClickListener listener) {
mCloseButton.setOnClickListener(listener);
}
@Override
public void setCustomActionButton(Drawable drawable, String description,
OnClickListener listener) {
Resources resources = getResources();
// The height will be scaled to match spec while keeping the aspect ratio, so get the scaled
// width through that.
int sourceHeight = drawable.getIntrinsicHeight();
int sourceScaledHeight = resources.getDimensionPixelSize(R.dimen.toolbar_icon_height);
int sourceWidth = drawable.getIntrinsicWidth();
int sourceScaledWidth = sourceWidth * sourceScaledHeight / sourceHeight;
int minPadding = resources.getDimensionPixelSize(R.dimen.min_toolbar_icon_side_padding);
int sidePadding = Math.max((2 * sourceScaledHeight - sourceScaledWidth) / 2, minPadding);
int topPadding = mCustomActionButton.getPaddingTop();
int bottomPadding = mCustomActionButton.getPaddingBottom();
mCustomActionButton.setPadding(sidePadding, topPadding, sidePadding, bottomPadding);
mCustomActionButton.setImageDrawable(drawable);
mCustomActionButton.setContentDescription(description);
mCustomActionButton.setOnClickListener(listener);
mCustomActionButton.setVisibility(VISIBLE);
updateButtonsTint();
}
/**
* @return The custom action button. For test purpose only.
*/
@VisibleForTesting
public ImageButton getCustomActionButtonForTest() {
return mCustomActionButton;
}
@Override
public int getTabStripHeight() {
return 0;
}
@Override
public Tab getCurrentTab() {
return getToolbarDataProvider().getTab();
}
@Override
public boolean shouldEmphasizeHttpsScheme() {
int securityLevel = getSecurityLevel();
if (securityLevel == ConnectionSecurityLevel.SECURITY_ERROR
|| securityLevel == ConnectionSecurityLevel.SECURITY_POLICY_WARNING) {
return true;
}
return false;
}
@Override
public void setShowTitle(boolean showTitle) {
mShouldShowTitle = showTitle;
if (mShouldShowTitle) mAnimDelegate.prepareTitleAnim(mUrlBar, mTitleBar);
}
@Override
public void setTitleToPageTitle() {
Tab currentTab = getToolbarDataProvider().getTab();
if (currentTab == null || TextUtils.isEmpty(currentTab.getTitle())) {
mTitleBar.setText("");
return;
}
// It takes some time to parse the title of the webcontent, and before that Tab#getTitle
// always return the url. We postpone the title animation until the title is authentic.
// TODO(yusufo): Clear the explicit references to about:blank here and for domain.
if (mShouldShowTitle
&& !TextUtils.equals(currentTab.getTitle(), currentTab.getUrl())
&& !TextUtils.equals(currentTab.getTitle(), "about:blank")) {
long duration = System.currentTimeMillis() - mInitializeTimeStamp;
if (duration >= TITLE_ANIM_DELAY_MS) {
mTitleAnimationStarter.run();
} else {
ThreadUtils.postOnUiThreadDelayed(mTitleAnimationStarter,
TITLE_ANIM_DELAY_MS - duration);
}
}
mTitleBar.setText(currentTab.getTitle());
}
@Override
protected void onNavigatedToDifferentPage() {
super.onNavigatedToDifferentPage();
setTitleToPageTitle();
}
@Override
public void setUrlToPageUrl() {
if (getCurrentTab() == null) {
mUrlBar.setUrl("", null);
return;
}
String url = getCurrentTab().getUrl().trim();
// Don't show anything for Chrome URLs and "about:blank".
// If we have taken a pre-initialized WebContents, then the starting URL
// is "about:blank". We should not display it.
if (NativePageFactory.isNativePageUrl(url, getCurrentTab().isIncognito())
|| "about:blank".equals(url)) {
mUrlBar.setUrl("", null);
return;
}
String displayText = getToolbarDataProvider().getText();
Pair<String, String> urlText = LocationBarLayout.splitPathFromUrlDisplayText(displayText);
displayText = urlText.first;
if (DomDistillerUrlUtils.isDistilledPage(url)) {
if (isStoredArticle(url)) {
Profile profile = getCurrentTab().getProfile();
DomDistillerService domDistillerService =
DomDistillerServiceFactory.getForProfile(profile);
String originalUrl = domDistillerService.getUrlForEntry(
DomDistillerUrlUtils.getValueForKeyInUrl(url, "entry_id"));
displayText =
DomDistillerTabUtils.getFormattedUrlFromOriginalDistillerUrl(originalUrl);
} else if (DomDistillerUrlUtils.getOriginalUrlFromDistillerUrl(url) != null) {
String originalUrl = DomDistillerUrlUtils.getOriginalUrlFromDistillerUrl(url);
displayText =
DomDistillerTabUtils.getFormattedUrlFromOriginalDistillerUrl(originalUrl);
}
}
if (mUrlBar.setUrl(url, displayText)) {
mUrlBar.deEmphasizeUrl();
mUrlBar.emphasizeUrl();
}
}
private boolean isStoredArticle(String url) {
DomDistillerService domDistillerService =
DomDistillerServiceFactory.getForProfile(Profile.getLastUsedProfile());
String entryIdFromUrl = DomDistillerUrlUtils.getValueForKeyInUrl(url, "entry_id");
if (TextUtils.isEmpty(entryIdFromUrl)) return false;
return domDistillerService.hasEntry(entryIdFromUrl);
}
@Override
public void updateLoadingState(boolean updateUrl) {
if (updateUrl) setUrlToPageUrl();
updateSecurityIcon(getSecurityLevel());
}
@Override
public void updateVisualsForState() {
Resources resources = getResources();
updateSecurityIcon(getSecurityLevel());
updateButtonsTint();
mUrlBar.setUseDarkTextColors(mUseDarkColors);
int titleTextColor = mUseDarkColors
? ApiCompatibilityUtils.getColor(resources, R.color.url_emphasis_default_text)
: ApiCompatibilityUtils.getColor(resources,
R.color.url_emphasis_light_default_text);
mTitleBar.setTextColor(titleTextColor);
if (getProgressBar() != null) {
if (mBackgroundColorSet && !mUseDarkColors) {
getProgressBar().setBackgroundColor(ColorUtils
.getLightProgressbarBackground(getToolbarDataProvider().getPrimaryColor()));
getProgressBar().setForegroundColor(ApiCompatibilityUtils.getColor(resources,
R.color.progress_bar_foreground_white));
} else {
int progressBarBackgroundColorResource = mUseDarkColors
? R.color.progress_bar_background : R.color.progress_bar_background_white;
getProgressBar().setBackgroundColor(ApiCompatibilityUtils.getColor(resources,
progressBarBackgroundColorResource));
}
}
}
private void updateButtonsTint() {
mMenuButton.setTint(mUseDarkColors ? mDarkModeTint : mLightModeTint);
if (mCloseButton.getDrawable() instanceof TintedDrawable) {
((TintedDrawable) mCloseButton.getDrawable()).setTint(
mUseDarkColors ? mDarkModeTint : mLightModeTint);
}
if (mCustomActionButton.getDrawable() instanceof TintedDrawable) {
((TintedDrawable) mCustomActionButton.getDrawable()).setTint(
mUseDarkColors ? mDarkModeTint : mLightModeTint);
}
}
@Override
public void setMenuButtonHelper(final AppMenuButtonHelper helper) {
mMenuButton.setOnTouchListener(new OnTouchListener() {
@SuppressLint("ClickableViewAccessibility")
@Override
public boolean onTouch(View v, MotionEvent event) {
return helper.onTouch(v, event);
}
});
mMenuButton.setOnKeyListener(new OnKeyListener() {
@Override
public boolean onKey(View view, int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_ENTER && event.getAction() == KeyEvent.ACTION_UP) {
return helper.onEnterKeyPress(view);
}
return false;
}
});
}
@Override
public View getMenuAnchor() {
return mMenuButton;
}
@Override
protected void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
setTitleToPageTitle();
setUrlToPageUrl();
}
@Override
public ColorDrawable getBackground() {
return (ColorDrawable) super.getBackground();
}
@Override
public void initializeControls(WindowDelegate windowDelegate, ActionBarDelegate delegate,
WindowAndroid windowAndroid) {
}
private int getSecurityLevel() {
if (getCurrentTab() == null) return ConnectionSecurityLevel.NONE;
return getCurrentTab().getSecurityLevel();
}
@Override
public void updateSecurityIcon(int securityLevel) {
if (mSecurityIconType == securityLevel) return;
mSecurityIconType = securityLevel;
if (securityLevel == ConnectionSecurityLevel.NONE) {
mAnimDelegate.hideSecurityButton();
} else {
int id = LocationBarLayout.getSecurityIconResource(
securityLevel, !shouldEmphasizeHttpsScheme());
// ImageView#setImageResource is no-op if given resource is the current one.
if (id == 0) {
mSecurityButton.setImageDrawable(null);
} else {
mSecurityButton.setImageResource(id);
}
mAnimDelegate.showSecurityButton();
}
mUrlBar.emphasizeUrl();
mUrlBar.invalidate();
}
/**
* For extending classes to override and carry out the changes related with the primary color
* for the current tab changing.
*/
@Override
protected void onPrimaryColorChanged(boolean shouldAnimate) {
if (mBackgroundColorSet) return;
mBackgroundColorSet = true;
int primaryColor = getToolbarDataProvider().getPrimaryColor();
getBackground().setColor(primaryColor);
mUseDarkColors = !ColorUtils.shoudUseLightForegroundOnBackground(primaryColor);
updateVisualsForState();
}
@Override
public View getContainerView() {
return this;
}
@Override
public void setDefaultTextEditActionModeCallback(ToolbarActionModeCallback callback) {
mUrlBar.setCustomSelectionActionModeCallback(callback);
}
private void updateLayoutParams() {
int startMargin = 0;
int locationBarLayoutChildIndex = -1;
for (int i = 0; i < getChildCount(); i++) {
View childView = getChildAt(i);
if (childView.getVisibility() != GONE) {
LayoutParams childLayoutParams = (LayoutParams) childView.getLayoutParams();
if (ApiCompatibilityUtils.getMarginStart(childLayoutParams) != startMargin) {
ApiCompatibilityUtils.setMarginStart(childLayoutParams, startMargin);
childView.setLayoutParams(childLayoutParams);
}
if (childView == mLocationBarFrameLayout) {
locationBarLayoutChildIndex = i;
break;
}
int widthMeasureSpec;
int heightMeasureSpec;
if (childLayoutParams.width == LayoutParams.WRAP_CONTENT) {
widthMeasureSpec = MeasureSpec.makeMeasureSpec(
getMeasuredWidth(), MeasureSpec.AT_MOST);
} else if (childLayoutParams.width == LayoutParams.MATCH_PARENT) {
widthMeasureSpec = MeasureSpec.makeMeasureSpec(
getMeasuredWidth(), MeasureSpec.EXACTLY);
} else {
widthMeasureSpec = MeasureSpec.makeMeasureSpec(
childLayoutParams.width, MeasureSpec.EXACTLY);
}
if (childLayoutParams.height == LayoutParams.WRAP_CONTENT) {
heightMeasureSpec = MeasureSpec.makeMeasureSpec(
getMeasuredHeight(), MeasureSpec.AT_MOST);
} else if (childLayoutParams.height == LayoutParams.MATCH_PARENT) {
heightMeasureSpec = MeasureSpec.makeMeasureSpec(
getMeasuredHeight(), MeasureSpec.EXACTLY);
} else {
heightMeasureSpec = MeasureSpec.makeMeasureSpec(
childLayoutParams.height, MeasureSpec.EXACTLY);
}
childView.measure(widthMeasureSpec, heightMeasureSpec);
startMargin += childView.getMeasuredWidth();
}
}
assert locationBarLayoutChildIndex != -1;
int locationBarLayoutEndMargin = 0;
for (int i = locationBarLayoutChildIndex + 1; i < getChildCount(); i++) {
View childView = getChildAt(i);
if (childView.getVisibility() != GONE) {
locationBarLayoutEndMargin += childView.getMeasuredWidth();
}
}
LayoutParams urlLayoutParams = (LayoutParams) mLocationBarFrameLayout.getLayoutParams();
if (ApiCompatibilityUtils.getMarginEnd(urlLayoutParams) != locationBarLayoutEndMargin) {
ApiCompatibilityUtils.setMarginEnd(urlLayoutParams, locationBarLayoutEndMargin);
mLocationBarFrameLayout.setLayoutParams(urlLayoutParams);
}
// Update left margin of mTitleUrlContainer here to make sure the security icon is always
// placed left of the urlbar.
LayoutParams lp = (LayoutParams) mTitleUrlContainer.getLayoutParams();
if (mSecurityButton.getVisibility() == View.GONE) {
lp.leftMargin = 0;
} else {
lp.leftMargin = mSecurityButton.getMeasuredWidth();
}
mTitleUrlContainer.setLayoutParams(lp);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
updateLayoutParams();
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
public LocationBar getLocationBar() {
return this;
}
@Override
public boolean onLongClick(View v) {
CharSequence description = null;
if (v == mCloseButton) {
description = getResources().getString(R.string.close_tab);
} else if (v == mCustomActionButton) {
description = mCustomActionButton.getContentDescription();
} else {
return false;
}
return showAccessibilityToast(v, description);
}
// Toolbar and LocationBar calls that are not relevant here.
@Override
public void setToolbarDataProvider(ToolbarDataProvider model) { }
@Override
public void onUrlPreFocusChanged(boolean gainFocus) {
}
@Override
public void setUrlFocusChangeListener(UrlFocusChangeListener listener) { }
@Override
public void setUrlBarFocus(boolean shouldBeFocused) { }
@Override
public void revertChanges() { }
@Override
public long getFirstUrlBarFocusTime() {
return 0;
}
@Override
public void setIgnoreURLBarModification(boolean ignore) {
}
@Override
public void hideSuggestions() {
}
@Override
public void updateMicButtonState() {
}
@Override
public void onTabLoadingNTP(NewTabPage ntp) {
}
@Override
public void setAutocompleteProfile(Profile profile) {
}
@Override
public void backKeyPressed() { }
@Override
public void showAppMenuUpdateBadge() {
}
@Override
public boolean isShowingAppMenuUpdateBadge() {
return false;
}
@Override
public void removeAppMenuUpdateBadge(boolean animate) {
}
@Override
protected void setAppMenuUpdateBadgeToVisible(boolean animate) {
}
@Override
public View getMenuButtonWrapper() {
// This class has no menu button wrapper, so return the menu button instead.
return mMenuButton;
}
}
| |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.yangutils.linker.impl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import org.onosproject.yangutils.datamodel.YangAtomicPath;
import org.onosproject.yangutils.datamodel.YangAugment;
import org.onosproject.yangutils.datamodel.YangCase;
import org.onosproject.yangutils.datamodel.YangChoice;
import org.onosproject.yangutils.datamodel.YangGrouping;
import org.onosproject.yangutils.datamodel.YangImport;
import org.onosproject.yangutils.datamodel.YangInclude;
import org.onosproject.yangutils.datamodel.YangInput;
import org.onosproject.yangutils.datamodel.YangLeaf;
import org.onosproject.yangutils.datamodel.YangLeafList;
import org.onosproject.yangutils.datamodel.YangLeafRef;
import org.onosproject.yangutils.datamodel.YangLeavesHolder;
import org.onosproject.yangutils.datamodel.YangModule;
import org.onosproject.yangutils.datamodel.YangNode;
import org.onosproject.yangutils.datamodel.YangNodeIdentifier;
import org.onosproject.yangutils.datamodel.YangOutput;
import org.onosproject.yangutils.datamodel.YangSubModule;
import org.onosproject.yangutils.datamodel.YangTypeDef;
import org.onosproject.yangutils.datamodel.YangUses;
import org.onosproject.yangutils.linker.exceptions.LinkerException;
import static org.onosproject.yangutils.linker.impl.PrefixResolverType.INTER_TO_INTER;
import static org.onosproject.yangutils.linker.impl.PrefixResolverType.INTER_TO_INTRA;
import static org.onosproject.yangutils.linker.impl.PrefixResolverType.INTRA_TO_INTER;
import static org.onosproject.yangutils.linker.impl.PrefixResolverType.NO_PREFIX_CHANGE_FOR_INTER;
import static org.onosproject.yangutils.linker.impl.PrefixResolverType.NO_PREFIX_CHANGE_FOR_INTRA;
import static org.onosproject.yangutils.utils.UtilConstants.INPUT;
import static org.onosproject.yangutils.utils.UtilConstants.OUTPUT;
/**
* Represents x-path linking.
*
* @param <T> x-path linking can be done for target node or for target leaf/leaf-list
*/
public class YangXpathLinker<T> {
private List<YangAtomicPath> absPaths;
private YangNode rootNode;
private Map<YangAtomicPath, PrefixResolverType> prefixResolverTypes;
private String curPrefix;
/**
* Creates an instance of x-path linker.
*/
public YangXpathLinker() {
absPaths = new ArrayList<>();
}
/**
* Returns prefix resolver list.
*
* @return prefix resolver list
*/
private Map<YangAtomicPath, PrefixResolverType> getPrefixResolverTypes() {
return prefixResolverTypes;
}
/**
* Sets prefix resolver list.
*
* @param prefixResolverTypes prefix resolver list.
*/
private void setPrefixResolverTypes(Map<YangAtomicPath, PrefixResolverType> prefixResolverTypes) {
this.prefixResolverTypes = prefixResolverTypes;
}
/**
* Adds to the prefix resolver type map.
*
* @param type resolver type
* @param path absolute path
*/
private void addToPrefixResolverList(PrefixResolverType type, YangAtomicPath path) {
getPrefixResolverTypes().put(path, type);
}
/**
* Returns list of target nodes paths.
*
* @return target nodes paths
*/
private List<YangAtomicPath> getAbsPaths() {
return absPaths;
}
/**
* Sets target nodes paths.
*
* @param absPaths target nodes paths
*/
private void setAbsPaths(List<YangAtomicPath> absPaths) {
this.absPaths = absPaths;
}
/**
* Returns current prefix.
*
* @return current prefix
*/
private String getCurPrefix() {
return curPrefix;
}
/**
* Sets current prefix.
*
* @param curPrefix current prefix
*/
private void setCurPrefix(String curPrefix) {
this.curPrefix = curPrefix;
}
/**
* Return root node.
*
* @return root Node
*/
private YangNode getRootNode() {
return rootNode;
}
/**
* Sets root node.
*
* @param rootNode root node
*/
private void setRootNode(YangNode rootNode) {
this.rootNode = rootNode;
}
/**
* Adds node to resolved nodes.
*
* @param path absolute path
* @param node resolved node
*/
private void addToResolvedNodes(YangAtomicPath path, YangNode node) {
path.setResolvedNode(node);
}
/**
* Returns list of augment nodes.
*
* @param node root node
* @return list of augment nodes
*/
public List<YangAugment> getListOfYangAugment(YangNode node) {
node = node.getChild();
List<YangAugment> augments = new ArrayList<>();
while (node != null) {
if (node instanceof YangAugment) {
augments.add((YangAugment) node);
}
node = node.getNextSibling();
}
return augments;
}
/**
* Process absolute node path for target leaf.
*
* @param atomicPaths atomic path node list
* @param root root node
* @param leafref instance of YANG leafref
* @return linked target node
*/
T processLeafRefXpathLinking(List<YangAtomicPath> atomicPaths, YangNode root, YangLeafRef leafref) {
YangNode targetNode;
setRootNode(root);
setPrefixResolverTypes(new HashMap<>());
parsePrefixResolverList(atomicPaths);
YangAtomicPath leafRefPath = atomicPaths.get(atomicPaths.size() - 1);
// When leaf-ref path contains only one absolute path.
if (atomicPaths.size() == 1) {
targetNode = getTargetNodewhenSizeIsOne(atomicPaths);
} else {
atomicPaths.remove(atomicPaths.size() - 1);
setAbsPaths(atomicPaths);
targetNode = parseData(root);
}
if (targetNode == null) {
targetNode = searchInSubModule(root);
}
// Invalid path presence in the node list is checked.
validateInvalidNodesInThePath(leafref);
if (targetNode != null) {
YangLeaf targetLeaf = searchReferredLeaf(targetNode, leafRefPath.getNodeIdentifier().getName());
if (targetLeaf == null) {
YangLeafList targetLeafList = searchReferredLeafList(targetNode,
leafRefPath.getNodeIdentifier().getName());
if (targetLeafList != null) {
return (T) targetLeafList;
} else {
LinkerException linkerException = new LinkerException("YANG file error: Unable to find base " +
"leaf/leaf-list for given leafref path "
+ leafref.getPath());
linkerException.setCharPosition(leafref.getCharPosition());
linkerException.setLine(leafref.getLineNumber());
throw linkerException;
}
}
return (T) targetLeaf;
}
return null;
}
/**
* Validates the nodes in the path for any invalid node.
*
* @param leafref instance of YANG leafref
*/
private void validateInvalidNodesInThePath(YangLeafRef leafref) {
for (YangAtomicPath absolutePath : (Iterable<YangAtomicPath>) leafref.getAtomicPath()) {
YangNode nodeInPath = absolutePath.getResolvedNode();
if (nodeInPath instanceof YangGrouping || nodeInPath instanceof YangUses
|| nodeInPath instanceof YangTypeDef || nodeInPath instanceof YangCase
|| nodeInPath instanceof YangChoice) {
LinkerException linkerException = new LinkerException("YANG file error: The target node, in the " +
"leafref path " + leafref.getPath() + ", is invalid.");
linkerException.setCharPosition(leafref.getCharPosition());
linkerException.setLine(leafref.getLineNumber());
throw linkerException;
}
}
}
/**
* Returns target node when leaf-ref has only one absolute path in list.
*
* @param absPaths absolute paths
* @return target node
*/
private YangNode getTargetNodewhenSizeIsOne(List<YangAtomicPath> absPaths) {
if (absPaths.get(0).getNodeIdentifier().getPrefix() != null
&& !absPaths.get(0).getNodeIdentifier().getPrefix().equals(getRootsPrefix(getRootNode()))) {
return getImportedNode(getRootNode(), absPaths.get(0).getNodeIdentifier());
}
return getRootNode();
}
/**
* Process absolute node path linking for augment.
*
* @param absPaths absolute path node list
* @param root root node
* @return linked target node
*/
public YangNode processAugmentXpathLinking(List<YangAtomicPath> absPaths, YangNode root) {
setAbsPaths(absPaths);
setRootNode(root);
setPrefixResolverTypes(new HashMap<>());
parsePrefixResolverList(absPaths);
YangNode targetNode = parseData(root);
if (targetNode == null) {
targetNode = searchInSubModule(root);
}
return targetNode;
}
/**
* Searches for the referred leaf in target node.
*
* @param targetNode target node
* @param leafName leaf name
* @return target leaf
*/
private YangLeaf searchReferredLeaf(YangNode targetNode, String leafName) {
if (!(targetNode instanceof YangLeavesHolder)) {
throw new LinkerException("Refered node " + targetNode.getName() +
"should be of type leaves holder ");
}
YangLeavesHolder holder = (YangLeavesHolder) targetNode;
List<YangLeaf> leaves = holder.getListOfLeaf();
if (leaves != null && !leaves.isEmpty()) {
for (YangLeaf leaf : leaves) {
if (leaf.getName().equals(leafName)) {
return leaf;
}
}
}
return null;
}
/**
* Searches for the referred leaf-list in target node.
*
* @param targetNode target node
* @param leafListName leaf-list name
* @return target leaf-list
*/
private YangLeafList searchReferredLeafList(YangNode targetNode, String leafListName) {
if (!(targetNode instanceof YangLeavesHolder)) {
throw new LinkerException("Refered node " + targetNode.getName() +
"should be of type leaves holder ");
}
YangLeavesHolder holder = (YangLeavesHolder) targetNode;
List<YangLeafList> leavesList = holder.getListOfLeafList();
if (leavesList != null && !leavesList.isEmpty()) {
for (YangLeafList leafList : leavesList) {
if (leafList.getName().equals(leafListName)) {
return leafList;
}
}
}
return null;
}
/**
* Process linking using for node identifier for inter/intra file.
*
* @param root root node
* @return linked target node
*/
private YangNode parseData(YangNode root) {
String rootPrefix = getRootsPrefix(root);
Iterator<YangAtomicPath> pathIterator = getAbsPaths().iterator();
YangAtomicPath path = pathIterator.next();
if (path.getNodeIdentifier().getPrefix() != null
&& !path.getNodeIdentifier().getPrefix().equals(rootPrefix)) {
return parsePath(getImportedNode(root, path.getNodeIdentifier()));
} else {
return parsePath(root);
}
}
/**
* Process linking of target node in root node.
*
* @param root root node
* @return linked target node
*/
private YangNode parsePath(YangNode root) {
YangNode tempNode = root;
Stack<YangNode> linkerStack = new Stack<>();
Iterator<YangAtomicPath> pathIterator = getAbsPaths().iterator();
YangAtomicPath tempPath = pathIterator.next();
setCurPrefix(tempPath.getNodeIdentifier().getPrefix());
int index = 0;
YangNode tempAugment;
do {
if (tempPath.getNodeIdentifier().getPrefix() == null) {
tempAugment = resolveIntraFileAugment(tempPath, root);
} else {
tempAugment = resolveInterFileAugment(tempPath, root);
}
if (tempAugment != null) {
linkerStack.push(tempNode);
tempNode = tempAugment;
}
tempNode = searchTargetNode(tempNode, tempPath.getNodeIdentifier());
if (tempNode == null && linkerStack.size() != 0) {
tempNode = linkerStack.peek();
linkerStack.pop();
tempNode = searchTargetNode(tempNode, tempPath.getNodeIdentifier());
}
if (tempNode != null) {
addToResolvedNodes(tempPath, tempNode);
}
if (index == getAbsPaths().size() - 1) {
break;
}
tempPath = pathIterator.next();
index++;
} while (validate(tempNode, index));
return tempNode;
}
/**
* Resolves intra file augment linking.
*
* @param tempPath temporary absolute path
* @param root root node
* @return linked target node
*/
private YangNode resolveIntraFileAugment(YangAtomicPath tempPath, YangNode root) {
YangNode tempAugment;
if (getCurPrefix() != tempPath.getNodeIdentifier().getPrefix()) {
root = getIncludedNode(getRootNode(), tempPath.getNodeIdentifier().getName());
if (root == null) {
root = getIncludedNode(getRootNode(), getAugmentNodeIdentifier(tempPath.getNodeIdentifier(), absPaths,
getRootNode()));
if (root == null) {
root = getRootNode();
}
}
} else {
if (getCurPrefix() != null) {
root = getImportedNode(root, tempPath.getNodeIdentifier());
}
}
setCurPrefix(tempPath.getNodeIdentifier().getPrefix());
tempAugment = getAugment(tempPath.getNodeIdentifier(), root, getAbsPaths());
if (tempAugment == null) {
tempAugment = getAugment(tempPath.getNodeIdentifier(), getRootNode(), getAbsPaths());
}
return tempAugment;
}
/**
* Resolves inter file augment linking.
*
* @param tempPath temporary absolute path
* @param root root node
* @return linked target node
*/
private YangNode resolveInterFileAugment(YangAtomicPath tempPath, YangNode root) {
YangNode tempAugment;
if (!tempPath.getNodeIdentifier().getPrefix().equals(getCurPrefix())) {
setCurPrefix(tempPath.getNodeIdentifier().getPrefix());
root = getImportedNode(getRootNode(), tempPath.getNodeIdentifier());
}
tempAugment = getAugment(tempPath.getNodeIdentifier(), root, getAbsPaths());
if (tempAugment == null) {
return resolveInterToInterFileAugment(root);
}
return tempAugment;
}
/**
* Resolves augment when prefix changed from inter file to inter file.
* it may be possible that the prefix used in imported module is different the
* given list of node identifiers.
*
* @param root root node
* @return target node
*/
private YangNode resolveInterToInterFileAugment(YangNode root) {
List<YangAugment> augments = getListOfYangAugment(root);
int index;
List<YangAtomicPath> absPaths = new ArrayList<>();
for (YangAugment augment : augments) {
index = 0;
for (YangAtomicPath path : augment.getTargetNode()) {
if (!searchForAugmentInImportedNode(path.getNodeIdentifier(), index)) {
absPaths.clear();
break;
}
absPaths.add(path);
index++;
}
if (!absPaths.isEmpty() && absPaths.size() == getAbsPaths().size() - 1) {
return augment;
} else {
absPaths.clear();
}
}
return null;
}
/**
* Searches for the augment node in imported module when prefix has changed from
* inter file to inter file.
*
* @param nodeId node id
* @param index index
* @return true if found
*/
private boolean searchForAugmentInImportedNode(YangNodeIdentifier nodeId, int index) {
YangNodeIdentifier tempNodeId = getAbsPaths().get(index).getNodeIdentifier();
return nodeId.getName().equals(tempNodeId.getName());
}
/**
* Returns augment node.
*
* @param tempNodeId temporary absolute path id
* @param root root node
* @return linked target node
*/
private YangNode getAugment(YangNodeIdentifier tempNodeId, YangNode root, List<YangAtomicPath> absPaths) {
String augmentName = getAugmentNodeIdentifier(tempNodeId, absPaths, root);
if (augmentName != null) {
return searchAugmentNode(root, augmentName);
}
return null;
}
/**
* Process linking using import list.
*
* @param root root node
* @param nodeId node identifier
* @return linked target node
*/
private YangNode getImportedNode(YangNode root, YangNodeIdentifier nodeId) {
List<YangImport> importList;
if (root instanceof YangModule) {
importList = ((YangModule) root).getImportList();
} else {
importList = ((YangSubModule) root).getImportList();
}
for (YangImport imported : importList) {
if (imported.getPrefixId().equals(nodeId.getPrefix())) {
return imported.getImportedNode();
}
}
return root;
}
/**
* Searches in sub-module node.
*
* @param root root node
* @return target linked node
*/
private YangNode searchInSubModule(YangNode root) {
List<YangInclude> includeList;
YangNode tempNode;
if (root instanceof YangModule) {
includeList = ((YangModule) root).getIncludeList();
} else {
includeList = ((YangSubModule) root).getIncludeList();
}
for (YangInclude included : includeList) {
tempNode = parseData(included.getIncludedNode());
if (tempNode != null) {
return tempNode;
}
}
return null;
}
/**
* Process linking using include list.
*
* @param root root node
* @param tempPathName temporary path node name
* @return linked target node
*/
private YangNode getIncludedNode(YangNode root, String tempPathName) {
List<YangInclude> includeList;
if (root instanceof YangModule) {
includeList = ((YangModule) root).getIncludeList();
} else {
includeList = ((YangSubModule) root).getIncludeList();
}
for (YangInclude included : includeList) {
if (verifyChildNode(included.getIncludedNode(), tempPathName)) {
return included.getIncludedNode();
}
}
return null;
}
/**
* Verifies for child nodes in sub module.
*
* @param node submodule node
* @param name name of child node
* @return true if child node found
*/
private boolean verifyChildNode(YangNode node, String name) {
node = node.getChild();
while (node != null) {
if (node.getName().equals(name)) {
return true;
}
node = node.getNextSibling();
}
return false;
}
/**
* Returns augment's node id.
*
* @param nodeId node identifier
* @param absPaths absolute paths
* @param root root node
* @return augment's node id
*/
private String getAugmentNodeIdentifier(YangNodeIdentifier nodeId, List<YangAtomicPath> absPaths, YangNode root) {
Iterator<YangAtomicPath> nodeIdIterator = absPaths.iterator();
YangAtomicPath tempNodeId;
StringBuilder builder = new StringBuilder();
String id;
PrefixResolverType type;
while (nodeIdIterator.hasNext()) {
tempNodeId = nodeIdIterator.next();
if (!tempNodeId.getNodeIdentifier().equals(nodeId)) {
type = getPrefixResolverTypes().get(tempNodeId);
switch (type) {
case INTER_TO_INTRA:
id = "/" + tempNodeId.getNodeIdentifier().getName();
break;
case INTRA_TO_INTER:
if (!getRootsPrefix(root).equals(tempNodeId.getNodeIdentifier().getPrefix())) {
id = "/" + tempNodeId.getNodeIdentifier().getPrefix() + ":" + tempNodeId.getNodeIdentifier()
.getName();
} else {
id = "/" + tempNodeId.getNodeIdentifier().getName();
}
break;
case INTER_TO_INTER:
id = "/" + tempNodeId.getNodeIdentifier().getPrefix() + ":" + tempNodeId.getNodeIdentifier()
.getName();
break;
case NO_PREFIX_CHANGE_FOR_INTRA:
id = "/" + tempNodeId.getNodeIdentifier().getName();
break;
case NO_PREFIX_CHANGE_FOR_INTER:
if (!getRootsPrefix(root).equals(tempNodeId.getNodeIdentifier().getPrefix())) {
id = "/" + tempNodeId.getNodeIdentifier().getPrefix() + ":" + tempNodeId.getNodeIdentifier()
.getName();
} else {
id = "/" + tempNodeId.getNodeIdentifier().getName();
}
break;
default:
id = "/" + tempNodeId.getNodeIdentifier().getName();
break;
}
builder.append(id);
} else {
return builder.toString();
}
}
return null;
}
/**
* Searches augment node in root node.
*
* @param node root node
* @param tempNodeId node identifier
* @return target augment node
*/
private YangNode searchAugmentNode(YangNode node, String tempNodeId) {
node = node.getChild();
while (node != null) {
if (node instanceof YangAugment) {
if (node.getName().equals(tempNodeId)) {
return node;
}
}
node = node.getNextSibling();
}
return null;
}
/**
* Validates for target node if target node found or not.
*
* @param tempNode temporary node
* @param index current index of list
* @return false if target node found
*/
private boolean validate(YangNode tempNode, int index) {
int size = getAbsPaths().size();
if (tempNode != null && index != size) {
return true;
} else if (tempNode != null) {
return false;
// this is your target node.
} else if (index != size) {
return true;
// this could be in submodule as well.
}
return false;
}
/**
* Searches target node in root node.
*
* @param node root node
* @param curNodeId YANG node identifier
* @return linked target node
*/
private YangNode searchTargetNode(YangNode node, YangNodeIdentifier curNodeId) {
if (node != null) {
node = node.getChild();
}
while (node != null) {
if (node instanceof YangInput) {
if (curNodeId.getName().equalsIgnoreCase(INPUT)) {
return node;
}
} else if (node instanceof YangOutput) {
if (curNodeId.getName().equalsIgnoreCase(OUTPUT)) {
return node;
}
}
if (node.getName().equals(curNodeId.getName())) {
return node;
}
node = node.getNextSibling();
}
return null;
}
/**
* Returns root prefix.
*
* @param root root node
* @return root prefix
*/
private String getRootsPrefix(YangNode root) {
if (root instanceof YangModule) {
return ((YangModule) root).getPrefix();
} else {
return ((YangSubModule) root).getPrefix();
}
}
/**
* Resolves prefix and provides prefix resolver list.
*
* @param absolutePaths absolute paths
*/
private void parsePrefixResolverList(List<YangAtomicPath> absolutePaths) {
Iterator<YangAtomicPath> pathIterator = absolutePaths.iterator();
YangAtomicPath absPath;
String prePrefix;
String curPrefix = null;
while (pathIterator.hasNext()) {
prePrefix = curPrefix;
absPath = pathIterator.next();
curPrefix = absPath.getNodeIdentifier().getPrefix();
if (curPrefix != null) {
if (!curPrefix.equals(prePrefix)) {
if (prePrefix != null) {
addToPrefixResolverList(INTER_TO_INTER, absPath);
} else {
addToPrefixResolverList(INTRA_TO_INTER, absPath);
}
} else {
addToPrefixResolverList(NO_PREFIX_CHANGE_FOR_INTER, absPath);
}
} else {
if (prePrefix != null) {
addToPrefixResolverList(INTER_TO_INTRA, absPath);
} else {
addToPrefixResolverList(NO_PREFIX_CHANGE_FOR_INTRA, absPath);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.connectors.kafka;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.CatalogTableImpl;
import org.apache.flink.table.catalog.ObjectPath;
import org.apache.flink.types.Row;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
/**
* Basic Tests for Kafka connector for Table API & SQL.
*/
public abstract class KafkaTableTestBase extends KafkaTestBase {
public abstract String kafkaVersion();
@Test
public void testKafkaSourceSink() throws Exception {
final String topic = "tstopic";
createTestTopic(topic, 1, 1);
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tEnv = StreamTableEnvironment.create(
env,
EnvironmentSettings.newInstance()
// watermark is only supported in blink planner
.useBlinkPlanner()
.inStreamingMode()
.build()
);
env.getConfig().setRestartStrategy(RestartStrategies.noRestart());
env.setParallelism(1);
// ---------- Produce an event time stream into Kafka -------------------
String groupId = standardProps.getProperty("group.id");
String zk = standardProps.getProperty("zookeeper.connect");
String bootstraps = standardProps.getProperty("bootstrap.servers");
// TODO: use DDL to register Kafka once FLINK-15282 is fixed.
// we have to register into Catalog manually because it will use Calcite's ParameterScope
TableSchema schema = TableSchema.builder()
.field("computed-price", DataTypes.DECIMAL(38, 18), "price + 1.0")
.field("price", DataTypes.DECIMAL(38, 18))
.field("currency", DataTypes.STRING())
.field("log_ts", DataTypes.TIMESTAMP(3))
.field("ts", DataTypes.TIMESTAMP(3), "log_ts + INTERVAL '1' SECOND")
.watermark("ts", "ts", DataTypes.TIMESTAMP(3))
.build();
Map<String, String> properties = new HashMap<>();
properties.put("connector.type", "kafka");
properties.put("connector.topic", topic);
properties.put("connector.version", kafkaVersion());
properties.put("connector.properties.zookeeper.connect", zk);
properties.put("connector.properties.bootstrap.servers", bootstraps);
properties.put("connector.properties.group.id", groupId);
properties.put("connector.startup-mode", "earliest-offset");
properties.put("format.type", "json");
properties.put("update-mode", "append");
CatalogTableImpl catalogTable = new CatalogTableImpl(
schema,
properties,
"comment"
);
tEnv.getCatalog(tEnv.getCurrentCatalog()).get().createTable(
ObjectPath.fromString(tEnv.getCurrentDatabase() + "." + "kafka"),
catalogTable,
true);
// TODO: use the following DDL instead of the preceding code to register Kafka
// String ddl = "CREATE TABLE kafka (\n" +
// " computed-price as price + 1.0,\n" +
// " price DECIMAL(38, 18),\n" +
// " currency STRING,\n" +
// " log_ts TIMESTAMP(3),\n" +
// " ts AS log_ts + INTERVAL '1' SECOND,\n" +
// " WATERMARK FOR ts AS ts\n" +
// ") with (\n" +
// " 'connector.type' = 'kafka',\n" +
// " 'connector.topic' = '" + topic + "',\n" +
// " 'connector.version' = 'universal',\n" +
// " 'connector.properties.zookeeper.connect' = '" + zk + "',\n" +
// " 'connector.properties.bootstrap.servers' = '" + bootstraps + "',\n" +
// " 'connector.properties.group.id' = '" + groupId + "', \n" +
// " 'connector.startup-mode' = 'earliest-offset', \n" +
// " 'format.type' = 'json',\n" +
// " 'update-mode' = 'append'\n" +
// ")";
// tEnv.sqlUpdate(ddl);
String initialValues = "INSERT INTO kafka\n" +
"SELECT CAST(price AS DECIMAL(10, 2)), currency, CAST(ts AS TIMESTAMP(3))\n" +
"FROM (VALUES (2.02,'Euro','2019-12-12 00:00:00.001001'), \n" +
" (1.11,'US Dollar','2019-12-12 00:00:01.002001'), \n" +
" (50,'Yen','2019-12-12 00:00:03.004001'), \n" +
" (3.1,'Euro','2019-12-12 00:00:04.005001'), \n" +
" (5.33,'US Dollar','2019-12-12 00:00:05.006001'), \n" +
" (0,'DUMMY','2019-12-12 00:00:10'))\n" +
" AS orders (price, currency, ts)";
tEnv.sqlUpdate(initialValues);
tEnv.execute("Job_1");
// ---------- Consume stream from Kafka -------------------
String query = "SELECT\n" +
" CAST(TUMBLE_END(ts, INTERVAL '5' SECOND) AS VARCHAR),\n" +
" CAST(MAX(ts) AS VARCHAR),\n" +
" COUNT(*),\n" +
" CAST(MAX(price) AS DECIMAL(10, 2))\n" +
"FROM kafka\n" +
"GROUP BY TUMBLE(ts, INTERVAL '5' SECOND)";
DataStream<Row> result = tEnv.toAppendStream(tEnv.sqlQuery(query), Row.class);
TestingSinkFunction sink = new TestingSinkFunction(2);
result.addSink(sink).setParallelism(1);
try {
tEnv.execute("Job_2");
} catch (Throwable e) {
// we have to use a specific exception to indicate the job is finished,
// because the registered Kafka source is infinite.
if (!isCausedByJobFinished(e)) {
// re-throw
throw e;
}
}
List<String> expected = Arrays.asList(
"2019-12-12 00:00:05.000,2019-12-12 00:00:04.004,3,50.00",
"2019-12-12 00:00:10.000,2019-12-12 00:00:06.006,2,5.33");
assertEquals(expected, TestingSinkFunction.rows);
// ------------- cleanup -------------------
deleteTestTopic(topic);
}
private static final class TestingSinkFunction implements SinkFunction<Row> {
private static final long serialVersionUID = 455430015321124493L;
private static List<String> rows = new ArrayList<>();
private final int expectedSize;
private TestingSinkFunction(int expectedSize) {
this.expectedSize = expectedSize;
rows.clear();
}
@Override
public void invoke(Row value, Context context) throws Exception {
rows.add(value.toString());
if (rows.size() >= expectedSize) {
// job finish
throw new JobFinishedException("All records are received, job is finished.");
}
}
}
private static final class JobFinishedException extends RuntimeException {
private static final long serialVersionUID = -4684689851069516182L;
private JobFinishedException(String message) {
super(message);
}
}
private static boolean isCausedByJobFinished(Throwable e) {
if (e instanceof JobFinishedException) {
return true;
} else if (e.getCause() != null) {
return isCausedByJobFinished(e.getCause());
} else {
return false;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kendra.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kendra-2019-02-03/DescribeFaq" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeFaqResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The identifier of the FAQ.
* </p>
*/
private String id;
/**
* <p>
* The identifier of the index that contains the FAQ.
* </p>
*/
private String indexId;
/**
* <p>
* The name that you gave the FAQ when it was created.
* </p>
*/
private String name;
/**
* <p>
* The description of the FAQ that you provided when it was created.
* </p>
*/
private String description;
/**
* <p>
* The date and time that the FAQ was created.
* </p>
*/
private java.util.Date createdAt;
/**
* <p>
* The date and time that the FAQ was last updated.
* </p>
*/
private java.util.Date updatedAt;
private S3Path s3Path;
/**
* <p>
* The status of the FAQ. It is ready to use when the status is <code>ACTIVE</code>.
* </p>
*/
private String status;
/**
* <p>
* The Amazon Resource Name (ARN) of the role that provides access to the S3 bucket containing the input files for
* the FAQ.
* </p>
*/
private String roleArn;
/**
* <p>
* If the <code>Status</code> field is <code>FAILED</code>, the <code>ErrorMessage</code> field contains the reason
* why the FAQ failed.
* </p>
*/
private String errorMessage;
/**
* <p>
* The file format used by the input files for the FAQ.
* </p>
*/
private String fileFormat;
/**
* <p>
* The code for a language. This shows a supported language for the FAQ document. English is supported by default.
* For more information on supported languages, including their codes, see <a
* href="https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html">Adding documents in languages other
* than English</a>.
* </p>
*/
private String languageCode;
/**
* <p>
* The identifier of the FAQ.
* </p>
*
* @param id
* The identifier of the FAQ.
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* The identifier of the FAQ.
* </p>
*
* @return The identifier of the FAQ.
*/
public String getId() {
return this.id;
}
/**
* <p>
* The identifier of the FAQ.
* </p>
*
* @param id
* The identifier of the FAQ.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFaqResult withId(String id) {
setId(id);
return this;
}
/**
* <p>
* The identifier of the index that contains the FAQ.
* </p>
*
* @param indexId
* The identifier of the index that contains the FAQ.
*/
public void setIndexId(String indexId) {
this.indexId = indexId;
}
/**
* <p>
* The identifier of the index that contains the FAQ.
* </p>
*
* @return The identifier of the index that contains the FAQ.
*/
public String getIndexId() {
return this.indexId;
}
/**
* <p>
* The identifier of the index that contains the FAQ.
* </p>
*
* @param indexId
* The identifier of the index that contains the FAQ.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFaqResult withIndexId(String indexId) {
setIndexId(indexId);
return this;
}
/**
* <p>
* The name that you gave the FAQ when it was created.
* </p>
*
* @param name
* The name that you gave the FAQ when it was created.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name that you gave the FAQ when it was created.
* </p>
*
* @return The name that you gave the FAQ when it was created.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name that you gave the FAQ when it was created.
* </p>
*
* @param name
* The name that you gave the FAQ when it was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFaqResult withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The description of the FAQ that you provided when it was created.
* </p>
*
* @param description
* The description of the FAQ that you provided when it was created.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* The description of the FAQ that you provided when it was created.
* </p>
*
* @return The description of the FAQ that you provided when it was created.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* The description of the FAQ that you provided when it was created.
* </p>
*
* @param description
* The description of the FAQ that you provided when it was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFaqResult withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The date and time that the FAQ was created.
* </p>
*
* @param createdAt
* The date and time that the FAQ was created.
*/
public void setCreatedAt(java.util.Date createdAt) {
this.createdAt = createdAt;
}
/**
* <p>
* The date and time that the FAQ was created.
* </p>
*
* @return The date and time that the FAQ was created.
*/
public java.util.Date getCreatedAt() {
return this.createdAt;
}
/**
* <p>
* The date and time that the FAQ was created.
* </p>
*
* @param createdAt
* The date and time that the FAQ was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFaqResult withCreatedAt(java.util.Date createdAt) {
setCreatedAt(createdAt);
return this;
}
/**
* <p>
* The date and time that the FAQ was last updated.
* </p>
*
* @param updatedAt
* The date and time that the FAQ was last updated.
*/
public void setUpdatedAt(java.util.Date updatedAt) {
this.updatedAt = updatedAt;
}
/**
* <p>
* The date and time that the FAQ was last updated.
* </p>
*
* @return The date and time that the FAQ was last updated.
*/
public java.util.Date getUpdatedAt() {
return this.updatedAt;
}
/**
* <p>
* The date and time that the FAQ was last updated.
* </p>
*
* @param updatedAt
* The date and time that the FAQ was last updated.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFaqResult withUpdatedAt(java.util.Date updatedAt) {
setUpdatedAt(updatedAt);
return this;
}
/**
* @param s3Path
*/
public void setS3Path(S3Path s3Path) {
this.s3Path = s3Path;
}
/**
* @return
*/
public S3Path getS3Path() {
return this.s3Path;
}
/**
* @param s3Path
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFaqResult withS3Path(S3Path s3Path) {
setS3Path(s3Path);
return this;
}
/**
* <p>
* The status of the FAQ. It is ready to use when the status is <code>ACTIVE</code>.
* </p>
*
* @param status
* The status of the FAQ. It is ready to use when the status is <code>ACTIVE</code>.
* @see FaqStatus
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* The status of the FAQ. It is ready to use when the status is <code>ACTIVE</code>.
* </p>
*
* @return The status of the FAQ. It is ready to use when the status is <code>ACTIVE</code>.
* @see FaqStatus
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* The status of the FAQ. It is ready to use when the status is <code>ACTIVE</code>.
* </p>
*
* @param status
* The status of the FAQ. It is ready to use when the status is <code>ACTIVE</code>.
* @return Returns a reference to this object so that method calls can be chained together.
* @see FaqStatus
*/
public DescribeFaqResult withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* The status of the FAQ. It is ready to use when the status is <code>ACTIVE</code>.
* </p>
*
* @param status
* The status of the FAQ. It is ready to use when the status is <code>ACTIVE</code>.
* @return Returns a reference to this object so that method calls can be chained together.
* @see FaqStatus
*/
public DescribeFaqResult withStatus(FaqStatus status) {
this.status = status.toString();
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the role that provides access to the S3 bucket containing the input files for
* the FAQ.
* </p>
*
* @param roleArn
* The Amazon Resource Name (ARN) of the role that provides access to the S3 bucket containing the input
* files for the FAQ.
*/
public void setRoleArn(String roleArn) {
this.roleArn = roleArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the role that provides access to the S3 bucket containing the input files for
* the FAQ.
* </p>
*
* @return The Amazon Resource Name (ARN) of the role that provides access to the S3 bucket containing the input
* files for the FAQ.
*/
public String getRoleArn() {
return this.roleArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the role that provides access to the S3 bucket containing the input files for
* the FAQ.
* </p>
*
* @param roleArn
* The Amazon Resource Name (ARN) of the role that provides access to the S3 bucket containing the input
* files for the FAQ.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFaqResult withRoleArn(String roleArn) {
setRoleArn(roleArn);
return this;
}
/**
* <p>
* If the <code>Status</code> field is <code>FAILED</code>, the <code>ErrorMessage</code> field contains the reason
* why the FAQ failed.
* </p>
*
* @param errorMessage
* If the <code>Status</code> field is <code>FAILED</code>, the <code>ErrorMessage</code> field contains the
* reason why the FAQ failed.
*/
public void setErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
}
/**
* <p>
* If the <code>Status</code> field is <code>FAILED</code>, the <code>ErrorMessage</code> field contains the reason
* why the FAQ failed.
* </p>
*
* @return If the <code>Status</code> field is <code>FAILED</code>, the <code>ErrorMessage</code> field contains the
* reason why the FAQ failed.
*/
public String getErrorMessage() {
return this.errorMessage;
}
/**
* <p>
* If the <code>Status</code> field is <code>FAILED</code>, the <code>ErrorMessage</code> field contains the reason
* why the FAQ failed.
* </p>
*
* @param errorMessage
* If the <code>Status</code> field is <code>FAILED</code>, the <code>ErrorMessage</code> field contains the
* reason why the FAQ failed.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFaqResult withErrorMessage(String errorMessage) {
setErrorMessage(errorMessage);
return this;
}
/**
* <p>
* The file format used by the input files for the FAQ.
* </p>
*
* @param fileFormat
* The file format used by the input files for the FAQ.
* @see FaqFileFormat
*/
public void setFileFormat(String fileFormat) {
this.fileFormat = fileFormat;
}
/**
* <p>
* The file format used by the input files for the FAQ.
* </p>
*
* @return The file format used by the input files for the FAQ.
* @see FaqFileFormat
*/
public String getFileFormat() {
return this.fileFormat;
}
/**
* <p>
* The file format used by the input files for the FAQ.
* </p>
*
* @param fileFormat
* The file format used by the input files for the FAQ.
* @return Returns a reference to this object so that method calls can be chained together.
* @see FaqFileFormat
*/
public DescribeFaqResult withFileFormat(String fileFormat) {
setFileFormat(fileFormat);
return this;
}
/**
* <p>
* The file format used by the input files for the FAQ.
* </p>
*
* @param fileFormat
* The file format used by the input files for the FAQ.
* @return Returns a reference to this object so that method calls can be chained together.
* @see FaqFileFormat
*/
public DescribeFaqResult withFileFormat(FaqFileFormat fileFormat) {
this.fileFormat = fileFormat.toString();
return this;
}
/**
* <p>
* The code for a language. This shows a supported language for the FAQ document. English is supported by default.
* For more information on supported languages, including their codes, see <a
* href="https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html">Adding documents in languages other
* than English</a>.
* </p>
*
* @param languageCode
* The code for a language. This shows a supported language for the FAQ document. English is supported by
* default. For more information on supported languages, including their codes, see <a
* href="https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html">Adding documents in languages
* other than English</a>.
*/
public void setLanguageCode(String languageCode) {
this.languageCode = languageCode;
}
/**
* <p>
* The code for a language. This shows a supported language for the FAQ document. English is supported by default.
* For more information on supported languages, including their codes, see <a
* href="https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html">Adding documents in languages other
* than English</a>.
* </p>
*
* @return The code for a language. This shows a supported language for the FAQ document. English is supported by
* default. For more information on supported languages, including their codes, see <a
* href="https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html">Adding documents in
* languages other than English</a>.
*/
public String getLanguageCode() {
return this.languageCode;
}
/**
* <p>
* The code for a language. This shows a supported language for the FAQ document. English is supported by default.
* For more information on supported languages, including their codes, see <a
* href="https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html">Adding documents in languages other
* than English</a>.
* </p>
*
* @param languageCode
* The code for a language. This shows a supported language for the FAQ document. English is supported by
* default. For more information on supported languages, including their codes, see <a
* href="https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html">Adding documents in languages
* other than English</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFaqResult withLanguageCode(String languageCode) {
setLanguageCode(languageCode);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getId() != null)
sb.append("Id: ").append(getId()).append(",");
if (getIndexId() != null)
sb.append("IndexId: ").append(getIndexId()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getCreatedAt() != null)
sb.append("CreatedAt: ").append(getCreatedAt()).append(",");
if (getUpdatedAt() != null)
sb.append("UpdatedAt: ").append(getUpdatedAt()).append(",");
if (getS3Path() != null)
sb.append("S3Path: ").append(getS3Path()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getRoleArn() != null)
sb.append("RoleArn: ").append(getRoleArn()).append(",");
if (getErrorMessage() != null)
sb.append("ErrorMessage: ").append(getErrorMessage()).append(",");
if (getFileFormat() != null)
sb.append("FileFormat: ").append(getFileFormat()).append(",");
if (getLanguageCode() != null)
sb.append("LanguageCode: ").append(getLanguageCode());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeFaqResult == false)
return false;
DescribeFaqResult other = (DescribeFaqResult) obj;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
if (other.getIndexId() == null ^ this.getIndexId() == null)
return false;
if (other.getIndexId() != null && other.getIndexId().equals(this.getIndexId()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getCreatedAt() == null ^ this.getCreatedAt() == null)
return false;
if (other.getCreatedAt() != null && other.getCreatedAt().equals(this.getCreatedAt()) == false)
return false;
if (other.getUpdatedAt() == null ^ this.getUpdatedAt() == null)
return false;
if (other.getUpdatedAt() != null && other.getUpdatedAt().equals(this.getUpdatedAt()) == false)
return false;
if (other.getS3Path() == null ^ this.getS3Path() == null)
return false;
if (other.getS3Path() != null && other.getS3Path().equals(this.getS3Path()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getRoleArn() == null ^ this.getRoleArn() == null)
return false;
if (other.getRoleArn() != null && other.getRoleArn().equals(this.getRoleArn()) == false)
return false;
if (other.getErrorMessage() == null ^ this.getErrorMessage() == null)
return false;
if (other.getErrorMessage() != null && other.getErrorMessage().equals(this.getErrorMessage()) == false)
return false;
if (other.getFileFormat() == null ^ this.getFileFormat() == null)
return false;
if (other.getFileFormat() != null && other.getFileFormat().equals(this.getFileFormat()) == false)
return false;
if (other.getLanguageCode() == null ^ this.getLanguageCode() == null)
return false;
if (other.getLanguageCode() != null && other.getLanguageCode().equals(this.getLanguageCode()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode + ((getIndexId() == null) ? 0 : getIndexId().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getCreatedAt() == null) ? 0 : getCreatedAt().hashCode());
hashCode = prime * hashCode + ((getUpdatedAt() == null) ? 0 : getUpdatedAt().hashCode());
hashCode = prime * hashCode + ((getS3Path() == null) ? 0 : getS3Path().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getRoleArn() == null) ? 0 : getRoleArn().hashCode());
hashCode = prime * hashCode + ((getErrorMessage() == null) ? 0 : getErrorMessage().hashCode());
hashCode = prime * hashCode + ((getFileFormat() == null) ? 0 : getFileFormat().hashCode());
hashCode = prime * hashCode + ((getLanguageCode() == null) ? 0 : getLanguageCode().hashCode());
return hashCode;
}
@Override
public DescribeFaqResult clone() {
try {
return (DescribeFaqResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.ec.jap.entiti.saap;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.persistence.Transient;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.ec.jap.utilitario.Utilitario;
/**
*
* @author Freddy G Castillo C
*/
@Entity
@Table(name = "cabecera_planilla")
@NamedQueries({
@NamedQuery(name = "CabeceraPlanilla.findConsulta", query = "SELECT c FROM CabeceraPlanilla c INNER JOIN c.idLlave ll inner join ll.idUsuario u INNER JOIN c.idPeriodoPago per WHERE per.estado='CERR' AND ( ll.numero = :filtro OR u.cedula = :filtro) ORDER BY cast(ll.numero,int),c.fechaRegistro,c.observacion DESC"),
@NamedQuery(name = "CabeceraPlanilla.findConAbono", query = "SELECT c FROM CabeceraPlanilla c WHERE c.estado in (:estado,:estado2) AND c.idPeriodoPago.idPeriodoPago=:idPeriodoPago AND c.valorPagadoAbono!=0.0"),
@NamedQuery(name = "CabeceraPlanilla.findSinPagar", query = "SELECT c FROM CabeceraPlanilla c WHERE c.estado in (:estado,:estado2) AND c.idPeriodoPago.idPeriodoPago=:idPeriodoPago "),
@NamedQuery(name = "CabeceraPlanilla.findAbono", query = "SELECT c FROM CabeceraPlanilla c INNER JOIN c.idLlave ll WHERE ll=:llave AND c IN (SELECT MAX(cp) from CabeceraPlanilla cp INNER JOIN cp.idLlave ll WHERE ll=:llave AND cp!=:cp )"),
@NamedQuery(name = "CabeceraPlanilla.findByPerAbiertActFilters", query = "SELECT c FROM CabeceraPlanilla c INNER JOIN c.idLlave ll inner join ll.idUsuario u WHERE c IN (SELECT dp.idCabeceraPlanilla FROM DetallePlanilla dp INNER JOIN dp.idCabeceraPlanilla cabp WHERE cabp.idPeriodoPago.estado=:estado) AND c.idPeriodoPago.estado=:estado AND ( UPPER(c.observacion) like UPPER(CONCAT('%',:filtro,'%')) OR ll.numero like CONCAT('%',:filtro,'%') OR u.cedula like CONCAT('%',:filtro,'%') OR UPPER(u.nombres) like UPPER(CONCAT('%',:filtro,'%')) OR UPPER(u.apellidos) like UPPER(CONCAT('%',:filtro,'%')) ) ORDER BY cast(ll.numero,int), c.fechaRegistro,c.observacion DESC"),
@NamedQuery(name = "CabeceraPlanilla.findNoPag", query = "SELECT c FROM CabeceraPlanilla c WHERE c.estado in (:estado,:estado2) AND c.idPeriodoPago.idPeriodoPago=:idPeriodoPago"),
@NamedQuery(name = "CabeceraPlanilla.findAllNoPag", query = "SELECT c FROM CabeceraPlanilla c WHERE c.estado=:estado AND c.idLlave=:idLlave "),
@NamedQuery(name = "CabeceraPlanilla.findNewUser", query = "SELECT COUNT(c.idCabeceraPlanilla) FROM CabeceraPlanilla c WHERE c.idLlave=:idLlave AND c IN ( SELECT dp.idCabeceraPlanilla FROM DetallePlanilla dp INNER JOIN dp.idLectura l WHERE l.idLlave=:idLlave )"),
@NamedQuery(name = "CabeceraPlanilla.findByFilters", query = "SELECT c FROM CabeceraPlanilla c INNER JOIN c.idLlave ll inner join ll.idUsuario u WHERE ll.numero like CONCAT('%',:filtro,'%') OR u.cedula like CONCAT('%',:filtro,'%') OR u.cedula like CONCAT('%',:filtro,'%') OR UPPER(u.nombres) like UPPER(CONCAT('%',:filtro,'%')) OR UPPER(u.apellidos) like UPPER(CONCAT('%',:filtro,'%')) ORDER BY cast(ll.numero,int),c.fechaRegistro,c.observacion DESC"),
@NamedQuery(name = "CabeceraPlanilla.findAllIngresado", query = "SELECT c FROM CabeceraPlanilla c WHERE c.estado='ING'"),
@NamedQuery(name = "CabeceraPlanilla.findByUsuarioAndEstado", query = "SELECT c FROM CabeceraPlanilla c inner join c.idLlave ll where ll.idLlave=:idLlave AND c.estado=:estado") })
public class CabeceraPlanilla implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "id_cabecera_planilla")
private Integer idCabeceraPlanilla;
@Size(max = 2147483647)
@Column(name = "observacion")
private String observacion;
@Column(name = "observacion1")
private String observacion1;
@Basic(optional = false)
@NotNull
@Column(name = "subtotal")
private Double subtotal;
@Basic(optional = false)
@NotNull
@Column(name = "descuento")
private Double descuento;
@NotNull
@Column(name = "fecha_registro")
private Date fechaRegistro;
@Basic(optional = false)
@NotNull
@Column(name = "base")
private Double base;
@Basic(optional = false)
@NotNull
@Column(name = "total")
private Double total;
@Column(name = "valor_pagado")
private Double valorPagado;
@Column(name = "cambio_usd")
private Double cambioUsd;
@Column(name = "abono_usd")
private Double abonoUsd;
@Column(name = "valor_cancelado")
private Double valorCancelado;
@Basic(optional = false)
@NotNull
@Size(min = 1, max = 2147483647)
@Column(name = "cp_estado")
private String estado;
@OneToMany(mappedBy = "idCabeceraPlanilla")
private List<DetallePlanilla> detallePlanillaList;
@JoinColumn(name = "id_llave", referencedColumnName = "id_llave")
@ManyToOne
private Llave idLlave;
@JoinColumn(name = "id_periodo_pago", referencedColumnName = "id_periodo_pago")
@ManyToOne
private PeriodoPago idPeriodoPago;
@Column(name = "fecha_pago")
private Date fechaPago;
@Column(name = "valor_pagado_abono")
private Double valorPagadoAbono;
@Transient
private Double valorPendiente = 0.0;
public CabeceraPlanilla() {
}
public CabeceraPlanilla(Integer idCabeceraPlanilla) {
this.idCabeceraPlanilla = idCabeceraPlanilla;
}
public CabeceraPlanilla(Integer idCabeceraPlanilla, Double subtotal, Double descuento, Double base, Double total, String estado) {
this.idCabeceraPlanilla = idCabeceraPlanilla;
this.subtotal = subtotal;
this.descuento = descuento;
this.base = base;
this.total = total;
this.estado = estado;
}
public Integer getIdCabeceraPlanilla() {
return idCabeceraPlanilla;
}
public void setIdCabeceraPlanilla(Integer idCabeceraPlanilla) {
this.idCabeceraPlanilla = idCabeceraPlanilla;
}
public String getObservacion() {
return observacion;
}
public void setObservacion(String observacion) {
this.observacion = observacion;
}
public Double getSubtotal() {
return subtotal;
}
public void setSubtotal(Double subtotal) {
this.subtotal = subtotal;
}
public Double getDescuento() {
return descuento;
}
public void setDescuento(Double descuento) {
this.descuento = descuento;
}
public Double getBase() {
return base;
}
public void setBase(Double base) {
this.base = base;
}
public Double getTotal() {
return total;
}
public void setTotal(Double total) {
this.total = total;
}
public String getEstado() {
return estado;
}
public void setEstado(String estado) {
this.estado = estado;
}
public List<DetallePlanilla> getDetallePlanillaList() {
return detallePlanillaList;
}
public void setDetallePlanillaList(List<DetallePlanilla> detallePlanillaList) {
this.detallePlanillaList = detallePlanillaList;
}
public Llave getIdLlave() {
return idLlave;
}
public void setIdLlave(Llave idLlave) {
this.idLlave = idLlave;
}
public PeriodoPago getIdPeriodoPago() {
return idPeriodoPago;
}
public void setIdPeriodoPago(PeriodoPago idPeriodoPago) {
this.idPeriodoPago = idPeriodoPago;
}
public Date getFechaRegistro() {
return fechaRegistro;
}
public void setFechaRegistro(Date fechaRegistro) {
this.fechaRegistro = fechaRegistro;
}
public Double getValorPagado() {
return valorPagado == null ? 0.0 : valorPagado;
}
public void setValorPagado(Double valorPagado) {
this.valorPagado = valorPagado;
}
public Date getFechaPago() {
return fechaPago;
}
public void setFechaPago(Date fechaPago) {
this.fechaPago = fechaPago;
}
public Double getCambioUsd() {
return cambioUsd != null ? cambioUsd : 0.0;
}
public void setCambioUsd(Double cambioUsd) {
this.cambioUsd = cambioUsd;
}
public Double getAbonoUsd() {
return abonoUsd != null ? abonoUsd : 0.0;
}
public void setAbonoUsd(Double abonoUsd) {
this.abonoUsd = abonoUsd;
}
public Double getValorCancelado() {
return valorCancelado != null ? valorCancelado : 0.0;
}
public void setValorCancelado(Double valorCancelado) {
this.valorCancelado = valorCancelado;
}
/**
* Atributo valorPagadoAbono
*
* @return el valor del atributo valorPagadoAbono
*/
public Double getValorPagadoAbono() {
return valorPagadoAbono != null ? valorPagadoAbono : 0.0;
}
/**
* El @param valorPagadoAbono define valorPagadoAbono
*/
public void setValorPagadoAbono(Double valorPagadoAbono) {
this.valorPagadoAbono = valorPagadoAbono;
}
/**
* Atributo valorPendiente
*
* @return el valor del atributo valorPendiente
*/
public Double getValorPendiente() {
try {
return Utilitario.redondear(total - valorPagado);
} catch (Exception e) {
return valorPendiente;
}
}
/**
* El @param valorPendiente define valorPendiente
*/
public void setValorPendiente(Double valorPendiente) {
this.valorPendiente = valorPendiente;
}
public String getObservacion1() {
return observacion1;
}
public void setObservacion1(String observacion1) {
this.observacion1 = observacion1;
}
@Override
public int hashCode() {
int hash = 0;
hash += (idCabeceraPlanilla != null ? idCabeceraPlanilla.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are
// not set
if (!(object instanceof CabeceraPlanilla)) {
return false;
}
CabeceraPlanilla other = (CabeceraPlanilla) object;
if ((this.idCabeceraPlanilla == null && other.idCabeceraPlanilla != null) || (this.idCabeceraPlanilla != null && !this.idCabeceraPlanilla.equals(other.idCabeceraPlanilla))) {
return false;
}
return true;
}
@Override
public String toString() {
return "org.ec.jap.entiti.CabeceraPlanilla[ idCabeceraPlanilla=" + idCabeceraPlanilla + " ]";
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jasper.runtime;
import org.apache.jasper.Constants;
import org.apache.jasper.JasperException;
import org.apache.jasper.compiler.Localizer;
import org.apache.jasper.util.ExceptionUtils;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.jsp.JspWriter;
import javax.servlet.jsp.PageContext;
import javax.servlet.jsp.tagext.BodyContent;
import java.beans.PropertyEditor;
import java.beans.PropertyEditorManager;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.lang.reflect.Method;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.Enumeration;
/**
* Bunch of util methods that are used by code generated for useBean,
* getProperty and setProperty.
*
* The __begin, __end stuff is there so that the JSP engine can
* actually parse this file and inline them if people don't want
* runtime dependencies on this class. However, I'm not sure if that
* works so well right now. It got forgotten at some point. -akv
*
* @author Mandar Raje
* @author Shawn Bayern
*/
public class JspRuntimeLibrary {
protected static class PrivilegedIntrospectHelper
implements PrivilegedExceptionAction<Void> {
private final Object bean;
private final String prop;
private final String value;
private final ServletRequest request;
private final String param;
private final boolean ignoreMethodNF;
PrivilegedIntrospectHelper(Object bean, String prop,
String value, ServletRequest request,
String param, boolean ignoreMethodNF)
{
this.bean = bean;
this.prop = prop;
this.value = value;
this.request = request;
this.param = param;
this.ignoreMethodNF = ignoreMethodNF;
}
@Override
public Void run() throws JasperException {
internalIntrospecthelper(
bean,prop,value,request,param,ignoreMethodNF);
return null;
}
}
/**
* Returns the value of the javax.servlet.error.exception request
* attribute value, if present, otherwise the value of the
* javax.servlet.jsp.jspException request attribute value.
*
* This method is called at the beginning of the generated servlet code
* for a JSP error page, when the "exception" implicit scripting language
* variable is initialized.
*/
public static Throwable getThrowable(ServletRequest request) {
Throwable error = (Throwable) request.getAttribute(
RequestDispatcher.ERROR_EXCEPTION);
if (error == null) {
error = (Throwable) request.getAttribute(PageContext.EXCEPTION);
if (error != null) {
/*
* The only place that sets JSP_EXCEPTION is
* PageContextImpl.handlePageException(). It really should set
* SERVLET_EXCEPTION, but that would interfere with the
* ErrorReportValve. Therefore, if JSP_EXCEPTION is set, we
* need to set SERVLET_EXCEPTION.
*/
request.setAttribute(RequestDispatcher.ERROR_EXCEPTION, error);
}
}
return error;
}
public static boolean coerceToBoolean(String s) {
if (s == null || s.length() == 0)
return false;
else
return Boolean.parseBoolean(s);
}
public static byte coerceToByte(String s) {
if (s == null || s.length() == 0)
return (byte) 0;
else
return Byte.parseByte(s);
}
public static char coerceToChar(String s) {
if (s == null || s.length() == 0) {
return (char) 0;
} else {
return s.charAt(0);
}
}
public static double coerceToDouble(String s) {
if (s == null || s.length() == 0)
return 0;
else
return Double.parseDouble(s);
}
public static float coerceToFloat(String s) {
if (s == null || s.length() == 0)
return 0;
else
return Float.parseFloat(s);
}
public static int coerceToInt(String s) {
if (s == null || s.length() == 0)
return 0;
else
return Integer.parseInt(s);
}
public static short coerceToShort(String s) {
if (s == null || s.length() == 0)
return (short) 0;
else
return Short.parseShort(s);
}
public static long coerceToLong(String s) {
if (s == null || s.length() == 0)
return 0;
else
return Long.parseLong(s);
}
public static Object coerce(String s, Class<?> target) {
boolean isNullOrEmpty = (s == null || s.length() == 0);
if (target == Boolean.class) {
if (isNullOrEmpty) {
s = "false";
}
return Boolean.valueOf(s);
} else if (target == Byte.class) {
if (isNullOrEmpty)
return Byte.valueOf((byte) 0);
else
return Byte.valueOf(s);
} else if (target == Character.class) {
if (isNullOrEmpty)
return Character.valueOf((char) 0);
else {
@SuppressWarnings("null")
Character result = Character.valueOf(s.charAt(0));
return result;
}
} else if (target == Double.class) {
if (isNullOrEmpty)
return Double.valueOf(0);
else
return Double.valueOf(s);
} else if (target == Float.class) {
if (isNullOrEmpty)
return Float.valueOf(0);
else
return Float.valueOf(s);
} else if (target == Integer.class) {
if (isNullOrEmpty)
return Integer.valueOf(0);
else
return Integer.valueOf(s);
} else if (target == Short.class) {
if (isNullOrEmpty)
return Short.valueOf((short) 0);
else
return Short.valueOf(s);
} else if (target == Long.class) {
if (isNullOrEmpty)
return Long.valueOf(0);
else
return Long.valueOf(s);
} else {
return null;
}
}
// __begin convertMethod
public static Object convert(String propertyName, String s, Class<?> t,
Class<?> propertyEditorClass)
throws JasperException
{
try {
if (s == null) {
if (t.equals(Boolean.class) || t.equals(Boolean.TYPE))
s = "false";
else
return null;
}
if (propertyEditorClass != null) {
return getValueFromBeanInfoPropertyEditor(
t, propertyName, s, propertyEditorClass);
} else if ( t.equals(Boolean.class) || t.equals(Boolean.TYPE) ) {
if (s.equalsIgnoreCase("on") || s.equalsIgnoreCase("true"))
s = "true";
else
s = "false";
return Boolean.valueOf(s);
} else if ( t.equals(Byte.class) || t.equals(Byte.TYPE) ) {
return Byte.valueOf(s);
} else if (t.equals(Character.class) || t.equals(Character.TYPE)) {
return s.length() > 0 ? Character.valueOf(s.charAt(0)) : null;
} else if ( t.equals(Short.class) || t.equals(Short.TYPE) ) {
return Short.valueOf(s);
} else if ( t.equals(Integer.class) || t.equals(Integer.TYPE) ) {
return Integer.valueOf(s);
} else if ( t.equals(Float.class) || t.equals(Float.TYPE) ) {
return Float.valueOf(s);
} else if ( t.equals(Long.class) || t.equals(Long.TYPE) ) {
return Long.valueOf(s);
} else if ( t.equals(Double.class) || t.equals(Double.TYPE) ) {
return Double.valueOf(s);
} else if ( t.equals(String.class) ) {
return s;
} else if ( t.equals(java.io.File.class) ) {
return new java.io.File(s);
} else if (t.getName().equals("java.lang.Object")) {
return new Object[] {s};
} else {
return getValueFromPropertyEditorManager(
t, propertyName, s);
}
} catch (Exception ex) {
throw new JasperException(ex);
}
}
// __end convertMethod
// __begin introspectMethod
public static void introspect(Object bean, ServletRequest request)
throws JasperException
{
Enumeration<String> e = request.getParameterNames();
while ( e.hasMoreElements() ) {
String name = e.nextElement();
String value = request.getParameter(name);
introspecthelper(bean, name, value, request, name, true);
}
}
// __end introspectMethod
// __begin introspecthelperMethod
public static void introspecthelper(Object bean, String prop,
String value, ServletRequest request,
String param, boolean ignoreMethodNF)
throws JasperException
{
if( Constants.IS_SECURITY_ENABLED ) {
try {
PrivilegedIntrospectHelper dp =
new PrivilegedIntrospectHelper(
bean,prop,value,request,param,ignoreMethodNF);
AccessController.doPrivileged(dp);
} catch( PrivilegedActionException pe) {
Exception e = pe.getException();
throw (JasperException)e;
}
} else {
internalIntrospecthelper(
bean,prop,value,request,param,ignoreMethodNF);
}
}
private static void internalIntrospecthelper(Object bean, String prop,
String value, ServletRequest request,
String param, boolean ignoreMethodNF)
throws JasperException
{
Method method = null;
Class<?> type = null;
Class<?> propertyEditorClass = null;
try {
java.beans.BeanInfo info
= java.beans.Introspector.getBeanInfo(bean.getClass());
if ( info != null ) {
java.beans.PropertyDescriptor pd[]
= info.getPropertyDescriptors();
for (int i = 0 ; i < pd.length ; i++) {
if ( pd[i].getName().equals(prop) ) {
method = pd[i].getWriteMethod();
type = pd[i].getPropertyType();
propertyEditorClass = pd[i].getPropertyEditorClass();
break;
}
}
}
if (method != null && type != null) {
if (type.isArray()) {
if (request == null) {
throw new JasperException(
Localizer.getMessage("jsp.error.beans.setproperty.noindexset"));
}
Class<?> t = type.getComponentType();
String[] values = request.getParameterValues(param);
//XXX Please check.
if(values == null) return;
if(t.equals(String.class)) {
method.invoke(bean, new Object[] { values });
} else {
createTypedArray (prop, bean, method, values, t,
propertyEditorClass);
}
} else {
if(value == null || (param != null && value.equals(""))) return;
Object oval = convert(prop, value, type, propertyEditorClass);
if ( oval != null )
method.invoke(bean, new Object[] { oval });
}
}
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException(ex);
}
if (!ignoreMethodNF && (method == null)) {
if (type == null) {
throw new JasperException(
Localizer.getMessage("jsp.error.beans.noproperty",
prop,
bean.getClass().getName()));
} else {
throw new JasperException(
Localizer.getMessage("jsp.error.beans.nomethod.setproperty",
prop,
type.getName(),
bean.getClass().getName()));
}
}
}
// __end introspecthelperMethod
//-------------------------------------------------------------------
// functions to convert builtin Java data types to string.
//-------------------------------------------------------------------
// __begin toStringMethod
public static String toString(Object o) {
return String.valueOf(o);
}
public static String toString(byte b) {
return Byte.toString(b);
}
public static String toString(boolean b) {
return Boolean.toString(b);
}
public static String toString(short s) {
return Short.toString(s);
}
public static String toString(int i) {
return Integer.toString(i);
}
public static String toString(float f) {
return Float.toString(f);
}
public static String toString(long l) {
return Long.toString(l);
}
public static String toString(double d) {
return Double.toString(d);
}
public static String toString(char c) {
return Character.toString(c);
}
// __end toStringMethod
/**
* Create a typed array.
* This is a special case where params are passed through
* the request and the property is indexed.
*/
public static void createTypedArray(String propertyName,
Object bean,
Method method,
String[] values,
Class<?> t,
Class<?> propertyEditorClass)
throws JasperException {
try {
if (propertyEditorClass != null) {
Object[] tmpval = new Integer[values.length];
for (int i=0; i<values.length; i++) {
tmpval[i] = getValueFromBeanInfoPropertyEditor(
t, propertyName, values[i], propertyEditorClass);
}
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(Integer.class)) {
Integer []tmpval = new Integer[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = new Integer (values[i]);
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(Byte.class)) {
Byte[] tmpval = new Byte[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = new Byte (values[i]);
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(Boolean.class)) {
Boolean[] tmpval = new Boolean[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = Boolean.valueOf(values[i]);
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(Short.class)) {
Short[] tmpval = new Short[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = new Short (values[i]);
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(Long.class)) {
Long[] tmpval = new Long[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = new Long (values[i]);
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(Double.class)) {
Double[] tmpval = new Double[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = new Double (values[i]);
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(Float.class)) {
Float[] tmpval = new Float[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = new Float (values[i]);
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(Character.class)) {
Character[] tmpval = new Character[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = Character.valueOf(values[i].charAt(0));
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(int.class)) {
int []tmpval = new int[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = Integer.parseInt (values[i]);
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(byte.class)) {
byte[] tmpval = new byte[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = Byte.parseByte (values[i]);
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(boolean.class)) {
boolean[] tmpval = new boolean[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = (Boolean.valueOf(values[i])).booleanValue();
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(short.class)) {
short[] tmpval = new short[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = Short.parseShort (values[i]);
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(long.class)) {
long[] tmpval = new long[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = Long.parseLong (values[i]);
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(double.class)) {
double[] tmpval = new double[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = Double.valueOf(values[i]).doubleValue();
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(float.class)) {
float[] tmpval = new float[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = Float.valueOf(values[i]).floatValue();
method.invoke (bean, new Object[] {tmpval});
} else if (t.equals(char.class)) {
char[] tmpval = new char[values.length];
for (int i = 0 ; i < values.length; i++)
tmpval[i] = values[i].charAt(0);
method.invoke (bean, new Object[] {tmpval});
} else {
Object[] tmpval = new Integer[values.length];
for (int i=0; i<values.length; i++) {
tmpval[i] =
getValueFromPropertyEditorManager(
t, propertyName, values[i]);
}
method.invoke (bean, new Object[] {tmpval});
}
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException ("error in invoking method", ex);
}
}
/**
* Escape special shell characters.
* @param unescString The string to shell-escape
* @return The escaped shell string.
*/
public static String escapeQueryString(String unescString) {
if ( unescString == null )
return null;
String escString = "";
String shellSpChars = "&;`'\"|*?~<>^()[]{}$\\\n";
for(int index=0; index<unescString.length(); index++) {
char nextChar = unescString.charAt(index);
if( shellSpChars.indexOf(nextChar) != -1 )
escString += "\\";
escString += nextChar;
}
return escString;
}
// __begin lookupReadMethodMethod
public static Object handleGetProperty(Object o, String prop)
throws JasperException {
if (o == null) {
throw new JasperException(
Localizer.getMessage("jsp.error.beans.nullbean"));
}
Object value = null;
try {
Method method = getReadMethod(o.getClass(), prop);
value = method.invoke(o, (Object[]) null);
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException (ex);
}
return value;
}
// __end lookupReadMethodMethod
// handles <jsp:setProperty> with EL expression for 'value' attribute
public static void handleSetPropertyExpression(Object bean,
String prop, String expression, PageContext pageContext,
ProtectedFunctionMapper functionMapper )
throws JasperException
{
try {
Method method = getWriteMethod(bean.getClass(), prop);
method.invoke(bean, new Object[] {
PageContextImpl.proprietaryEvaluate(
expression,
method.getParameterTypes()[0],
pageContext,
functionMapper)
});
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException(ex);
}
}
public static void handleSetProperty(Object bean, String prop,
Object value)
throws JasperException
{
try {
Method method = getWriteMethod(bean.getClass(), prop);
method.invoke(bean, new Object[] { value });
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException(ex);
}
}
public static void handleSetProperty(Object bean, String prop,
int value)
throws JasperException
{
try {
Method method = getWriteMethod(bean.getClass(), prop);
method.invoke(bean, new Object[] { Integer.valueOf(value) });
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException(ex);
}
}
public static void handleSetProperty(Object bean, String prop,
short value)
throws JasperException
{
try {
Method method = getWriteMethod(bean.getClass(), prop);
method.invoke(bean, new Object[] { Short.valueOf(value) });
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException(ex);
}
}
public static void handleSetProperty(Object bean, String prop,
long value)
throws JasperException
{
try {
Method method = getWriteMethod(bean.getClass(), prop);
method.invoke(bean, new Object[] { Long.valueOf(value) });
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException(ex);
}
}
public static void handleSetProperty(Object bean, String prop,
double value)
throws JasperException
{
try {
Method method = getWriteMethod(bean.getClass(), prop);
method.invoke(bean, new Object[] { Double.valueOf(value) });
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException(ex);
}
}
public static void handleSetProperty(Object bean, String prop,
float value)
throws JasperException
{
try {
Method method = getWriteMethod(bean.getClass(), prop);
method.invoke(bean, new Object[] { Float.valueOf(value) });
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException(ex);
}
}
public static void handleSetProperty(Object bean, String prop,
char value)
throws JasperException
{
try {
Method method = getWriteMethod(bean.getClass(), prop);
method.invoke(bean, new Object[] { Character.valueOf(value) });
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException(ex);
}
}
public static void handleSetProperty(Object bean, String prop,
byte value)
throws JasperException
{
try {
Method method = getWriteMethod(bean.getClass(), prop);
method.invoke(bean, new Object[] { Byte.valueOf(value) });
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException(ex);
}
}
public static void handleSetProperty(Object bean, String prop,
boolean value)
throws JasperException
{
try {
Method method = getWriteMethod(bean.getClass(), prop);
method.invoke(bean, new Object[] { Boolean.valueOf(value) });
} catch (Exception ex) {
Throwable thr = ExceptionUtils.unwrapInvocationTargetException(ex);
ExceptionUtils.handleThrowable(thr);
throw new JasperException(ex);
}
}
public static Method getWriteMethod(Class<?> beanClass, String prop)
throws JasperException {
Method method = null;
Class<?> type = null;
try {
java.beans.BeanInfo info
= java.beans.Introspector.getBeanInfo(beanClass);
if ( info != null ) {
java.beans.PropertyDescriptor pd[]
= info.getPropertyDescriptors();
for (int i = 0 ; i < pd.length ; i++) {
if ( pd[i].getName().equals(prop) ) {
method = pd[i].getWriteMethod();
type = pd[i].getPropertyType();
break;
}
}
} else {
// just in case introspection silently fails.
throw new JasperException(
Localizer.getMessage("jsp.error.beans.nobeaninfo",
beanClass.getName()));
}
} catch (Exception ex) {
throw new JasperException (ex);
}
if (method == null) {
if (type == null) {
throw new JasperException(
Localizer.getMessage("jsp.error.beans.noproperty",
prop,
beanClass.getName()));
} else {
throw new JasperException(
Localizer.getMessage("jsp.error.beans.nomethod.setproperty",
prop,
type.getName(),
beanClass.getName()));
}
}
return method;
}
public static Method getReadMethod(Class<?> beanClass, String prop)
throws JasperException {
Method method = null;
Class<?> type = null;
try {
java.beans.BeanInfo info
= java.beans.Introspector.getBeanInfo(beanClass);
if ( info != null ) {
java.beans.PropertyDescriptor pd[]
= info.getPropertyDescriptors();
for (int i = 0 ; i < pd.length ; i++) {
if ( pd[i].getName().equals(prop) ) {
method = pd[i].getReadMethod();
type = pd[i].getPropertyType();
break;
}
}
} else {
// just in case introspection silently fails.
throw new JasperException(
Localizer.getMessage("jsp.error.beans.nobeaninfo",
beanClass.getName()));
}
} catch (Exception ex) {
throw new JasperException (ex);
}
if (method == null) {
if (type == null) {
throw new JasperException(
Localizer.getMessage("jsp.error.beans.noproperty", prop,
beanClass.getName()));
} else {
throw new JasperException(
Localizer.getMessage("jsp.error.beans.nomethod", prop,
beanClass.getName()));
}
}
return method;
}
//*********************************************************************
// PropertyEditor Support
public static Object getValueFromBeanInfoPropertyEditor(
Class<?> attrClass, String attrName, String attrValue,
Class<?> propertyEditorClass)
throws JasperException
{
try {
PropertyEditor pe =
(PropertyEditor)propertyEditorClass.newInstance();
pe.setAsText(attrValue);
return pe.getValue();
} catch (Exception ex) {
throw new JasperException(
Localizer.getMessage("jsp.error.beans.property.conversion",
attrValue, attrClass.getName(), attrName,
ex.getMessage()));
}
}
public static Object getValueFromPropertyEditorManager(
Class<?> attrClass, String attrName, String attrValue)
throws JasperException
{
try {
PropertyEditor propEditor =
PropertyEditorManager.findEditor(attrClass);
if (propEditor != null) {
propEditor.setAsText(attrValue);
return propEditor.getValue();
} else {
throw new IllegalArgumentException(
Localizer.getMessage("jsp.error.beans.propertyeditor.notregistered"));
}
} catch (IllegalArgumentException ex) {
throw new JasperException(
Localizer.getMessage("jsp.error.beans.property.conversion",
attrValue, attrClass.getName(), attrName,
ex.getMessage()));
}
}
// ************************************************************************
// General Purpose Runtime Methods
// ************************************************************************
/**
* Convert a possibly relative resource path into a context-relative
* resource path that starts with a '/'.
*
* @param request The servlet request we are processing
* @param relativePath The possibly relative resource path
*/
public static String getContextRelativePath(ServletRequest request,
String relativePath) {
if (relativePath.startsWith("/"))
return (relativePath);
if (!(request instanceof HttpServletRequest))
return (relativePath);
HttpServletRequest hrequest = (HttpServletRequest) request;
String uri = (String) request.getAttribute(
RequestDispatcher.INCLUDE_SERVLET_PATH);
if (uri != null) {
String pathInfo = (String)
request.getAttribute(RequestDispatcher.INCLUDE_PATH_INFO);
if (pathInfo == null) {
if (uri.lastIndexOf('/') >= 0)
uri = uri.substring(0, uri.lastIndexOf('/'));
}
}
else {
uri = hrequest.getServletPath();
if (uri.lastIndexOf('/') >= 0)
uri = uri.substring(0, uri.lastIndexOf('/'));
}
return uri + '/' + relativePath;
}
/**
* Perform a RequestDispatcher.include() operation, with optional flushing
* of the response beforehand.
*
* @param request The servlet request we are processing
* @param response The servlet response we are processing
* @param relativePath The relative path of the resource to be included
* @param out The Writer to whom we are currently writing
* @param flush Should we flush before the include is processed?
*
* @exception IOException if thrown by the included servlet
* @exception ServletException if thrown by the included servlet
*/
public static void include(ServletRequest request,
ServletResponse response,
String relativePath,
JspWriter out,
boolean flush)
throws IOException, ServletException {
if (flush && !(out instanceof BodyContent))
out.flush();
// FIXME - It is tempting to use request.getRequestDispatcher() to
// resolve a relative path directly, but Catalina currently does not
// take into account whether the caller is inside a RequestDispatcher
// include or not. Whether Catalina *should* take that into account
// is a spec issue currently under review. In the mean time,
// replicate Jasper's previous behavior
String resourcePath = getContextRelativePath(request, relativePath);
RequestDispatcher rd = request.getRequestDispatcher(resourcePath);
rd.include(request,
new ServletResponseWrapperInclude(response, out));
}
/**
* URL encodes a string, based on the supplied character encoding.
* This performs the same function as java.next.URLEncode.encode
* in J2SDK1.4, and should be removed if the only platform supported
* is 1.4 or higher.
* @param s The String to be URL encoded.
* @param enc The character encoding
* @return The URL encoded String
*/
public static String URLEncode(String s, String enc) {
if (s == null) {
return "null";
}
if (enc == null) {
enc = "ISO-8859-1"; // The default request encoding
}
StringBuilder out = new StringBuilder(s.length());
ByteArrayOutputStream buf = new ByteArrayOutputStream();
OutputStreamWriter writer = null;
try {
writer = new OutputStreamWriter(buf, enc);
} catch (java.io.UnsupportedEncodingException ex) {
// Use the default encoding?
writer = new OutputStreamWriter(buf);
}
for (int i = 0; i < s.length(); i++) {
int c = s.charAt(i);
if (c == ' ') {
out.append('+');
} else if (isSafeChar(c)) {
out.append((char)c);
} else {
// convert to external encoding before hex conversion
try {
writer.write(c);
writer.flush();
} catch(IOException e) {
buf.reset();
continue;
}
byte[] ba = buf.toByteArray();
for (int j = 0; j < ba.length; j++) {
out.append('%');
// Converting each byte in the buffer
out.append(Character.forDigit((ba[j]>>4) & 0xf, 16));
out.append(Character.forDigit(ba[j] & 0xf, 16));
}
buf.reset();
}
}
return out.toString();
}
private static boolean isSafeChar(int c) {
if (c >= 'a' && c <= 'z') {
return true;
}
if (c >= 'A' && c <= 'Z') {
return true;
}
if (c >= '0' && c <= '9') {
return true;
}
if (c == '-' || c == '_' || c == '.' || c == '!' ||
c == '~' || c == '*' || c == '\'' || c == '(' || c == ')') {
return true;
}
return false;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2020_04_01.implementation;
import com.microsoft.azure.arm.collection.InnerSupportsGet;
import com.microsoft.azure.arm.collection.InnerSupportsDelete;
import com.microsoft.azure.arm.collection.InnerSupportsListing;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.AzureServiceFuture;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.ListOperationCallback;
import com.microsoft.azure.management.network.v2020_04_01.TagsObject;
import com.microsoft.azure.Page;
import com.microsoft.azure.PagedList;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.Validator;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import okhttp3.ResponseBody;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.HTTP;
import retrofit2.http.PATCH;
import retrofit2.http.Path;
import retrofit2.http.PUT;
import retrofit2.http.Query;
import retrofit2.http.Url;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in ExpressRoutePorts.
*/
public class ExpressRoutePortsInner implements InnerSupportsGet<ExpressRoutePortInner>, InnerSupportsDelete<Void>, InnerSupportsListing<ExpressRoutePortInner> {
/** The Retrofit service to perform REST calls. */
private ExpressRoutePortsService service;
/** The service client containing this operation class. */
private NetworkManagementClientImpl client;
/**
* Initializes an instance of ExpressRoutePortsInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public ExpressRoutePortsInner(Retrofit retrofit, NetworkManagementClientImpl client) {
this.service = retrofit.create(ExpressRoutePortsService.class);
this.client = client;
}
/**
* The interface defining all the services for ExpressRoutePorts to be
* used by Retrofit to perform actually REST calls.
*/
interface ExpressRoutePortsService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_04_01.ExpressRoutePorts delete" })
@HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}", method = "DELETE", hasBody = true)
Observable<Response<ResponseBody>> delete(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("expressRoutePortName") String expressRoutePortName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_04_01.ExpressRoutePorts beginDelete" })
@HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}", method = "DELETE", hasBody = true)
Observable<Response<ResponseBody>> beginDelete(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("expressRoutePortName") String expressRoutePortName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_04_01.ExpressRoutePorts getByResourceGroup" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}")
Observable<Response<ResponseBody>> getByResourceGroup(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("expressRoutePortName") String expressRoutePortName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_04_01.ExpressRoutePorts createOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}")
Observable<Response<ResponseBody>> createOrUpdate(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("expressRoutePortName") String expressRoutePortName, @Query("api-version") String apiVersion, @Body ExpressRoutePortInner parameters, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_04_01.ExpressRoutePorts beginCreateOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}")
Observable<Response<ResponseBody>> beginCreateOrUpdate(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("expressRoutePortName") String expressRoutePortName, @Query("api-version") String apiVersion, @Body ExpressRoutePortInner parameters, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_04_01.ExpressRoutePorts updateTags" })
@PATCH("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}")
Observable<Response<ResponseBody>> updateTags(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("expressRoutePortName") String expressRoutePortName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Body TagsObject parameters, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_04_01.ExpressRoutePorts listByResourceGroup" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts")
Observable<Response<ResponseBody>> listByResourceGroup(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_04_01.ExpressRoutePorts list" })
@GET("subscriptions/{subscriptionId}/providers/Microsoft.Network/ExpressRoutePorts")
Observable<Response<ResponseBody>> list(@Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_04_01.ExpressRoutePorts listByResourceGroupNext" })
@GET
Observable<Response<ResponseBody>> listByResourceGroupNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2020_04_01.ExpressRoutePorts listNext" })
@GET
Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Deletes the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void delete(String resourceGroupName, String expressRoutePortName) {
deleteWithServiceResponseAsync(resourceGroupName, expressRoutePortName).toBlocking().last().body();
}
/**
* Deletes the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> deleteAsync(String resourceGroupName, String expressRoutePortName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(resourceGroupName, expressRoutePortName), serviceCallback);
}
/**
* Deletes the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<Void> deleteAsync(String resourceGroupName, String expressRoutePortName) {
return deleteWithServiceResponseAsync(resourceGroupName, expressRoutePortName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Deletes the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String expressRoutePortName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (expressRoutePortName == null) {
throw new IllegalArgumentException("Parameter expressRoutePortName is required and cannot be null.");
}
final String apiVersion = "2020-04-01";
Observable<Response<ResponseBody>> observable = service.delete(this.client.subscriptionId(), resourceGroupName, expressRoutePortName, apiVersion, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPostOrDeleteResultAsync(observable, new TypeToken<Void>() { }.getType());
}
/**
* Deletes the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void beginDelete(String resourceGroupName, String expressRoutePortName) {
beginDeleteWithServiceResponseAsync(resourceGroupName, expressRoutePortName).toBlocking().single().body();
}
/**
* Deletes the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> beginDeleteAsync(String resourceGroupName, String expressRoutePortName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(beginDeleteWithServiceResponseAsync(resourceGroupName, expressRoutePortName), serviceCallback);
}
/**
* Deletes the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> beginDeleteAsync(String resourceGroupName, String expressRoutePortName) {
return beginDeleteWithServiceResponseAsync(resourceGroupName, expressRoutePortName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Deletes the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> beginDeleteWithServiceResponseAsync(String resourceGroupName, String expressRoutePortName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (expressRoutePortName == null) {
throw new IllegalArgumentException("Parameter expressRoutePortName is required and cannot be null.");
}
final String apiVersion = "2020-04-01";
return service.beginDelete(this.client.subscriptionId(), resourceGroupName, expressRoutePortName, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = beginDeleteDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> beginDeleteDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<Void, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.register(202, new TypeToken<Void>() { }.getType())
.register(204, new TypeToken<Void>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Retrieves the requested ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of ExpressRoutePort.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ExpressRoutePortInner object if successful.
*/
public ExpressRoutePortInner getByResourceGroup(String resourceGroupName, String expressRoutePortName) {
return getByResourceGroupWithServiceResponseAsync(resourceGroupName, expressRoutePortName).toBlocking().single().body();
}
/**
* Retrieves the requested ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of ExpressRoutePort.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ExpressRoutePortInner> getByResourceGroupAsync(String resourceGroupName, String expressRoutePortName, final ServiceCallback<ExpressRoutePortInner> serviceCallback) {
return ServiceFuture.fromResponse(getByResourceGroupWithServiceResponseAsync(resourceGroupName, expressRoutePortName), serviceCallback);
}
/**
* Retrieves the requested ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of ExpressRoutePort.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ExpressRoutePortInner object
*/
public Observable<ExpressRoutePortInner> getByResourceGroupAsync(String resourceGroupName, String expressRoutePortName) {
return getByResourceGroupWithServiceResponseAsync(resourceGroupName, expressRoutePortName).map(new Func1<ServiceResponse<ExpressRoutePortInner>, ExpressRoutePortInner>() {
@Override
public ExpressRoutePortInner call(ServiceResponse<ExpressRoutePortInner> response) {
return response.body();
}
});
}
/**
* Retrieves the requested ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of ExpressRoutePort.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ExpressRoutePortInner object
*/
public Observable<ServiceResponse<ExpressRoutePortInner>> getByResourceGroupWithServiceResponseAsync(String resourceGroupName, String expressRoutePortName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (expressRoutePortName == null) {
throw new IllegalArgumentException("Parameter expressRoutePortName is required and cannot be null.");
}
final String apiVersion = "2020-04-01";
return service.getByResourceGroup(this.client.subscriptionId(), resourceGroupName, expressRoutePortName, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ExpressRoutePortInner>>>() {
@Override
public Observable<ServiceResponse<ExpressRoutePortInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ExpressRoutePortInner> clientResponse = getByResourceGroupDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<ExpressRoutePortInner> getByResourceGroupDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<ExpressRoutePortInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<ExpressRoutePortInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Creates or updates the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param parameters Parameters supplied to the create ExpressRoutePort operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ExpressRoutePortInner object if successful.
*/
public ExpressRoutePortInner createOrUpdate(String resourceGroupName, String expressRoutePortName, ExpressRoutePortInner parameters) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, expressRoutePortName, parameters).toBlocking().last().body();
}
/**
* Creates or updates the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param parameters Parameters supplied to the create ExpressRoutePort operation.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ExpressRoutePortInner> createOrUpdateAsync(String resourceGroupName, String expressRoutePortName, ExpressRoutePortInner parameters, final ServiceCallback<ExpressRoutePortInner> serviceCallback) {
return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, expressRoutePortName, parameters), serviceCallback);
}
/**
* Creates or updates the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param parameters Parameters supplied to the create ExpressRoutePort operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ExpressRoutePortInner> createOrUpdateAsync(String resourceGroupName, String expressRoutePortName, ExpressRoutePortInner parameters) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, expressRoutePortName, parameters).map(new Func1<ServiceResponse<ExpressRoutePortInner>, ExpressRoutePortInner>() {
@Override
public ExpressRoutePortInner call(ServiceResponse<ExpressRoutePortInner> response) {
return response.body();
}
});
}
/**
* Creates or updates the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param parameters Parameters supplied to the create ExpressRoutePort operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceResponse<ExpressRoutePortInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String expressRoutePortName, ExpressRoutePortInner parameters) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (expressRoutePortName == null) {
throw new IllegalArgumentException("Parameter expressRoutePortName is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
Validator.validate(parameters);
final String apiVersion = "2020-04-01";
Observable<Response<ResponseBody>> observable = service.createOrUpdate(this.client.subscriptionId(), resourceGroupName, expressRoutePortName, apiVersion, parameters, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<ExpressRoutePortInner>() { }.getType());
}
/**
* Creates or updates the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param parameters Parameters supplied to the create ExpressRoutePort operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ExpressRoutePortInner object if successful.
*/
public ExpressRoutePortInner beginCreateOrUpdate(String resourceGroupName, String expressRoutePortName, ExpressRoutePortInner parameters) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, expressRoutePortName, parameters).toBlocking().single().body();
}
/**
* Creates or updates the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param parameters Parameters supplied to the create ExpressRoutePort operation.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ExpressRoutePortInner> beginCreateOrUpdateAsync(String resourceGroupName, String expressRoutePortName, ExpressRoutePortInner parameters, final ServiceCallback<ExpressRoutePortInner> serviceCallback) {
return ServiceFuture.fromResponse(beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, expressRoutePortName, parameters), serviceCallback);
}
/**
* Creates or updates the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param parameters Parameters supplied to the create ExpressRoutePort operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ExpressRoutePortInner object
*/
public Observable<ExpressRoutePortInner> beginCreateOrUpdateAsync(String resourceGroupName, String expressRoutePortName, ExpressRoutePortInner parameters) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, expressRoutePortName, parameters).map(new Func1<ServiceResponse<ExpressRoutePortInner>, ExpressRoutePortInner>() {
@Override
public ExpressRoutePortInner call(ServiceResponse<ExpressRoutePortInner> response) {
return response.body();
}
});
}
/**
* Creates or updates the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param parameters Parameters supplied to the create ExpressRoutePort operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ExpressRoutePortInner object
*/
public Observable<ServiceResponse<ExpressRoutePortInner>> beginCreateOrUpdateWithServiceResponseAsync(String resourceGroupName, String expressRoutePortName, ExpressRoutePortInner parameters) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (expressRoutePortName == null) {
throw new IllegalArgumentException("Parameter expressRoutePortName is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
Validator.validate(parameters);
final String apiVersion = "2020-04-01";
return service.beginCreateOrUpdate(this.client.subscriptionId(), resourceGroupName, expressRoutePortName, apiVersion, parameters, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ExpressRoutePortInner>>>() {
@Override
public Observable<ServiceResponse<ExpressRoutePortInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ExpressRoutePortInner> clientResponse = beginCreateOrUpdateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<ExpressRoutePortInner> beginCreateOrUpdateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<ExpressRoutePortInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<ExpressRoutePortInner>() { }.getType())
.register(201, new TypeToken<ExpressRoutePortInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Update ExpressRoutePort tags.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ExpressRoutePortInner object if successful.
*/
public ExpressRoutePortInner updateTags(String resourceGroupName, String expressRoutePortName) {
return updateTagsWithServiceResponseAsync(resourceGroupName, expressRoutePortName).toBlocking().single().body();
}
/**
* Update ExpressRoutePort tags.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ExpressRoutePortInner> updateTagsAsync(String resourceGroupName, String expressRoutePortName, final ServiceCallback<ExpressRoutePortInner> serviceCallback) {
return ServiceFuture.fromResponse(updateTagsWithServiceResponseAsync(resourceGroupName, expressRoutePortName), serviceCallback);
}
/**
* Update ExpressRoutePort tags.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ExpressRoutePortInner object
*/
public Observable<ExpressRoutePortInner> updateTagsAsync(String resourceGroupName, String expressRoutePortName) {
return updateTagsWithServiceResponseAsync(resourceGroupName, expressRoutePortName).map(new Func1<ServiceResponse<ExpressRoutePortInner>, ExpressRoutePortInner>() {
@Override
public ExpressRoutePortInner call(ServiceResponse<ExpressRoutePortInner> response) {
return response.body();
}
});
}
/**
* Update ExpressRoutePort tags.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ExpressRoutePortInner object
*/
public Observable<ServiceResponse<ExpressRoutePortInner>> updateTagsWithServiceResponseAsync(String resourceGroupName, String expressRoutePortName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (expressRoutePortName == null) {
throw new IllegalArgumentException("Parameter expressRoutePortName is required and cannot be null.");
}
final String apiVersion = "2020-04-01";
final Map<String, String> tags = null;
TagsObject parameters = new TagsObject();
parameters.withTags(null);
return service.updateTags(this.client.subscriptionId(), resourceGroupName, expressRoutePortName, apiVersion, this.client.acceptLanguage(), parameters, this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ExpressRoutePortInner>>>() {
@Override
public Observable<ServiceResponse<ExpressRoutePortInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ExpressRoutePortInner> clientResponse = updateTagsDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
/**
* Update ExpressRoutePort tags.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param tags Resource tags.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ExpressRoutePortInner object if successful.
*/
public ExpressRoutePortInner updateTags(String resourceGroupName, String expressRoutePortName, Map<String, String> tags) {
return updateTagsWithServiceResponseAsync(resourceGroupName, expressRoutePortName, tags).toBlocking().single().body();
}
/**
* Update ExpressRoutePort tags.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param tags Resource tags.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ExpressRoutePortInner> updateTagsAsync(String resourceGroupName, String expressRoutePortName, Map<String, String> tags, final ServiceCallback<ExpressRoutePortInner> serviceCallback) {
return ServiceFuture.fromResponse(updateTagsWithServiceResponseAsync(resourceGroupName, expressRoutePortName, tags), serviceCallback);
}
/**
* Update ExpressRoutePort tags.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param tags Resource tags.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ExpressRoutePortInner object
*/
public Observable<ExpressRoutePortInner> updateTagsAsync(String resourceGroupName, String expressRoutePortName, Map<String, String> tags) {
return updateTagsWithServiceResponseAsync(resourceGroupName, expressRoutePortName, tags).map(new Func1<ServiceResponse<ExpressRoutePortInner>, ExpressRoutePortInner>() {
@Override
public ExpressRoutePortInner call(ServiceResponse<ExpressRoutePortInner> response) {
return response.body();
}
});
}
/**
* Update ExpressRoutePort tags.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param tags Resource tags.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ExpressRoutePortInner object
*/
public Observable<ServiceResponse<ExpressRoutePortInner>> updateTagsWithServiceResponseAsync(String resourceGroupName, String expressRoutePortName, Map<String, String> tags) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (expressRoutePortName == null) {
throw new IllegalArgumentException("Parameter expressRoutePortName is required and cannot be null.");
}
Validator.validate(tags);
final String apiVersion = "2020-04-01";
TagsObject parameters = new TagsObject();
parameters.withTags(tags);
return service.updateTags(this.client.subscriptionId(), resourceGroupName, expressRoutePortName, apiVersion, this.client.acceptLanguage(), parameters, this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ExpressRoutePortInner>>>() {
@Override
public Observable<ServiceResponse<ExpressRoutePortInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ExpressRoutePortInner> clientResponse = updateTagsDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<ExpressRoutePortInner> updateTagsDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<ExpressRoutePortInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<ExpressRoutePortInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* List all the ExpressRoutePort resources in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<ExpressRoutePortInner> object if successful.
*/
public PagedList<ExpressRoutePortInner> listByResourceGroup(final String resourceGroupName) {
ServiceResponse<Page<ExpressRoutePortInner>> response = listByResourceGroupSinglePageAsync(resourceGroupName).toBlocking().single();
return new PagedList<ExpressRoutePortInner>(response.body()) {
@Override
public Page<ExpressRoutePortInner> nextPage(String nextPageLink) {
return listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* List all the ExpressRoutePort resources in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<ExpressRoutePortInner>> listByResourceGroupAsync(final String resourceGroupName, final ListOperationCallback<ExpressRoutePortInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listByResourceGroupSinglePageAsync(resourceGroupName),
new Func1<String, Observable<ServiceResponse<Page<ExpressRoutePortInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> call(String nextPageLink) {
return listByResourceGroupNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* List all the ExpressRoutePort resources in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ExpressRoutePortInner> object
*/
public Observable<Page<ExpressRoutePortInner>> listByResourceGroupAsync(final String resourceGroupName) {
return listByResourceGroupWithServiceResponseAsync(resourceGroupName)
.map(new Func1<ServiceResponse<Page<ExpressRoutePortInner>>, Page<ExpressRoutePortInner>>() {
@Override
public Page<ExpressRoutePortInner> call(ServiceResponse<Page<ExpressRoutePortInner>> response) {
return response.body();
}
});
}
/**
* List all the ExpressRoutePort resources in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ExpressRoutePortInner> object
*/
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> listByResourceGroupWithServiceResponseAsync(final String resourceGroupName) {
return listByResourceGroupSinglePageAsync(resourceGroupName)
.concatMap(new Func1<ServiceResponse<Page<ExpressRoutePortInner>>, Observable<ServiceResponse<Page<ExpressRoutePortInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> call(ServiceResponse<Page<ExpressRoutePortInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listByResourceGroupNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* List all the ExpressRoutePort resources in the specified resource group.
*
ServiceResponse<PageImpl<ExpressRoutePortInner>> * @param resourceGroupName The name of the resource group.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<ExpressRoutePortInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> listByResourceGroupSinglePageAsync(final String resourceGroupName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
final String apiVersion = "2020-04-01";
return service.listByResourceGroup(this.client.subscriptionId(), resourceGroupName, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<ExpressRoutePortInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<ExpressRoutePortInner>> result = listByResourceGroupDelegate(response);
return Observable.just(new ServiceResponse<Page<ExpressRoutePortInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<ExpressRoutePortInner>> listByResourceGroupDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<ExpressRoutePortInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<ExpressRoutePortInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* List all the ExpressRoutePort resources in the specified subscription.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<ExpressRoutePortInner> object if successful.
*/
public PagedList<ExpressRoutePortInner> list() {
ServiceResponse<Page<ExpressRoutePortInner>> response = listSinglePageAsync().toBlocking().single();
return new PagedList<ExpressRoutePortInner>(response.body()) {
@Override
public Page<ExpressRoutePortInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* List all the ExpressRoutePort resources in the specified subscription.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<ExpressRoutePortInner>> listAsync(final ListOperationCallback<ExpressRoutePortInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listSinglePageAsync(),
new Func1<String, Observable<ServiceResponse<Page<ExpressRoutePortInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* List all the ExpressRoutePort resources in the specified subscription.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ExpressRoutePortInner> object
*/
public Observable<Page<ExpressRoutePortInner>> listAsync() {
return listWithServiceResponseAsync()
.map(new Func1<ServiceResponse<Page<ExpressRoutePortInner>>, Page<ExpressRoutePortInner>>() {
@Override
public Page<ExpressRoutePortInner> call(ServiceResponse<Page<ExpressRoutePortInner>> response) {
return response.body();
}
});
}
/**
* List all the ExpressRoutePort resources in the specified subscription.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ExpressRoutePortInner> object
*/
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> listWithServiceResponseAsync() {
return listSinglePageAsync()
.concatMap(new Func1<ServiceResponse<Page<ExpressRoutePortInner>>, Observable<ServiceResponse<Page<ExpressRoutePortInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> call(ServiceResponse<Page<ExpressRoutePortInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* List all the ExpressRoutePort resources in the specified subscription.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<ExpressRoutePortInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> listSinglePageAsync() {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2020-04-01";
return service.list(this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<ExpressRoutePortInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<ExpressRoutePortInner>> result = listDelegate(response);
return Observable.just(new ServiceResponse<Page<ExpressRoutePortInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<ExpressRoutePortInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<ExpressRoutePortInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<ExpressRoutePortInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* List all the ExpressRoutePort resources in the specified resource group.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<ExpressRoutePortInner> object if successful.
*/
public PagedList<ExpressRoutePortInner> listByResourceGroupNext(final String nextPageLink) {
ServiceResponse<Page<ExpressRoutePortInner>> response = listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single();
return new PagedList<ExpressRoutePortInner>(response.body()) {
@Override
public Page<ExpressRoutePortInner> nextPage(String nextPageLink) {
return listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* List all the ExpressRoutePort resources in the specified resource group.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param serviceFuture the ServiceFuture object tracking the Retrofit calls
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<ExpressRoutePortInner>> listByResourceGroupNextAsync(final String nextPageLink, final ServiceFuture<List<ExpressRoutePortInner>> serviceFuture, final ListOperationCallback<ExpressRoutePortInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listByResourceGroupNextSinglePageAsync(nextPageLink),
new Func1<String, Observable<ServiceResponse<Page<ExpressRoutePortInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> call(String nextPageLink) {
return listByResourceGroupNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* List all the ExpressRoutePort resources in the specified resource group.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ExpressRoutePortInner> object
*/
public Observable<Page<ExpressRoutePortInner>> listByResourceGroupNextAsync(final String nextPageLink) {
return listByResourceGroupNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<ExpressRoutePortInner>>, Page<ExpressRoutePortInner>>() {
@Override
public Page<ExpressRoutePortInner> call(ServiceResponse<Page<ExpressRoutePortInner>> response) {
return response.body();
}
});
}
/**
* List all the ExpressRoutePort resources in the specified resource group.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ExpressRoutePortInner> object
*/
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> listByResourceGroupNextWithServiceResponseAsync(final String nextPageLink) {
return listByResourceGroupNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<ExpressRoutePortInner>>, Observable<ServiceResponse<Page<ExpressRoutePortInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> call(ServiceResponse<Page<ExpressRoutePortInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listByResourceGroupNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* List all the ExpressRoutePort resources in the specified resource group.
*
ServiceResponse<PageImpl<ExpressRoutePortInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<ExpressRoutePortInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> listByResourceGroupNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
String nextUrl = String.format("%s", nextPageLink);
return service.listByResourceGroupNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<ExpressRoutePortInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<ExpressRoutePortInner>> result = listByResourceGroupNextDelegate(response);
return Observable.just(new ServiceResponse<Page<ExpressRoutePortInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<ExpressRoutePortInner>> listByResourceGroupNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<ExpressRoutePortInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<ExpressRoutePortInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* List all the ExpressRoutePort resources in the specified subscription.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<ExpressRoutePortInner> object if successful.
*/
public PagedList<ExpressRoutePortInner> listNext(final String nextPageLink) {
ServiceResponse<Page<ExpressRoutePortInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single();
return new PagedList<ExpressRoutePortInner>(response.body()) {
@Override
public Page<ExpressRoutePortInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* List all the ExpressRoutePort resources in the specified subscription.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param serviceFuture the ServiceFuture object tracking the Retrofit calls
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<ExpressRoutePortInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<ExpressRoutePortInner>> serviceFuture, final ListOperationCallback<ExpressRoutePortInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listNextSinglePageAsync(nextPageLink),
new Func1<String, Observable<ServiceResponse<Page<ExpressRoutePortInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* List all the ExpressRoutePort resources in the specified subscription.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ExpressRoutePortInner> object
*/
public Observable<Page<ExpressRoutePortInner>> listNextAsync(final String nextPageLink) {
return listNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<ExpressRoutePortInner>>, Page<ExpressRoutePortInner>>() {
@Override
public Page<ExpressRoutePortInner> call(ServiceResponse<Page<ExpressRoutePortInner>> response) {
return response.body();
}
});
}
/**
* List all the ExpressRoutePort resources in the specified subscription.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ExpressRoutePortInner> object
*/
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> listNextWithServiceResponseAsync(final String nextPageLink) {
return listNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<ExpressRoutePortInner>>, Observable<ServiceResponse<Page<ExpressRoutePortInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> call(ServiceResponse<Page<ExpressRoutePortInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* List all the ExpressRoutePort resources in the specified subscription.
*
ServiceResponse<PageImpl<ExpressRoutePortInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<ExpressRoutePortInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> listNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
String nextUrl = String.format("%s", nextPageLink);
return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<ExpressRoutePortInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRoutePortInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<ExpressRoutePortInner>> result = listNextDelegate(response);
return Observable.just(new ServiceResponse<Page<ExpressRoutePortInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<ExpressRoutePortInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<ExpressRoutePortInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<ExpressRoutePortInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.ml.utils.persistence;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.ClearScrollAction;
import org.elasticsearch.action.search.ClearScrollResponse;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollAction;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.OriginSettingClient;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.ml.test.MockOriginSettingClient;
import org.elasticsearch.xpack.ml.test.SearchHitBuilder;
import org.junit.Before;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Deque;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.atomic.AtomicInteger;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class BatchedDocumentsIteratorTests extends ESTestCase {
private static final String INDEX_NAME = ".ml-anomalies-foo";
private static final String SCROLL_ID = "someScrollId";
private Client client;
private boolean wasScrollCleared;
private TestIterator testIterator;
@Before
public void setUpMocks() {
client = Mockito.mock(Client.class);
OriginSettingClient originSettingClient = MockOriginSettingClient.mockOriginSettingClient(client, ClientHelper.ML_ORIGIN);
wasScrollCleared = false;
testIterator = new TestIterator(originSettingClient, INDEX_NAME);
givenClearScrollRequest();
}
public void testQueryReturnsNoResults() {
ResponsesMocker scrollResponsesMocker = new ScrollResponsesMocker(client).finishMock();
assertTrue(testIterator.hasNext());
assertTrue(testIterator.next().isEmpty());
assertFalse(testIterator.hasNext());
assertTrue(wasScrollCleared);
scrollResponsesMocker.assertSearchRequest(INDEX_NAME);
scrollResponsesMocker.assertSearchScrollRequests(0, SCROLL_ID);
}
public void testCallingNextWhenHasNextIsFalseThrows() {
new ScrollResponsesMocker(client).addBatch(createJsonDoc("a"), createJsonDoc("b"), createJsonDoc("c")).finishMock();
testIterator.next();
assertFalse(testIterator.hasNext());
ESTestCase.expectThrows(NoSuchElementException.class, () -> testIterator.next());
}
public void testQueryReturnsSingleBatch() {
ResponsesMocker scrollResponsesMocker = new ScrollResponsesMocker(client).addBatch(
createJsonDoc("a"),
createJsonDoc("b"),
createJsonDoc("c")
).finishMock();
assertTrue(testIterator.hasNext());
Deque<String> batch = testIterator.next();
assertEquals(3, batch.size());
assertTrue(batch.containsAll(Arrays.asList(createJsonDoc("a"), createJsonDoc("b"), createJsonDoc("c"))));
assertFalse(testIterator.hasNext());
assertTrue(wasScrollCleared);
scrollResponsesMocker.assertSearchRequest(INDEX_NAME);
scrollResponsesMocker.assertSearchScrollRequests(0, SCROLL_ID);
}
public void testQueryReturnsThreeBatches() {
ResponsesMocker responsesMocker = new ScrollResponsesMocker(client).addBatch(
createJsonDoc("a"),
createJsonDoc("b"),
createJsonDoc("c")
).addBatch(createJsonDoc("d"), createJsonDoc("e")).addBatch(createJsonDoc("f")).finishMock();
assertTrue(testIterator.hasNext());
Deque<String> batch = testIterator.next();
assertEquals(3, batch.size());
assertTrue(batch.containsAll(Arrays.asList(createJsonDoc("a"), createJsonDoc("b"), createJsonDoc("c"))));
batch = testIterator.next();
assertEquals(2, batch.size());
assertTrue(batch.containsAll(Arrays.asList(createJsonDoc("d"), createJsonDoc("e"))));
batch = testIterator.next();
assertEquals(1, batch.size());
assertTrue(batch.containsAll(Collections.singletonList(createJsonDoc("f"))));
assertFalse(testIterator.hasNext());
assertTrue(wasScrollCleared);
responsesMocker.assertSearchRequest(INDEX_NAME);
responsesMocker.assertSearchScrollRequests(2, SCROLL_ID);
}
private String createJsonDoc(String value) {
return "{\"foo\":\"" + value + "\"}";
}
@SuppressWarnings("unchecked")
private void givenClearScrollRequest() {
doAnswer(invocationOnMock -> {
ActionListener<ClearScrollResponse> listener = (ActionListener<ClearScrollResponse>) invocationOnMock.getArguments()[2];
wasScrollCleared = true;
listener.onResponse(mock(ClearScrollResponse.class));
return null;
}).when(client).execute(eq(ClearScrollAction.INSTANCE), any(), any());
}
abstract static class ResponsesMocker {
protected Client client;
protected List<String[]> batches = new ArrayList<>();
protected long totalHits = 0;
protected List<SearchResponse> responses = new ArrayList<>();
protected AtomicInteger responseIndex = new AtomicInteger(0);
protected ArgumentCaptor<SearchRequest> searchRequestCaptor = ArgumentCaptor.forClass(SearchRequest.class);
protected ArgumentCaptor<SearchScrollRequest> searchScrollRequestCaptor = ArgumentCaptor.forClass(SearchScrollRequest.class);
ResponsesMocker(Client client) {
this.client = client;
}
ResponsesMocker addBatch(String... hits) {
totalHits += hits.length;
batches.add(hits);
return this;
}
abstract ResponsesMocker finishMock();
protected SearchResponse createSearchResponseWithHits(String... hits) {
SearchHits searchHits = createHits(hits);
SearchResponse searchResponse = mock(SearchResponse.class);
when(searchResponse.getScrollId()).thenReturn(SCROLL_ID);
when(searchResponse.getHits()).thenReturn(searchHits);
return searchResponse;
}
protected SearchHits createHits(String... values) {
List<SearchHit> hits = new ArrayList<>();
for (String value : values) {
hits.add(new SearchHitBuilder(randomInt()).setSource(value).build());
}
return new SearchHits(hits.toArray(new SearchHit[hits.size()]), new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1.0f);
}
void assertSearchRequest(String indexName) {
List<SearchRequest> searchRequests = searchRequestCaptor.getAllValues();
assertThat(searchRequests.size(), equalTo(1));
SearchRequest searchRequest = searchRequests.get(0);
assertThat(searchRequest.indices(), equalTo(new String[] { indexName }));
assertThat(searchRequest.scroll().keepAlive(), equalTo(TimeValue.timeValueMinutes(5)));
assertThat(searchRequest.source().query(), equalTo(QueryBuilders.matchAllQuery()));
assertThat(searchRequest.source().trackTotalHitsUpTo(), is(SearchContext.TRACK_TOTAL_HITS_ACCURATE));
}
void assertSearchScrollRequests(int expectedCount, String scrollId) {
List<SearchScrollRequest> searchScrollRequests = searchScrollRequestCaptor.getAllValues();
assertThat(searchScrollRequests.size(), equalTo(expectedCount));
for (SearchScrollRequest request : searchScrollRequests) {
assertThat(request.scrollId(), equalTo(scrollId));
assertThat(request.scroll().keepAlive(), equalTo(TimeValue.timeValueMinutes(5)));
}
}
}
static class ScrollResponsesMocker extends ResponsesMocker {
ScrollResponsesMocker(Client client) {
super(client);
}
@Override
@SuppressWarnings("unchecked")
ResponsesMocker finishMock() {
if (batches.isEmpty()) {
givenInitialResponse();
return this;
}
givenInitialResponse(batches.get(0));
for (int i = 1; i < batches.size(); ++i) {
responses.add(createSearchResponseWithHits(batches.get(i)));
}
doAnswer(invocationOnMock -> {
ActionListener<SearchResponse> listener = (ActionListener<SearchResponse>) invocationOnMock.getArguments()[2];
listener.onResponse(responses.get(responseIndex.getAndIncrement()));
return null;
}).when(client).execute(eq(SearchScrollAction.INSTANCE), searchScrollRequestCaptor.capture(), any());
return this;
}
@SuppressWarnings("unchecked")
private void givenInitialResponse(String... hits) {
SearchResponse searchResponse = createSearchResponseWithHits(hits);
doAnswer(invocationOnMock -> {
ActionListener<SearchResponse> listener = (ActionListener<SearchResponse>) invocationOnMock.getArguments()[2];
listener.onResponse(searchResponse);
return null;
}).when(client).execute(eq(SearchAction.INSTANCE), searchRequestCaptor.capture(), any());
}
}
static class SearchResponsesMocker extends ResponsesMocker {
SearchResponsesMocker(Client client) {
super(client);
}
@Override
@SuppressWarnings("unchecked")
ResponsesMocker finishMock() {
if (batches.isEmpty()) {
doAnswer(invocationOnMock -> {
ActionListener<SearchResponse> listener = (ActionListener<SearchResponse>) invocationOnMock.getArguments()[2];
listener.onResponse(createSearchResponseWithHits());
return null;
}).when(client).execute(eq(SearchAction.INSTANCE), searchRequestCaptor.capture(), any());
return this;
}
for (String[] batch : batches) {
responses.add(createSearchResponseWithHits(batch));
}
doAnswer(invocationOnMock -> {
ActionListener<SearchResponse> listener = (ActionListener<SearchResponse>) invocationOnMock.getArguments()[2];
listener.onResponse(responses.get(responseIndex.getAndIncrement()));
return null;
}).when(client).execute(eq(SearchAction.INSTANCE), searchRequestCaptor.capture(), any());
return this;
}
}
private static class TestIterator extends BatchedDocumentsIterator<String> {
TestIterator(OriginSettingClient client, String jobId) {
super(client, jobId);
}
@Override
protected QueryBuilder getQuery() {
return QueryBuilders.matchAllQuery();
}
@Override
protected String map(SearchHit hit) {
return hit.getSourceAsString();
}
}
}
| |
package com.github.ningg.flume.source;
import java.io.File;
import java.io.FileFilter;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.charset.Charset;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.regex.Pattern;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.FlumeException;
import org.apache.flume.client.avro.ReliableEventReader;
import org.apache.flume.client.avro.ReliableSpoolingFileEventReader;
import org.apache.flume.serialization.DecodeErrorPolicy;
import org.apache.flume.serialization.DurablePositionTracker;
import org.apache.flume.serialization.EventDeserializer;
import org.apache.flume.serialization.EventDeserializerFactory;
import org.apache.flume.serialization.PositionTracker;
import org.apache.flume.serialization.ResettableFileInputStream;
import org.apache.flume.serialization.ResettableInputStream;
import org.apache.flume.tools.PlatformDetect;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.ningg.flume.source.SpoolDirectoryTailFileSourceConfigurationConstants.ConsumeOrder;
import static com.github.ningg.flume.source.SpoolDirectoryTailFileSourceConfigurationConstants.*;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.io.Files;
/**
* Refer to {@link ReliableSpoolingFileEventReader}
*
* @author Ning Guo
*
*/
public class ReliableSpoolDirectoryTailFileEventReader implements ReliableEventReader{
private static final Logger logger = LoggerFactory.getLogger(ReliableSpoolDirectoryTailFileEventReader.class);
static final String metaFileName = ".flumespooltailfile-main.meta";
private final File spoolDirectory;
private final String completedSuffix;
private final String deserializerType;
private final Context deserializerContext;
private final Pattern ignorePattern;
private final Pattern targetPattern;
private final String targetFilename;
private final File metaFile;
private final boolean annotateFileName;
private final boolean annotateBaseName;
private final String fileNameHeader;
private final String baseNameHeader;
private final String deletePolicy;
private final Charset inputCharset;
private final DecodeErrorPolicy decodeErrorPolicy;
private final ConsumeOrder consumeOrder;
private Optional<FileInfo> currentFile = Optional.absent();
/** Always contains the last file from which lines have been read. **/
private Optional<FileInfo> lastFileRead = Optional.absent();
private boolean committed = true;
/**
* Create a ReliableSpoolingFileEventReader to watch the given directory.
*/
private ReliableSpoolDirectoryTailFileEventReader(File spoolDirectory,
String completedSuffix, String ignorePattern, String targetPattern, String targetFilename, String trackerDirPath,
boolean annotateFileName, String fileNameHeader,
boolean annotateBaseName, String baseNameHeader,
String deserializerType, Context deserializerContext,
String deletePolicy, String inputCharset,
DecodeErrorPolicy decodeErrorPolicy,
ConsumeOrder consumeOrder) throws IOException {
// Sanity checks
Preconditions.checkNotNull(spoolDirectory);
Preconditions.checkNotNull(completedSuffix);
Preconditions.checkNotNull(ignorePattern);
Preconditions.checkNotNull(targetPattern);
Preconditions.checkNotNull(targetFilename);
Preconditions.checkNotNull(trackerDirPath);
Preconditions.checkNotNull(deserializerType);
Preconditions.checkNotNull(deserializerContext);
Preconditions.checkNotNull(deletePolicy);
Preconditions.checkNotNull(inputCharset);
// validate delete policy
if (!deletePolicy.equalsIgnoreCase(DeletePolicy.NEVER.name()) &&
!deletePolicy.equalsIgnoreCase(DeletePolicy.IMMEDIATE.name())) {
throw new IllegalArgumentException("Delete policies other than " +
"NEVER and IMMEDIATE are not yet supported");
}
if (logger.isDebugEnabled()) {
logger.debug("Initializing {} with directory={}, metaDir={}, " +
"deserializer={}",
new Object[] { ReliableSpoolDirectoryTailFileEventReader.class.getSimpleName(),
spoolDirectory, trackerDirPath, deserializerType });
}
// Verify directory exists and is readable/writable
Preconditions.checkState(spoolDirectory.exists(),
"Directory does not exist: " + spoolDirectory.getAbsolutePath());
Preconditions.checkState(spoolDirectory.isDirectory(),
"Path is not a directory: " + spoolDirectory.getAbsolutePath());
// Do a canary test to make sure we have access to spooling directory
try {
File canary = File.createTempFile("flume-spooldir-perm-check-", ".canary",
spoolDirectory);
Files.write("testing flume file permissions\n", canary, Charsets.UTF_8);
List<String> lines = Files.readLines(canary, Charsets.UTF_8);
Preconditions.checkState(!lines.isEmpty(), "Empty canary file %s", canary);
if (!canary.delete()) {
throw new IOException("Unable to delete canary file " + canary);
}
logger.debug("Successfully created and deleted canary file: {}", canary);
} catch (IOException e) {
throw new FlumeException("Unable to read and modify files" +
" in the spooling directory: " + spoolDirectory, e);
}
this.spoolDirectory = spoolDirectory;
this.completedSuffix = completedSuffix;
this.deserializerType = deserializerType;
this.deserializerContext = deserializerContext;
this.annotateFileName = annotateFileName;
this.fileNameHeader = fileNameHeader;
this.annotateBaseName = annotateBaseName;
this.baseNameHeader = baseNameHeader;
this.ignorePattern = Pattern.compile(ignorePattern);
this.targetPattern = Pattern.compile(targetPattern);
this.targetFilename = targetFilename;
this.deletePolicy = deletePolicy;
this.inputCharset = Charset.forName(inputCharset);
this.decodeErrorPolicy = Preconditions.checkNotNull(decodeErrorPolicy);
this.consumeOrder = Preconditions.checkNotNull(consumeOrder);
File trackerDirectory = new File(trackerDirPath);
// if relative path, treat as relative to spool directory
if (!trackerDirectory.isAbsolute()) {
trackerDirectory = new File(spoolDirectory, trackerDirPath);
}
// ensure that meta directory exists
if (!trackerDirectory.exists()) {
if (!trackerDirectory.mkdir()) {
throw new IOException("Unable to mkdir nonexistent meta directory " +
trackerDirectory);
}
}
// ensure that the meta directory is a directory
if (!trackerDirectory.isDirectory()) {
throw new IOException("Specified meta directory is not a directory" +
trackerDirectory);
}
this.metaFile = new File(trackerDirectory, metaFileName);
}
/** Return the filename which generated the data from the last successful
* {@link #readEvents(int)} call. Returns null if called before any file
* contents are read. */
public String getLastFileRead() {
if (!lastFileRead.isPresent()) {
return null;
}
return lastFileRead.get().getFile().getAbsolutePath();
}
@Override
public Event readEvent() throws IOException {
List<Event> events = readEvents(1);
if (!events.isEmpty()) {
return events.get(0);
} else {
return null;
}
}
@Override
public List<Event> readEvents(int numEvents) throws IOException {
if (!committed) {
if (!currentFile.isPresent()) {
throw new IllegalStateException("File should not roll when " +
"commit is outstanding.");
}
/**
* Function: Monitor current file.
* Author: Ning Guo
* Time: 2015-02-27
*/
// logger.info("Last read was never committed - resetting mark position.");
currentFile.get().getDeserializer().reset();
} else {
// Check if new files have arrived since last call
if (!currentFile.isPresent()) {
currentFile = getNextFile();
}
// Return empty list if no new files
if (!currentFile.isPresent()) {
return Collections.emptyList();
}
}
EventDeserializer des = currentFile.get().getDeserializer();
List<Event> events = des.readEvents(numEvents);
/* It's possible that the last read took us just up to a file boundary.
* If so, try to roll to the next file, if there is one. */
if (events.isEmpty()) {
/*
* Function: monitor current file
* Author: Ning Guo
* Time: 2015-02-25
*
* deal with two kinds files:
* 1. old File: delete or rename ;
* 2. current fresh File: monitor it;
*/
if(!isTargetFile(currentFile) // Only CurrentFile is no longer the target, at the meanwhile, next file exists.
&& (isExistNextFile()) ){ // Then deal with the history file(ever target file)
logger.info("File:{} is no longer a TARGET File, which will no longer be monitored.", currentFile.get().getFile().getName());
retireCurrentFile();
currentFile = getNextFile();
}
if (!currentFile.isPresent()) {
return Collections.emptyList();
}
events = currentFile.get().getDeserializer().readEvents(numEvents);
}
if (annotateFileName) {
String filename = currentFile.get().getFile().getAbsolutePath();
for (Event event : events) {
event.getHeaders().put(fileNameHeader, filename);
}
}
if (annotateBaseName) {
String basename = currentFile.get().getFile().getName();
for (Event event : events) {
event.getHeaders().put(baseNameHeader, basename);
}
}
committed = false;
lastFileRead = currentFile;
return events;
}
/**
* refer to {@link #getNextFile()}
* @return
*/
private boolean isExistNextFile() {
/* Filter to exclude finished or hidden files */
FileFilter filter = new FileFilter() {
@Override
public boolean accept(File candidate) {
String fileName = candidate.getName();
if( (candidate.isDirectory()) ||
(fileName.endsWith(completedSuffix)) ||
(fileName.startsWith(".")) ||
(ignorePattern.matcher(fileName).matches()) ){
return false;
}
if( targetPattern.matcher(fileName).matches() ){
return true;
}
return false;
}
};
List<File> candidateFiles = Arrays.asList(spoolDirectory.listFiles(filter));
if (candidateFiles.isEmpty()){ // No matching file in spooling directory.
return false;
}
if (candidateFiles.size() >= 2) { // Only when two file exist. (Since current file is there.)
return true;
}
return false;
}
/**
* Test if currentFile2 is the targetFile.
* @param currentFile2
* @return
*/
private boolean isTargetFile(Optional<FileInfo> currentFile2) {
String inputFilename = currentFile2.get().getFile().getName();
SimpleDateFormat dateFormat = new SimpleDateFormat(targetFilename);
String substringOfTargetFile = dateFormat.format(new Date());
if(inputFilename.toLowerCase().contains(substringOfTargetFile.toLowerCase())){
return true;
}
return false;
}
/**
* Closes currentFile and attempt to rename it.
*
* If these operations fail in a way that may cause duplicate log entries,
* an error is logged but no exceptions are thrown. If these operations fail
* in a way that indicates potential misuse of the spooling directory, a
* FlumeException will be thrown.
* @throws FlumeException if files do not conform to spooling assumptions
*/
private void retireCurrentFile() throws IOException {
Preconditions.checkState(currentFile.isPresent());
File fileToRoll = new File(currentFile.get().getFile().getAbsolutePath());
currentFile.get().getDeserializer().close();
// Verify that spooling assumptions hold
if (fileToRoll.lastModified() == currentFile.get().getLastModified()) {
logger.info("File:{} has not been modified since being read.", fileToRoll.getName());
}
if (fileToRoll.length() == currentFile.get().getLength()) {
logger.info("File:{} has not changed size since being read.", fileToRoll.getName());
}
if (deletePolicy.equalsIgnoreCase(DeletePolicy.NEVER.name())) {
rollCurrentFile(fileToRoll);
} else if (deletePolicy.equalsIgnoreCase(DeletePolicy.IMMEDIATE.name())) {
deleteCurrentFile(fileToRoll);
} else {
// TODO: implement delay in the future
throw new IllegalArgumentException("Unsupported delete policy: " +
deletePolicy);
}
}
/**
* Rename the given spooled file
* @param fileToRoll
* @throws IOException
*/
private void rollCurrentFile(File fileToRoll) throws IOException {
File dest = new File(fileToRoll.getPath() + completedSuffix);
logger.info("Preparing to move file {} to {}", fileToRoll, dest);
// Before renaming, check whether destination file name exists
if (dest.exists() && PlatformDetect.isWindows()) {
/*
* If we are here, it means the completed file already exists. In almost
* every case this means the user is violating an assumption of Flume
* (that log files are placed in the spooling directory with unique
* names). However, there is a corner case on Windows systems where the
* file was already rolled but the rename was not atomic. If that seems
* likely, we let it pass with only a warning.
*/
if (Files.equal(currentFile.get().getFile(), dest)) {
logger.warn("Completed file " + dest +
" already exists, but files match, so continuing.");
boolean deleted = fileToRoll.delete();
if (!deleted) {
logger.error("Unable to delete file " + fileToRoll.getAbsolutePath() +
". It will likely be ingested another time.");
}
} else {
String message = "File name has been re-used with different" +
" files. Spooling assumptions violated for " + dest;
throw new IllegalStateException(message);
}
// Dest file exists and not on windows
} else if (dest.exists()) {
String message = "File name has been re-used with different" +
" files. Spooling assumptions violated for " + dest;
throw new IllegalStateException(message);
// Destination file does not already exist. We are good to go!
} else {
boolean renamed = fileToRoll.renameTo(dest);
if (renamed) {
logger.debug("Successfully rolled file {} to {}", fileToRoll, dest);
// now we no longer need the meta file
deleteMetaFile();
} else {
/* If we are here then the file cannot be renamed for a reason other
* than that the destination file exists (actually, that remains
* possible w/ small probability due to TOC-TOU conditions).*/
String message = "Unable to move " + fileToRoll + " to " + dest +
". This will likely cause duplicate events. Please verify that " +
"flume has sufficient permissions to perform these operations.";
throw new FlumeException(message);
}
}
}
/**
* Delete the given spooled file
* @param fileToDelete
* @throws IOException
*/
private void deleteCurrentFile(File fileToDelete) throws IOException {
logger.info("Preparing to delete file {}", fileToDelete);
if (!fileToDelete.exists()) {
logger.warn("Unable to delete nonexistent file: {}", fileToDelete);
return;
}
if (!fileToDelete.delete()) {
throw new IOException("Unable to delete spool file: " + fileToDelete);
}
// now we no longer need the meta file
deleteMetaFile();
}
/**
* Returns the next file to be consumed from the chosen directory.
* If the directory is empty or the chosen file is not readable,
* this will return an absent option.
* If the {@link #consumeOrder} variable is {@link ConsumeOrder#OLDEST}
* then returns the oldest file. If the {@link #consumeOrder} variable
* is {@link ConsumeOrder#YOUNGEST} then returns the youngest file.
* If two or more files are equally old/young, then the file name with
* lower lexicographical value is returned.
* If THE {@link #consumeOrder} variable is {@link ConsumeOrder#RANDOM}
* then returns any arbitrary file in the directory.
*/
private Optional<FileInfo> getNextFile(){
/* Filter to exclude finished or hidden files */
FileFilter filter = new FileFilter() {
@Override
public boolean accept(File candidate) {
String fileName = candidate.getName();
if( (candidate.isDirectory()) ||
(fileName.endsWith(completedSuffix)) ||
(fileName.startsWith(".")) ||
(ignorePattern.matcher(fileName).matches()) ){
return false;
}
if( targetPattern.matcher(fileName).matches() ){
return true;
}
return false;
}
};
List<File> candidateFiles = Arrays.asList(spoolDirectory.listFiles(filter));
if (candidateFiles.isEmpty()){ // No matching file in spooling directory.
return Optional.absent();
}
File selectedFile = candidateFiles.get(0); // Select the first random file.
if (consumeOrder == ConsumeOrder.RANDOM) { // Selected file is random.
return openFile(selectedFile);
} else if (consumeOrder == ConsumeOrder.YOUNGEST) {
for (File candidateFile: candidateFiles) {
long compare = selectedFile.lastModified() - candidateFile.lastModified();
if (compare == 0) { // timestamp is same pick smallest lexicographically.
selectedFile = smallerLexicographical(selectedFile, candidateFile);
} else if (compare < 0) { // candidate is younger (cand-ts > selec-ts)
selectedFile = candidateFile;
}
}
} else { // deafult order is OLDEST
for (File candidateFile: candidateFiles) {
long compare = selectedFile.lastModified() - candidateFile.lastModified();
if (compare == 0) { // timstamp is same pick smallest lexicographically.
selectedFile = smallerLexicographical(selectedFile, candidateFile);
} else if (compare > 0) { // candidate is older (cand-ts < selec-ts)
selectedFile = candidateFile;
}
}
}
return openFile(selectedFile);
}
private File smallerLexicographical(File f1, File f2){
if (f1.getName().compareTo(f2.getName()) < 0){
return f1;
}
return f2;
}
/**
* Opens a file for consuming
* @param file
* @return {@link #FileInfo} for the file to consume or absent option if the
* file does not exists or readable.
*/
private Optional<FileInfo> openFile(File file) {
try {
// roll the meta file, if needed
String nextPath = file.getPath();
PositionTracker tracker =
DurablePositionTracker.getInstance(metaFile, nextPath);
if (!tracker.getTarget().equals(nextPath)) {
tracker.close();
deleteMetaFile();
tracker = DurablePositionTracker.getInstance(metaFile, nextPath);
}
// sanity check
Preconditions.checkState(tracker.getTarget().equals(nextPath),
"Tracker target %s does not equal expected filename %s",
tracker.getTarget(), nextPath);
ResettableInputStream in =
new ResettableFileInputStream(file, tracker,
ResettableFileInputStream.DEFAULT_BUF_SIZE, inputCharset,
decodeErrorPolicy);
EventDeserializer deserializer = EventDeserializerFactory.getInstance
(deserializerType, deserializerContext, in);
return Optional.of(new FileInfo(file, deserializer));
} catch (FileNotFoundException e) {
// File could have been deleted in the interim
logger.warn("Could not find file: " + file, e);
return Optional.absent();
} catch (IOException e) {
logger.error("Exception opening file: " + file, e);
return Optional.absent();
}
}
private void deleteMetaFile() throws IOException {
if (metaFile.exists() && !metaFile.delete()) {
throw new IOException("Unable to delete old meta file " + metaFile);
}
}
@Override
public void close() throws IOException {
if (currentFile.isPresent()) {
currentFile.get().getDeserializer().close();
currentFile = Optional.absent();
}
}
/** Commit the last lines which were read. */
@Override
public void commit() throws IOException {
if (!committed && currentFile.isPresent()) {
currentFile.get().getDeserializer().mark();
committed = true;
}
}
/** An immutable class with information about a file being processed. **/
private static class FileInfo {
private final File file;
private final long length;
private final long lastModified;
private final EventDeserializer deserializer;
public FileInfo(File file, EventDeserializer deserializer){
this.file = file;
this.length = file.length();
this.lastModified = file.lastModified();
this.deserializer = deserializer;
}
public File getFile() { return file; }
public long getLength() { return length; }
public long getLastModified() { return lastModified; }
public EventDeserializer getDeserializer() { return deserializer; }
}
static enum DeletePolicy{
NEVER,
IMMEDIATE,
DELAY
}
/**
* Special builder class for {@link ReliableSpoolDirectoryTailFileEventReader}
*/
public static class Builder {
private File spoolDirectory;
private String completedSuffix = SPOOLED_FILE_SUFFIX;
private String ignorePattern = DEFAULT_IGNORE_PAT;
private String targetPattern = DEFAULT_TARGET_PAT;
private String targetFilename = DEFAULT_TARGET_FILENAME;
private String trackerDirPath = DEFAULT_TRACKER_DIR;
private boolean annotateFileName = DEFAULT_FILENAME_HEADER;
private String fileNameHeader = DEFAULT_FILENAME_HEADER_KEY;
private boolean annotateBaseName = DEFAULT_BASENAME_HEADER;
private String baseNameHeader = DEFAULT_BASENAME_HEADER_KEY;
private String deserializerType = DEFAULT_DESERIALIZER;
private Context deserializerContext = new Context();
private String deletePolicy = DEFAULT_DELETE_POLICY;
private String inputCharset = DEFAULT_INPUT_CHARSET;
private DecodeErrorPolicy decodeErrorPolicy = DecodeErrorPolicy.valueOf(DEFAULT_DECODE_ERROR_POLICY.toUpperCase());
private ConsumeOrder consumeOrder = DEFAULT_CONSUME_ORDER;
public Builder spoolDirectory(File directory) {
this.spoolDirectory = directory;
return this;
}
public Builder completedSuffix(String completedSuffix) {
this.completedSuffix = completedSuffix;
return this;
}
public Builder ignorePattern(String ignorePattern) {
this.ignorePattern = ignorePattern;
return this;
}
public Builder targetPattern(String targetPattern) {
this.targetPattern = targetPattern;
return this;
}
public Builder targetFilename(String targetFilename) {
this.targetFilename = targetFilename;
return this;
}
public Builder trackerDirPath(String trackerDirPath) {
this.trackerDirPath = trackerDirPath;
return this;
}
public Builder annotateFileName(Boolean annotateFileName) {
this.annotateFileName = annotateFileName;
return this;
}
public Builder fileNameHeader(String fileNameHeader) {
this.fileNameHeader = fileNameHeader;
return this;
}
public Builder annotateBaseName(Boolean annotateBaseName) {
this.annotateBaseName = annotateBaseName;
return this;
}
public Builder baseNameHeader(String baseNameHeader) {
this.baseNameHeader = baseNameHeader;
return this;
}
public Builder deserializerType(String deserializerType) {
this.deserializerType = deserializerType;
return this;
}
public Builder deserializerContext(Context deserializerContext) {
this.deserializerContext = deserializerContext;
return this;
}
public Builder deletePolicy(String deletePolicy) {
this.deletePolicy = deletePolicy;
return this;
}
public Builder inputCharset(String inputCharset) {
this.inputCharset = inputCharset;
return this;
}
public Builder decodeErrorPolicy(DecodeErrorPolicy decodeErrorPolicy) {
this.decodeErrorPolicy = decodeErrorPolicy;
return this;
}
public Builder consumeOrder(ConsumeOrder consumeOrder) {
this.consumeOrder = consumeOrder;
return this;
}
public ReliableSpoolDirectoryTailFileEventReader build() throws IOException {
return new ReliableSpoolDirectoryTailFileEventReader(spoolDirectory, completedSuffix,
ignorePattern, targetPattern, targetFilename, trackerDirPath, annotateFileName, fileNameHeader,
annotateBaseName, baseNameHeader, deserializerType,
deserializerContext, deletePolicy, inputCharset, decodeErrorPolicy,
consumeOrder);
}
}
}
| |
package org.apache.lucene.index;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import org.apache.lucene.index.MultiTermsEnum.TermsEnumIndex;
import org.apache.lucene.index.MultiTermsEnum.TermsEnumWithSlice;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.InPlaceMergeSorter;
import org.apache.lucene.util.LongValues;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PackedLongValues;
/**
* A wrapper for CompositeIndexReader providing access to DocValues.
*
* <p><b>NOTE</b>: for multi readers, you'll get better
* performance by gathering the sub readers using
* {@link IndexReader#getContext()} to get the
* atomic leaves and then operate per-AtomicReader,
* instead of using this class.
*
* <p><b>NOTE</b>: This is very costly.
*
* @lucene.experimental
* @lucene.internal
*/
public class MultiDocValues {
/** No instantiation */
private MultiDocValues() {}
/** Returns a NumericDocValues for a reader's norms (potentially merging on-the-fly).
* <p>
* This is a slow way to access normalization values. Instead, access them per-segment
* with {@link AtomicReader#getNormValues(String)}
* </p>
*/
public static NumericDocValues getNormValues(final IndexReader r, final String field) throws IOException {
final List<AtomicReaderContext> leaves = r.leaves();
final int size = leaves.size();
if (size == 0) {
return null;
} else if (size == 1) {
return leaves.get(0).reader().getNormValues(field);
}
FieldInfo fi = MultiFields.getMergedFieldInfos(r).fieldInfo(field);
if (fi == null || fi.hasNorms() == false) {
return null;
}
boolean anyReal = false;
final NumericDocValues[] values = new NumericDocValues[size];
final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) {
AtomicReaderContext context = leaves.get(i);
NumericDocValues v = context.reader().getNormValues(field);
if (v == null) {
v = DocValues.emptyNumeric();
} else {
anyReal = true;
}
values[i] = v;
starts[i] = context.docBase;
}
starts[size] = r.maxDoc();
assert anyReal;
return new NumericDocValues() {
@Override
public long get(int docID) {
int subIndex = ReaderUtil.subIndex(docID, starts);
return values[subIndex].get(docID - starts[subIndex]);
}
};
}
/** Returns a NumericDocValues for a reader's docvalues (potentially merging on-the-fly)
* <p>
* This is a slow way to access numeric values. Instead, access them per-segment
* with {@link AtomicReader#getNumericDocValues(String)}
* </p>
* */
public static NumericDocValues getNumericValues(final IndexReader r, final String field) throws IOException {
final List<AtomicReaderContext> leaves = r.leaves();
final int size = leaves.size();
if (size == 0) {
return null;
} else if (size == 1) {
return leaves.get(0).reader().getNumericDocValues(field);
}
boolean anyReal = false;
final NumericDocValues[] values = new NumericDocValues[size];
final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) {
AtomicReaderContext context = leaves.get(i);
NumericDocValues v = context.reader().getNumericDocValues(field);
if (v == null) {
v = DocValues.emptyNumeric();
} else {
anyReal = true;
}
values[i] = v;
starts[i] = context.docBase;
}
starts[size] = r.maxDoc();
if (!anyReal) {
return null;
} else {
return new NumericDocValues() {
@Override
public long get(int docID) {
int subIndex = ReaderUtil.subIndex(docID, starts);
return values[subIndex].get(docID - starts[subIndex]);
}
};
}
}
/** Returns a Bits for a reader's docsWithField (potentially merging on-the-fly)
* <p>
* This is a slow way to access this bitset. Instead, access them per-segment
* with {@link AtomicReader#getDocsWithField(String)}
* </p>
* */
public static Bits getDocsWithField(final IndexReader r, final String field) throws IOException {
final List<AtomicReaderContext> leaves = r.leaves();
final int size = leaves.size();
if (size == 0) {
return null;
} else if (size == 1) {
return leaves.get(0).reader().getDocsWithField(field);
}
boolean anyReal = false;
boolean anyMissing = false;
final Bits[] values = new Bits[size];
final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) {
AtomicReaderContext context = leaves.get(i);
Bits v = context.reader().getDocsWithField(field);
if (v == null) {
v = new Bits.MatchNoBits(context.reader().maxDoc());
anyMissing = true;
} else {
anyReal = true;
if (v instanceof Bits.MatchAllBits == false) {
anyMissing = true;
}
}
values[i] = v;
starts[i] = context.docBase;
}
starts[size] = r.maxDoc();
if (!anyReal) {
return null;
} else if (!anyMissing) {
return new Bits.MatchAllBits(r.maxDoc());
} else {
return new MultiBits(values, starts, false);
}
}
/** Returns a BinaryDocValues for a reader's docvalues (potentially merging on-the-fly)
* <p>
* This is a slow way to access binary values. Instead, access them per-segment
* with {@link AtomicReader#getBinaryDocValues(String)}
* </p>
*/
public static BinaryDocValues getBinaryValues(final IndexReader r, final String field) throws IOException {
final List<AtomicReaderContext> leaves = r.leaves();
final int size = leaves.size();
if (size == 0) {
return null;
} else if (size == 1) {
return leaves.get(0).reader().getBinaryDocValues(field);
}
boolean anyReal = false;
final BinaryDocValues[] values = new BinaryDocValues[size];
final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) {
AtomicReaderContext context = leaves.get(i);
BinaryDocValues v = context.reader().getBinaryDocValues(field);
if (v == null) {
v = DocValues.emptyBinary();
} else {
anyReal = true;
}
values[i] = v;
starts[i] = context.docBase;
}
starts[size] = r.maxDoc();
if (!anyReal) {
return null;
} else {
return new BinaryDocValues() {
@Override
public BytesRef get(int docID) {
int subIndex = ReaderUtil.subIndex(docID, starts);
return values[subIndex].get(docID - starts[subIndex]);
}
};
}
}
/** Returns a SortedNumericDocValues for a reader's docvalues (potentially merging on-the-fly)
* <p>
* This is a slow way to access sorted numeric values. Instead, access them per-segment
* with {@link AtomicReader#getSortedNumericDocValues(String)}
* </p>
* */
public static SortedNumericDocValues getSortedNumericValues(final IndexReader r, final String field) throws IOException {
final List<AtomicReaderContext> leaves = r.leaves();
final int size = leaves.size();
if (size == 0) {
return null;
} else if (size == 1) {
return leaves.get(0).reader().getSortedNumericDocValues(field);
}
boolean anyReal = false;
final SortedNumericDocValues[] values = new SortedNumericDocValues[size];
final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) {
AtomicReaderContext context = leaves.get(i);
SortedNumericDocValues v = context.reader().getSortedNumericDocValues(field);
if (v == null) {
v = DocValues.emptySortedNumeric(context.reader().maxDoc());
} else {
anyReal = true;
}
values[i] = v;
starts[i] = context.docBase;
}
starts[size] = r.maxDoc();
if (!anyReal) {
return null;
} else {
return new SortedNumericDocValues() {
SortedNumericDocValues current;
@Override
public void setDocument(int doc) {
int subIndex = ReaderUtil.subIndex(doc, starts);
current = values[subIndex];
current.setDocument(doc - starts[subIndex]);
}
@Override
public long valueAt(int index) {
return current.valueAt(index);
}
@Override
public int count() {
return current.count();
}
};
}
}
/** Returns a SortedDocValues for a reader's docvalues (potentially doing extremely slow things).
* <p>
* This is an extremely slow way to access sorted values. Instead, access them per-segment
* with {@link AtomicReader#getSortedDocValues(String)}
* </p>
*/
public static SortedDocValues getSortedValues(final IndexReader r, final String field) throws IOException {
final List<AtomicReaderContext> leaves = r.leaves();
final int size = leaves.size();
if (size == 0) {
return null;
} else if (size == 1) {
return leaves.get(0).reader().getSortedDocValues(field);
}
boolean anyReal = false;
final SortedDocValues[] values = new SortedDocValues[size];
final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) {
AtomicReaderContext context = leaves.get(i);
SortedDocValues v = context.reader().getSortedDocValues(field);
if (v == null) {
v = DocValues.emptySorted();
} else {
anyReal = true;
}
values[i] = v;
starts[i] = context.docBase;
}
starts[size] = r.maxDoc();
if (!anyReal) {
return null;
} else {
OrdinalMap mapping = OrdinalMap.build(r.getCoreCacheKey(), values, PackedInts.DEFAULT);
return new MultiSortedDocValues(values, starts, mapping);
}
}
/** Returns a SortedSetDocValues for a reader's docvalues (potentially doing extremely slow things).
* <p>
* This is an extremely slow way to access sorted values. Instead, access them per-segment
* with {@link AtomicReader#getSortedSetDocValues(String)}
* </p>
*/
public static SortedSetDocValues getSortedSetValues(final IndexReader r, final String field) throws IOException {
final List<AtomicReaderContext> leaves = r.leaves();
final int size = leaves.size();
if (size == 0) {
return null;
} else if (size == 1) {
return leaves.get(0).reader().getSortedSetDocValues(field);
}
boolean anyReal = false;
final SortedSetDocValues[] values = new SortedSetDocValues[size];
final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) {
AtomicReaderContext context = leaves.get(i);
SortedSetDocValues v = context.reader().getSortedSetDocValues(field);
if (v == null) {
v = DocValues.emptySortedSet();
} else {
anyReal = true;
}
values[i] = v;
starts[i] = context.docBase;
}
starts[size] = r.maxDoc();
if (!anyReal) {
return null;
} else {
OrdinalMap mapping = OrdinalMap.build(r.getCoreCacheKey(), values, PackedInts.DEFAULT);
return new MultiSortedSetDocValues(values, starts, mapping);
}
}
/** maps per-segment ordinals to/from global ordinal space */
// TODO: we could also have a utility method to merge Terms[] and use size() as a weight when we need it
// TODO: use more efficient packed ints structures?
// TODO: pull this out? its pretty generic (maps between N ord()-enabled TermsEnums)
public static class OrdinalMap implements Accountable {
private static class SegmentMap implements Accountable {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SegmentMap.class);
/** Build a map from an index into a sorted view of `weights` to an index into `weights`. */
private static int[] map(final long[] weights) {
final int[] newToOld = new int[weights.length];
for (int i = 0; i < weights.length; ++i) {
newToOld[i] = i;
}
new InPlaceMergeSorter() {
@Override
protected void swap(int i, int j) {
final int tmp = newToOld[i];
newToOld[i] = newToOld[j];
newToOld[j] = tmp;
}
@Override
protected int compare(int i, int j) {
// j first since we actually want higher weights first
return Long.compare(weights[newToOld[j]], weights[newToOld[i]]);
}
}.sort(0, weights.length);
return newToOld;
}
/** Inverse the map. */
private static int[] inverse(int[] map) {
final int[] inverse = new int[map.length];
for (int i = 0; i < map.length; ++i) {
inverse[map[i]] = i;
}
return inverse;
}
private final int[] newToOld, oldToNew;
SegmentMap(long[] weights) {
newToOld = map(weights);
oldToNew = inverse(newToOld);
assert Arrays.equals(newToOld, inverse(oldToNew));
}
int newToOld(int segment) {
return newToOld[segment];
}
int oldToNew(int segment) {
return oldToNew[segment];
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(newToOld) + RamUsageEstimator.sizeOf(oldToNew);
}
}
/**
* Create an ordinal map that uses the number of unique values of each
* {@link SortedDocValues} instance as a weight.
* @see #build(Object, TermsEnum[], long[], float)
*/
public static OrdinalMap build(Object owner, SortedDocValues[] values, float acceptableOverheadRatio) throws IOException {
final TermsEnum[] subs = new TermsEnum[values.length];
final long[] weights = new long[values.length];
for (int i = 0; i < values.length; ++i) {
subs[i] = values[i].termsEnum();
weights[i] = values[i].getValueCount();
}
return build(owner, subs, weights, acceptableOverheadRatio);
}
/**
* Create an ordinal map that uses the number of unique values of each
* {@link SortedSetDocValues} instance as a weight.
* @see #build(Object, TermsEnum[], long[], float)
*/
public static OrdinalMap build(Object owner, SortedSetDocValues[] values, float acceptableOverheadRatio) throws IOException {
final TermsEnum[] subs = new TermsEnum[values.length];
final long[] weights = new long[values.length];
for (int i = 0; i < values.length; ++i) {
subs[i] = values[i].termsEnum();
weights[i] = values[i].getValueCount();
}
return build(owner, subs, weights, acceptableOverheadRatio);
}
/**
* Creates an ordinal map that allows mapping ords to/from a merged
* space from <code>subs</code>.
* @param owner a cache key
* @param subs TermsEnums that support {@link TermsEnum#ord()}. They need
* not be dense (e.g. can be FilteredTermsEnums}.
* @param weights a weight for each sub. This is ideally correlated with
* the number of unique terms that each sub introduces compared
* to the other subs
* @throws IOException if an I/O error occurred.
*/
public static OrdinalMap build(Object owner, TermsEnum subs[], long[] weights, float acceptableOverheadRatio) throws IOException {
if (subs.length != weights.length) {
throw new IllegalArgumentException("subs and weights must have the same length");
}
// enums are not sorted, so let's sort to save memory
final SegmentMap segmentMap = new SegmentMap(weights);
return new OrdinalMap(owner, subs, segmentMap, acceptableOverheadRatio);
}
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(OrdinalMap.class);
// cache key of whoever asked for this awful thing
final Object owner;
// globalOrd -> (globalOrd - segmentOrd) where segmentOrd is the the ordinal in the first segment that contains this term
final PackedLongValues globalOrdDeltas;
// globalOrd -> first segment container
final PackedLongValues firstSegments;
// for every segment, segmentOrd -> globalOrd
final LongValues segmentToGlobalOrds[];
// the map from/to segment ids
final SegmentMap segmentMap;
// ram usage
final long ramBytesUsed;
OrdinalMap(Object owner, TermsEnum subs[], SegmentMap segmentMap, float acceptableOverheadRatio) throws IOException {
// create the ordinal mappings by pulling a termsenum over each sub's
// unique terms, and walking a multitermsenum over those
this.owner = owner;
this.segmentMap = segmentMap;
// even though we accept an overhead ratio, we keep these ones with COMPACT
// since they are only used to resolve values given a global ord, which is
// slow anyway
PackedLongValues.Builder globalOrdDeltas = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
PackedLongValues.Builder firstSegments = PackedLongValues.packedBuilder(PackedInts.COMPACT);
final PackedLongValues.Builder[] ordDeltas = new PackedLongValues.Builder[subs.length];
for (int i = 0; i < ordDeltas.length; i++) {
ordDeltas[i] = PackedLongValues.monotonicBuilder(acceptableOverheadRatio);
}
long[] ordDeltaBits = new long[subs.length];
long segmentOrds[] = new long[subs.length];
ReaderSlice slices[] = new ReaderSlice[subs.length];
TermsEnumIndex indexes[] = new TermsEnumIndex[slices.length];
for (int i = 0; i < slices.length; i++) {
slices[i] = new ReaderSlice(0, 0, i);
indexes[i] = new TermsEnumIndex(subs[segmentMap.newToOld(i)], i);
}
MultiTermsEnum mte = new MultiTermsEnum(slices);
mte.reset(indexes);
long globalOrd = 0;
while (mte.next() != null) {
TermsEnumWithSlice matches[] = mte.getMatchArray();
int firstSegmentIndex = Integer.MAX_VALUE;
long globalOrdDelta = Long.MAX_VALUE;
for (int i = 0; i < mte.getMatchCount(); i++) {
int segmentIndex = matches[i].index;
long segmentOrd = matches[i].terms.ord();
long delta = globalOrd - segmentOrd;
// We compute the least segment where the term occurs. In case the
// first segment contains most (or better all) values, this will
// help save significant memory
if (segmentIndex < firstSegmentIndex) {
firstSegmentIndex = segmentIndex;
globalOrdDelta = delta;
}
// for each per-segment ord, map it back to the global term.
while (segmentOrds[segmentIndex] <= segmentOrd) {
ordDeltaBits[segmentIndex] |= delta;
ordDeltas[segmentIndex].add(delta);
segmentOrds[segmentIndex]++;
}
}
// for each unique term, just mark the first segment index/delta where it occurs
assert firstSegmentIndex < segmentOrds.length;
firstSegments.add(firstSegmentIndex);
globalOrdDeltas.add(globalOrdDelta);
globalOrd++;
}
this.firstSegments = firstSegments.build();
this.globalOrdDeltas = globalOrdDeltas.build();
// ordDeltas is typically the bottleneck, so let's see what we can do to make it faster
segmentToGlobalOrds = new LongValues[subs.length];
long ramBytesUsed = BASE_RAM_BYTES_USED + this.globalOrdDeltas.ramBytesUsed()
+ this.firstSegments.ramBytesUsed() + RamUsageEstimator.shallowSizeOf(segmentToGlobalOrds)
+ segmentMap.ramBytesUsed();
for (int i = 0; i < ordDeltas.length; ++i) {
final PackedLongValues deltas = ordDeltas[i].build();
if (ordDeltaBits[i] == 0L) {
// segment ords perfectly match global ordinals
// likely in case of low cardinalities and large segments
segmentToGlobalOrds[i] = LongValues.IDENTITY;
} else {
final int bitsRequired = ordDeltaBits[i] < 0 ? 64 : PackedInts.bitsRequired(ordDeltaBits[i]);
final long monotonicBits = deltas.ramBytesUsed() * 8;
final long packedBits = bitsRequired * deltas.size();
if (deltas.size() <= Integer.MAX_VALUE
&& packedBits <= monotonicBits * (1 + acceptableOverheadRatio)) {
// monotonic compression mostly adds overhead, let's keep the mapping in plain packed ints
final int size = (int) deltas.size();
final PackedInts.Mutable newDeltas = PackedInts.getMutable(size, bitsRequired, acceptableOverheadRatio);
final PackedLongValues.Iterator it = deltas.iterator();
for (int ord = 0; ord < size; ++ord) {
newDeltas.set(ord, it.next());
}
assert !it.hasNext();
segmentToGlobalOrds[i] = new LongValues() {
@Override
public long get(long ord) {
return ord + newDeltas.get((int) ord);
}
};
ramBytesUsed += newDeltas.ramBytesUsed();
} else {
segmentToGlobalOrds[i] = new LongValues() {
@Override
public long get(long ord) {
return ord + deltas.get(ord);
}
};
ramBytesUsed += deltas.ramBytesUsed();
}
ramBytesUsed += RamUsageEstimator.shallowSizeOf(segmentToGlobalOrds[i]);
}
}
this.ramBytesUsed = ramBytesUsed;
}
/**
* Given a segment number, return a {@link LongValues} instance that maps
* segment ordinals to global ordinals.
*/
public LongValues getGlobalOrds(int segmentIndex) {
return segmentToGlobalOrds[segmentMap.oldToNew(segmentIndex)];
}
/**
* Given global ordinal, returns the ordinal of the first segment which contains
* this ordinal (the corresponding to the segment return {@link #getFirstSegmentNumber}).
*/
public long getFirstSegmentOrd(long globalOrd) {
return globalOrd - globalOrdDeltas.get(globalOrd);
}
/**
* Given a global ordinal, returns the index of the first
* segment that contains this term.
*/
public int getFirstSegmentNumber(long globalOrd) {
return segmentMap.newToOld((int) firstSegments.get(globalOrd));
}
/**
* Returns the total number of unique terms in global ord space.
*/
public long getValueCount() {
return globalOrdDeltas.size();
}
@Override
public long ramBytesUsed() {
return ramBytesUsed;
}
}
/**
* Implements SortedDocValues over n subs, using an OrdinalMap
* @lucene.internal
*/
public static class MultiSortedDocValues extends SortedDocValues {
/** docbase for each leaf: parallel with {@link #values} */
public final int docStarts[];
/** leaf values */
public final SortedDocValues values[];
/** ordinal map mapping ords from <code>values</code> to global ord space */
public final OrdinalMap mapping;
/** Creates a new MultiSortedDocValues over <code>values</code> */
MultiSortedDocValues(SortedDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException {
assert docStarts.length == values.length + 1;
this.values = values;
this.docStarts = docStarts;
this.mapping = mapping;
}
@Override
public int getOrd(int docID) {
int subIndex = ReaderUtil.subIndex(docID, docStarts);
int segmentOrd = values[subIndex].getOrd(docID - docStarts[subIndex]);
return segmentOrd == -1 ? segmentOrd : (int) mapping.getGlobalOrds(subIndex).get(segmentOrd);
}
@Override
public BytesRef lookupOrd(int ord) {
int subIndex = mapping.getFirstSegmentNumber(ord);
int segmentOrd = (int) mapping.getFirstSegmentOrd(ord);
return values[subIndex].lookupOrd(segmentOrd);
}
@Override
public int getValueCount() {
return (int) mapping.getValueCount();
}
}
/**
* Implements MultiSortedSetDocValues over n subs, using an OrdinalMap
* @lucene.internal
*/
public static class MultiSortedSetDocValues extends SortedSetDocValues {
/** docbase for each leaf: parallel with {@link #values} */
public final int docStarts[];
/** leaf values */
public final SortedSetDocValues values[];
/** ordinal map mapping ords from <code>values</code> to global ord space */
public final OrdinalMap mapping;
int currentSubIndex;
LongValues currentGlobalOrds;
/** Creates a new MultiSortedSetDocValues over <code>values</code> */
MultiSortedSetDocValues(SortedSetDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException {
assert docStarts.length == values.length + 1;
this.values = values;
this.docStarts = docStarts;
this.mapping = mapping;
}
@Override
public long nextOrd() {
long segmentOrd = values[currentSubIndex].nextOrd();
if (segmentOrd == NO_MORE_ORDS) {
return segmentOrd;
} else {
return currentGlobalOrds.get(segmentOrd);
}
}
@Override
public void setDocument(int docID) {
currentSubIndex = ReaderUtil.subIndex(docID, docStarts);
currentGlobalOrds = mapping.getGlobalOrds(currentSubIndex);
values[currentSubIndex].setDocument(docID - docStarts[currentSubIndex]);
}
@Override
public BytesRef lookupOrd(long ord) {
int subIndex = mapping.getFirstSegmentNumber(ord);
long segmentOrd = mapping.getFirstSegmentOrd(ord);
return values[subIndex].lookupOrd(segmentOrd);
}
@Override
public long getValueCount() {
return mapping.getValueCount();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.stream.socket;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Serializable;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.events.CacheEvent;
import org.apache.ignite.internal.util.GridConcurrentHashSet;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.marshaller.Marshaller;
import org.apache.ignite.marshaller.jdk.JdkMarshaller;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.stream.StreamTupleExtractor;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.events.EventType.EVT_CACHE_OBJECT_PUT;
/**
* Tests {@link SocketStreamer}.
*/
public class SocketStreamerSelfTest extends GridCommonAbstractTest {
/** IP finder. */
private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true);
/** Grid count. */
private final static int GRID_CNT = 3;
/** Count. */
private static final int CNT = 500;
/** Delimiter. */
private static final byte[] DELIM = new byte[] {0, 1, 2, 3, 4, 5, 4, 3, 2, 1, 0};
/** Port. */
private static int port;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(gridName);
CacheConfiguration ccfg = defaultCacheConfiguration();
cfg.setCacheConfiguration(ccfg);
TcpDiscoverySpi discoSpi = new TcpDiscoverySpi();
discoSpi.setIpFinder(IP_FINDER);
cfg.setDiscoverySpi(discoSpi);
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
startGridsMultiThreaded(GRID_CNT);
try (ServerSocket sock = new ServerSocket(0)) {
port = sock.getLocalPort();
}
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids();
}
/**
* @throws Exception If failed.
*/
public void testSizeBasedDefaultConverter() throws Exception {
test(null, null, new Runnable() {
@Override public void run() {
try (Socket sock = new Socket(InetAddress.getLocalHost(), port);
OutputStream os = new BufferedOutputStream(sock.getOutputStream())) {
Marshaller marsh = new JdkMarshaller();
for (int i = 0; i < CNT; i++) {
byte[] msg = marsh.marshal(new Tuple(i));
os.write(msg.length >>> 24);
os.write(msg.length >>> 16);
os.write(msg.length >>> 8);
os.write(msg.length);
os.write(msg);
}
}
catch (IOException | IgniteCheckedException e) {
throw new IgniteException(e);
}
}
});
}
/**
* @throws Exception If failed.
*/
public void testSizeBasedCustomConverter() throws Exception {
SocketMessageConverter<Tuple> converter = new SocketMessageConverter<Tuple>() {
@Override public Tuple convert(byte[] msg) {
int i = (msg[0] & 0xFF) << 24;
i |= (msg[1] & 0xFF) << 16;
i |= (msg[2] & 0xFF) << 8;
i |= msg[3] & 0xFF;
return new Tuple(i);
}
};
test(converter, null, new Runnable() {
@Override public void run() {
try(Socket sock = new Socket(InetAddress.getLocalHost(), port);
OutputStream os = new BufferedOutputStream(sock.getOutputStream())) {
for (int i = 0; i < CNT; i++) {
os.write(0);
os.write(0);
os.write(0);
os.write(4);
os.write(i >>> 24);
os.write(i >>> 16);
os.write(i >>> 8);
os.write(i);
}
}
catch (IOException e) {
throw new IgniteException(e);
}
}
});
}
/**
* @throws Exception If failed.
*/
public void testDelimiterBasedDefaultConverter() throws Exception {
test(null, DELIM, new Runnable() {
@Override public void run() {
try(Socket sock = new Socket(InetAddress.getLocalHost(), port);
OutputStream os = new BufferedOutputStream(sock.getOutputStream())) {
Marshaller marsh = new JdkMarshaller();
for (int i = 0; i < CNT; i++) {
byte[] msg = marsh.marshal(new Tuple(i));
os.write(msg);
os.write(DELIM);
}
}
catch (IOException | IgniteCheckedException e) {
throw new IgniteException(e);
}
}
});
}
/**
* @throws Exception If failed.
*/
public void testDelimiterBasedCustomConverter() throws Exception {
SocketMessageConverter<Tuple> converter = new SocketMessageConverter<Tuple>() {
@Override public Tuple convert(byte[] msg) {
int i = (msg[0] & 0xFF) << 24;
i |= (msg[1] & 0xFF) << 16;
i |= (msg[2] & 0xFF) << 8;
i |= msg[3] & 0xFF;
return new Tuple(i);
}
};
test(converter, DELIM, new Runnable() {
@Override public void run() {
try(Socket sock = new Socket(InetAddress.getLocalHost(), port);
OutputStream os = new BufferedOutputStream(sock.getOutputStream())) {
for (int i = 0; i < CNT; i++) {
os.write(i >>> 24);
os.write(i >>> 16);
os.write(i >>> 8);
os.write(i);
os.write(DELIM);
}
}
catch (IOException e) {
throw new IgniteException(e);
}
}
});
}
/**
* @param converter Converter.
* @param r Runnable..
*/
private void test(@Nullable SocketMessageConverter<Tuple> converter,
@Nullable byte[] delim,
Runnable r) throws Exception {
SocketStreamer<Tuple, Integer, String> sockStmr = null;
Ignite ignite = grid(0);
IgniteCache<Integer, String> cache = ignite.cache(null);
cache.clear();
try (IgniteDataStreamer<Integer, String> stmr = ignite.dataStreamer(null)) {
stmr.allowOverwrite(true);
stmr.autoFlushFrequency(10);
sockStmr = new SocketStreamer<>();
sockStmr.setIgnite(ignite);
sockStmr.setStreamer(stmr);
sockStmr.setPort(port);
sockStmr.setDelimiter(delim);
sockStmr.setTupleExtractor(new StreamTupleExtractor<Tuple, Integer, String>() {
@Override public Map.Entry<Integer, String> extract(Tuple msg) {
return new IgniteBiTuple<>(msg.key, msg.val);
}
});
if (converter != null)
sockStmr.setConverter(converter);
final CountDownLatch latch = new CountDownLatch(CNT);
final GridConcurrentHashSet<CacheEvent> evts = new GridConcurrentHashSet<>();
IgniteBiPredicate<UUID, CacheEvent> locLsnr = new IgniteBiPredicate<UUID, CacheEvent>() {
@Override public boolean apply(UUID uuid, CacheEvent evt) {
evts.add(evt);
latch.countDown();
return true;
}
};
ignite.events(ignite.cluster().forCacheNodes(null)).remoteListen(locLsnr, null, EVT_CACHE_OBJECT_PUT);
sockStmr.start();
r.run();
latch.await();
for (int i = 0; i < CNT; i++) {
Object val = cache.get(i);
String exp = Integer.toString(i);
if (!exp.equals(val))
log.error("Unexpected cache value [key=" + i +
", exp=" + exp +
", val=" + val +
", evts=" + evts + ']');
assertEquals(exp, val);
}
assertEquals(CNT, cache.size(CachePeekMode.PRIMARY));
}
finally {
if (sockStmr != null)
sockStmr.stop();
}
}
/**
* Tuple.
*/
private static class Tuple implements Serializable {
/** Serial version uid. */
private static final long serialVersionUID = 0L;
/** Key. */
private final int key;
/** Value. */
private final String val;
/**
* @param key Key.
*/
Tuple(int key) {
this.key = key;
this.val = Integer.toString(key);
}
}
}
| |
package com.github.dockerjava.core.dockerfile;
import com.github.dockerjava.api.DockerClientException;
import com.github.dockerjava.core.CompressArchiveUtil;
import com.github.dockerjava.core.GoLangFileMatch;
import com.github.dockerjava.core.GoLangFileMatchException;
import com.github.dockerjava.core.GoLangMatchFileFilter;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.filefilter.TrueFileFilter;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import com.google.common.base.Function;
import com.google.common.base.Objects;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.collect.Collections2;
/**
* Parse a Dockerfile.
*/
public class Dockerfile {
public final File dockerFile;
public Dockerfile(File dockerFile) {
if (!dockerFile.exists()) {
throw new IllegalStateException(
String.format("Dockerfile %s does not exist", dockerFile.getAbsolutePath()));
}
if (!dockerFile.isFile()) {
throw new IllegalStateException(
String.format("Dockerfile %s is not a file", dockerFile.getAbsolutePath()));
}
this.dockerFile = dockerFile;
}
private static class LineTransformer
implements Function<String, Optional<? extends DockerfileStatement>> {
private int line = 0;
@Override
public Optional<? extends DockerfileStatement> apply(String input) {
try {
line++;
return DockerfileStatement.createFromLine(input);
} catch (Exception ex) {
throw new DockerClientException("Error on dockerfile line " + line);
}
}
}
public Iterable<DockerfileStatement> getStatements() throws IOException {
Collection<String> dockerFileContent = FileUtils.readLines(dockerFile);
if (dockerFileContent.size() <= 0) {
throw new DockerClientException(String.format(
"Dockerfile %s is empty", dockerFile));
}
Collection<Optional<? extends DockerfileStatement>> optionals = Collections2
.transform(dockerFileContent, new LineTransformer());
return Optional.presentInstances(optionals);
}
public List<String> getIgnores() throws IOException {
List<String> ignores = new ArrayList<String>();
File dockerIgnoreFile = new File(getDockerFolder(), ".dockerignore");
if (dockerIgnoreFile.exists()) {
int lineNumber = 0;
List<String> dockerIgnoreFileContent = FileUtils.readLines(dockerIgnoreFile);
for (String pattern : dockerIgnoreFileContent) {
lineNumber++;
pattern = pattern.trim();
if (pattern.isEmpty()) {
continue; // skip empty lines
}
pattern = FilenameUtils.normalize(pattern);
try {
// validate pattern and make sure we aren't excluding Dockerfile
if (GoLangFileMatch.match(pattern, "Dockerfile")) {
throw new DockerClientException(
String.format(
"Dockerfile is excluded by pattern '%s' on line %s in .dockerignore file",
pattern, lineNumber));
}
ignores.add(pattern);
} catch (GoLangFileMatchException e) {
throw new DockerClientException(String.format(
"Invalid pattern '%s' on line %s in .dockerignore file", pattern, lineNumber));
}
}
}
return ignores;
}
public ScannedResult parse() throws IOException {
return new ScannedResult();
}
public File getDockerFolder() {
return dockerFile.getParentFile();
}
/**
* Result of scanning / parsing a docker file.
*/
public class ScannedResult {
final List<String> ignores;
final Map<String, String> environmentMap = new HashMap<String, String>();
final List<File> filesToAdd = new ArrayList<File>();
public InputStream buildDockerFolderTar() {
return buildDockerFolderTar(getDockerFolder());
}
public InputStream buildDockerFolderTar(File directory) {
// ARCHIVE TAR
File dockerFolderTar = null;
try {
String archiveNameWithOutExtension = UUID.randomUUID().toString();
dockerFolderTar = CompressArchiveUtil.archiveTARFiles(directory,
filesToAdd,
archiveNameWithOutExtension);
return FileUtils.openInputStream(dockerFolderTar);
} catch (IOException ex) {
FileUtils.deleteQuietly(dockerFolderTar);
throw new DockerClientException(
"Error occurred while preparing Docker context folder.", ex);
}
}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("ignores", ignores)
.add("environmentMap", environmentMap)
.add("filesToAdd", filesToAdd)
.toString();
}
public ScannedResult() throws IOException {
ignores = getIgnores();
filesToAdd.add(dockerFile);
for (DockerfileStatement statement : getStatements()) {
if (statement instanceof DockerfileStatement.Env) {
processEnvStatement((DockerfileStatement.Env) statement);
} else if (statement instanceof DockerfileStatement.Add) {
processAddStatement((DockerfileStatement.Add) statement);
}
}
}
private void processAddStatement(DockerfileStatement.Add add) throws IOException {
add = add.transform(environmentMap);
if (add.isFileResource()) {
File dockerFolder = getDockerFolder();
String resource = add.source;
File src = new File(resource);
if (!src.isAbsolute()) {
src = new File(dockerFolder, resource)
.getCanonicalFile();
} else {
throw new DockerClientException(String.format(
"Source file %s must be relative to %s",
src, dockerFolder));
}
// if (!src.exists()) {
// throw new DockerClientException(String.format(
// "Source file %s doesn't exist", src));
// }
if (src.isDirectory()) {
Collection<File> files = FileUtils.listFiles(src,
new GoLangMatchFileFilter(src, ignores),
TrueFileFilter.INSTANCE);
filesToAdd.addAll(files);
} else if (!src.exists()) {
filesToAdd.addAll(resolveWildcards(src, ignores));
} else if (!GoLangFileMatch.match(ignores,
CompressArchiveUtil.relativize(dockerFolder,
src))) {
filesToAdd.add(src);
} else {
throw new DockerClientException(
String.format(
"Source file %s is excluded by .dockerignore file",
src));
}
}
}
private Collection<File> resolveWildcards(File file, List<String> ignores) {
List<File> filesToAdd = new ArrayList<File>();
File parent = file.getParentFile();
if (parent != null) {
if (parent.isDirectory()) {
Collection<File> files = FileUtils.listFiles(parent,
new GoLangMatchFileFilter(parent, ignores),
TrueFileFilter.INSTANCE);
filesToAdd.addAll(files);
} else {
filesToAdd.addAll(resolveWildcards(parent, ignores));
}
} else {
throw new DockerClientException(String.format(
"Source file %s doesn't exist", file));
}
return filesToAdd;
}
private void processEnvStatement(DockerfileStatement.Env env) {
environmentMap.put(env.variable, env.value);
}
}
}
| |
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.recyclerview.selection;
import static androidx.core.util.Preconditions.checkArgument;
import static androidx.core.util.Preconditions.checkState;
import static androidx.recyclerview.selection.Shared.DEBUG;
import static androidx.recyclerview.selection.Shared.VERBOSE;
import android.graphics.Point;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import androidx.core.view.ViewCompat;
import androidx.recyclerview.widget.RecyclerView;
/**
* Provides auto-scrolling upon request when user's interaction with the application
* introduces a natural intent to scroll. Used by BandSelectionHelper and GestureSelectionHelper,
* to provide auto scrolling when user is performing selection operations.
*/
final class ViewAutoScroller extends AutoScroller {
private static final String TAG = "ViewAutoScroller";
// ratio used to calculate the top/bottom hotspot region; used with view height
private static final float DEFAULT_SCROLL_THRESHOLD_RATIO = 0.125f;
private static final int MAX_SCROLL_STEP = 70;
private final float mScrollThresholdRatio;
private final ScrollHost mHost;
private final Runnable mRunner;
private @Nullable Point mOrigin;
private @Nullable Point mLastLocation;
private boolean mPassedInitialMotionThreshold;
ViewAutoScroller(@NonNull ScrollHost scrollHost) {
this(scrollHost, DEFAULT_SCROLL_THRESHOLD_RATIO);
}
@VisibleForTesting
ViewAutoScroller(@NonNull ScrollHost scrollHost, float scrollThresholdRatio) {
checkArgument(scrollHost != null);
mHost = scrollHost;
mScrollThresholdRatio = scrollThresholdRatio;
mRunner = new Runnable() {
@Override
public void run() {
runScroll();
}
};
}
@Override
public void reset() {
mHost.removeCallback(mRunner);
mOrigin = null;
mLastLocation = null;
mPassedInitialMotionThreshold = false;
}
@Override
public void scroll(@NonNull Point location) {
mLastLocation = location;
// See #aboveMotionThreshold for details on how we track initial location.
if (mOrigin == null) {
mOrigin = location;
if (VERBOSE) Log.v(TAG, "Origin @ " + mOrigin);
}
if (VERBOSE) Log.v(TAG, "Current location @ " + mLastLocation);
mHost.runAtNextFrame(mRunner);
}
/**
* Attempts to smooth-scroll the view at the given UI frame. Application should be
* responsible to do any clean up (such as unsubscribing scrollListeners) after the run has
* finished, and re-run this method on the next UI frame if applicable.
*/
private void runScroll() {
if (DEBUG) checkState(mLastLocation != null);
if (VERBOSE) Log.v(TAG, "Running in background using event location @ " + mLastLocation);
// Compute the number of pixels the pointer's y-coordinate is past the view.
// Negative values mean the pointer is at or before the top of the view, and
// positive values mean that the pointer is at or after the bottom of the view. Note
// that top/bottom threshold is added here so that the view still scrolls when the
// pointer are in these buffer pixels.
int pixelsPastView = 0;
final int verticalThreshold = (int) (mHost.getViewHeight()
* mScrollThresholdRatio);
if (mLastLocation.y <= verticalThreshold) {
pixelsPastView = mLastLocation.y - verticalThreshold;
} else if (mLastLocation.y >= mHost.getViewHeight()
- verticalThreshold) {
pixelsPastView = mLastLocation.y - mHost.getViewHeight()
+ verticalThreshold;
}
if (pixelsPastView == 0) {
// If the operation that started the scrolling is no longer inactive, or if it is active
// but not at the edge of the view, no scrolling is necessary.
return;
}
// We're in one of the endzones. Now determine if there's enough of a difference
// from the orgin to take any action. Basically if a user has somehow initiated
// selection, but is hovering at or near their initial contact point, we don't
// scroll. This avoids a situation where the user initiates selection in an "endzone"
// only to have scrolling start automatically.
if (!mPassedInitialMotionThreshold && !aboveMotionThreshold(mLastLocation)) {
if (VERBOSE) Log.v(TAG, "Ignoring event below motion threshold.");
return;
}
mPassedInitialMotionThreshold = true;
if (pixelsPastView > verticalThreshold) {
pixelsPastView = verticalThreshold;
}
// Compute the number of pixels to scroll, and scroll that many pixels.
final int numPixels = computeScrollDistance(pixelsPastView);
mHost.scrollBy(numPixels);
// Replace any existing scheduled jobs with the latest and greatest..
mHost.removeCallback(mRunner);
mHost.runAtNextFrame(mRunner);
}
private boolean aboveMotionThreshold(@NonNull Point location) {
// We reuse the scroll threshold to calculate a much smaller area
// in which we ignore motion initially.
int motionThreshold =
(int) ((mHost.getViewHeight() * mScrollThresholdRatio)
* (mScrollThresholdRatio * 2));
return Math.abs(mOrigin.y - location.y) >= motionThreshold;
}
/**
* Computes the number of pixels to scroll based on how far the pointer is past the end
* of the region. Roughly based on ItemTouchHelper's algorithm for computing the number of
* pixels to scroll when an item is dragged to the end of a view.
* @return
*/
@VisibleForTesting
int computeScrollDistance(int pixelsPastView) {
final int topBottomThreshold =
(int) (mHost.getViewHeight() * mScrollThresholdRatio);
final int direction = (int) Math.signum(pixelsPastView);
final int absPastView = Math.abs(pixelsPastView);
// Calculate the ratio of how far out of the view the pointer currently resides to
// the top/bottom scrolling hotspot of the view.
final float outOfBoundsRatio = Math.min(
1.0f, (float) absPastView / topBottomThreshold);
// Interpolate this ratio and use it to compute the maximum scroll that should be
// possible for this step.
final int cappedScrollStep =
(int) (direction * MAX_SCROLL_STEP * smoothOutOfBoundsRatio(outOfBoundsRatio));
// If the final number of pixels to scroll ends up being 0, the view should still
// scroll at least one pixel.
return cappedScrollStep != 0 ? cappedScrollStep : direction;
}
/**
* Interpolates the given out of bounds ratio on a curve which starts at (0,0) and ends
* at (1,1) and quickly approaches 1 near the start of that interval. This ensures that
* drags that are at the edge or barely past the edge of the threshold does little to no
* scrolling, while drags that are near the edge of the view does a lot of
* scrolling. The equation y=x^10 is used, but this could also be tweaked if
* needed.
* @param ratio A ratio which is in the range [0, 1].
* @return A "smoothed" value, also in the range [0, 1].
*/
private float smoothOutOfBoundsRatio(float ratio) {
return (float) Math.pow(ratio, 10);
}
/**
* Used by to calculate the proper amount of pixels to scroll given time passed
* since scroll started, and to properly scroll / proper listener clean up if necessary.
*
* Callback used by scroller to perform UI tasks, such as scrolling and rerunning at next UI
* cycle.
*/
abstract static class ScrollHost {
/**
* @return height of the view.
*/
abstract int getViewHeight();
/**
* @param dy distance to scroll.
*/
abstract void scrollBy(int dy);
/**
* @param r schedule runnable to be run at next convenient time.
*/
abstract void runAtNextFrame(@NonNull Runnable r);
/**
* @param r remove runnable from being run.
*/
abstract void removeCallback(@NonNull Runnable r);
}
static ScrollHost createScrollHost(final RecyclerView recyclerView) {
return new RuntimeHost(recyclerView);
}
/**
* Tracks location of last surface contact as reported by RecyclerView.
*/
private static final class RuntimeHost extends ScrollHost {
private final RecyclerView mRecyclerView;
RuntimeHost(@NonNull RecyclerView recyclerView) {
mRecyclerView = recyclerView;
}
@Override
void runAtNextFrame(@NonNull Runnable r) {
ViewCompat.postOnAnimation(mRecyclerView, r);
}
@Override
void removeCallback(@NonNull Runnable r) {
mRecyclerView.removeCallbacks(r);
}
@Override
void scrollBy(int dy) {
if (VERBOSE) Log.v(TAG, "Scrolling view by: " + dy);
mRecyclerView.scrollBy(0, dy);
}
@Override
int getViewHeight() {
return mRecyclerView.getHeight();
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.find;
import com.intellij.execution.impl.ConsoleViewUtil;
import com.intellij.find.editorHeaderActions.*;
import com.intellij.find.impl.RegExHelpPopup;
import com.intellij.find.impl.livePreview.LivePreviewController;
import com.intellij.find.impl.livePreview.SearchResults;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.ex.CustomComponentAction;
import com.intellij.openapi.actionSystem.ex.DefaultCustomComponentAction;
import com.intellij.openapi.application.ApplicationBundle;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.editor.event.EditorFactoryEvent;
import com.intellij.openapi.editor.event.EditorFactoryListener;
import com.intellij.openapi.editor.event.SelectionEvent;
import com.intellij.openapi.editor.event.SelectionListener;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.ui.components.labels.LinkLabel;
import com.intellij.util.ObjectUtils;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.update.Activatable;
import com.intellij.util.ui.update.UiNotifyConnector;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.text.JTextComponent;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.regex.Pattern;
/**
* @author max, andrey.zaytsev
*/
public class EditorSearchSession implements SearchSession,
DataProvider,
SelectionListener,
SearchResults.SearchResultsListener,
SearchReplaceComponent.Listener {
public static final DataKey<EditorSearchSession> SESSION_KEY = DataKey.create("EditorSearchSession");
private final Editor myEditor;
private final LivePreviewController myLivePreviewController;
private final SearchResults mySearchResults;
@NotNull
private final FindModel myFindModel;
private final SearchReplaceComponent myComponent;
private final RangeMarker myStartSessionSelectionMarker;
private final RangeMarker myStartSessionCaretMarker;
private final LinkLabel<Object> myClickToHighlightLabel = new LinkLabel<>("Click to highlight", null, (__, ___) -> {
setMatchesLimit(Integer.MAX_VALUE);
updateResults(true);
});
private final Disposable myDisposable = Disposer.newDisposable(EditorSearchSession.class.getName());
public EditorSearchSession(@NotNull Editor editor, Project project) {
this(editor, project, createDefaultFindModel(project, editor));
}
public EditorSearchSession(@NotNull final Editor editor, Project project, @NotNull FindModel findModel) {
assert !editor.isDisposed();
myClickToHighlightLabel.setVisible(false);
myFindModel = findModel;
myEditor = editor;
myStartSessionSelectionMarker = myEditor.getDocument().createRangeMarker(
myEditor.getSelectionModel().getSelectionStart(),
myEditor.getSelectionModel().getSelectionEnd()
);
myStartSessionCaretMarker = myEditor.getDocument().createRangeMarker(
myEditor.getCaretModel().getOffset(),
myEditor.getCaretModel().getOffset()
);
mySearchResults = new SearchResults(myEditor, project);
myLivePreviewController = new LivePreviewController(mySearchResults, this, myDisposable);
myComponent = SearchReplaceComponent
.buildFor(project, myEditor.getContentComponent())
.addPrimarySearchActions(new PrevOccurrenceAction(),
new NextOccurrenceAction(),
new FindAllAction(),
new Separator(),
new AddOccurrenceAction(),
new RemoveOccurrenceAction(),
new SelectAllAction(),
new Separator())
.addSecondarySearchActions(new ToggleAnywhereAction(),
new ToggleInCommentsAction(),
new ToggleInLiteralsOnlyAction(),
new ToggleExceptCommentsAction(),
new ToggleExceptLiteralsAction(),
new ToggleExceptCommentsAndLiteralsAction())
.addExtraSearchActions(new ToggleMatchCase(),
new ToggleWholeWordsOnlyAction(),
new ToggleRegex(),
new DefaultCustomComponentAction(
() -> RegExHelpPopup.createRegExLink("<html><body><b>?</b></body></html>", null, null)),
new StatusTextAction(),
new DefaultCustomComponentAction(() -> myClickToHighlightLabel))
.addSearchFieldActions(new RestorePreviousSettingsAction())
.addPrimaryReplaceActions(new ReplaceAction(),
new ReplaceAllAction(),
new ExcludeAction())
.addExtraReplaceAction(new TogglePreserveCaseAction(),
new ToggleSelectionOnlyAction())
.addReplaceFieldActions(new PrevOccurrenceAction(false),
new NextOccurrenceAction(false))
.withDataProvider(this)
.withCloseAction(this::close)
.withReplaceAction(this::replaceCurrent)
.withSecondarySearchActionsIsModifiedGetter(() -> myFindModel.getSearchContext() != FindModel.SearchContext.ANY)
.build();
myComponent.addListener(this);
new UiNotifyConnector(myComponent, new Activatable() {
@Override
public void showNotify() {
initLivePreview();
}
@Override
public void hideNotify() {
myLivePreviewController.off();
mySearchResults.removeListener(EditorSearchSession.this);
}
});
new SwitchToFind(getComponent());
new SwitchToReplace(getComponent());
myFindModel.addObserver(new FindModel.FindModelObserver() {
boolean myReentrantLock = false;
boolean myIsGlobal = myFindModel.isGlobal();
boolean myIsReplace = myFindModel.isReplaceState();
@Override
public void findModelChanged(FindModel findModel1) {
if (myReentrantLock) return;
try {
myReentrantLock = true;
String stringToFind = myFindModel.getStringToFind();
if (!wholeWordsApplicable(stringToFind)) {
myFindModel.setWholeWordsOnly(false);
}
if (myIsGlobal != myFindModel.isGlobal() || myIsReplace != myFindModel.isReplaceState()) {
if (myFindModel.getStringToFind().isEmpty() && myFindModel.isGlobal()) {
myFindModel.setStringToFind(StringUtil.notNullize(myEditor.getSelectionModel().getSelectedText()));
}
if (!myFindModel.isGlobal() && myFindModel.getStringToFind().equals(myEditor.getSelectionModel().getSelectedText())) {
myFindModel.setStringToFind("");
}
myIsGlobal = myFindModel.isGlobal();
myIsReplace = myFindModel.isReplaceState();
}
EditorSearchSession.this.updateUIWithFindModel();
mySearchResults.clear();
EditorSearchSession.this.updateResults(true);
FindUtil.updateFindInFileModel(EditorSearchSession.this.getProject(), myFindModel, !ConsoleViewUtil.isConsoleViewEditor(editor));
} finally {
myReentrantLock = false;
}
}
});
updateUIWithFindModel();
if (ApplicationManager.getApplication().isUnitTestMode()) {
initLivePreview();
}
updateMultiLineStateIfNeed();
EditorFactory.getInstance().addEditorFactoryListener(new EditorFactoryListener() {
@Override
public void editorReleased(@NotNull EditorFactoryEvent event) {
if (event.getEditor() == myEditor) {
Disposer.dispose(myDisposable);
myLivePreviewController.dispose();
myStartSessionSelectionMarker.dispose();
myStartSessionCaretMarker.dispose();
}
}
}, myDisposable);
}
@Nullable
public static EditorSearchSession get(@Nullable Editor editor) {
JComponent headerComponent = editor != null ? editor.getHeaderComponent() : null;
SearchReplaceComponent searchReplaceComponent = ObjectUtils.tryCast(headerComponent, SearchReplaceComponent.class);
return searchReplaceComponent != null ? SESSION_KEY.getData(searchReplaceComponent) : null;
}
@NotNull
public static EditorSearchSession start(@NotNull Editor editor, @Nullable Project project) {
EditorSearchSession session = new EditorSearchSession(editor, project);
editor.setHeaderComponent(session.getComponent());
return session;
}
@NotNull
public static EditorSearchSession start(@NotNull Editor editor, @NotNull FindModel findModel, @Nullable Project project) {
EditorSearchSession session = new EditorSearchSession(editor, project, findModel);
editor.setHeaderComponent(session.getComponent());
return session;
}
@NotNull
@Override
public SearchReplaceComponent getComponent() {
return myComponent;
}
public Project getProject() {
return myComponent.getProject();
}
@NotNull
private static FindModel createDefaultFindModel(Project project, Editor editor) {
FindModel findModel = new FindModel();
findModel.copyFrom(FindManager.getInstance(project).getFindInFileModel());
if (editor.getSelectionModel().hasSelection()) {
String selectedText = editor.getSelectionModel().getSelectedText();
if (selectedText != null) {
findModel.setStringToFind(selectedText);
}
}
findModel.setPromptOnReplace(false);
return findModel;
}
@Override
@Nullable
public Object getData(@NotNull @NonNls final String dataId) {
if (SearchSession.KEY.is(dataId)) {
return this;
}
if (SESSION_KEY.is(dataId)) {
return this;
}
if (CommonDataKeys.EDITOR_EVEN_IF_INACTIVE.is(dataId)) {
return myEditor;
}
return null;
}
@Override
public void searchResultsUpdated(@NotNull SearchResults sr) {
if (sr.getFindModel() == null) return;
if (myComponent.getSearchTextComponent().getText().isEmpty()) {
updateUIWithEmptyResults();
} else {
int matches = sr.getMatchesCount();
boolean tooManyMatches = matches > mySearchResults.getMatchesLimit();
myComponent.setStatusText(tooManyMatches
? ApplicationBundle.message("editorsearch.toomuch", mySearchResults.getMatchesLimit())
: ApplicationBundle.message("editorsearch.matches", matches));
myClickToHighlightLabel.setVisible(tooManyMatches);
if (!tooManyMatches && matches <= 0) {
myComponent.setNotFoundBackground();
}
else {
myComponent.setRegularBackground();
}
}
myComponent.updateActions();
}
@Override
public void cursorMoved() {
myComponent.updateActions();
}
@Override
public void updateFinished() {
}
@Override
public void searchFieldDocumentChanged() {
setMatchesLimit(LivePreviewController.MATCHES_LIMIT);
String text = myComponent.getSearchTextComponent().getText();
myFindModel.setStringToFind(text);
if (!StringUtil.isEmpty(text)) {
updateResults(true);
}
else {
nothingToSearchFor(true);
}
updateMultiLineStateIfNeed();
}
private void updateMultiLineStateIfNeed() {
myFindModel.setMultiline(myComponent.getSearchTextComponent().getText().contains("\n") ||
myComponent.getReplaceTextComponent().getText().contains("\n"));
}
@Override
public void replaceFieldDocumentChanged() {
setMatchesLimit(LivePreviewController.MATCHES_LIMIT);
myFindModel.setStringToReplace(myComponent.getReplaceTextComponent().getText());
updateMultiLineStateIfNeed();
}
@Override
public void multilineStateChanged() {
myFindModel.setMultiline(myComponent.isMultiline());
}
@NotNull
@Override
public FindModel getFindModel() {
return myFindModel;
}
@Override
public boolean hasMatches() {
return mySearchResults != null && mySearchResults.hasMatches();
}
@Override
public void searchForward() {
moveCursor(SearchResults.Direction.DOWN);
addTextToRecent(myComponent.getSearchTextComponent());
}
@Override
public void searchBackward() {
moveCursor(SearchResults.Direction.UP);
addTextToRecent(myComponent.getSearchTextComponent());
}
private void updateUIWithFindModel() {
myComponent.update(myFindModel.getStringToFind(),
myFindModel.getStringToReplace(),
myFindModel.isReplaceState(),
myFindModel.isMultiline());
myLivePreviewController.setTrackingSelection(!myFindModel.isGlobal());
}
private static boolean wholeWordsApplicable(String stringToFind) {
return !stringToFind.startsWith(" ") &&
!stringToFind.startsWith("\t") &&
!stringToFind.endsWith(" ") &&
!stringToFind.endsWith("\t");
}
private void setMatchesLimit(int value) {
mySearchResults.setMatchesLimit(value);
}
private void replaceCurrent() {
if (mySearchResults.getCursor() != null) {
try {
myLivePreviewController.performReplace();
}
catch (FindManager.MalformedReplacementStringException e) {
Messages.showErrorDialog(myComponent, e.getMessage(), FindBundle.message("find.replace.invalid.replacement.string.title"));
}
}
}
public void addTextToRecent(JTextComponent textField) {
myComponent.addTextToRecent(textField);
}
@Override
public void selectionChanged(@NotNull SelectionEvent e) {
updateResults(false);
}
private void moveCursor(SearchResults.Direction direction) {
myLivePreviewController.moveCursor(direction);
}
@Override
public void close() {
IdeFocusManager.getInstance(getProject()).requestFocus(myEditor.getContentComponent(), false);
myLivePreviewController.dispose();
myEditor.setHeaderComponent(null);
}
private void initLivePreview() {
if (myEditor.isDisposed()) return;
myLivePreviewController.on();
myLivePreviewController.setUserActivityDelay(0);
updateResults(false);
myLivePreviewController.setUserActivityDelay(LivePreviewController.USER_ACTIVITY_TRIGGERING_DELAY);
mySearchResults.addListener(this);
}
private void updateResults(final boolean allowedToChangedEditorSelection) {
final String text = myFindModel.getStringToFind();
if (text.isEmpty()) {
nothingToSearchFor(allowedToChangedEditorSelection);
}
else {
if (myFindModel.isRegularExpressions()) {
try {
Pattern.compile(text);
}
catch (Exception e) {
myComponent.setNotFoundBackground();
myClickToHighlightLabel.setVisible(false);
mySearchResults.clear();
myComponent.setStatusText(INCORRECT_REGEX_MESSAGE);
return;
}
}
final FindManager findManager = FindManager.getInstance(getProject());
if (allowedToChangedEditorSelection) {
findManager.setFindWasPerformed();
FindModel copy = new FindModel();
copy.copyFrom(myFindModel);
copy.setReplaceState(false);
findManager.setFindNextModel(copy);
}
if (myLivePreviewController != null) {
myLivePreviewController.updateInBackground(myFindModel, allowedToChangedEditorSelection);
}
}
}
private void nothingToSearchFor(boolean allowedToChangedEditorSelection) {
updateUIWithEmptyResults();
if (mySearchResults != null) {
mySearchResults.clear();
}
if (allowedToChangedEditorSelection
&& !UIUtil.isClientPropertyTrue(myComponent.getSearchTextComponent(), SearchTextArea.JUST_CLEARED_KEY)) {
restoreInitialCaretPositionAndSelection();
}
}
private void restoreInitialCaretPositionAndSelection() {
int originalSelectionStart = Math.min(myStartSessionSelectionMarker.getStartOffset(), myEditor.getDocument().getTextLength());
int originalSelectionEnd = Math.min(myStartSessionSelectionMarker.getEndOffset(), myEditor.getDocument().getTextLength());
myEditor.getSelectionModel().setSelection(originalSelectionStart, originalSelectionEnd);
myEditor.getCaretModel().moveToOffset(Math.min(myStartSessionCaretMarker.getEndOffset(), myEditor.getDocument().getTextLength()));
myEditor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
}
private void updateUIWithEmptyResults() {
myComponent.setRegularBackground();
myComponent.setStatusText("");
myClickToHighlightLabel.setVisible(false);
}
public String getTextInField() {
return myComponent.getSearchTextComponent().getText();
}
public void setTextInField(final String text) {
myComponent.getSearchTextComponent().setText(text);
myFindModel.setStringToFind(text);
}
public void selectAllOccurrences() {
FindUtil.selectSearchResultsInEditor(myEditor, mySearchResults.getOccurrences().iterator(), -1);
}
public void removeOccurrence() {
mySearchResults.prevOccurrence(true);
}
public void addNextOccurrence() {
mySearchResults.nextOccurrence(true);
}
public void clearUndoInTextFields() {
myComponent.resetUndoRedoActions();
}
private abstract static class ButtonAction extends DumbAwareAction implements CustomComponentAction, ActionListener {
private final String myTitle;
private final char myMnemonic;
ButtonAction(@NotNull String title, char mnemonic) {
myTitle = title;
myMnemonic = mnemonic;
}
@NotNull
@Override
public JComponent createCustomComponent(@NotNull Presentation presentation) {
JButton button = new JButton(myTitle);
button.setFocusable(false);
if (!UISettings.getInstance().getDisableMnemonicsInControls()) {
button.setMnemonic(myMnemonic);
}
button.addActionListener(this);
return button;
}
@Override
public final void update(@NotNull AnActionEvent e) {
JButton button = (JButton)e.getPresentation().getClientProperty(COMPONENT_KEY);
if (button != null) {
update(button);
}
}
@Override
public final void actionPerformed(@NotNull AnActionEvent e) {
onClick();
}
@Override
public final void actionPerformed(ActionEvent e) {
onClick();
}
protected abstract void update(@NotNull JButton button);
protected abstract void onClick();
}
private class ReplaceAction extends ButtonAction {
ReplaceAction() {
super("Replace", 'p');
}
@Override
protected void update(@NotNull JButton button) {
button.setEnabled(mySearchResults.hasMatches());
}
@Override
protected void onClick() {
replaceCurrent();
}
}
private class ReplaceAllAction extends ButtonAction {
ReplaceAllAction() {
super("Replace all", 'a');
}
@Override
protected void update(@NotNull JButton button) {
button.setEnabled(mySearchResults.hasMatches());
}
@Override
protected void onClick() {
myLivePreviewController.performReplaceAll();
}
}
private class ExcludeAction extends ButtonAction {
ExcludeAction() {
super("", 'l');
}
@Override
protected void update(@NotNull JButton button) {
FindResult cursor = mySearchResults.getCursor();
button.setEnabled(cursor != null);
button.setText(cursor != null && mySearchResults.isExcluded(cursor) ? "Include" : "Exclude");
}
@Override
protected void onClick() {
myLivePreviewController.exclude();
moveCursor(SearchResults.Direction.DOWN);
}
}
}
| |
/**
* Copyright 2016 Yahoo Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yahoo.pulsar.common.util.collections;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.LongFunction;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.yahoo.pulsar.common.util.collections.ConcurrentLongHashMap;
public class ConcurrentLongHashMapTest {
@Test
public void testConstructor() {
try {
new ConcurrentLongHashMap<String>(0);
fail("should have thrown exception");
} catch (IllegalArgumentException e) {
// ok
}
try {
new ConcurrentLongHashMap<String>(16, 0);
fail("should have thrown exception");
} catch (IllegalArgumentException e) {
// ok
}
try {
new ConcurrentLongHashMap<String>(4, 8);
fail("should have thrown exception");
} catch (IllegalArgumentException e) {
// ok
}
}
@Test
public void simpleInsertions() {
ConcurrentLongHashMap<String> map = new ConcurrentLongHashMap<>(16);
assertTrue(map.isEmpty());
assertNull(map.put(1, "one"));
assertFalse(map.isEmpty());
assertNull(map.put(2, "two"));
assertNull(map.put(3, "three"));
assertEquals(map.size(), 3);
assertEquals(map.get(1), "one");
assertEquals(map.size(), 3);
assertEquals(map.remove(1), "one");
assertEquals(map.size(), 2);
assertEquals(map.get(1), null);
assertEquals(map.get(5), null);
assertEquals(map.size(), 2);
assertNull(map.put(1, "one"));
assertEquals(map.size(), 3);
assertEquals(map.put(1, "uno"), "one");
assertEquals(map.size(), 3);
}
@Test
public void testRemove() {
ConcurrentLongHashMap<String> map = new ConcurrentLongHashMap<>();
assertTrue(map.isEmpty());
assertNull(map.put(1, "one"));
assertFalse(map.isEmpty());
assertFalse(map.remove(0, "zero"));
assertFalse(map.remove(1, "uno"));
assertFalse(map.isEmpty());
assertTrue(map.remove(1, "one"));
assertTrue(map.isEmpty());
}
@Test
public void testNegativeUsedBucketCount() {
ConcurrentLongHashMap<String> map = new ConcurrentLongHashMap<>(16, 1);
map.put(0, "zero");
assertEquals(1, map.getUsedBucketCount());
map.put(0, "zero1");
assertEquals(1, map.getUsedBucketCount());
map.remove(0);
assertEquals(0, map.getUsedBucketCount());
map.remove(0);
assertEquals(0, map.getUsedBucketCount());
}
@Test
public void testRehashing() {
int n = 16;
ConcurrentLongHashMap<Integer> map = new ConcurrentLongHashMap<>(n / 2, 1);
assertEquals(map.capacity(), n);
assertEquals(map.size(), 0);
for (int i = 0; i < n; i++) {
map.put(i, i);
}
assertEquals(map.capacity(), 2 * n);
assertEquals(map.size(), n);
}
@Test
public void testRehashingWithDeletes() {
int n = 16;
ConcurrentLongHashMap<Integer> map = new ConcurrentLongHashMap<>(n / 2, 1);
assertEquals(map.capacity(), n);
assertEquals(map.size(), 0);
for (int i = 0; i < n / 2; i++) {
map.put(i, i);
}
for (int i = 0; i < n / 2; i++) {
map.remove(i);
}
for (int i = n; i < (2 * n); i++) {
map.put(i, i);
}
assertEquals(map.capacity(), 2 * n);
assertEquals(map.size(), n);
}
@Test
public void concurrentInsertions() throws Throwable {
ConcurrentLongHashMap<String> map = new ConcurrentLongHashMap<>();
ExecutorService executor = Executors.newCachedThreadPool();
final int nThreads = 16;
final int N = 100_000;
String value = "value";
List<Future<?>> futures = new ArrayList<>();
for (int i = 0; i < nThreads; i++) {
final int threadIdx = i;
futures.add(executor.submit(() -> {
Random random = new Random();
for (int j = 0; j < N; j++) {
long key = random.nextLong();
// Ensure keys are uniques
key -= key % (threadIdx + 1);
map.put(key, value);
}
}));
}
for (Future<?> future : futures) {
future.get();
}
assertEquals(map.size(), N * nThreads);
executor.shutdown();
}
@Test
public void concurrentInsertionsAndReads() throws Throwable {
ConcurrentLongHashMap<String> map = new ConcurrentLongHashMap<>();
ExecutorService executor = Executors.newCachedThreadPool();
final int nThreads = 16;
final int N = 100_000;
String value = "value";
List<Future<?>> futures = new ArrayList<>();
for (int i = 0; i < nThreads; i++) {
final int threadIdx = i;
futures.add(executor.submit(() -> {
Random random = new Random();
for (int j = 0; j < N; j++) {
long key = random.nextLong();
// Ensure keys are uniques
key -= key % (threadIdx + 1);
map.put(key, value);
}
}));
}
for (Future<?> future : futures) {
future.get();
}
assertEquals(map.size(), N * nThreads);
executor.shutdown();
}
@Test
public void testIteration() {
ConcurrentLongHashMap<String> map = new ConcurrentLongHashMap<>();
assertEquals(map.keys(), Collections.emptyList());
assertEquals(map.values(), Collections.emptyList());
map.put(0, "zero");
assertEquals(map.keys(), Lists.newArrayList(0l));
assertEquals(map.values(), Lists.newArrayList("zero"));
map.remove(0);
assertEquals(map.keys(), Collections.emptyList());
assertEquals(map.values(), Collections.emptyList());
map.put(0, "zero");
map.put(1, "one");
map.put(2, "two");
List<Long> keys = map.keys();
keys.sort(null);
assertEquals(keys, Lists.newArrayList(0l, 1l, 2l));
List<String> values = map.values();
values.sort(null);
assertEquals(values, Lists.newArrayList("one", "two", "zero"));
map.put(1, "uno");
keys = map.keys();
keys.sort(null);
assertEquals(keys, Lists.newArrayList(0l, 1l, 2l));
values = map.values();
values.sort(null);
assertEquals(values, Lists.newArrayList("two", "uno", "zero"));
map.clear();
assertTrue(map.isEmpty());
}
@Test
public void testHashConflictWithDeletion() {
final int Buckets = 16;
ConcurrentLongHashMap<String> map = new ConcurrentLongHashMap<>(Buckets, 1);
// Pick 2 keys that fall into the same bucket
long key1 = 1;
long key2 = 27;
int bucket1 = ConcurrentLongHashMap.signSafeMod(ConcurrentLongHashMap.hash(key1), Buckets);
int bucket2 = ConcurrentLongHashMap.signSafeMod(ConcurrentLongHashMap.hash(key2), Buckets);
assertEquals(bucket1, bucket2);
assertEquals(map.put(key1, "value-1"), null);
assertEquals(map.put(key2, "value-2"), null);
assertEquals(map.size(), 2);
assertEquals(map.remove(key1), "value-1");
assertEquals(map.size(), 1);
assertEquals(map.put(key1, "value-1-overwrite"), null);
assertEquals(map.size(), 2);
assertEquals(map.remove(key1), "value-1-overwrite");
assertEquals(map.size(), 1);
assertEquals(map.put(key2, "value-2-overwrite"), "value-2");
assertEquals(map.get(key2), "value-2-overwrite");
assertEquals(map.size(), 1);
assertEquals(map.remove(key2), "value-2-overwrite");
assertTrue(map.isEmpty());
}
@Test
public void testPutIfAbsent() {
ConcurrentLongHashMap<String> map = new ConcurrentLongHashMap<>();
assertEquals(map.putIfAbsent(1, "one"), null);
assertEquals(map.get(1), "one");
assertEquals(map.putIfAbsent(1, "uno"), "one");
assertEquals(map.get(1), "one");
}
@Test
public void testComputeIfAbsent() {
ConcurrentLongHashMap<Integer> map = new ConcurrentLongHashMap<>(16, 1);
AtomicInteger counter = new AtomicInteger();
LongFunction<Integer> provider = new LongFunction<Integer>() {
public Integer apply(long key) {
return counter.getAndIncrement();
}
};
assertEquals(map.computeIfAbsent(0, provider).intValue(), 0);
assertEquals(map.get(0).intValue(), 0);
assertEquals(map.computeIfAbsent(1, provider).intValue(), 1);
assertEquals(map.get(1).intValue(), 1);
assertEquals(map.computeIfAbsent(1, provider).intValue(), 1);
assertEquals(map.get(1).intValue(), 1);
assertEquals(map.computeIfAbsent(2, provider).intValue(), 2);
assertEquals(map.get(2).intValue(), 2);
}
final static int Iterations = 1;
final static int ReadIterations = 10000;
final static int N = 100_000;
public void benchConcurrentLongHashMap() throws Exception {
ConcurrentLongHashMap<String> map = new ConcurrentLongHashMap<>(N, 1);
for (long i = 0; i < Iterations; i++) {
for (int j = 0; j < N; j++) {
map.put(i, "value");
}
for (long h = 0; h < ReadIterations; h++) {
for (int j = 0; j < N; j++) {
map.get(i);
}
}
for (int j = 0; j < N; j++) {
map.remove(i);
}
}
}
public void benchConcurrentHashMap() throws Exception {
ConcurrentHashMap<Long, String> map = new ConcurrentHashMap<Long, String>(N, 0.66f, 1);
for (long i = 0; i < Iterations; i++) {
for (int j = 0; j < N; j++) {
map.put(i, "value");
}
for (long h = 0; h < ReadIterations; h++) {
for (int j = 0; j < N; j++) {
map.get(i);
}
}
for (int j = 0; j < N; j++) {
map.remove(i);
}
}
}
void benchHashMap() throws Exception {
HashMap<Long, String> map = new HashMap<Long, String>(N, 0.66f);
for (long i = 0; i < Iterations; i++) {
for (int j = 0; j < N; j++) {
map.put(i, "value");
}
for (long h = 0; h < ReadIterations; h++) {
for (int j = 0; j < N; j++) {
map.get(i);
}
}
for (int j = 0; j < N; j++) {
map.remove(i);
}
}
}
public static void main(String[] args) throws Exception {
ConcurrentLongHashMapTest t = new ConcurrentLongHashMapTest();
long start = System.nanoTime();
t.benchConcurrentLongHashMap();
long end = System.nanoTime();
System.out.println("CLHM: " + TimeUnit.NANOSECONDS.toMillis(end - start) + " ms");
start = System.nanoTime();
t.benchHashMap();
end = System.nanoTime();
System.out.println("HM: " + TimeUnit.NANOSECONDS.toMillis(end - start) + " ms");
start = System.nanoTime();
t.benchConcurrentHashMap();
end = System.nanoTime();
System.out.println("CHM: " + TimeUnit.NANOSECONDS.toMillis(end - start) + " ms");
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: tests/test space1/test 1.proto
package protobufel1.test1;
public final class Test1 {
private Test1() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public interface Message111OrBuilder extends
// @@protoc_insertion_point(interface_extends:protobufel1.test1.Message111)
com.google.protobuf.MessageOrBuilder {
/**
* <code>repeated string field1 = 1;</code>
*/
com.google.protobuf.ProtocolStringList
getField1List();
/**
* <code>repeated string field1 = 1;</code>
*/
int getField1Count();
/**
* <code>repeated string field1 = 1;</code>
*/
java.lang.String getField1(int index);
/**
* <code>repeated string field1 = 1;</code>
*/
com.google.protobuf.ByteString
getField1Bytes(int index);
}
/**
* Protobuf type {@code protobufel1.test1.Message111}
*/
public static final class Message111 extends
com.google.protobuf.GeneratedMessage implements
// @@protoc_insertion_point(message_implements:protobufel1.test1.Message111)
Message111OrBuilder {
// Use Message111.newBuilder() to construct.
private Message111(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Message111(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Message111 defaultInstance;
public static Message111 getDefaultInstance() {
return defaultInstance;
}
public Message111 getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Message111(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
com.google.protobuf.ByteString bs = input.readBytes();
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
field1_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
field1_.add(bs);
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
field1_ = field1_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return protobufel1.test1.Test1.internal_static_protobufel1_test1_Message111_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return protobufel1.test1.Test1.internal_static_protobufel1_test1_Message111_fieldAccessorTable
.ensureFieldAccessorsInitialized(
protobufel1.test1.Test1.Message111.class, protobufel1.test1.Test1.Message111.Builder.class);
}
public static com.google.protobuf.Parser<Message111> PARSER =
new com.google.protobuf.AbstractParser<Message111>() {
public Message111 parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Message111(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Message111> getParserForType() {
return PARSER;
}
public static final int FIELD1_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList field1_;
/**
* <code>repeated string field1 = 1;</code>
*/
public com.google.protobuf.ProtocolStringList
getField1List() {
return field1_;
}
/**
* <code>repeated string field1 = 1;</code>
*/
public int getField1Count() {
return field1_.size();
}
/**
* <code>repeated string field1 = 1;</code>
*/
public java.lang.String getField1(int index) {
return field1_.get(index);
}
/**
* <code>repeated string field1 = 1;</code>
*/
public com.google.protobuf.ByteString
getField1Bytes(int index) {
return field1_.getByteString(index);
}
private void initFields() {
field1_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < field1_.size(); i++) {
output.writeBytes(1, field1_.getByteString(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < field1_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(field1_.getByteString(i));
}
size += dataSize;
size += 1 * getField1List().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static protobufel1.test1.Test1.Message111 parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static protobufel1.test1.Test1.Message111 parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static protobufel1.test1.Test1.Message111 parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static protobufel1.test1.Test1.Message111 parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static protobufel1.test1.Test1.Message111 parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static protobufel1.test1.Test1.Message111 parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static protobufel1.test1.Test1.Message111 parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static protobufel1.test1.Test1.Message111 parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static protobufel1.test1.Test1.Message111 parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static protobufel1.test1.Test1.Message111 parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(protobufel1.test1.Test1.Message111 prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code protobufel1.test1.Message111}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:protobufel1.test1.Message111)
protobufel1.test1.Test1.Message111OrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return protobufel1.test1.Test1.internal_static_protobufel1_test1_Message111_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return protobufel1.test1.Test1.internal_static_protobufel1_test1_Message111_fieldAccessorTable
.ensureFieldAccessorsInitialized(
protobufel1.test1.Test1.Message111.class, protobufel1.test1.Test1.Message111.Builder.class);
}
// Construct using protobufel1.test1.Test1.Message111.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
field1_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return protobufel1.test1.Test1.internal_static_protobufel1_test1_Message111_descriptor;
}
public protobufel1.test1.Test1.Message111 getDefaultInstanceForType() {
return protobufel1.test1.Test1.Message111.getDefaultInstance();
}
public protobufel1.test1.Test1.Message111 build() {
protobufel1.test1.Test1.Message111 result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public protobufel1.test1.Test1.Message111 buildPartial() {
protobufel1.test1.Test1.Message111 result = new protobufel1.test1.Test1.Message111(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
field1_ = field1_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.field1_ = field1_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof protobufel1.test1.Test1.Message111) {
return mergeFrom((protobufel1.test1.Test1.Message111)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(protobufel1.test1.Test1.Message111 other) {
if (other == protobufel1.test1.Test1.Message111.getDefaultInstance()) return this;
if (!other.field1_.isEmpty()) {
if (field1_.isEmpty()) {
field1_ = other.field1_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureField1IsMutable();
field1_.addAll(other.field1_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
protobufel1.test1.Test1.Message111 parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (protobufel1.test1.Test1.Message111) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.google.protobuf.LazyStringList field1_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureField1IsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
field1_ = new com.google.protobuf.LazyStringArrayList(field1_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated string field1 = 1;</code>
*/
public com.google.protobuf.ProtocolStringList
getField1List() {
return field1_.getUnmodifiableView();
}
/**
* <code>repeated string field1 = 1;</code>
*/
public int getField1Count() {
return field1_.size();
}
/**
* <code>repeated string field1 = 1;</code>
*/
public java.lang.String getField1(int index) {
return field1_.get(index);
}
/**
* <code>repeated string field1 = 1;</code>
*/
public com.google.protobuf.ByteString
getField1Bytes(int index) {
return field1_.getByteString(index);
}
/**
* <code>repeated string field1 = 1;</code>
*/
public Builder setField1(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureField1IsMutable();
field1_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated string field1 = 1;</code>
*/
public Builder addField1(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureField1IsMutable();
field1_.add(value);
onChanged();
return this;
}
/**
* <code>repeated string field1 = 1;</code>
*/
public Builder addAllField1(
java.lang.Iterable<java.lang.String> values) {
ensureField1IsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, field1_);
onChanged();
return this;
}
/**
* <code>repeated string field1 = 1;</code>
*/
public Builder clearField1() {
field1_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <code>repeated string field1 = 1;</code>
*/
public Builder addField1Bytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureField1IsMutable();
field1_.add(value);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:protobufel1.test1.Message111)
}
static {
defaultInstance = new Message111(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:protobufel1.test1.Message111)
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_protobufel1_test1_Message111_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_protobufel1_test1_Message111_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\036tests/test space1/test 1.proto\022\021protob" +
"ufel1.test1\"\034\n\nMessage111\022\016\n\006field1\030\001 \003(" +
"\t"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
internal_static_protobufel1_test1_Message111_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_protobufel1_test1_Message111_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_protobufel1_test1_Message111_descriptor,
new java.lang.String[] { "Field1", });
}
// @@protoc_insertion_point(outer_class_scope)
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel;
import io.netty.bootstrap.Bootstrap;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.embedded.EmbeddedChannel;
import io.netty.channel.local.LocalAddress;
import io.netty.channel.local.LocalChannel;
import io.netty.channel.local.LocalServerChannel;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.oio.OioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.channel.socket.oio.OioSocketChannel;
import io.netty.util.AbstractReferenceCounted;
import io.netty.util.ReferenceCountUtil;
import io.netty.util.ReferenceCounted;
import io.netty.util.concurrent.AbstractEventExecutor;
import io.netty.util.concurrent.DefaultEventExecutorGroup;
import io.netty.util.concurrent.EventExecutor;
import io.netty.util.concurrent.EventExecutorGroup;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.ImmediateEventExecutor;
import io.netty.util.concurrent.Promise;
import io.netty.util.concurrent.UnorderedThreadPoolEventExecutor;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Test;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.LockSupport;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class DefaultChannelPipelineTest {
private static final EventLoopGroup group = new DefaultEventLoopGroup(1);
private Channel self;
private Channel peer;
@AfterClass
public static void afterClass() throws Exception {
group.shutdownGracefully().sync();
}
private void setUp(final ChannelHandler... handlers) throws Exception {
final AtomicReference<Channel> peerRef = new AtomicReference<Channel>();
ServerBootstrap sb = new ServerBootstrap();
sb.group(group).channel(LocalServerChannel.class);
sb.childHandler(new ChannelInboundHandlerAdapter() {
@Override
public void channelRegistered(ChannelHandlerContext ctx) throws Exception {
peerRef.set(ctx.channel());
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ReferenceCountUtil.release(msg);
}
});
ChannelFuture bindFuture = sb.bind(LocalAddress.ANY).sync();
Bootstrap b = new Bootstrap();
b.group(group).channel(LocalChannel.class);
b.handler(new ChannelInitializer<LocalChannel>() {
@Override
protected void initChannel(LocalChannel ch) throws Exception {
ch.pipeline().addLast(handlers);
}
});
self = b.connect(bindFuture.channel().localAddress()).sync().channel();
peer = peerRef.get();
bindFuture.channel().close().sync();
}
@After
public void tearDown() throws Exception {
if (peer != null) {
peer.close();
peer = null;
}
if (self != null) {
self = null;
}
}
@Test
public void testFreeCalled() throws Exception {
final CountDownLatch free = new CountDownLatch(1);
final ReferenceCounted holder = new AbstractReferenceCounted() {
@Override
protected void deallocate() {
free.countDown();
}
@Override
public ReferenceCounted touch(Object hint) {
return this;
}
};
StringInboundHandler handler = new StringInboundHandler();
setUp(handler);
peer.writeAndFlush(holder).sync();
assertTrue(free.await(10, TimeUnit.SECONDS));
assertTrue(handler.called);
}
private static final class StringInboundHandler extends ChannelInboundHandlerAdapter {
boolean called;
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
called = true;
if (!(msg instanceof String)) {
ctx.fireChannelRead(msg);
}
}
}
@Test
public void testRemoveChannelHandler() {
ChannelPipeline pipeline = new LocalChannel().pipeline();
ChannelHandler handler1 = newHandler();
ChannelHandler handler2 = newHandler();
ChannelHandler handler3 = newHandler();
pipeline.addLast("handler1", handler1);
pipeline.addLast("handler2", handler2);
pipeline.addLast("handler3", handler3);
assertSame(pipeline.get("handler1"), handler1);
assertSame(pipeline.get("handler2"), handler2);
assertSame(pipeline.get("handler3"), handler3);
pipeline.remove(handler1);
assertNull(pipeline.get("handler1"));
pipeline.remove(handler2);
assertNull(pipeline.get("handler2"));
pipeline.remove(handler3);
assertNull(pipeline.get("handler3"));
}
@Test
public void testReplaceChannelHandler() {
ChannelPipeline pipeline = new LocalChannel().pipeline();
ChannelHandler handler1 = newHandler();
pipeline.addLast("handler1", handler1);
pipeline.addLast("handler2", handler1);
pipeline.addLast("handler3", handler1);
assertSame(pipeline.get("handler1"), handler1);
assertSame(pipeline.get("handler2"), handler1);
assertSame(pipeline.get("handler3"), handler1);
ChannelHandler newHandler1 = newHandler();
pipeline.replace("handler1", "handler1", newHandler1);
assertSame(pipeline.get("handler1"), newHandler1);
ChannelHandler newHandler3 = newHandler();
pipeline.replace("handler3", "handler3", newHandler3);
assertSame(pipeline.get("handler3"), newHandler3);
ChannelHandler newHandler2 = newHandler();
pipeline.replace("handler2", "handler2", newHandler2);
assertSame(pipeline.get("handler2"), newHandler2);
}
@Test
public void testChannelHandlerContextNavigation() {
ChannelPipeline pipeline = new LocalChannel().pipeline();
final int HANDLER_ARRAY_LEN = 5;
ChannelHandler[] firstHandlers = newHandlers(HANDLER_ARRAY_LEN);
ChannelHandler[] lastHandlers = newHandlers(HANDLER_ARRAY_LEN);
pipeline.addFirst(firstHandlers);
pipeline.addLast(lastHandlers);
verifyContextNumber(pipeline, HANDLER_ARRAY_LEN * 2);
}
@Test
public void testFireChannelRegistered() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.addLast(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
ch.pipeline().addLast(new ChannelInboundHandlerAdapter() {
@Override
public void channelRegistered(ChannelHandlerContext ctx) throws Exception {
latch.countDown();
}
});
}
});
group.register(pipeline.channel());
assertTrue(latch.await(2, TimeUnit.SECONDS));
}
@Test
public void testPipelineOperation() {
ChannelPipeline pipeline = new LocalChannel().pipeline();
final int handlerNum = 5;
ChannelHandler[] handlers1 = newHandlers(handlerNum);
ChannelHandler[] handlers2 = newHandlers(handlerNum);
final String prefixX = "x";
for (int i = 0; i < handlerNum; i++) {
if (i % 2 == 0) {
pipeline.addFirst(prefixX + i, handlers1[i]);
} else {
pipeline.addLast(prefixX + i, handlers1[i]);
}
}
for (int i = 0; i < handlerNum; i++) {
if (i % 2 != 0) {
pipeline.addBefore(prefixX + i, String.valueOf(i), handlers2[i]);
} else {
pipeline.addAfter(prefixX + i, String.valueOf(i), handlers2[i]);
}
}
verifyContextNumber(pipeline, handlerNum * 2);
}
@Test
public void testChannelHandlerContextOrder() {
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.addFirst("1", newHandler());
pipeline.addLast("10", newHandler());
pipeline.addBefore("10", "5", newHandler());
pipeline.addAfter("1", "3", newHandler());
pipeline.addBefore("5", "4", newHandler());
pipeline.addAfter("5", "6", newHandler());
pipeline.addBefore("1", "0", newHandler());
pipeline.addAfter("10", "11", newHandler());
AbstractChannelHandlerContext ctx = (AbstractChannelHandlerContext) pipeline.firstContext();
assertNotNull(ctx);
while (ctx != null) {
int i = toInt(ctx.name());
int j = next(ctx);
if (j != -1) {
assertTrue(i < j);
} else {
assertNull(ctx.next.next);
}
ctx = ctx.next;
}
verifyContextNumber(pipeline, 8);
}
@Test(timeout = 10000)
public void testLifeCycleAwareness() throws Exception {
setUp();
ChannelPipeline p = self.pipeline();
final List<LifeCycleAwareTestHandler> handlers = new ArrayList<LifeCycleAwareTestHandler>();
final int COUNT = 20;
final CountDownLatch addLatch = new CountDownLatch(COUNT);
for (int i = 0; i < COUNT; i++) {
final LifeCycleAwareTestHandler handler = new LifeCycleAwareTestHandler("handler-" + i);
// Add handler.
p.addFirst(handler.name, handler);
self.eventLoop().execute(new Runnable() {
@Override
public void run() {
// Validate handler life-cycle methods called.
handler.validate(true, false);
// Store handler into the list.
handlers.add(handler);
addLatch.countDown();
}
});
}
addLatch.await();
// Change the order of remove operations over all handlers in the pipeline.
Collections.shuffle(handlers);
final CountDownLatch removeLatch = new CountDownLatch(COUNT);
for (final LifeCycleAwareTestHandler handler : handlers) {
assertSame(handler, p.remove(handler.name));
self.eventLoop().execute(new Runnable() {
@Override
public void run() {
// Validate handler life-cycle methods called.
handler.validate(true, true);
removeLatch.countDown();
}
});
}
removeLatch.await();
}
@Test(timeout = 100000)
public void testRemoveAndForwardInbound() throws Exception {
final BufferedTestHandler handler1 = new BufferedTestHandler();
final BufferedTestHandler handler2 = new BufferedTestHandler();
setUp(handler1, handler2);
self.eventLoop().submit(new Runnable() {
@Override
public void run() {
ChannelPipeline p = self.pipeline();
handler1.inboundBuffer.add(8);
assertEquals(8, handler1.inboundBuffer.peek());
assertTrue(handler2.inboundBuffer.isEmpty());
p.remove(handler1);
assertEquals(1, handler2.inboundBuffer.size());
assertEquals(8, handler2.inboundBuffer.peek());
}
}).sync();
}
@Test(timeout = 10000)
public void testRemoveAndForwardOutbound() throws Exception {
final BufferedTestHandler handler1 = new BufferedTestHandler();
final BufferedTestHandler handler2 = new BufferedTestHandler();
setUp(handler1, handler2);
self.eventLoop().submit(new Runnable() {
@Override
public void run() {
ChannelPipeline p = self.pipeline();
handler2.outboundBuffer.add(8);
assertEquals(8, handler2.outboundBuffer.peek());
assertTrue(handler1.outboundBuffer.isEmpty());
p.remove(handler2);
assertEquals(1, handler1.outboundBuffer.size());
assertEquals(8, handler1.outboundBuffer.peek());
}
}).sync();
}
@Test(timeout = 10000)
public void testReplaceAndForwardOutbound() throws Exception {
final BufferedTestHandler handler1 = new BufferedTestHandler();
final BufferedTestHandler handler2 = new BufferedTestHandler();
setUp(handler1);
self.eventLoop().submit(new Runnable() {
@Override
public void run() {
ChannelPipeline p = self.pipeline();
handler1.outboundBuffer.add(8);
assertEquals(8, handler1.outboundBuffer.peek());
assertTrue(handler2.outboundBuffer.isEmpty());
p.replace(handler1, "handler2", handler2);
assertEquals(8, handler2.outboundBuffer.peek());
}
}).sync();
}
@Test(timeout = 10000)
public void testReplaceAndForwardInboundAndOutbound() throws Exception {
final BufferedTestHandler handler1 = new BufferedTestHandler();
final BufferedTestHandler handler2 = new BufferedTestHandler();
setUp(handler1);
self.eventLoop().submit(new Runnable() {
@Override
public void run() {
ChannelPipeline p = self.pipeline();
handler1.inboundBuffer.add(8);
handler1.outboundBuffer.add(8);
assertEquals(8, handler1.inboundBuffer.peek());
assertEquals(8, handler1.outboundBuffer.peek());
assertTrue(handler2.inboundBuffer.isEmpty());
assertTrue(handler2.outboundBuffer.isEmpty());
p.replace(handler1, "handler2", handler2);
assertEquals(8, handler2.outboundBuffer.peek());
assertEquals(8, handler2.inboundBuffer.peek());
}
}).sync();
}
@Test(timeout = 10000)
public void testRemoveAndForwardInboundOutbound() throws Exception {
final BufferedTestHandler handler1 = new BufferedTestHandler();
final BufferedTestHandler handler2 = new BufferedTestHandler();
final BufferedTestHandler handler3 = new BufferedTestHandler();
setUp(handler1, handler2, handler3);
self.eventLoop().submit(new Runnable() {
@Override
public void run() {
ChannelPipeline p = self.pipeline();
handler2.inboundBuffer.add(8);
handler2.outboundBuffer.add(8);
assertEquals(8, handler2.inboundBuffer.peek());
assertEquals(8, handler2.outboundBuffer.peek());
assertEquals(0, handler1.outboundBuffer.size());
assertEquals(0, handler3.inboundBuffer.size());
p.remove(handler2);
assertEquals(8, handler3.inboundBuffer.peek());
assertEquals(8, handler1.outboundBuffer.peek());
}
}).sync();
}
// Tests for https://github.com/netty/netty/issues/2349
@Test
public void testCancelBind() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
group.register(pipeline.channel());
ChannelPromise promise = pipeline.channel().newPromise();
assertTrue(promise.cancel(false));
ChannelFuture future = pipeline.bind(new LocalAddress("test"), promise);
assertTrue(future.isCancelled());
}
@Test
public void testCancelConnect() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
group.register(pipeline.channel());
ChannelPromise promise = pipeline.channel().newPromise();
assertTrue(promise.cancel(false));
ChannelFuture future = pipeline.connect(new LocalAddress("test"), promise);
assertTrue(future.isCancelled());
}
@Test
public void testCancelDisconnect() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
group.register(pipeline.channel());
ChannelPromise promise = pipeline.channel().newPromise();
assertTrue(promise.cancel(false));
ChannelFuture future = pipeline.disconnect(promise);
assertTrue(future.isCancelled());
}
@Test
public void testCancelClose() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
group.register(pipeline.channel());
ChannelPromise promise = pipeline.channel().newPromise();
assertTrue(promise.cancel(false));
ChannelFuture future = pipeline.close(promise);
assertTrue(future.isCancelled());
}
@Test(expected = IllegalArgumentException.class)
public void testWrongPromiseChannel() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
group.register(pipeline.channel()).sync();
ChannelPipeline pipeline2 = new LocalChannel().pipeline();
group.register(pipeline2.channel()).sync();
try {
ChannelPromise promise2 = pipeline2.channel().newPromise();
pipeline.close(promise2);
} finally {
pipeline.close();
pipeline2.close();
}
}
@Test(expected = IllegalArgumentException.class)
public void testUnexpectedVoidChannelPromise() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
group.register(pipeline.channel()).sync();
try {
ChannelPromise promise = new VoidChannelPromise(pipeline.channel(), false);
pipeline.close(promise);
} finally {
pipeline.close();
}
}
@Test(expected = IllegalArgumentException.class)
public void testUnexpectedVoidChannelPromiseCloseFuture() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
group.register(pipeline.channel()).sync();
try {
ChannelPromise promise = (ChannelPromise) pipeline.channel().closeFuture();
pipeline.close(promise);
} finally {
pipeline.close();
}
}
@Test
public void testCancelDeregister() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
group.register(pipeline.channel());
ChannelPromise promise = pipeline.channel().newPromise();
assertTrue(promise.cancel(false));
ChannelFuture future = pipeline.deregister(promise);
assertTrue(future.isCancelled());
}
@Test
public void testCancelWrite() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
group.register(pipeline.channel());
ChannelPromise promise = pipeline.channel().newPromise();
assertTrue(promise.cancel(false));
ByteBuf buffer = Unpooled.buffer();
assertEquals(1, buffer.refCnt());
ChannelFuture future = pipeline.write(buffer, promise);
assertTrue(future.isCancelled());
assertEquals(0, buffer.refCnt());
}
@Test
public void testCancelWriteAndFlush() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
group.register(pipeline.channel());
ChannelPromise promise = pipeline.channel().newPromise();
assertTrue(promise.cancel(false));
ByteBuf buffer = Unpooled.buffer();
assertEquals(1, buffer.refCnt());
ChannelFuture future = pipeline.writeAndFlush(buffer, promise);
assertTrue(future.isCancelled());
assertEquals(0, buffer.refCnt());
}
@Test
public void testFirstContextEmptyPipeline() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
assertNull(pipeline.firstContext());
}
@Test
public void testLastContextEmptyPipeline() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
assertNull(pipeline.lastContext());
}
@Test
public void testFirstHandlerEmptyPipeline() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
assertNull(pipeline.first());
}
@Test
public void testLastHandlerEmptyPipeline() throws Exception {
ChannelPipeline pipeline = new LocalChannel().pipeline();
assertNull(pipeline.last());
}
@Test(timeout = 5000)
public void testChannelInitializerException() throws Exception {
final IllegalStateException exception = new IllegalStateException();
final AtomicReference<Throwable> error = new AtomicReference<Throwable>();
final CountDownLatch latch = new CountDownLatch(1);
EmbeddedChannel channel = new EmbeddedChannel(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
throw exception;
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
super.exceptionCaught(ctx, cause);
error.set(cause);
latch.countDown();
}
});
latch.await();
assertFalse(channel.isActive());
assertSame(exception, error.get());
}
@Test
public void testChannelUnregistrationWithCustomExecutor() throws Exception {
final CountDownLatch channelLatch = new CountDownLatch(1);
final CountDownLatch handlerLatch = new CountDownLatch(1);
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.addLast(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
ch.pipeline().addLast(new WrapperExecutor(),
new ChannelInboundHandlerAdapter() {
@Override
public void channelUnregistered(ChannelHandlerContext ctx) throws Exception {
channelLatch.countDown();
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
handlerLatch.countDown();
}
});
}
});
Channel channel = pipeline.channel();
group.register(channel);
channel.close();
channel.deregister();
assertTrue(channelLatch.await(2, TimeUnit.SECONDS));
assertTrue(handlerLatch.await(2, TimeUnit.SECONDS));
}
@Test(timeout = 3000)
public void testAddHandlerBeforeRegisteredThenRemove() {
final EventLoop loop = group.next();
CheckEventExecutorHandler handler = new CheckEventExecutorHandler(loop);
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.addFirst(handler);
assertFalse(handler.addedPromise.isDone());
group.register(pipeline.channel());
handler.addedPromise.syncUninterruptibly();
pipeline.remove(handler);
handler.removedPromise.syncUninterruptibly();
}
@Test(timeout = 3000)
public void testAddHandlerBeforeRegisteredThenReplace() throws Exception {
final EventLoop loop = group.next();
final CountDownLatch latch = new CountDownLatch(1);
CheckEventExecutorHandler handler = new CheckEventExecutorHandler(loop);
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.addFirst(handler);
assertFalse(handler.addedPromise.isDone());
group.register(pipeline.channel());
handler.addedPromise.syncUninterruptibly();
pipeline.replace(handler, null, new ChannelHandlerAdapter() {
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
latch.countDown();
}
});
handler.removedPromise.syncUninterruptibly();
latch.await();
}
@Test
public void testAddRemoveHandlerNotRegistered() throws Throwable {
final AtomicReference<Throwable> error = new AtomicReference<Throwable>();
ChannelHandler handler = new ErrorChannelHandler(error);
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.addFirst(handler);
pipeline.remove(handler);
Throwable cause = error.get();
if (cause != null) {
throw cause;
}
}
@Test
public void testAddReplaceHandlerNotRegistered() throws Throwable {
final AtomicReference<Throwable> error = new AtomicReference<Throwable>();
ChannelHandler handler = new ErrorChannelHandler(error);
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.addFirst(handler);
pipeline.replace(handler, null, new ErrorChannelHandler(error));
Throwable cause = error.get();
if (cause != null) {
throw cause;
}
}
@Test(timeout = 3000)
public void testHandlerAddedAndRemovedCalledInCorrectOrder() throws Throwable {
final EventExecutorGroup group1 = new DefaultEventExecutorGroup(1);
final EventExecutorGroup group2 = new DefaultEventExecutorGroup(1);
try {
BlockingQueue<CheckOrderHandler> addedQueue = new LinkedBlockingQueue<CheckOrderHandler>();
BlockingQueue<CheckOrderHandler> removedQueue = new LinkedBlockingQueue<CheckOrderHandler>();
CheckOrderHandler handler1 = new CheckOrderHandler(addedQueue, removedQueue);
CheckOrderHandler handler2 = new CheckOrderHandler(addedQueue, removedQueue);
CheckOrderHandler handler3 = new CheckOrderHandler(addedQueue, removedQueue);
CheckOrderHandler handler4 = new CheckOrderHandler(addedQueue, removedQueue);
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.addLast(handler1);
group.register(pipeline.channel()).syncUninterruptibly();
pipeline.addLast(group1, handler2);
pipeline.addLast(group2, handler3);
pipeline.addLast(handler4);
assertTrue(removedQueue.isEmpty());
pipeline.channel().close().syncUninterruptibly();
assertHandler(addedQueue.take(), handler1);
// Depending on timing this can be handler2 or handler3 as these use different EventExecutorGroups.
assertHandler(addedQueue.take(), handler2, handler3, handler4);
assertHandler(addedQueue.take(), handler2, handler3, handler4);
assertHandler(addedQueue.take(), handler2, handler3, handler4);
assertTrue(addedQueue.isEmpty());
assertHandler(removedQueue.take(), handler4);
assertHandler(removedQueue.take(), handler3);
assertHandler(removedQueue.take(), handler2);
assertHandler(removedQueue.take(), handler1);
assertTrue(removedQueue.isEmpty());
} finally {
group1.shutdownGracefully();
group2.shutdownGracefully();
}
}
@Test(timeout = 3000)
public void testHandlerAddedExceptionFromChildHandlerIsPropagated() {
final EventExecutorGroup group1 = new DefaultEventExecutorGroup(1);
try {
final Promise<Void> promise = group1.next().newPromise();
final AtomicBoolean handlerAdded = new AtomicBoolean();
final Exception exception = new RuntimeException();
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.addLast(group1, new CheckExceptionHandler(exception, promise));
pipeline.addFirst(new ChannelHandlerAdapter() {
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
handlerAdded.set(true);
throw exception;
}
});
assertFalse(handlerAdded.get());
group.register(pipeline.channel());
promise.syncUninterruptibly();
} finally {
group1.shutdownGracefully();
}
}
@Test(timeout = 3000)
public void testHandlerRemovedExceptionFromChildHandlerIsPropagated() {
final EventExecutorGroup group1 = new DefaultEventExecutorGroup(1);
try {
final Promise<Void> promise = group1.next().newPromise();
String handlerName = "foo";
final Exception exception = new RuntimeException();
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.addLast(handlerName, new ChannelHandlerAdapter() {
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
throw exception;
}
});
pipeline.addLast(group1, new CheckExceptionHandler(exception, promise));
group.register(pipeline.channel()).syncUninterruptibly();
pipeline.remove(handlerName);
promise.syncUninterruptibly();
} finally {
group1.shutdownGracefully();
}
}
@Test(timeout = 3000)
public void testHandlerAddedThrowsAndRemovedThrowsException() throws InterruptedException {
final EventExecutorGroup group1 = new DefaultEventExecutorGroup(1);
try {
final CountDownLatch latch = new CountDownLatch(1);
final Promise<Void> promise = group1.next().newPromise();
final Exception exceptionAdded = new RuntimeException();
final Exception exceptionRemoved = new RuntimeException();
String handlerName = "foo";
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.addLast(group1, new CheckExceptionHandler(exceptionAdded, promise));
pipeline.addFirst(handlerName, new ChannelHandlerAdapter() {
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
throw exceptionAdded;
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
// Execute this later so we are sure the exception is handled first.
ctx.executor().execute(new Runnable() {
@Override
public void run() {
latch.countDown();
}
});
throw exceptionRemoved;
}
});
group.register(pipeline.channel()).syncUninterruptibly();
latch.await();
assertNull(pipeline.context(handlerName));
promise.syncUninterruptibly();
} finally {
group1.shutdownGracefully();
}
}
@Test(timeout = 2000)
public void testAddRemoveHandlerCalledOnceRegistered() throws Throwable {
ChannelPipeline pipeline = new LocalChannel().pipeline();
CallbackCheckHandler handler = new CallbackCheckHandler();
pipeline.addFirst(handler);
pipeline.remove(handler);
assertNull(handler.addedHandler.getNow());
assertNull(handler.removedHandler.getNow());
group.register(pipeline.channel()).syncUninterruptibly();
Throwable cause = handler.error.get();
if (cause != null) {
throw cause;
}
assertTrue(handler.addedHandler.get());
assertTrue(handler.removedHandler.get());
}
@Test(timeout = 3000)
public void testAddReplaceHandlerCalledOnceRegistered() throws Throwable {
ChannelPipeline pipeline = new LocalChannel().pipeline();
CallbackCheckHandler handler = new CallbackCheckHandler();
CallbackCheckHandler handler2 = new CallbackCheckHandler();
pipeline.addFirst(handler);
pipeline.replace(handler, null, handler2);
assertNull(handler.addedHandler.getNow());
assertNull(handler.removedHandler.getNow());
assertNull(handler2.addedHandler.getNow());
assertNull(handler2.removedHandler.getNow());
group.register(pipeline.channel()).syncUninterruptibly();
Throwable cause = handler.error.get();
if (cause != null) {
throw cause;
}
assertTrue(handler.addedHandler.get());
assertTrue(handler.removedHandler.get());
Throwable cause2 = handler2.error.get();
if (cause2 != null) {
throw cause2;
}
assertTrue(handler2.addedHandler.get());
assertNull(handler2.removedHandler.getNow());
pipeline.remove(handler2);
assertTrue(handler2.removedHandler.get());
}
@Test(timeout = 3000)
public void testAddBefore() throws Throwable {
ChannelPipeline pipeline1 = new LocalChannel().pipeline();
ChannelPipeline pipeline2 = new LocalChannel().pipeline();
EventLoopGroup defaultGroup = new DefaultEventLoopGroup(2);
try {
EventLoop eventLoop1 = defaultGroup.next();
EventLoop eventLoop2 = defaultGroup.next();
eventLoop1.register(pipeline1.channel()).syncUninterruptibly();
eventLoop2.register(pipeline2.channel()).syncUninterruptibly();
CountDownLatch latch = new CountDownLatch(2 * 10);
for (int i = 0; i < 10; i++) {
eventLoop1.execute(new TestTask(pipeline2, latch));
eventLoop2.execute(new TestTask(pipeline1, latch));
}
latch.await();
} finally {
defaultGroup.shutdownGracefully();
}
}
@Test(timeout = 3000)
public void testAddInListenerNio() throws Throwable {
testAddInListener(new NioSocketChannel(), new NioEventLoopGroup(1));
}
@Test(timeout = 3000)
public void testAddInListenerOio() throws Throwable {
testAddInListener(new OioSocketChannel(), new OioEventLoopGroup(1));
}
@Test(timeout = 3000)
public void testAddInListenerLocal() throws Throwable {
testAddInListener(new LocalChannel(), new DefaultEventLoopGroup(1));
}
private static void testAddInListener(Channel channel, EventLoopGroup group) throws Throwable {
ChannelPipeline pipeline1 = channel.pipeline();
try {
final Object event = new Object();
final Promise<Object> promise = ImmediateEventExecutor.INSTANCE.newPromise();
group.register(pipeline1.channel()).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
ChannelPipeline pipeline = future.channel().pipeline();
final AtomicBoolean handlerAddedCalled = new AtomicBoolean();
pipeline.addLast(new ChannelInboundHandlerAdapter() {
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
handlerAddedCalled.set(true);
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
promise.setSuccess(event);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
promise.setFailure(cause);
}
});
if (!handlerAddedCalled.get()) {
promise.setFailure(new AssertionError("handlerAdded(...) should have been called"));
return;
}
// This event must be captured by the added handler.
pipeline.fireUserEventTriggered(event);
}
});
assertSame(event, promise.syncUninterruptibly().getNow());
} finally {
pipeline1.channel().close().syncUninterruptibly();
group.shutdownGracefully();
}
}
@Test
public void testNullName() {
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.addLast(newHandler());
pipeline.addLast(null, newHandler());
pipeline.addFirst(newHandler());
pipeline.addFirst(null, newHandler());
pipeline.addLast("test", newHandler());
pipeline.addAfter("test", null, newHandler());
pipeline.addBefore("test", null, newHandler());
}
@Test(timeout = 3000)
public void testUnorderedEventExecutor() throws Throwable {
ChannelPipeline pipeline1 = new LocalChannel().pipeline();
EventExecutorGroup eventExecutors = new UnorderedThreadPoolEventExecutor(2);
EventLoopGroup defaultGroup = new DefaultEventLoopGroup(1);
try {
EventLoop eventLoop1 = defaultGroup.next();
eventLoop1.register(pipeline1.channel()).syncUninterruptibly();
final CountDownLatch latch = new CountDownLatch(1);
pipeline1.addLast(eventExecutors, new ChannelInboundHandlerAdapter() {
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
// Just block one of the two threads.
LockSupport.park();
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
latch.countDown();
}
});
// Trigger an event, as we use UnorderedEventExecutor userEventTriggered should be called even when
// handlerAdded(...) blocks.
pipeline1.fireUserEventTriggered("");
latch.await();
} finally {
defaultGroup.shutdownGracefully(0, 0, TimeUnit.SECONDS).syncUninterruptibly();
eventExecutors.shutdownGracefully(0, 0, TimeUnit.SECONDS).syncUninterruptibly();
}
}
@Test
public void testPinExecutor() {
EventExecutorGroup group = new DefaultEventExecutorGroup(2);
ChannelPipeline pipeline = new LocalChannel().pipeline();
ChannelPipeline pipeline2 = new LocalChannel().pipeline();
pipeline.addLast(group, "h1", new ChannelInboundHandlerAdapter());
pipeline.addLast(group, "h2", new ChannelInboundHandlerAdapter());
pipeline2.addLast(group, "h3", new ChannelInboundHandlerAdapter());
EventExecutor executor1 = pipeline.context("h1").executor();
EventExecutor executor2 = pipeline.context("h2").executor();
assertNotNull(executor1);
assertNotNull(executor2);
assertSame(executor1, executor2);
EventExecutor executor3 = pipeline2.context("h3").executor();
assertNotNull(executor3);
assertNotSame(executor3, executor2);
group.shutdownGracefully(0, 0, TimeUnit.SECONDS);
}
@Test
public void testNotPinExecutor() {
EventExecutorGroup group = new DefaultEventExecutorGroup(2);
ChannelPipeline pipeline = new LocalChannel().pipeline();
pipeline.channel().config().setOption(ChannelOption.SINGLE_EVENTEXECUTOR_PER_GROUP, false);
pipeline.addLast(group, "h1", new ChannelInboundHandlerAdapter());
pipeline.addLast(group, "h2", new ChannelInboundHandlerAdapter());
EventExecutor executor1 = pipeline.context("h1").executor();
EventExecutor executor2 = pipeline.context("h2").executor();
assertNotNull(executor1);
assertNotNull(executor2);
assertNotSame(executor1, executor2);
group.shutdownGracefully(0, 0, TimeUnit.SECONDS);
}
@Test(timeout = 3000)
public void testVoidPromiseNotify() throws Throwable {
ChannelPipeline pipeline1 = new LocalChannel().pipeline();
EventLoopGroup defaultGroup = new DefaultEventLoopGroup(1);
EventLoop eventLoop1 = defaultGroup.next();
final Promise<Throwable> promise = eventLoop1.newPromise();
final Exception exception = new IllegalArgumentException();
try {
eventLoop1.register(pipeline1.channel()).syncUninterruptibly();
pipeline1.addLast(new ChannelDuplexHandler() {
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
throw exception;
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
promise.setSuccess(cause);
}
});
pipeline1.write("test", pipeline1.voidPromise());
assertSame(exception, promise.syncUninterruptibly().getNow());
} finally {
pipeline1.channel().close().syncUninterruptibly();
defaultGroup.shutdownGracefully();
}
}
@Test(timeout = 5000)
public void handlerAddedStateUpdatedBeforeHandlerAddedDoneForceEventLoop() throws InterruptedException {
handlerAddedStateUpdatedBeforeHandlerAddedDone(true);
}
@Test(timeout = 5000)
public void handlerAddedStateUpdatedBeforeHandlerAddedDoneOnCallingThread() throws InterruptedException {
handlerAddedStateUpdatedBeforeHandlerAddedDone(false);
}
private static void handlerAddedStateUpdatedBeforeHandlerAddedDone(boolean executeInEventLoop)
throws InterruptedException {
final ChannelPipeline pipeline = new LocalChannel().pipeline();
final Object userEvent = new Object();
final Object writeObject = new Object();
final CountDownLatch doneLatch = new CountDownLatch(1);
group.register(pipeline.channel());
Runnable r = new Runnable() {
@Override
public void run() {
pipeline.addLast(new ChannelInboundHandlerAdapter() {
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) {
if (evt == userEvent) {
ctx.write(writeObject);
}
ctx.fireUserEventTriggered(evt);
}
});
pipeline.addFirst(new ChannelDuplexHandler() {
@Override
public void handlerAdded(ChannelHandlerContext ctx) {
ctx.fireUserEventTriggered(userEvent);
}
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) {
if (msg == writeObject) {
doneLatch.countDown();
}
ctx.write(msg, promise);
}
});
}
};
if (executeInEventLoop) {
pipeline.channel().eventLoop().execute(r);
} else {
r.run();
}
doneLatch.await();
}
private static final class TestTask implements Runnable {
private final ChannelPipeline pipeline;
private final CountDownLatch latch;
TestTask(ChannelPipeline pipeline, CountDownLatch latch) {
this.pipeline = pipeline;
this.latch = latch;
}
@Override
public void run() {
pipeline.addLast(new ChannelInboundHandlerAdapter());
latch.countDown();
}
}
private static final class CallbackCheckHandler extends ChannelHandlerAdapter {
final Promise<Boolean> addedHandler = ImmediateEventExecutor.INSTANCE.newPromise();
final Promise<Boolean> removedHandler = ImmediateEventExecutor.INSTANCE.newPromise();
final AtomicReference<Throwable> error = new AtomicReference<Throwable>();
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
if (!addedHandler.trySuccess(true)) {
error.set(new AssertionError("handlerAdded(...) called multiple times: " + ctx.name()));
} else if (removedHandler.getNow() == Boolean.TRUE) {
error.set(new AssertionError("handlerRemoved(...) called before handlerAdded(...): " + ctx.name()));
}
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
if (!removedHandler.trySuccess(true)) {
error.set(new AssertionError("handlerRemoved(...) called multiple times: " + ctx.name()));
} else if (addedHandler.getNow() == Boolean.FALSE) {
error.set(new AssertionError("handlerRemoved(...) called before handlerAdded(...): " + ctx.name()));
}
}
}
private static final class CheckExceptionHandler extends ChannelInboundHandlerAdapter {
private final Throwable expected;
private final Promise<Void> promise;
CheckExceptionHandler(Throwable expected, Promise<Void> promise) {
this.expected = expected;
this.promise = promise;
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
if (cause instanceof ChannelPipelineException && cause.getCause() == expected) {
promise.setSuccess(null);
} else {
promise.setFailure(new AssertionError("cause not the expected instance"));
}
}
}
private static void assertHandler(CheckOrderHandler actual, CheckOrderHandler... handlers) throws Throwable {
for (CheckOrderHandler h : handlers) {
if (h == actual) {
actual.checkError();
return;
}
}
fail("handler was not one of the expected handlers");
}
private static final class CheckOrderHandler extends ChannelHandlerAdapter {
private final Queue<CheckOrderHandler> addedQueue;
private final Queue<CheckOrderHandler> removedQueue;
private final AtomicReference<Throwable> error = new AtomicReference<Throwable>();
CheckOrderHandler(Queue<CheckOrderHandler> addedQueue, Queue<CheckOrderHandler> removedQueue) {
this.addedQueue = addedQueue;
this.removedQueue = removedQueue;
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
addedQueue.add(this);
checkExecutor(ctx);
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
removedQueue.add(this);
checkExecutor(ctx);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
error.set(cause);
}
void checkError() throws Throwable {
Throwable cause = error.get();
if (cause != null) {
throw cause;
}
}
private void checkExecutor(ChannelHandlerContext ctx) {
if (!ctx.executor().inEventLoop()) {
error.set(new AssertionError());
}
}
}
private static final class CheckEventExecutorHandler extends ChannelHandlerAdapter {
final EventExecutor executor;
final Promise<Void> addedPromise;
final Promise<Void> removedPromise;
CheckEventExecutorHandler(EventExecutor executor) {
this.executor = executor;
addedPromise = executor.newPromise();
removedPromise = executor.newPromise();
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
assertExecutor(ctx, addedPromise);
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
assertExecutor(ctx, removedPromise);
}
private void assertExecutor(ChannelHandlerContext ctx, Promise<Void> promise) {
final boolean same;
try {
same = executor == ctx.executor();
} catch (Throwable cause) {
promise.setFailure(cause);
return;
}
if (same) {
promise.setSuccess(null);
} else {
promise.setFailure(new AssertionError("EventExecutor not the same"));
}
}
}
private static final class ErrorChannelHandler extends ChannelHandlerAdapter {
private final AtomicReference<Throwable> error;
ErrorChannelHandler(AtomicReference<Throwable> error) {
this.error = error;
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
error.set(new AssertionError());
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
error.set(new AssertionError());
}
}
private static int next(AbstractChannelHandlerContext ctx) {
AbstractChannelHandlerContext next = ctx.next;
if (next == null) {
return Integer.MAX_VALUE;
}
return toInt(next.name());
}
private static int toInt(String name) {
try {
return Integer.parseInt(name);
} catch (NumberFormatException e) {
return -1;
}
}
private static void verifyContextNumber(ChannelPipeline pipeline, int expectedNumber) {
AbstractChannelHandlerContext ctx = (AbstractChannelHandlerContext) pipeline.firstContext();
int handlerNumber = 0;
while (ctx != ((DefaultChannelPipeline) pipeline).tail) {
handlerNumber++;
ctx = ctx.next;
}
assertEquals(expectedNumber, handlerNumber);
}
private static ChannelHandler[] newHandlers(int num) {
assert num > 0;
ChannelHandler[] handlers = new ChannelHandler[num];
for (int i = 0; i < num; i++) {
handlers[i] = newHandler();
}
return handlers;
}
private static ChannelHandler newHandler() {
return new TestHandler();
}
@Sharable
private static class TestHandler extends ChannelDuplexHandler { }
private static class BufferedTestHandler extends ChannelDuplexHandler {
final Queue<Object> inboundBuffer = new ArrayDeque<Object>();
final Queue<Object> outboundBuffer = new ArrayDeque<Object>();
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
outboundBuffer.add(msg);
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
inboundBuffer.add(msg);
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
if (!inboundBuffer.isEmpty()) {
for (Object o: inboundBuffer) {
ctx.fireChannelRead(o);
}
ctx.fireChannelReadComplete();
}
if (!outboundBuffer.isEmpty()) {
for (Object o: outboundBuffer) {
ctx.write(o);
}
ctx.flush();
}
}
}
/** Test handler to validate life-cycle aware behavior. */
private static final class LifeCycleAwareTestHandler extends ChannelHandlerAdapter {
private final String name;
private boolean afterAdd;
private boolean afterRemove;
/**
* Constructs life-cycle aware test handler.
*
* @param name Handler name to display in assertion messages.
*/
private LifeCycleAwareTestHandler(String name) {
this.name = name;
}
public void validate(boolean afterAdd, boolean afterRemove) {
assertEquals(name, afterAdd, this.afterAdd);
assertEquals(name, afterRemove, this.afterRemove);
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) {
validate(false, false);
afterAdd = true;
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) {
validate(true, false);
afterRemove = true;
}
}
private static final class WrapperExecutor extends AbstractEventExecutor {
private final ExecutorService wrapped = Executors.newSingleThreadExecutor();
@Override
public boolean isShuttingDown() {
return wrapped.isShutdown();
}
@Override
public Future<?> shutdownGracefully(long l, long l2, TimeUnit timeUnit) {
throw new IllegalStateException();
}
@Override
public Future<?> terminationFuture() {
throw new IllegalStateException();
}
@Override
public void shutdown() {
wrapped.shutdown();
}
@Override
public List<Runnable> shutdownNow() {
return wrapped.shutdownNow();
}
@Override
public boolean isShutdown() {
return wrapped.isShutdown();
}
@Override
public boolean isTerminated() {
return wrapped.isTerminated();
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
return wrapped.awaitTermination(timeout, unit);
}
@Override
public EventExecutorGroup parent() {
return null;
}
@Override
public boolean inEventLoop(Thread thread) {
return false;
}
@Override
public void execute(Runnable command) {
wrapped.execute(command);
}
}
}
| |
package org.oliot.epcis.converter.mongodb.model;
import java.util.List;
import java.util.Map;
import org.bson.BsonArray;
import org.bson.BsonDateTime;
import org.bson.BsonDocument;
import org.bson.BsonDouble;
import org.bson.BsonString;
import org.bson.BsonValue;
/**
* Copyright (C) 2014-17 Jaewook Byun
*
* This project is part of Oliot (oliot.org), pursuing the implementation of
* Electronic Product Code Information Service(EPCIS) v1.1 specification in
* EPCglobal.
* [http://www.gs1.org/gsmp/kc/epcglobal/epcis/epcis_1_1-standard-20140520.pdf]
*
*
* @author Jaewook Jack Byun, Ph.D student
*
* Korea Advanced Institute of Science and Technology (KAIST)
*
* Real-time Embedded System Laboratory(RESL)
*
* bjw0829@kaist.ac.kr, bjw0829@gmail.com
*/
public class CaptureUtil {
public BsonDocument putEventTime(BsonDocument base, Long eventTime) {
base.put("eventTime", new BsonDateTime(eventTime));
return base;
}
public BsonDocument putEventTimeZoneOffset(BsonDocument base, String eventTimeZoneOffset) {
base.put("eventTimeZoneOffset", new BsonString(eventTimeZoneOffset));
return base;
}
public BsonDocument putEventType(BsonDocument base, String eventType) {
base.put("eventType", new BsonString(eventType));
return base;
}
public BsonDocument putAction(BsonDocument base, String action) {
base.put("action", new BsonString(action));
return base;
}
public BsonDocument putParentID(BsonDocument base, String parentID) {
base.put("parentID", new BsonString(parentID));
return base;
}
public BsonDocument putEPCList(BsonDocument base, List<String> epcList) {
BsonArray bsonEPCList = new BsonArray();
for (String epc : epcList) {
bsonEPCList.add(new BsonDocument("epc", new BsonString(epc)));
}
base.put("epcList", bsonEPCList);
return base;
}
public BsonDocument putChildEPCs(BsonDocument base, List<String> childEPCs) {
BsonArray bsonEPCList = new BsonArray();
for (String epc : childEPCs) {
bsonEPCList.add(new BsonDocument("epc", new BsonString(epc)));
}
base.put("childEPCs", bsonEPCList);
return base;
}
public BsonDocument putInputEPCList(BsonDocument base, List<String> inputEPCList) {
BsonArray bsonEPCList = new BsonArray();
for (String epc : inputEPCList) {
bsonEPCList.add(new BsonDocument("epc", new BsonString(epc)));
}
base.put("inputEPCList", bsonEPCList);
return base;
}
public BsonDocument putOutputEPCList(BsonDocument base, List<String> outputEPCList) {
BsonArray bsonEPCList = new BsonArray();
for (String epc : outputEPCList) {
bsonEPCList.add(new BsonDocument("epc", new BsonString(epc)));
}
base.put("outputEPCList", bsonEPCList);
return base;
}
public BsonDocument putTransformationID(BsonDocument base, String transformationID) {
base.put("transformationID", new BsonString(transformationID));
return base;
}
public BsonDocument putBizStep(BsonDocument base, String bizStep) {
base.put("bizStep", new BsonString(bizStep));
return base;
}
public BsonDocument putDisposition(BsonDocument base, String disposition) {
base.put("disposition", new BsonString(disposition));
return base;
}
public BsonDocument putReadPoint(BsonDocument base, String readPoint) {
base.put("readPoint", new BsonDocument("id", new BsonString(readPoint)));
return base;
}
public BsonDocument putBizLocation(BsonDocument base, String bizLocation) {
base.put("bizLocation", new BsonDocument("id", new BsonString(bizLocation)));
return base;
}
public BsonDocument putBizTransactionList(BsonDocument base, Map<String, List<String>> bizTransactionList) {
BsonArray bsonBizTransactionList = new BsonArray();
for (String key : bizTransactionList.keySet()) {
List<String> list = bizTransactionList.get(key);
for (String element : list) {
bsonBizTransactionList.add(new BsonDocument(key, new BsonString(element)));
}
}
base.put("bizTransactionList", bsonBizTransactionList);
return base;
}
private BsonDocument convertToExtensionDocument(Map<String, String> namespaces, BsonDocument extension) {
BsonDocument ext = new BsonDocument();
for (String key : extension.keySet()) {
String[] namespaceAndKey = key.split("#");
if (namespaceAndKey.length != 2)
continue;
String namespace = namespaceAndKey[0];
if (!namespaces.containsKey(namespace))
continue;
ext.put("@" + encodeMongoObjectKey(namespace), new BsonString(namespaces.get(namespace)));
BsonValue extValue = extension.get(key);
if (extValue instanceof BsonDocument) {
ext.put(encodeMongoObjectKey(key), convertToExtensionDocument(namespaces, extValue.asDocument()));
} else {
ext.put(encodeMongoObjectKey(key), extValue);
}
}
return ext;
}
public BsonDocument putILMD(BsonDocument base, Map<String, String> namespaces, BsonDocument ilmds) {
BsonDocument bsonILMD = convertToExtensionDocument(namespaces, ilmds);
BsonDocument any = new BsonDocument();
any.put("any", bsonILMD);
base.put("ilmd", any);
return base;
}
public BsonDocument putExtensions(BsonDocument base, Map<String, String> namespaces, BsonDocument extensions) {
BsonDocument extension = convertToExtensionDocument(namespaces, extensions);
base.put("any", extension);
return base;
}
public BsonDocument putQuantityList(BsonDocument base, List<QuantityElement> quantityList) {
BsonArray quantityArray = new BsonArray();
for (QuantityElement quantityElement : quantityList) {
BsonDocument bsonQuantityElement = new BsonDocument("epcClass",
new BsonString(quantityElement.getEpcClass()));
if (quantityElement.getQuantity() != null) {
bsonQuantityElement.put("quantity", new BsonDouble(quantityElement.getQuantity()));
}
if (quantityElement.getUom() != null) {
bsonQuantityElement.put("uom", new BsonString(quantityElement.getUom()));
}
quantityArray.add(bsonQuantityElement);
}
base.put("quantityList", quantityArray);
return base;
}
public BsonDocument putChildQuantityList(BsonDocument base, List<QuantityElement> childQuantityList) {
BsonArray quantityArray = new BsonArray();
for (QuantityElement quantityElement : childQuantityList) {
BsonDocument bsonQuantityElement = new BsonDocument("epcClass",
new BsonString(quantityElement.getEpcClass()));
if (quantityElement.getQuantity() != null) {
bsonQuantityElement.put("quantity", new BsonDouble(quantityElement.getQuantity()));
}
if (quantityElement.getUom() != null) {
bsonQuantityElement.put("uom", new BsonString(quantityElement.getUom()));
}
quantityArray.add(bsonQuantityElement);
}
base.put("childQuantityList", quantityArray);
return base;
}
public BsonDocument putInputQuantityList(BsonDocument base, List<QuantityElement> inputQuantityList) {
BsonArray quantityArray = new BsonArray();
for (QuantityElement quantityElement : inputQuantityList) {
BsonDocument bsonQuantityElement = new BsonDocument("epcClass",
new BsonString(quantityElement.getEpcClass()));
if (quantityElement.getQuantity() != null) {
bsonQuantityElement.put("quantity", new BsonDouble(quantityElement.getQuantity()));
}
if (quantityElement.getUom() != null) {
bsonQuantityElement.put("uom", new BsonString(quantityElement.getUom()));
}
quantityArray.add(bsonQuantityElement);
}
base.put("inputQuantityList", quantityArray);
return base;
}
public BsonDocument putOutputQuantityList(BsonDocument base, List<QuantityElement> outputQuantityList) {
BsonArray quantityArray = new BsonArray();
for (QuantityElement quantityElement : outputQuantityList) {
BsonDocument bsonQuantityElement = new BsonDocument("epcClass",
new BsonString(quantityElement.getEpcClass()));
if (quantityElement.getQuantity() != null) {
bsonQuantityElement.put("quantity", new BsonDouble(quantityElement.getQuantity()));
}
if (quantityElement.getUom() != null) {
bsonQuantityElement.put("uom", new BsonString(quantityElement.getUom()));
}
quantityArray.add(bsonQuantityElement);
}
base.put("outputQuantityList", quantityArray);
return base;
}
public BsonDocument putSourceList(BsonDocument base, Map<String, List<String>> sourceList) {
BsonArray bsonSourceList = new BsonArray();
for (String key : sourceList.keySet()) {
List<String> sourceArrayList = sourceList.get(key);
for (String source : sourceArrayList) {
BsonDocument bsonSource = new BsonDocument(key, new BsonString(source));
bsonSourceList.add(bsonSource);
}
}
base.put("sourceList", bsonSourceList);
return base;
}
public BsonDocument putDestinationList(BsonDocument base, Map<String, List<String>> destinationList) {
BsonArray bsonDestinationList = new BsonArray();
for (String key : destinationList.keySet()) {
List<String> destinationArrayList = destinationList.get(key);
for (String destination : destinationArrayList) {
BsonDocument bsonDestination = new BsonDocument(key, new BsonString(destination));
bsonDestinationList.add(bsonDestination);
}
}
base.put("destinationList", bsonDestinationList);
return base;
}
public BsonDocument putType(BsonDocument base, VocabularyType type) {
base.put("type", new BsonString(type.getVocabularyType()));
return base;
}
public BsonDocument putID(BsonDocument base, String id) {
base.put("id", new BsonString(id));
return base;
}
public BsonDocument putAttributes(BsonDocument base, Map<String, String> attributes) {
BsonDocument bsonAttributes = new BsonDocument();
for (String key : attributes.keySet()) {
String value = attributes.get(key);
bsonAttributes.put(encodeMongoObjectKey(key), new BsonString(value));
}
base.put("attributes", bsonAttributes);
return base;
}
public BsonDocument putChildren(BsonDocument base, List<String> children) {
BsonArray bsonChildren = new BsonArray();
for (String child : children) {
bsonChildren.add(new BsonString(child));
}
base.put("children", bsonChildren);
return base;
}
public String encodeMongoObjectKey(String key) {
key = key.replace(".", "\uff0e");
return key;
}
}
| |
/*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.progress;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.ThrowableComputable;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
public abstract class ProgressManager {
private static final ProgressManager ourInstance = ServiceManager.getService(ProgressManager.class);
static {
ProgressIndicatorProvider.ourInstance = new ProgressIndicatorProvider() {
@Override
public ProgressIndicator getProgressIndicator() {
return ProgressManager.ourInstance.getProgressIndicator();
}
@Override
protected void doCheckCanceled() throws ProcessCanceledException {
ProgressManager.ourInstance.doCheckCanceled();
}
@Override
public NonCancelableSection startNonCancelableSection() {
return ProgressManager.ourInstance.startNonCancelableSection();
}
};
}
public static ProgressManager getInstance() {
return ourInstance;
}
public abstract boolean hasProgressIndicator();
public abstract boolean hasModalProgressIndicator();
public abstract boolean hasUnsafeProgressIndicator();
public abstract void runProcess(@NotNull Runnable process, ProgressIndicator progress) throws ProcessCanceledException;
public abstract <T> T runProcess(@NotNull Computable<T> process, ProgressIndicator progress) throws ProcessCanceledException;
public ProgressIndicator getProgressIndicator() {
return myThreadIndicator.get();
}
protected static volatile boolean ourNeedToCheckCancel = false;
public static void checkCanceled() throws ProcessCanceledException {
// smart optimization! There's a thread started in ProgressManagerImpl, that set's this flag up once in 10 milliseconds
if (ourNeedToCheckCancel) {
getInstance().doCheckCanceled();
ourNeedToCheckCancel = false;
}
}
public static void progress(final String text) throws ProcessCanceledException {
progress(text, "");
}
public static void progress2(@NotNull final String text) throws ProcessCanceledException {
final ProgressIndicator pi = getInstance().getProgressIndicator();
if (pi != null) {
pi.checkCanceled();
pi.setText2(text);
}
}
public static void progress(final String text, @Nullable String text2) throws ProcessCanceledException {
final ProgressIndicator pi = getInstance().getProgressIndicator();
if (pi != null) {
pi.checkCanceled();
pi.setText(text);
pi.setText2(text2 == null ? "" : text2);
}
}
protected abstract void doCheckCanceled() throws ProcessCanceledException;
public abstract void executeNonCancelableSection(@NotNull Runnable runnable);
public abstract NonCancelableSection startNonCancelableSection();
public abstract void setCancelButtonText(String cancelButtonText);
/**
* Runs the specified operation in a background thread and shows a modal progress dialog in the
* main thread while the operation is executing.
*
* @param process the operation to execute.
* @param progressTitle the title of the progress window.
* @param canBeCanceled whether "Cancel" button is shown on the progress window.
* @param project the project in the context of which the operation is executed.
* @return true if the operation completed successfully, false if it was cancelled.
*/
public abstract boolean runProcessWithProgressSynchronously(@NotNull Runnable process,
@NotNull @Nls String progressTitle,
boolean canBeCanceled,
@Nullable Project project);
/**
* Runs the specified operation in a background thread and shows a modal progress dialog in the
* main thread while the operation is executing.
*
* @param process the operation to execute.
* @param progressTitle the title of the progress window.
* @param canBeCanceled whether "Cancel" button is shown on the progress window.
* @param project the project in the context of which the operation is executed.
* @return true result of operation
* @throws E exception thrown by process
*/
public abstract <T, E extends Exception> T runProcessWithProgressSynchronously(@NotNull ThrowableComputable<T, E> process,
@NotNull @Nls String progressTitle,
boolean canBeCanceled,
@Nullable Project project) throws E;
/**
* Runs the specified operation in a background thread and shows a modal progress dialog in the
* main thread while the operation is executing.
*
* @param process the operation to execute.
* @param progressTitle the title of the progress window.
* @param canBeCanceled whether "Cancel" button is shown on the progress window.
* @param project the project in the context of which the operation is executed.
* @param parentComponent the component which will be used to calculate the progress window ancestor
* @return true if the operation completed successfully, false if it was cancelled.
*/
public abstract boolean runProcessWithProgressSynchronously(@NotNull Runnable process,
@NotNull @Nls String progressTitle,
boolean canBeCanceled,
@Nullable Project project,
@Nullable JComponent parentComponent);
/**
* Runs a specified <code>process</code> in a background thread and shows a progress dialog, which can be made non-modal by pressing
* background button. Upon successful termination of the process a <code>successRunnable</code> will be called in Swing UI thread and
* <code>canceledRunnable</code> will be called if terminated on behalf of the user by pressing either cancel button, while running in
* a modal state or stop button if running in background.
*
* @param project the project in the context of which the operation is executed.
* @param progressTitle the title of the progress window.
* @param process the operation to execute.
* @param successRunnable a callback to be called in Swing UI thread upon normal termination of the process.
* @param canceledRunnable a callback to be called in Swing UI thread if the process have been canceled by the user.
* @deprecated use {@link #run(com.intellij.openapi.progress.Task)}
*/
public abstract void runProcessWithProgressAsynchronously(@NotNull Project project,
@NotNull @Nls String progressTitle,
@NotNull Runnable process,
@Nullable Runnable successRunnable,
@Nullable Runnable canceledRunnable);
/**
* Runs a specified <code>process</code> in a background thread and shows a progress dialog, which can be made non-modal by pressing
* background button. Upon successful termination of the process a <code>successRunnable</code> will be called in Swing UI thread and
* <code>canceledRunnable</code> will be called if terminated on behalf of the user by pressing either cancel button, while running in
* a modal state or stop button if running in background.
*
* @param project the project in the context of which the operation is executed.
* @param progressTitle the title of the progress window.
* @param process the operation to execute.
* @param successRunnable a callback to be called in Swing UI thread upon normal termination of the process.
* @param canceledRunnable a callback to be called in Swing UI thread if the process have been canceled by the user.
* @param option progress indicator behavior controller.
* @deprecated use {@link #run(com.intellij.openapi.progress.Task)}
*/
public abstract void runProcessWithProgressAsynchronously(@NotNull Project project,
@NotNull @Nls String progressTitle,
@NotNull Runnable process,
@Nullable Runnable successRunnable,
@Nullable Runnable canceledRunnable,
@NotNull PerformInBackgroundOption option);
/**
* Runs a specified <code>task</code> in either background/foreground thread and shows a progress dialog.
* @param task task to run (either {@link com.intellij.openapi.progress.Task.Modal} or {@link com.intellij.openapi.progress.Task.Backgroundable}).
*/
public abstract void run(@NotNull Task task);
public abstract void runProcessWithProgressAsynchronously(@NotNull Task.Backgroundable task, @NotNull ProgressIndicator progressIndicator);
protected static final ThreadLocal<ProgressIndicator> myThreadIndicator = new ThreadLocal<ProgressIndicator>();
public void executeProcessUnderProgress(@NotNull Runnable process, ProgressIndicator progress) throws ProcessCanceledException {
ProgressIndicator oldIndicator = null;
boolean set = progress != null && progress != (oldIndicator = myThreadIndicator.get());
if (set) {
myThreadIndicator.set(progress);
}
try {
process.run();
}
finally {
if (set) {
myThreadIndicator.set(oldIndicator);
}
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elasticloadbalancing.model;
import java.io.Serializable;
/**
*
*/
public class DescribeLoadBalancersResult implements Serializable, Cloneable {
/**
* <p>
* Information about the load balancers.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<LoadBalancerDescription> loadBalancerDescriptions;
/**
* <p>
* The marker to use when requesting the next set of results. If there are
* no additional results, the string is empty.
* </p>
*/
private String nextMarker;
/**
* <p>
* Information about the load balancers.
* </p>
*
* @return Information about the load balancers.
*/
public java.util.List<LoadBalancerDescription> getLoadBalancerDescriptions() {
if (loadBalancerDescriptions == null) {
loadBalancerDescriptions = new com.amazonaws.internal.SdkInternalList<LoadBalancerDescription>();
}
return loadBalancerDescriptions;
}
/**
* <p>
* Information about the load balancers.
* </p>
*
* @param loadBalancerDescriptions
* Information about the load balancers.
*/
public void setLoadBalancerDescriptions(
java.util.Collection<LoadBalancerDescription> loadBalancerDescriptions) {
if (loadBalancerDescriptions == null) {
this.loadBalancerDescriptions = null;
return;
}
this.loadBalancerDescriptions = new com.amazonaws.internal.SdkInternalList<LoadBalancerDescription>(
loadBalancerDescriptions);
}
/**
* <p>
* Information about the load balancers.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setLoadBalancerDescriptions(java.util.Collection)} or
* {@link #withLoadBalancerDescriptions(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param loadBalancerDescriptions
* Information about the load balancers.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeLoadBalancersResult withLoadBalancerDescriptions(
LoadBalancerDescription... loadBalancerDescriptions) {
if (this.loadBalancerDescriptions == null) {
setLoadBalancerDescriptions(new com.amazonaws.internal.SdkInternalList<LoadBalancerDescription>(
loadBalancerDescriptions.length));
}
for (LoadBalancerDescription ele : loadBalancerDescriptions) {
this.loadBalancerDescriptions.add(ele);
}
return this;
}
/**
* <p>
* Information about the load balancers.
* </p>
*
* @param loadBalancerDescriptions
* Information about the load balancers.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeLoadBalancersResult withLoadBalancerDescriptions(
java.util.Collection<LoadBalancerDescription> loadBalancerDescriptions) {
setLoadBalancerDescriptions(loadBalancerDescriptions);
return this;
}
/**
* <p>
* The marker to use when requesting the next set of results. If there are
* no additional results, the string is empty.
* </p>
*
* @param nextMarker
* The marker to use when requesting the next set of results. If
* there are no additional results, the string is empty.
*/
public void setNextMarker(String nextMarker) {
this.nextMarker = nextMarker;
}
/**
* <p>
* The marker to use when requesting the next set of results. If there are
* no additional results, the string is empty.
* </p>
*
* @return The marker to use when requesting the next set of results. If
* there are no additional results, the string is empty.
*/
public String getNextMarker() {
return this.nextMarker;
}
/**
* <p>
* The marker to use when requesting the next set of results. If there are
* no additional results, the string is empty.
* </p>
*
* @param nextMarker
* The marker to use when requesting the next set of results. If
* there are no additional results, the string is empty.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeLoadBalancersResult withNextMarker(String nextMarker) {
setNextMarker(nextMarker);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getLoadBalancerDescriptions() != null)
sb.append("LoadBalancerDescriptions: "
+ getLoadBalancerDescriptions() + ",");
if (getNextMarker() != null)
sb.append("NextMarker: " + getNextMarker());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeLoadBalancersResult == false)
return false;
DescribeLoadBalancersResult other = (DescribeLoadBalancersResult) obj;
if (other.getLoadBalancerDescriptions() == null
^ this.getLoadBalancerDescriptions() == null)
return false;
if (other.getLoadBalancerDescriptions() != null
&& other.getLoadBalancerDescriptions().equals(
this.getLoadBalancerDescriptions()) == false)
return false;
if (other.getNextMarker() == null ^ this.getNextMarker() == null)
return false;
if (other.getNextMarker() != null
&& other.getNextMarker().equals(this.getNextMarker()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getLoadBalancerDescriptions() == null) ? 0
: getLoadBalancerDescriptions().hashCode());
hashCode = prime * hashCode
+ ((getNextMarker() == null) ? 0 : getNextMarker().hashCode());
return hashCode;
}
@Override
public DescribeLoadBalancersResult clone() {
try {
return (DescribeLoadBalancersResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
package com.atlassian.pageobjects.binder;
import com.atlassian.pageobjects.Page;
import com.atlassian.pageobjects.PageBinder;
import com.atlassian.pageobjects.ProductInstance;
import com.atlassian.pageobjects.TestedProduct;
import com.atlassian.pageobjects.Tester;
import com.atlassian.pageobjects.browser.Browser;
import com.atlassian.pageobjects.browser.IgnoreBrowser;
import com.atlassian.pageobjects.browser.RequireBrowser;
import com.atlassian.pageobjects.inject.ConfigurableInjectionContext;
import com.google.inject.Binder;
import com.google.inject.Module;
import org.hamcrest.CoreMatchers;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import javax.inject.Inject;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@SuppressWarnings("unchecked")
@RunWith(MockitoJUnitRunner.class)
public class TestInjectPageBinder
{
private MyTestedProduct product;
@Mock
private ProductInstance productInstance;
@Mock
private Tester tester;
@Before
public void setUp() throws Exception
{
product = new MyTestedProduct(new SetTester());
}
@Test
public void testInject()
{
PageBinder binder = createBinder(StringField.class, StringFieldImpl.class);
OneFieldPage page = binder.bind(OneFieldPage.class);
assertEquals("Bob", page.name.getValue());
}
@Test
public void testInjectDefaults()
{
PageBinder binder = createBinder();
DefaultsPage page = binder.bind(DefaultsPage.class);
assertNotNull(page.testedProduct);
assertNotNull(page.myTestedProduct);
assertNotNull(page.tester);
assertNotNull(page.setTester);
assertNotNull(page.pageBinder);
assertNotNull(page.productInstance);
}
@Test(expected = IllegalArgumentException.class)
public void testInjectMissing()
{
PageBinder binder = createBinder();
binder.bind(OneFieldPage.class);
}
@Test
public void testInjectWithArgument()
{
PageBinder binder = createBinder();
ConstructorArgumentPage page = binder.bind(ConstructorArgumentPage.class, "foo");
assertEquals("foo", page.name);
}
@Test
public void testInstantiateWithPrimitiveArguments()
{
PageBinder binder = createBinder();
ConstructorArgumentPrimitive object = binder.bind(ConstructorArgumentPrimitive.class, 5, true);
assertNotNull(object);
assertEquals(5, object.intField);
assertTrue(object.booleanField);
}
@Test
public void testInjectWithArgumentSubclass()
{
PageBinder binder = createBinder();
ConstructorArgumentPage page = binder.bind(ConstructorArgumentPage.class, 43);
assertEquals(43, page.age);
}
@Test
public void testInitAfterInject()
{
PageBinder binder = createBinder(StringField.class, StringFieldImpl.class);
OneFieldWithInitPage page = binder.bind(OneFieldWithInitPage.class);
assertEquals("Bob Jones", page.name);
}
@Test
public void testPrivateInitAfterInject()
{
PageBinder binder = createBinder(StringField.class, StringFieldImpl.class);
OneFieldWithPrivateInitPage page = binder.bind(OneFieldWithPrivateInitPage.class);
assertEquals("Bob Private", page.name);
}
@Test
public void testOneFieldWithSuperClassInit()
{
PageBinder binder = createBinder(StringField.class, StringFieldImpl.class);
OneFieldWithSuperClassInitPage page = binder.bind(OneFieldWithSuperClassInitPage.class);
assertEquals("Bob Private", page.getName());
}
@Test
public void testProtectedInitAfterInject()
{
PageBinder binder = createBinder(StringField.class, StringFieldImpl.class);
OneFieldWithProtectedInitPage page = binder.bind(OneFieldWithProtectedInitPage.class);
assertEquals("Bob Protected", page.name);
}
@Test
public void testParentInject()
{
PageBinder binder = createBinder(StringField.class, StringFieldImpl.class);
ChildNoNamePage page = binder.bind(ChildNoNamePage.class);
assertEquals("Bob", page.name.getValue());
}
@Test
public void shouldImplementConfigurableInjectionContext()
{
final PageBinder binder = createBinder(StringField.class, StringFieldImpl.class);
assertThat(binder, CoreMatchers.instanceOf(ConfigurableInjectionContext.class));
assertEquals("Bob", binder.bind(OneFieldPage.class).name.getValue());
ConfigurableInjectionContext.class.cast(binder)
.configure()
.addImplementation(StringField.class, AnotherStringFieldImpl.class)
.finish();
assertEquals("Rob", binder.bind(OneFieldPage.class).name.getValue());
}
@Test
public void shouldAllowConfiguringNewSingletonInstanceThatIsSubclassOfInterfaceType()
{
final PageBinder binder = createBinder();
ConfigurableInjectionContext.class.cast(binder)
.configure()
.addSingleton(StringField.class, new StringFieldImpl())
.finish();
assertEquals("Bob", binder.bind(OneFieldPage.class).name.getValue());
}
@Test
public void shouldAllowConfiguringNewImplementationInstance()
{
PageBinder binder = createBinder(StringField.class, StringFieldImpl.class);
assertEquals("Bob", binder.bind(OneFieldPage.class).name.getValue());
ConfigurableInjectionContext.class.cast(binder)
.configure()
.addSingleton(StringField.class, new StringField()
{
@Override
public String getValue()
{
return "Boom!";
}
})
.finish();
assertEquals("Boom!", binder.bind(OneFieldPage.class).name.getValue());
}
@Test
public void shouldIncludePostInjectionProcessorsAddedViaInjectionContext()
{
PageBinder binder = createBinder();
assertEquals("Default", binder.bind(MutablePage.class).getValue());
ConfigurableInjectionContext.class.cast(binder)
.configure()
.addSingleton(MutablePageProcessor.class, new MutablePageProcessor())
.finish();
// post processor should be invoked
assertEquals("Post processed", binder.bind(MutablePage.class).getValue());
}
@Test
public void visitUrlShouldRemoveExtraSlashAfterHostname() throws Exception
{
when(productInstance.getBaseUrl()).thenReturn("http://localhost/");
final PageBinder binder = createBinder(StringField.class, StringFieldImpl.class);
binder.navigateToAndBind(OneFieldPage.class);
verify(tester).gotoUrl("http://localhost/path");
}
@Test
public void visitUrlShouldAddMissingSlashAfterHostname() throws Exception
{
when(productInstance.getBaseUrl()).thenReturn("http://localhost");
final PageBinder binder = createBinder(StringField.class, StringFieldImpl.class);
binder.navigateToAndBind(PageWithNoLeadingSlash.class);
verify(tester).gotoUrl("http://localhost/path");
}
@Test
public void shouldCheckForIgnoreBrowserAndRequireBrowserAnnotation()
{
// should invoke all init methods
PageObjectWithRequiredAndIgnoredBrowsers pageObject = createBinderWithBrowser(Browser.FIREFOX)
.bind(PageObjectWithRequiredAndIgnoredBrowsers.class);
assertTrue(pageObject.initIgnoredInvoked);
assertTrue(pageObject.initRequiredInvoked);
// should _not_ invoke init ignored
pageObject = createBinderWithBrowser(Browser.HTMLUNIT).bind(PageObjectWithRequiredAndIgnoredBrowsers.class);
assertFalse(pageObject.initIgnoredInvoked);
assertTrue(pageObject.initRequiredInvoked);
// should _not_ invoke init required
pageObject = createBinderWithBrowser(Browser.SAFARI).bind(PageObjectWithRequiredAndIgnoredBrowsers.class);
assertTrue(pageObject.initIgnoredInvoked);
assertFalse(pageObject.initRequiredInvoked);
// should _not_ invoke any init
pageObject = createBinderWithBrowser(Browser.HTMLUNIT_NOJS).bind(PageObjectWithRequiredAndIgnoredBrowsers.class);
assertFalse(pageObject.initIgnoredInvoked);
assertFalse(pageObject.initRequiredInvoked);
}
@Test
public void shouldSupportIgnoreAllAndRequireAll()
{
for (Browser browser : Browser.values())
{
PageObjectWithRequiredAndIgnoredBrowsers pageObject = createBinderWithBrowser(browser)
.bind(PageObjectWithRequiredAndIgnoredBrowsers.class);
assertFalse(pageObject.initIgnoreAllInvoked);
assertTrue(pageObject.initRequireAllInvoked);
}
}
private InjectPageBinder createBinder()
{
return createBinder(null, null);
}
private InjectPageBinder createBinder(final Class<?> key, final Class impl)
{
return new InjectPageBinder(productInstance, tester, new StandardModule(product),
new Module()
{
public void configure(Binder binder)
{
if (key != null)
{
binder.bind(key).to(impl);
}
}
});
}
private InjectPageBinder createBinderWithBrowser(Browser browser)
{
InjectPageBinder pageBinder = createBinder();
ConfigurableInjectionContext.class.cast(pageBinder).configure()
.addSingleton(Browser.class, browser).finish();
return pageBinder;
}
static class AbstractPage implements Page
{
public String getUrl()
{
return "/path";
}
}
static class OneFieldPage extends AbstractPage
{
@Inject
private StringField name;
}
static class PageWithNoLeadingSlash extends AbstractPage
{
@Override
public String getUrl()
{
return "path";
}
}
static class ConstructorArgumentPrimitive
{
private final int intField;
private final boolean booleanField;
public ConstructorArgumentPrimitive(int intArg, boolean booleanArg)
{
this.intField = intArg;
this.booleanField = booleanArg;
}
}
static class ConstructorArgumentPage extends AbstractPage
{
private final String name;
private final Number age;
@SuppressWarnings("UnusedDeclaration")
public ConstructorArgumentPage(String name)
{
this.name = name;
this.age = null;
}
@SuppressWarnings("UnusedDeclaration")
public ConstructorArgumentPage(Number age)
{
this.age = age;
this.name = null;
}
}
static class ParentNamePage extends AbstractPage
{
@Inject
protected StringField name;
}
static class ChildNoNamePage extends ParentNamePage
{
}
static class DefaultsPage extends AbstractPage
{
@Inject
private ProductInstance productInstance;
@Inject
private TestedProduct testedProduct;
@Inject
private MyTestedProduct myTestedProduct;
@Inject
private Tester tester;
@Inject
private SetTester setTester;
@Inject
private PageBinder pageBinder;
}
static class OneFieldWithInitPage extends AbstractPage
{
@Inject
private StringField field;
private String name;
@Init
public void init()
{
name = field.getValue() + " Jones";
}
}
static interface StringField
{
String getValue();
}
static class StringFieldImpl implements StringField
{
public String getValue()
{
return "Bob";
}
}
static class AnotherStringFieldImpl implements StringField
{
public String getValue()
{
return "Rob";
}
}
static class OneFieldWithPrivateInitPage extends AbstractPage
{
@Inject
private StringField field;
private String name;
@Init
@SuppressWarnings("UnusedDeclaration")
private void init()
{
name = field.getValue() + " Private";
}
public String getName()
{
return name;
}
}
static class OneFieldWithProtectedInitPage extends AbstractPage
{
@Inject
private StringField field;
private String name;
@Init
protected void init()
{
name = field.getValue() + " Protected";
}
}
static class OneFieldWithSuperClassInitPage extends OneFieldWithPrivateInitPage
{
}
static class MutablePage
{
private String value = "Default";
public String getValue()
{
return value;
}
public void setValue(String value)
{
this.value = value;
}
}
static class MutablePageProcessor implements PostInjectionProcessor
{
@Override
public <T> T process(T pageObject)
{
if (pageObject instanceof MutablePage)
{
MutablePage.class.cast(pageObject).setValue("Post processed");
}
return pageObject;
}
}
static class PageObjectWithRequiredAndIgnoredBrowsers
{
boolean initIgnoredInvoked;
boolean initRequiredInvoked;
boolean initIgnoreAllInvoked;
boolean initRequireAllInvoked;
@Init
@IgnoreBrowser(Browser.ALL)
public void initIgnoreAll()
{
initIgnoreAllInvoked = true;
}
@Init
@IgnoreBrowser({ Browser.HTMLUNIT, Browser.HTMLUNIT_NOJS })
public void initIgnored()
{
initIgnoredInvoked = true;
}
@Init
@RequireBrowser(Browser.ALL)
public void initRequireAll()
{
initRequireAllInvoked = true;
}
@Init
@RequireBrowser({ Browser.CHROME, Browser.FIREFOX, Browser.HTMLUNIT })
public void initRequired()
{
initRequiredInvoked = true;
}
}
}
| |
/*
* Copyright (c) 2010-2015 William Bittle http://www.dyn4j.org/
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions
* and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions
* and the following disclaimer in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of dyn4j nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.dyn4j.sandbox.tests;
import java.util.List;
import org.dyn4j.collision.manifold.Manifold;
import org.dyn4j.collision.manifold.ManifoldPoint;
import org.dyn4j.collision.manifold.ManifoldSolver;
import org.dyn4j.collision.narrowphase.DistanceDetector;
import org.dyn4j.collision.narrowphase.Gjk;
import org.dyn4j.collision.narrowphase.NarrowphaseDetector;
import org.dyn4j.collision.narrowphase.Penetration;
import org.dyn4j.collision.narrowphase.Separation;
import org.dyn4j.dynamics.Body;
import org.dyn4j.dynamics.World;
import org.dyn4j.geometry.Circle;
import org.dyn4j.geometry.Convex;
import org.dyn4j.geometry.Geometry;
import org.dyn4j.geometry.MassType;
import org.dyn4j.geometry.Rectangle;
import org.dyn4j.geometry.Segment;
import org.dyn4j.geometry.Transform;
import org.dyn4j.geometry.Triangle;
import org.dyn4j.geometry.Vector2;
import org.dyn4j.sandbox.SandboxBody;
import org.dyn4j.sandbox.utilities.RenderUtilities;
import com.jogamp.opengl.GL;
import com.jogamp.opengl.GL2;
/**
* Compiled test for the collision detection pipeline.
* @author William Bittle
* @version 1.0.6
* @since 1.0.2
*/
public class Plotter extends CompiledSimulation {
/** The distance detector */
private static final DistanceDetector DISTANCE_DETECTOR = new Gjk();
/** The point rendering radius */
private static final double RADIUS = 0.05;
/**
* Default constructor.
*/
public Plotter() {
super();
// setup the camera
this.camera.setScale(64.0);
}
/* (non-Javadoc)
* @see org.dyn4j.sandbox.tests.CompiledSimulation#initialize()
*/
@Override
public void initialize() {
this.world.setUserData("Plotter");
this.world.setGravity(World.ZERO_GRAVITY);
for (int i = 0; i < 5; i++) {
SandboxBody body = create(i);
this.world.addBody(body);
}
}
/**
* Helper method to create the various bodies for the collision test.
* @param index the shape index
* @return SandboxBody
*/
private SandboxBody create(int index) {
SandboxBody body = new SandboxBody();
// which shape to make?
if (index == 0) {
body.addFixture(new Circle(0.5));
body.translate(3.5, 0.0);
} else if (index == 1) {
body.addFixture(Geometry.createUnitCirclePolygon(5, 1.0));
body.translate(0.0, 2.5);
} else if (index == 2) {
body.addFixture(new Rectangle(0.5, 0.5));
body.translate(-4.0, -0.5);
} else if (index == 3) {
body.addFixture(new Triangle(new Vector2(0.45, -0.12), new Vector2(-0.45, 0.38), new Vector2(-0.15, -0.22)));
body.translate(0.5, 0.5);
} else {
body.addFixture(new Segment(new Vector2(-0.3, 0.2), new Vector2(0.0, -0.1)));
body.translate(-1.0, -1.5);
}
// set the mass to infinite
body.setMass(MassType.INFINITE);
body.setUserData("Body" + (index + 1));
// return the entity
return body;
}
/* (non-Javadoc)
* @see org.dyn4j.sandbox.tests.CompiledSimulation#update(double, boolean)
*/
@Override
public void update(double elapsedTime, boolean stepped) {}
/* (non-Javadoc)
* @see org.dyn4j.sandbox.tests.CompiledSimulation#render(com.jogamp.opengl.GL2)
*/
@Override
public void render(GL2 gl) {
// use the current world's detectors
NarrowphaseDetector npd = this.world.getNarrowphaseDetector();
ManifoldSolver ms = this.world.getManifoldSolver();
Separation s = new Separation();
Penetration p = new Penetration();
int bSize = this.world.getBodyCount();
// loop over all the bodies and fixtures and get their
// penetrations, manifolds, and separations
for (int i = 0; i < bSize; i++) {
for (int j = i + 1; j < bSize; j++) {
Body b1 = this.world.getBody(i);
Body b2 = this.world.getBody(j);
Transform t1 = b1.getTransform();
Transform t2 = b2.getTransform();
int fSize1 = b1.getFixtureCount();
int fSize2 = b2.getFixtureCount();
for (int k = 0; k < fSize1; k++) {
for (int l = 0; l < fSize2; l++) {
Convex c1 = b1.getFixture(k).getShape();
Convex c2 = b2.getFixture(l).getShape();
// set the color
gl.glColor4f(0.0f, 1.0f, 0.0f, 1.0f);
if (npd.detect(c1, t1, c2, t2, p)) {
Manifold m = new Manifold();
if (ms.getManifold(p, c1, t1, c2, t2, m)) {
// get the points
List<ManifoldPoint> points = m.getPoints();
Vector2 n = m.getNormal();
// if we got a manifold lets show it
// there are only two cases for 2D (2 points or 1 point)
if (points.size() == 2) {
ManifoldPoint mp1 = points.get(0);
ManifoldPoint mp2 = points.get(1);
Vector2 p1 = mp1.getPoint();
Vector2 p2 = mp2.getPoint();
gl.glColor4f(1.0f, 0.6f, 0.0f, 1.0f);
RenderUtilities.fillRectangleFromCenter(gl, p1.x, p1.y, RADIUS, RADIUS);
RenderUtilities.fillRectangleFromCenter(gl, p2.x, p2.y, RADIUS, RADIUS);
Vector2 mid = p1.copy().add(p2).multiply(0.5);
gl.glBegin(GL.GL_LINES);
gl.glColor4f(1.0f, 0.0f, 0.0f, 1.0f);
gl.glVertex2d(mid.x, mid.y);
gl.glVertex2d(mid.x + n.x * p.getDepth(), mid.y + n.y * p.getDepth());
gl.glColor4f(0.0f, 0.0f, 1.0f, 1.0f);
gl.glVertex2d(p1.x, p1.y);
gl.glVertex2d(p1.x + n.x * mp1.getDepth(), p1.y + n.y * mp1.getDepth());
gl.glVertex2d(p2.x, p2.y);
gl.glVertex2d(p2.x + n.x * mp2.getDepth(), p2.y + n.y * mp2.getDepth());
gl.glEnd();
} else if (points.size() == 1) {
ManifoldPoint mp1 = points.get(0);
Vector2 p1 = mp1.getPoint();
gl.glColor4f(1.0f, 0.6f, 0.0f, 1.0f);
RenderUtilities.fillRectangleFromCenter(gl, p1.x, p1.y, RADIUS, RADIUS);
gl.glBegin(GL.GL_LINES);
gl.glColor4f(1.0f, 0.0f, 0.0f, 1.0f);
gl.glVertex2d(p1.x, p1.y);
gl.glVertex2d(p1.x + n.x * p.getDepth(), p1.y + n.y * p.getDepth());
gl.glColor4f(0.0f, 0.0f, 1.0f, 1.0f);
gl.glVertex2d(p1.x, p1.y);
gl.glVertex2d(p1.x + n.x * mp1.getDepth(), p1.y + n.y * mp1.getDepth());
gl.glEnd();
}
}
} else {
if (DISTANCE_DETECTOR.distance(c1, t1, c2, t2, s)) {
Vector2 p1 = s.getPoint1();
Vector2 p2 = s.getPoint2();
Vector2 n = s.getNormal();
RenderUtilities.fillRectangleFromCenter(gl, p1.x, p1.y, RADIUS, RADIUS);
RenderUtilities.fillRectangleFromCenter(gl, p2.x, p2.y, RADIUS, RADIUS);
gl.glBegin(GL.GL_LINES);
gl.glVertex2d(p1.x, p1.y);
gl.glVertex2d(p1.x + n.x * s.getDistance(), p1.y + n.y * s.getDistance());
gl.glEnd();
}
}
}
}
}
}
}
/* (non-Javadoc)
* @see org.dyn4j.sandbox.tests.CompiledSimulation#reset()
*/
@Override
public void reset() {
// remove everything from the world
this.world.removeAllBodiesAndJoints();
// add it all back
this.initialize();
}
}
| |
package com.netflix.suro.sink.elasticsearch;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.netflix.client.config.CommonClientConfigKey;
import com.netflix.client.config.IClientConfig;
import com.netflix.loadbalancer.BaseLoadBalancer;
import com.netflix.niws.client.http.RestClient;
import com.netflix.suro.jackson.DefaultObjectMapper;
import com.netflix.suro.message.DefaultMessageContainer;
import com.netflix.suro.message.Message;
import com.netflix.suro.sink.Sink;
import org.elasticsearch.action.count.CountRequest;
import org.elasticsearch.action.count.CountResponse;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.joda.time.DateTime;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
@ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.TEST, numNodes = 1)
public class TestElasticSearchSink extends ElasticsearchIntegrationTest {
protected String getPort() {
return "9200";
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 1)
.put(super.nodeSettings(nodeOrdinal)).build();
}
@Test
public void testDefaultArgument() throws IOException {
String index = "topic";
createDefaultESSink(index);
refresh();
CountResponse countResponse = client().count(new CountRequest(index)).actionGet();
assertEquals(countResponse.getCount(), 100);
}
private ElasticSearchSink createDefaultESSink(String index) throws JsonProcessingException {
ObjectMapper jsonMapper = new DefaultObjectMapper();
ElasticSearchSink sink = new ElasticSearchSink(
index,
null,
10,
1000,
Lists.newArrayList("localhost:" + getPort()),
null,
0,0,0,0,1000,
null,
false,
jsonMapper,
null
);
sink.open();
DateTime dt = new DateTime("2014-10-12T12:12:12.000Z");
Map<String, Object> msg = new ImmutableMap.Builder<String, Object>()
.put("f1", "v1")
.put("f2", "v2")
.put("f3", "v3")
.put("ts", dt.getMillis())
.build();
for (int i = 0; i < 100; ++i) {
sink.writeTo(new DefaultMessageContainer(new Message(index, jsonMapper.writeValueAsBytes(msg)), jsonMapper));
}
sink.close();
return sink;
}
@Test
public void testIndexInfoBuilder() throws IOException {
ObjectMapper jsonMapper = new DefaultObjectMapper();
Properties props = new Properties();
props.setProperty("dateFormat", "YYYYMMdd");
ElasticSearchSink sink = new ElasticSearchSink(
"testIndexInfoBuilder",
null,
1,
1000,
Lists.newArrayList("localhost:" + getPort()),
new DefaultIndexInfoBuilder(
null,
null,
new TimestampField("ts", null),
new IndexSuffixFormatter("date", props),
null,
jsonMapper),
0,0,0,0,0,
null,
false,
jsonMapper,
null
);
sink.open();
DateTime dt = new DateTime("2014-10-12T12:12:12.000Z");
Map<String, Object> msg = new ImmutableMap.Builder<String, Object>()
.put("f1", "v1")
.put("f2", "v2")
.put("f3", "v3")
.put("ts", dt.getMillis())
.build();
String routingKey = "topic";
String index = "topic20141012";
for (int i = 0; i < 100; ++i) {
sink.writeTo(new DefaultMessageContainer(new Message(routingKey, jsonMapper.writeValueAsBytes(msg)), jsonMapper));
}
sink.close();
refresh();
CountResponse countResponse = client().count(new CountRequest(index)).actionGet();
assertEquals(countResponse.getCount(), 100);
}
@Test
public void testCreate() throws IOException {
String desc = " {\n" +
" \"type\": \"elasticsearch\",\n" +
" \"queue4Sink\":{\"type\": \"memory\", \"capacity\": 0 },\n" +
" \"batchSize\": 100,\n" +
" \"batchTimeout\": 1000,\n" +
" \"clientName\": \"es_test\",\n" +
" \"cluster.name\": \"es_test\",\n" +
" \"addressList\": [\"http://host1:8080\", \"http://host2:8080\"],\n" +
" \"indexInfo\":{\n" +
" \"type\": \"default\",\n" +
" \"indexTypeMap\":{\"routingkey1\":\"index1:type1\", \"routingkey2\":\"index2:type2\"},\n" +
" \"idFields\":{\"index\":[\"f1\", \"f2\"]},\n" +
" \"timestamp\": {\"field\":\"ts\"},\n" +
" \"indexSuffixFormatter\":{\"type\": \"date\", \"properties\":{\"dateFormat\":\"YYYYMMdd\"}}\n" +
" }\n" +
" }";
final ObjectMapper jsonMapper = new DefaultObjectMapper();
jsonMapper.registerSubtypes(new NamedType(ElasticSearchSink.class, "elasticsearch"));
jsonMapper.setInjectableValues(new InjectableValues() {
@Override
public Object findInjectableValue(
Object valueId,
DeserializationContext ctxt,
BeanProperty forProperty,
Object beanInstance
) {
if (valueId.equals(ObjectMapper.class.getCanonicalName())) {
return jsonMapper;
} else {
return null;
}
}
});
Sink sink = jsonMapper.readValue(desc, new TypeReference<Sink>(){});
assertTrue(sink instanceof ElasticSearchSink);
ElasticSearchSink esSink = (ElasticSearchSink) sink;
esSink.createClient();
RestClient client = esSink.getClient();
IClientConfig config = ((BaseLoadBalancer) client.getLoadBalancer()).getClientConfig();
assertTrue(config.get(CommonClientConfigKey.OkToRetryOnAllOperations));
assertEquals(2, config.get(CommonClientConfigKey.MaxAutoRetriesNextServer).intValue());
assertEquals(0, esSink.getSleepOverClientException());
assertFalse(esSink.getReenqueueOnException());
}
@Test
public void testRecover() throws Exception {
ObjectMapper jsonMapper = new DefaultObjectMapper();
ElasticSearchSink sink = new ElasticSearchSink(
"default",
null,
10,
1000,
Lists.newArrayList("localhost:" + getPort()),
null,
0,0,0,0,
0,
null,
false,
jsonMapper,
null
);
sink.open();
DateTime dt = new DateTime("2014-10-12T12:12:12.000Z");
Map<String, Object> msg = new ImmutableMap.Builder<String, Object>()
.put("f1", "v1")
.put("f2", "v2")
.put("f3", "v3")
.put("ts", dt.getMillis())
.build();
String routingKey = "topicrecover";
String index = "topicrecover";
List<Message> msgList = new ArrayList<>();
int msgCount = 100;
for (int i = 0; i < msgCount; ++i) {
msgList.add(new Message(routingKey, jsonMapper.writeValueAsBytes(msg)));
}
for (Message m : msgList) {
sink.recover(m);
}
refresh();
CountResponse countResponse = client().count(new CountRequest(index)).actionGet();
assertEquals(countResponse.getCount(), 100);
}
private ObjectMapper jsonMapper = new DefaultObjectMapper();
// @Test
// public void testStat() throws JsonProcessingException, InterruptedException {
// final long ts = System.currentTimeMillis() - 1;
//
// IndexInfoBuilder indexInfo = mock(IndexInfoBuilder.class);
// doAnswer(new Answer() {
// @Override
// public Object answer(InvocationOnMock invocation) throws Throwable {
// final Message m = (Message) invocation.getArguments()[0];
// if (m.getRoutingKey().startsWith("parsing_failed")) {
// return null;
// } else {
// return new IndexInfo() {
// @Override
// public String getIndex() {
// return m.getRoutingKey();
// }
//
// @Override
// public String getType() {
// return "type";
// }
//
// @Override
// public Object getSource() {
// if (m.getRoutingKey().startsWith("rejected")) {
// return m.getPayload();
// } else {
// return new String(m.getPayload());
// }
// }
//
// @Override
// public String getId() {
// return null;
// }
//
// @Override
// public long getTimestamp() {
// return ts;
// }
// };
// }
// }
// }).when(indexInfo).create(any(Message.class));
//
// ElasticSearchSink sink = new ElasticSearchSink(
// "testStat",
// null, // by default it will be memory queue
// 1000,
// 5000,
// Lists.newArrayList("localhost:" + getPort()),
// indexInfo,
// 0,0,0,0,0,
// null,
// jsonMapper,
// null);
// sink.open();
//
// for (int i = 0; i < 3; ++i) {
// for (int j = 0; j < 3; ++j) {
// sink.writeTo(new DefaultMessageContainer(new Message("parsing_failed_topic" + i, getAnyMessage()), jsonMapper));
// }
// for (int j = 0; j < 3; ++j) {
// sink.writeTo(new DefaultMessageContainer(new Message("indexed" + i, getAnyMessage()), jsonMapper));
// }
// for (int j = 0; j < 3; ++j) {
// sink.writeTo(new DefaultMessageContainer(new Message("rejected" + i, getAnyMessage()), jsonMapper));
// }
// }
//
// sink.close();
// String stat = sink.getStat();
// System.out.println(stat);
// int count = 0;
// for (int i = 0; i < 3; ++i) {
// for (int j = 0; j < 3; ++j) {
// if (stat.contains("parsing_failed_topic" + i + ":3")) {
// ++count;
// }
// }
// for (int j = 0; j < 3; ++j) {
// if (stat.contains("indexed" + i + ":3")) {
// ++count;
// }
// }
// for (int j = 0; j < 3; ++j) {
// if (stat.contains("rejected" + i + ":3")) {
// ++count;
// }
// }
// }
// assertEquals(count, 27);
//
// // check indexDelay section
// ArrayIterator iterator = new ArrayIterator(stat.split("\n"));
// while (iterator.hasNext() && !iterator.next().equals("indexDelay"));
// Set<String> stringSet = new HashSet<>();
// for (int i = 0; i < 6; ++i) {
// String s = (String) iterator.next();
// assertTrue(Long.parseLong(s.split(":")[1]) > 0);
// stringSet.add(s.split(":")[0]);
// }
// assertEquals(stringSet.size(), 6);
// }
private byte[] getAnyMessage() throws JsonProcessingException {
return jsonMapper.writeValueAsBytes(new ImmutableMap.Builder<String, Object>().put("f1", "v1").build());
}
}
| |
package org.jsapar.text;
import java.math.BigDecimal;
import java.text.ParseException;
import java.time.format.DateTimeFormatter;
import java.time.temporal.TemporalAccessor;
import java.util.Locale;
/**
* Interface for parsing and formatting objects from and to String. Use one of the static factory methods of this interface
* to create an instance that fits the purpose. For instance, you can use {@link #ofJavaTextFormat(java.text.Format)} in order
* to wrap a java.text.Format instance.
* @param <T>
*/
public interface Format<T> {
/**
* Parses an instance of type T from the supplied string.
* @param stringValue The string value to parse from.
* @return A parsed object.
* @throws ParseException If parsing fails for any reason.
*/
T parse(String stringValue) throws ParseException;
/**
* Formats supplied object into string.
* @param value The value to format. Usually it should be of the type T but this method is a bit more generous and
* may accepts other type of objects on occasion.
* @return A string that is formatted from the supplied object.
* @throws IllegalArgumentException If the supplied value is of an unsupported type.
*/
String format(Object value) throws IllegalArgumentException;
/**
* @param format The java.text.Format to use.
* @param <T> The return type.
* @return An instance that formats and parses using a java.text.Format instance.
*/
static <T> Format<T> ofJavaTextFormat(java.text.Format format){
return new JavaTextFormat<>(format);
}
/**
* @param ignoreCase If true, upper/lower case is ignored.
* @return An instance that formats and parses Boolean in a default manner using 'true' or the true value and 'false' for false.
*/
static Format<Boolean> ofBooleanInstance(boolean ignoreCase){
return new BooleanFormat(ignoreCase);
}
/**
* @param trueValue The string value to use for true.
* @param falseValue The string value to use for false.
* @param ignoreCase If true, upper/lower case is ignored.
* @return An instance that formats and parses Boolean.
*/
static Format<Boolean> ofBooleanInstance(String trueValue, String falseValue, boolean ignoreCase){
return new BooleanFormat(trueValue, falseValue, ignoreCase);
}
/**
* Creates a formatter for boolean values where multiple values are accepted as true or false values. When parsing,
* the supplied
* values are tested for equality against the input in following order:
* <ol>
* <li>The first true value.</li>
* <li>The first false value.</li>
* <li>The rest of the true values are tested in supplied order.</li>
* <li>The rest of the false values are tested in supplied order.</li>
* </ol>
* @param trueValues An array of all of the strings that represents the true value. The first item in the array is used when formatting.
* @param falseValues An array of all of the strings that represents the false value. The first item in the array is used when formatting.
* @param ignoreCase If true, upper/lower case is ignored.
* @return An instance that formats and parses Boolean.
*/
static Format<Boolean> ofBooleanInstance(String[] trueValues, String[] falseValues, boolean ignoreCase){
return new BooleanFormat(trueValues, falseValues, ignoreCase);
}
/**
* Create a {@link Format} instance for the boolean cell type given the locale and a specified pattern.
* @param pattern A pattern to use for the format object. If null or empty, default format will be returned.
* The pattern should contain the true and false values separated with a ; character.
* Example: pattern="Y;N" will imply that Y represents true and N to represents false.
* Comparison while parsing is not case sensitive.
* Multiple true or false values can be specified, separated with the | character but the first value is always the
* one used while composing. Example: pattern="Y|YES;N|NO"
* @param ignoreCase If true, upper/lower case is ignored.
* @return An instance that formats and parses Boolean.
*/
static Format<Boolean> ofBooleanInstance(String pattern, boolean ignoreCase){
if(pattern == null || pattern.isEmpty())
return ofBooleanInstance(ignoreCase);
String[] aTrueFalse = pattern.trim().split("\\s*;\\s*");
if (aTrueFalse.length < 1 || aTrueFalse.length > 2)
throw new IllegalArgumentException(
"Boolean format pattern should only contain two fields separated with ; character");
return ofBooleanInstance(aTrueFalse[0].split("\\s*\\|\\s*"), aTrueFalse.length == 2 ? aTrueFalse[1].split("\\s*\\|\\s*") : new String[]{""}, ignoreCase);
}
/**
* @param pattern The parse pattern to use as in {@link java.text.DecimalFormat}. If null, only the locale is uses.
* @param locale The locale to use.
* @return An instance that formats and parses numbers.
*/
static Format<Number> ofNumberInstance(String pattern, Locale locale){
if(pattern == null || pattern.isEmpty())
return ofNumberInstance(locale);
return new NumberFormat(pattern, locale);
}
/**
* @param locale The locale to use.
* @return An instance that formats and parses numbers.
*/
static Format<Number> ofNumberInstance(Locale locale){
return new NumberFormat(locale);
}
/**
* @param locale The locale to use.
* @return An instance that formats and parses double precision numbers.
*/
static Format<Number> ofDoubleInstance(Locale locale){
if(locale == Locale.US)
return new USDoubleFormat();
return ofNumberInstance(locale);
}
/**
* @param locale The locale to use.
* @return An instance that formats and parses integer numbers.
*/
static Format<Number> ofIntegerInstance(Locale locale){
if(locale == Locale.US)
return new USIntegerFormat();
final java.text.NumberFormat intFormat = java.text.NumberFormat.getIntegerInstance(locale);
intFormat.setGroupingUsed(false);
return new NumberFormat(intFormat);
}
/**
* @param formatter The formatter to use while formatting and parsing.
* @return An instance that formats and parses date time objects.
*/
static Format<TemporalAccessor> ofDateTimeInstance(DateTimeFormatter formatter){
return new DateTimeFormat(formatter);
}
/**
* @param locale The locale to use
* @param pattern The date pattern to use according to {@link DateTimeFormatter}. Required.
* @return An instance that formats and parses date time objects.
*/
static Format<TemporalAccessor> ofDateTimeInstance(Locale locale, String pattern){
return new DateTimeFormat(DateTimeFormatter.ofPattern(pattern, locale));
}
/**
* @param pattern The parse pattern to use as in {@link java.text.DecimalFormat}. If null, only the locale is uses.
* @param locale The locale to use.
* @return An instance that formats and parses decimal numbers into {@link BigDecimal}.
*/
static Format<BigDecimal> ofDecimalInstance(String pattern, Locale locale){
if(pattern == null || pattern.isEmpty())
return ofDecimalInstance(locale);
return new DecimalFormat(pattern, locale);
}
/**
* @param locale The locale to use.
* @return An instance that formats and parses decimal numbers into {@link BigDecimal}.
*/
static Format<BigDecimal> ofDecimalInstance(Locale locale){
return new DecimalFormat(locale);
}
/**
* @param decimals The number of decimals to imply
* @return An instance that can be used to parse and format <a href="https://www.ibm.com/support/knowledgecenter/en/SSLVMB_24.0.0/spss/base/syn_data_list_implied_decimal_positions.html">implied decimals</a>.
* The text representation is always an integer but when parsing the decimal point is shifted left and when composing it is shifted right.
*/
static Format<BigDecimal> ofImpliedDecimalInstance(int decimals){
return new ImpliedDecimalFormat(decimals);
}
/**
* @return An instance that just does {@link String#valueOf(Object)} when formatting.
*/
static Format<String> ofStringInstance(){
return new StringFormat();
}
/**
* @param pattern The regular expression to check while both parsing and formatting.
* @return An instance that checks that the text representation matches the supplied regular expression. If null or empty, no check will be made.
* @see java.util.regex.Pattern
*/
static Format<String> ofStringInstance(String pattern){
if(pattern == null || pattern.isEmpty())
return ofStringInstance();
return new RegExpFormat(pattern);
}
/**
* @return An instance that formats characters.
*/
static Format ofCharacterInstance() {
return new CharacterFormat();
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* BreakPointServicePlugin.java
*
* Created on February 6, 2002, 11:13 AM
*/
package ghidra.app.plugin.core.misc;
import java.awt.Color;
import java.io.IOException;
import javax.swing.ImageIcon;
import docking.ActionContext;
import docking.action.DockingAction;
import docking.action.ToolBarData;
import ghidra.app.CorePluginPackage;
import ghidra.app.plugin.PluginCategoryNames;
import ghidra.app.plugin.ProgramPlugin;
import ghidra.app.services.MarkerService;
import ghidra.app.services.MarkerSet;
import ghidra.framework.client.ClientUtil;
import ghidra.framework.data.DomainObjectAdapterDB;
import ghidra.framework.main.AppInfo;
import ghidra.framework.model.*;
import ghidra.framework.plugintool.PluginInfo;
import ghidra.framework.plugintool.PluginTool;
import ghidra.framework.plugintool.util.PluginStatus;
import ghidra.program.model.address.*;
import ghidra.program.model.listing.Program;
import ghidra.program.model.listing.ProgramChangeSet;
import ghidra.util.*;
import ghidra.util.exception.CancelledException;
import ghidra.util.task.SwingUpdateManager;
import ghidra.util.task.TaskMonitor;
import ghidra.util.worker.Job;
import ghidra.util.worker.Worker;
import resources.ResourceManager;
/**
* Manages the markers to display areas where changes have occurred
*/
@PluginInfo( //@formatter:off
status = PluginStatus.RELEASED,
packageName = CorePluginPackage.NAME,
category = PluginCategoryNames.CODE_VIEWER,
shortDescription = "Indicates areas that have changed",
description = "This plugin tracks program changes and indicates those areas by " +
"creating changebars via the marker manager. In addition to showing current " +
"changes, it also tracks and displays changes by others if the program is shared.",
servicesRequired = { MarkerService.class }
) //@formatter:on
public class MyProgramChangesDisplayPlugin extends ProgramPlugin implements DomainObjectListener {
// priorities for the different change sets displayed - higher takes precedent when painting
private final static int CHANGES_SINCE_CO_PRIORITY = MarkerService.CHANGE_PRIORITY;
private final static int MY_CHANGE_PRIORITY = MarkerService.CHANGE_PRIORITY + 1;
private final static int OTHER_CHANGES_PRIORITY = MarkerService.CHANGE_PRIORITY + 2;
private final static int CONFLICT_PRIORITY = MarkerService.CHANGE_PRIORITY + 3;
private MarkerService markerService;
private MarkerSet currentMyChangeMarks; // my changes since last save
private MarkerSet currentChangesSinceCheckoutMarks; // mark changes since my checkout
private MarkerSet currentOtherChangeMarks; // mark other changes since MY check out
private MarkerSet currentConflictChangeMarks; // mark other changes that conflict with my changes
private ProgramFolderListener folderListener;
private TransactionListener transactionListener;
private SwingUpdateManager updateManager;
private Worker worker = Worker.createGuiWorker();
private DockingAction checkInAction;
private DockingAction mergeAction;
private AddressSetView otherChangeSet;
private int serverVersion = -1;
private int localVersion = -1;
private boolean programChangedLocally;
private boolean programChangedRemotely;
private boolean programSaved;
private boolean updateConflicts;
public MyProgramChangesDisplayPlugin(PluginTool tool) {
super(tool, false, false);
folderListener = new ProgramFolderListener();
transactionListener = new ProgramTransactionListener();
tool.getProject().getProjectData().addDomainFolderChangeListener(folderListener);
createActions();
}
private void createActions() {
ImageIcon icon = ResourceManager.loadImage("images/vcMerge.png");
mergeAction = new DockingAction("Update", getName()) {
@Override
public void actionPerformed(ActionContext context) {
AppInfo.getFrontEndTool().merge(tool, currentProgram.getDomainFile(), null);
}
@Override
public boolean isEnabledForContext(ActionContext context) {
return currentProgram != null && currentProgram.getDomainFile().canMerge();
}
};
mergeAction.setToolBarData(new ToolBarData(icon, "Repository"));
mergeAction.setDescription("Update checked out file with latest version");
mergeAction.setHelpLocation(new HelpLocation("VersionControl", mergeAction.getName()));
icon = ResourceManager.loadImage("images/vcCheckIn.png");
checkInAction = new DockingAction("CheckIn", getName()) {
@Override
public void actionPerformed(ActionContext context) {
AppInfo.getFrontEndTool()
.checkIn(tool, currentProgram.getDomainFile());
}
@Override
public boolean isEnabledForContext(ActionContext context) {
return currentProgram != null && currentProgram.getDomainFile().canCheckin();
}
};
checkInAction.setToolBarData(new ToolBarData(icon, "Repository"));
checkInAction.setDescription("Check in file");
checkInAction.setHelpLocation(new HelpLocation("VersionControl", checkInAction.getName()));
tool.addAction(mergeAction);
tool.addAction(checkInAction);
}
@Override
public void init() {
markerService = tool.getService(MarkerService.class);
updateManager = new SwingUpdateManager(1000, () -> updateChangeMarkers());
}
@Override
protected void programActivated(Program program) {
program.addListener(this);
program.addTransactionListener(transactionListener);
updateForDomainFileChanged();
createMarkerSets(program);
intializeChangeMarkers();
}
@Override
protected void programDeactivated(Program program) {
serverVersion = -1;
localVersion = -1;
programChangedLocally = false;
programChangedRemotely = false;
programSaved = false;
program.removeTransactionListener(transactionListener);
program.removeListener(this);
disposeMarkerSets(program);
}
private void intializeChangeMarkers() {
// set all the triggers for updating markers when initializing
programChangedLocally = true;
programChangedRemotely = true;
programSaved = true;
updateConflicts = true;
updateChangeMarkers();
}
private void createMarkerSets(Program program) {
currentMyChangeMarks =
markerService.createAreaMarker("Changes: Unsaved", "My changes not yet saved", program,
MY_CHANGE_PRIORITY, true, true, false, Color.darkGray);
if (program.getDomainFile().isCheckedOut()) {
trackServerChanges(program);
}
}
private void trackServerChanges(Program program) {
currentChangesSinceCheckoutMarks = markerService.createAreaMarker("Changes: Not Checked-In",
"My saved changes made since I checked it out", program, CHANGES_SINCE_CO_PRIORITY,
true, true, false, Color.GREEN);
currentOtherChangeMarks = markerService.createAreaMarker("Changes: Latest Version",
"Changes made by others to this program since I checked it out", program,
OTHER_CHANGES_PRIORITY, true, true, false, Color.BLUE);
currentConflictChangeMarks = markerService.createAreaMarker("Changes: Conflicting",
"Changes made by others to this program that conflict with my changes", program,
CONFLICT_PRIORITY, true, true, false, Color.RED);
}
private void disposeMarkerSets(Program program) {
markerService.removeMarker(currentMyChangeMarks, program);
markerService.removeMarker(currentChangesSinceCheckoutMarks, program);
markerService.removeMarker(currentOtherChangeMarks, program);
markerService.removeMarker(currentConflictChangeMarks, program);
currentMyChangeMarks = null;
currentChangesSinceCheckoutMarks = null;
currentOtherChangeMarks = null;
currentConflictChangeMarks = null;
}
@Override
public void dispose() {
worker.dispose();
if (currentProgram != null) {
currentProgram.removeTransactionListener(transactionListener);
currentProgram.removeListener(this);
}
tool.getProject().getProjectData().removeDomainFolderChangeListener(folderListener);
if (updateManager != null) {
updateManager.dispose();
updateManager = null;
}
if (currentProgram != null) {
disposeMarkerSets(currentProgram);
}
markerService = null;
super.dispose();
}
/**
* Update markers that show my changes.
*/
private void updateChangeMarkers() {
Swing.assertSwingThread(
"Change markers must be manipulated on the Swing thread");
if (currentProgram == null) {
return;
}
ProgramChangeSet changeSet = currentProgram.getChanges();
if (programChangedLocally) {
currentMyChangeMarks.setAddressSetCollection(
changeSet.getAddressSetCollectionSinceLastSave());
}
if (isTrackingServerChanges()) {
if (programSaved) {
currentChangesSinceCheckoutMarks.setAddressSetCollection(
changeSet.getAddressSetCollectionSinceCheckout());
}
if (programChangedRemotely) {
currentOtherChangeMarks.setAddressSetCollection(
new SingleAddressSetCollection(otherChangeSet));
}
// only update conflict markers when server changeSet changes or we end a transaction
if (programChangedRemotely || updateConflicts) {
AddressSet intersect =
changeSet.getAddressSetCollectionSinceCheckout()
.getCombinedAddressSet()
.intersect(
otherChangeSet);
currentConflictChangeMarks.setAddressSetCollection(
new SingleAddressSetCollection(intersect));
}
}
programChangedLocally = false;
programChangedRemotely = false;
programSaved = false;
updateConflicts = false;
}
/**
* If version numbers are different, get changes made by others since my
* checkout and launch update thread if necessary.
*/
private void updateForDomainFileChanged() {
DomainFile df = currentProgram.getDomainFile();
int latestServerVersion = df.getLatestVersion();
int latestLocalVersion = df.getVersion();
// if the server version changes, schedule thread to get server changeSet
// which will trigger an marker update for both the other and conflict marker sets.
if (df.isCheckedOut() && serverVersion != latestServerVersion) {
serverVersion = latestServerVersion;
localVersion = latestLocalVersion;
if (serverVersion == localVersion) {
otherChangeSet = new AddressSet();
programChangedRemotely = true;
updateManager.update();
}
else {
scheduleUpdatesFromServer(currentProgram);
}
}
// else just the local version changed, update conflict sets.
else if (latestLocalVersion != localVersion) {
localVersion = latestLocalVersion;
updateConflicts = true;
updateManager.update();
}
}
private void scheduleUpdatesFromServer(Program p) {
// ensure we never have more than one pending job
worker.clearPendingJobs();
DomainFile file = p.getDomainFile();
worker.schedule(new UpdateChangeSetJob(file));
}
public boolean isTrackingServerChanges() {
return currentChangesSinceCheckoutMarks != null;
}
@Override
public void domainObjectChanged(DomainObjectChangedEvent ev) {
programChangedLocally = true;
if (ev.containsEvent(DomainObject.DO_OBJECT_SAVED)) {
programSaved = true;
}
updateManager.update();
}
Worker getWorker() {
return worker;
}
//==================================================================================================
// Inner Classes
//==================================================================================================
private class ProgramTransactionListener implements TransactionListener {
@Override
public void transactionStarted(DomainObjectAdapterDB domainObj, Transaction tx) {
// ignore
}
@Override
public void transactionEnded(DomainObjectAdapterDB domainObj) {
updateConflicts = true;
updateManager.update();
}
@Override
public void undoStackChanged(DomainObjectAdapterDB domainObj) {
// ignore
}
@Override
public void undoRedoOccurred(DomainObjectAdapterDB domainObj) {
updateConflicts = true;
updateManager.update();
}
}
private class ProgramFolderListener extends DomainFolderListenerAdapter {
@Override
public void domainFileStatusChanged(DomainFile file, boolean fileIDset) {
Swing.runLater(() -> {
if (currentProgram == null) {
return;
}
DomainFile domainFile = currentProgram.getDomainFile();
if (!file.equals(domainFile)) {
return;
}
if (domainFile.isCheckedOut()) {
if (!isTrackingServerChanges()) {
trackServerChanges(currentProgram);
}
updateForDomainFileChanged();
}
});
}
}
/** A job to grab program changes from the server */
private class UpdateChangeSetJob extends Job {
private DomainFile domainFile;
UpdateChangeSetJob(DomainFile domainFile) {
this.domainFile = domainFile;
}
@Override
public void run(TaskMonitor monitor) throws CancelledException {
monitor.checkCanceled(); // plugin was shut down while we were scheduled
ProgramChangeSet changes = null;
try {
changes = (ProgramChangeSet) domainFile.getChangesByOthersSinceCheckout();
}
catch (IOException e) {
Msg.warn(this, "Unable to determine program change set: " + e.getMessage());
return;
}
catch (Exception e) {
ClientUtil.handleException(tool.getProject().getRepository(), e, "Get Change Set",
false, tool.getToolFrame());
return;
}
AddressSetView remoteChanges =
changes != null ? changes.getAddressSet() : new AddressSet();
Swing.runNow(() -> applyChanges(remoteChanges));
}
private void applyChanges(AddressSetView remoteChanges) {
if (isDisposed()) {
return; // plugin was shut down while we were running
}
otherChangeSet = remoteChanges;
programChangedRemotely = true;
updateManager.update();
}
}
MarkerSet getExternalChangeMarkers() {
return currentOtherChangeMarks;
}
}
| |
/*
* Copyright 2012 - 2013 Benjamin Weiss
* Copyright 2012 Neofonie Mobile GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.elephant.widget.crouton;
import java.util.Iterator;
import java.util.Queue;
import java.util.concurrent.LinkedBlockingQueue;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.os.Build;
import android.os.Handler;
import android.os.Message;
import android.support.v4.view.accessibility.AccessibilityEventCompat;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import android.view.ViewTreeObserver;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityManager;
import android.widget.FrameLayout;
/** Manages the lifecycle of {@link Crouton}s. */
final class Manager extends Handler {
private static final class Messages {
private Messages() { /* no-op */
}
public static final int DISPLAY_CROUTON = 0xc2007;
public static final int ADD_CROUTON_TO_VIEW = 0xc20074dd;
public static final int REMOVE_CROUTON = 0xc2007de1;
}
private static Manager INSTANCE;
private Queue<Crouton> croutonQueue;
private Manager() {
croutonQueue = new LinkedBlockingQueue<Crouton>();
}
/** @return The currently used instance of the {@link Manager}. */
static synchronized Manager getInstance() {
if (null == INSTANCE) {
INSTANCE = new Manager();
}
return INSTANCE;
}
/**
* Inserts a {@link Crouton} to be displayed.
*
* @param crouton
* The {@link Crouton} to be displayed.
*/
void add(Crouton crouton) {
croutonQueue.add(crouton);
displayCrouton();
}
/** Displays the next {@link Crouton} within the queue. */
private void displayCrouton() {
if (croutonQueue.isEmpty()) {
return;
}
// First peek whether the Crouton has an activity.
final Crouton currentCrouton = croutonQueue.peek();
// If the activity is null we poll the Crouton off the queue.
if (null == currentCrouton.getActivity()) {
croutonQueue.poll();
}
if (!currentCrouton.isShowing()) {
// Display the Crouton
sendMessage(currentCrouton, Messages.ADD_CROUTON_TO_VIEW);
if (null != currentCrouton.getLifecycleCallback()) {
currentCrouton.getLifecycleCallback().onDisplayed();
}
} else {
sendMessageDelayed(currentCrouton, Messages.DISPLAY_CROUTON,
calculateCroutonDuration(currentCrouton));
}
}
private long calculateCroutonDuration(Crouton crouton) {
long croutonDuration = crouton.getConfiguration().durationInMilliseconds;
croutonDuration += crouton.getInAnimation().getDuration();
croutonDuration += crouton.getOutAnimation().getDuration();
return croutonDuration;
}
/**
* Sends a {@link Crouton} within a {@link Message}.
*
* @param crouton
* The {@link Crouton} that should be sent.
* @param messageId
* The {@link Message} id.
*/
private void sendMessage(Crouton crouton, final int messageId) {
final Message message = obtainMessage(messageId);
message.obj = crouton;
sendMessage(message);
}
/**
* Sends a {@link Crouton} within a delayed {@link Message}.
*
* @param crouton
* The {@link Crouton} that should be sent.
* @param messageId
* The {@link Message} id.
* @param delay
* The delay in milliseconds.
*/
private void sendMessageDelayed(Crouton crouton, final int messageId,
final long delay) {
Message message = obtainMessage(messageId);
message.obj = crouton;
sendMessageDelayed(message, delay);
}
/*
* (non-Javadoc)
*
* @see android.os.Handler#handleMessage(android.os.Message)
*/
@Override
public void handleMessage(Message message) {
final Crouton crouton = (Crouton) message.obj;
switch (message.what) {
case Messages.DISPLAY_CROUTON: {
displayCrouton();
break;
}
case Messages.ADD_CROUTON_TO_VIEW: {
addCroutonToView(crouton);
break;
}
case Messages.REMOVE_CROUTON: {
removeCrouton(crouton);
if (null != crouton.getLifecycleCallback()) {
crouton.getLifecycleCallback().onRemoved();
}
break;
}
default: {
super.handleMessage(message);
break;
}
}
}
/**
* Adds a {@link Crouton} to the {@link ViewParent} of it's {@link Activity}
* .
*
* @param crouton
* The {@link Crouton} that should be added.
*/
private void addCroutonToView(final Crouton crouton) {
// don't add if it is already showing
if (crouton.isShowing()) {
return;
}
final View croutonView = crouton.getView();
if (null == croutonView.getParent()) {
ViewGroup.LayoutParams params = croutonView.getLayoutParams();
if (null == params) {
params = new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
}
// display Crouton in ViewGroup is it has been supplied
if (null != crouton.getViewGroup()) {
// TODO implement add to last position feature (need to align
// with how this will be requested for activity)
if (crouton.getViewGroup() instanceof FrameLayout) {
crouton.getViewGroup().addView(croutonView, params);
} else {
crouton.getViewGroup().addView(croutonView, 0, params);
}
} else {
Activity activity = crouton.getActivity();
if (null == activity || activity.isFinishing()) {
return;
}
activity.addContentView(croutonView, params);
}
}
croutonView.requestLayout(); // This is needed so the animation can use
// the measured with/height
croutonView.getViewTreeObserver().addOnGlobalLayoutListener(
new ViewTreeObserver.OnGlobalLayoutListener() {
@SuppressLint("NewApi")
@Override
public void onGlobalLayout() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
croutonView.getViewTreeObserver()
.removeGlobalOnLayoutListener(this);
} else {
croutonView.getViewTreeObserver()
.removeOnGlobalLayoutListener(this);
}
croutonView.startAnimation(crouton.getInAnimation());
announceForAccessibilityCompat(crouton.getActivity(),
crouton.getText());
if (Configuration.DURATION_INFINITE != crouton
.getConfiguration().durationInMilliseconds) {
sendMessageDelayed(
crouton,
Messages.REMOVE_CROUTON,
crouton.getConfiguration().durationInMilliseconds
+ crouton.getInAnimation()
.getDuration());
}
}
});
}
/**
* Removes the {@link Crouton}'s view after it's display
* durationInMilliseconds.
*
* @param crouton
* The {@link Crouton} added to a {@link ViewGroup} and should be
* removed.
*/
protected void removeCrouton(Crouton crouton) {
View croutonView = crouton.getView();
ViewGroup croutonParentView = (ViewGroup) croutonView.getParent();
if (null != croutonParentView) {
croutonView.startAnimation(crouton.getOutAnimation());
// Remove the Crouton from the queue.
Crouton removed = croutonQueue.poll();
// Remove the crouton from the view's parent.
croutonParentView.removeView(croutonView);
if (null != removed) {
removed.detachActivity();
removed.detachViewGroup();
if (null != removed.getLifecycleCallback()) {
removed.getLifecycleCallback().onRemoved();
}
removed.detachLifecycleCallback();
}
// Send a message to display the next crouton but delay it by the
// out
// animation duration to make sure it finishes
sendMessageDelayed(crouton, Messages.DISPLAY_CROUTON, crouton
.getOutAnimation().getDuration());
}
}
/**
* Removes a {@link Crouton} immediately, even when it's currently being
* displayed.
*
* @param crouton
* The {@link Crouton} that should be removed.
*/
void removeCroutonImmediately(Crouton crouton) {
// if Crouton has already been displayed then it may not be in the queue
// (because it was popped).
// This ensures the displayed Crouton is removed from its parent
// immediately, whether another instance
// of it exists in the queue or not.
// Note: crouton.isShowing() is false here even if it really is showing,
// as croutonView object in
// Crouton seems to be out of sync with reality!
if (null != crouton.getActivity() && null != crouton.getView()
&& null != crouton.getView().getParent()) {
((ViewGroup) crouton.getView().getParent()).removeView(crouton
.getView());
// remove any messages pending for the crouton
removeAllMessagesForCrouton(crouton);
}
// remove any matching croutons from queue
if (null != croutonQueue) {
final Iterator<Crouton> croutonIterator = croutonQueue.iterator();
while (croutonIterator.hasNext()) {
final Crouton c = croutonIterator.next();
if (c.equals(crouton) && (null != c.getActivity())) {
// remove the crouton from the content view
if (crouton.isShowing()) {
((ViewGroup) c.getView().getParent()).removeView(c
.getView());
}
// remove any messages pending for the crouton
removeAllMessagesForCrouton(c);
// remove the crouton from the queue
croutonIterator.remove();
// we have found our crouton so just break
break;
}
}
}
}
/** Removes all {@link Crouton}s from the queue. */
void clearCroutonQueue() {
removeAllMessages();
if (null != croutonQueue) {
// remove any views that may already have been added to the
// activity's
// content view
for (Crouton crouton : croutonQueue) {
if (crouton.isShowing()) {
((ViewGroup) crouton.getView().getParent())
.removeView(crouton.getView());
}
}
croutonQueue.clear();
}
}
/**
* Removes all {@link Crouton}s for the provided activity. This will remove
* crouton from {@link Activity}s content view immediately.
*/
void clearCroutonsForActivity(Activity activity) {
if (null != croutonQueue) {
Iterator<Crouton> croutonIterator = croutonQueue.iterator();
while (croutonIterator.hasNext()) {
Crouton crouton = croutonIterator.next();
if ((null != crouton.getActivity())
&& crouton.getActivity().equals(activity)) {
// remove the crouton from the content view
if (crouton.isShowing()) {
((ViewGroup) crouton.getView().getParent())
.removeView(crouton.getView());
}
removeAllMessagesForCrouton(crouton);
// remove the crouton from the queue
croutonIterator.remove();
}
}
}
}
private void removeAllMessages() {
removeMessages(Messages.ADD_CROUTON_TO_VIEW);
removeMessages(Messages.DISPLAY_CROUTON);
removeMessages(Messages.REMOVE_CROUTON);
}
private void removeAllMessagesForCrouton(Crouton crouton) {
removeMessages(Messages.ADD_CROUTON_TO_VIEW, crouton);
removeMessages(Messages.DISPLAY_CROUTON, crouton);
removeMessages(Messages.REMOVE_CROUTON, crouton);
}
/**
* Generates and dispatches an SDK-specific spoken announcement.
* <p>
* For backwards compatibility, we're constructing an event from scratch
* using the appropriate event type. If your application only targets SDK
* 16+, you can just call View.announceForAccessibility(CharSequence).
* </p>
* <p/>
* note: AccessibilityManager is only available from API lvl 4.
* <p/>
* Adapted from https://http://eyes-free.googlecode.com/files/
* accessibility_codelab_demos_v2_src.zip via
* https://github.com/coreform/android-formidable-validation
*
* @param context
* Used to get {@link AccessibilityManager}
* @param text
* The text to announce.
*/
public static void announceForAccessibilityCompat(Context context,
CharSequence text) {
if (Build.VERSION.SDK_INT >= 4) {
AccessibilityManager accessibilityManager = (AccessibilityManager) context
.getSystemService(Context.ACCESSIBILITY_SERVICE);
if (!accessibilityManager.isEnabled()) {
return;
}
// Prior to SDK 16, announcements could only be made through FOCUSED
// events. Jelly Bean (SDK 16) added support for speaking text
// verbatim
// using the ANNOUNCEMENT event type.
final int eventType;
if (Build.VERSION.SDK_INT < 16) {
eventType = AccessibilityEvent.TYPE_VIEW_FOCUSED;
} else {
eventType = AccessibilityEventCompat.TYPE_ANNOUNCEMENT;
}
// Construct an accessibility event with the minimum recommended
// attributes. An event without a class name or package may be
// dropped.
final AccessibilityEvent event = AccessibilityEvent
.obtain(eventType);
event.getText().add(text);
event.setClassName(Manager.class.getName());
event.setPackageName(context.getPackageName());
// Sends the event directly through the accessibility manager. If
// your
// application only targets SDK 14+, you should just call
// getParent().requestSendAccessibilityEvent(this, event);
accessibilityManager.sendAccessibilityEvent(event);
}
}
@Override
public String toString() {
return "Manager{" + "croutonQueue=" + croutonQueue + '}';
}
}
| |
/*
* Copyright 2006 ProductiveMe Inc.
* Copyright 2013-2018 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pme.launcher;
import org.apache.commons.imaging.ImageFormats;
import org.apache.commons.imaging.Imaging;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.input.SAXBuilder;
import java.awt.image.BufferedImage;
import java.io.*;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author yole
*/
public class LauncherGeneratorMain {
public static void main(String[] args) {
if (args.length != 5) {
System.err.println("Usage: LauncherGeneratorMain <template EXE file> <app info file> <resource.h file> <properties> <output>");
System.exit(1);
}
File template = new File(args[0]);
if (!template.exists()) {
System.err.println("Launcher template EXE file " + args[0] + " not found");
System.exit(2);
}
String appInfoFileName = args[1];
InputStream appInfoStream;
try {
appInfoStream = new FileInputStream(appInfoFileName);
}
catch (FileNotFoundException e) {
appInfoStream = LauncherGeneratorMain.class.getClassLoader().getResourceAsStream(appInfoFileName);
}
if (appInfoStream == null) {
System.err.println("Application info file " + appInfoFileName + " not found");
System.exit(3);
}
Document appInfo;
try {
appInfo = new SAXBuilder().build(appInfoStream);
} catch (Exception e) {
System.err.println("Error loading application info file " + appInfoFileName + ": " + e.getMessage());
System.exit(4);
return;
}
Element appInfoRoot = appInfo.getRootElement();
String splashUrl = getChild(appInfoRoot, "logo").getAttributeValue("url");
if (splashUrl.startsWith("/")) {
splashUrl = splashUrl.substring(1);
}
InputStream splashStream = LauncherGeneratorMain.class.getClassLoader().getResourceAsStream(splashUrl);
if (splashStream == null) {
System.err.println("Splash screen image file file " + splashUrl + " not found");
System.exit(5);
}
ByteArrayOutputStream splashBmpStream = new ByteArrayOutputStream();
try {
BufferedImage bufferedImage = Imaging.getBufferedImage(splashStream);
Imaging.writeImage(bufferedImage, splashBmpStream, ImageFormats.BMP, new HashMap());
}
catch (Exception e) {
System.err.println("Error converting splash screen to BMP: " + e.getMessage());
System.exit(6);
}
String icoUrl = getChild(appInfoRoot, "icon").getAttributeValue("ico");
if (icoUrl == null) {
System.err.println(".ico file URL not specified in application info file " + appInfoFileName);
System.exit(11);
}
InputStream iconStream = LauncherGeneratorMain.class.getClassLoader().getResourceAsStream(icoUrl);
if (iconStream == null) {
System.err.println(".ico file " + icoUrl + " not found");
System.exit(12);
}
Map<String, Integer> resourceIDs;
try {
resourceIDs = loadResourceIDs(args[2]);
}
catch (Exception e) {
System.err.println("Error loading resource.h: " + e.getMessage());
System.exit(7);
return;
}
Properties properties = new Properties();
try {
FileInputStream fis = new FileInputStream(args[3]);
try {
properties.load(fis);
}
finally {
fis.close();
}
}
catch (IOException e) {
System.err.println("Error loading launcher properties: " + e.getMessage());
System.exit(8);
}
String companyName = getChild(appInfoRoot, "company").getAttributeValue("name");
Element names = getChild(appInfoRoot, "names");
String productShortName = names.getAttributeValue("product");
String productFullName = names.getAttributeValue("fullname", productShortName);
Element versionElement = getChild(appInfoRoot, "version");
int majorVersion = Integer.parseInt(versionElement.getAttributeValue("major"));
String minorVersionString = versionElement.getAttributeValue("minor");
Pattern p = Pattern.compile("(\\d+)(\\.(\\d+))?");
Matcher matcher = p.matcher(minorVersionString);
if (!matcher.matches()) {
System.err.println("Unexpected minor version format: " + minorVersionString);
}
int minorVersion = Integer.parseInt(matcher.group(1));
int bugfixVersion = matcher.group(3) != null ? Integer.parseInt(matcher.group(3)) : 0;
String buildNumber = getChild(appInfoRoot, "build").getAttributeValue("number");
String versionString = "" + majorVersion + "." + minorVersion + "." + bugfixVersion + "." + buildNumber;
int year = new GregorianCalendar().get(Calendar.YEAR);
LauncherGenerator generator = new LauncherGenerator(template, new File(args[4]));
try {
generator.load();
for (Map.Entry<Object, Object> pair : properties.entrySet()) {
String key = (String) pair.getKey();
Integer id = resourceIDs.get(key);
if (id == null) {
System.err.println("Invalid stringtable ID found: " + key);
System.exit(9);
}
generator.setResourceString(id, (String) pair.getValue());
}
generator.injectBitmap(resourceIDs.get("IDB_SPLASH"), splashBmpStream.toByteArray());
generator.injectIcon(resourceIDs.get("IDI_WINLAUNCHER"), iconStream);
generator.setVersionInfoString("LegalCopyright", "Copyright (C) 2000-" + year + " " + companyName);
generator.setVersionInfoString("ProductName", productFullName);
generator.setVersionInfoString("FileVersion", versionString);
generator.setVersionInfoString("FileDescription", productFullName);
generator.setVersionInfoString("ProductVersion", versionString);
generator.setVersionInfoString("InternalName", productShortName.toLowerCase() + ".exe");
generator.setVersionInfoString("OriginalFilename", productShortName.toLowerCase() + ".exe");
generator.setVersionNumber(majorVersion, minorVersion, bugfixVersion);
generator.generate();
} catch (IOException e) {
e.printStackTrace();
System.exit(10);
}
}
private static Element getChild(Element appInfoRoot, String logo) {
return appInfoRoot.getChild(logo, appInfoRoot.getNamespace());
}
private static Map<String, Integer> loadResourceIDs(String arg) throws IOException {
Map<String, Integer> result = new HashMap<String, Integer>();
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(arg)));
Pattern pattern = Pattern.compile("#define (\\w+)\\s+(\\d+)");
try {
while(true) {
String line = reader.readLine();
if (line == null) break;
Matcher m = pattern.matcher(line);
if (m.matches()) {
result.put(m.group(1), Integer.parseInt(m.group(2)));
}
}
}
finally {
reader.close();
}
return result;
}
}
| |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.provider;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.content.pm.UserInfo;
import android.database.Cursor;
import android.net.Uri;
import android.os.UserHandle;
import android.os.UserManager;
import android.provider.Settings;
import android.test.AndroidTestCase;
import android.test.suitebuilder.annotation.MediumTest;
import android.test.suitebuilder.annotation.SmallTest;
import java.util.List;
/** Unit test for SettingsProvider. */
public class SettingsProviderTest extends AndroidTestCase {
@MediumTest
public void testNameValueCache() {
ContentResolver r = getContext().getContentResolver();
Settings.Secure.putString(r, "test_service", "Value");
assertEquals("Value", Settings.Secure.getString(r, "test_service"));
// Make sure the value can be overwritten.
Settings.Secure.putString(r, "test_service", "New");
assertEquals("New", Settings.Secure.getString(r, "test_service"));
// Also that delete works.
assertEquals(1, r.delete(Settings.Secure.getUriFor("test_service"), null, null));
assertEquals(null, Settings.Secure.getString(r, "test_service"));
// Try all the same things in the System table
Settings.System.putString(r, "test_setting", "Value");
assertEquals("Value", Settings.System.getString(r, "test_setting"));
Settings.System.putString(r, "test_setting", "New");
assertEquals("New", Settings.System.getString(r, "test_setting"));
assertEquals(1, r.delete(Settings.System.getUriFor("test_setting"), null, null));
assertEquals(null, Settings.System.getString(r, "test_setting"));
}
@MediumTest
public void testRowNameContentUri() {
ContentResolver r = getContext().getContentResolver();
assertEquals("content://settings/system/test_setting",
Settings.System.getUriFor("test_setting").toString());
assertEquals("content://settings/secure/test_service",
Settings.Secure.getUriFor("test_service").toString());
// These tables use the row name (not ID) as their content URI.
Uri tables[] = { Settings.System.CONTENT_URI, Settings.Secure.CONTENT_URI };
for (Uri table : tables) {
ContentValues v = new ContentValues();
v.put(Settings.System.NAME, "test_key");
v.put(Settings.System.VALUE, "Test");
Uri uri = r.insert(table, v);
assertEquals(table.toString() + "/test_key", uri.toString());
// Query with a specific URI and no WHERE clause succeeds.
Cursor c = r.query(uri, null, null, null, null);
try {
assertTrue(c.moveToNext());
assertEquals("test_key", c.getString(c.getColumnIndex(Settings.System.NAME)));
assertEquals("Test", c.getString(c.getColumnIndex(Settings.System.VALUE)));
assertFalse(c.moveToNext());
} finally {
c.close();
}
// Query with a specific URI and a WHERE clause fails.
try {
r.query(uri, null, "1", null, null);
fail("UnsupportedOperationException expected");
} catch (UnsupportedOperationException e) {
if (!e.toString().contains("WHERE clause")) throw e;
}
// Query with a tablewide URI and a WHERE clause succeeds.
c = r.query(table, null, "name='test_key'", null, null);
try {
assertTrue(c.moveToNext());
assertEquals("test_key", c.getString(c.getColumnIndex(Settings.System.NAME)));
assertEquals("Test", c.getString(c.getColumnIndex(Settings.System.VALUE)));
assertFalse(c.moveToNext());
} finally {
c.close();
}
v = new ContentValues();
v.put(Settings.System.VALUE, "Toast");
assertEquals(1, r.update(uri, v, null, null));
c = r.query(uri, null, null, null, null);
try {
assertTrue(c.moveToNext());
assertEquals("test_key", c.getString(c.getColumnIndex(Settings.System.NAME)));
assertEquals("Toast", c.getString(c.getColumnIndex(Settings.System.VALUE)));
assertFalse(c.moveToNext());
} finally {
c.close();
}
assertEquals(1, r.delete(uri, null, null));
}
assertEquals(null, Settings.System.getString(r, "test_key"));
assertEquals(null, Settings.Secure.getString(r, "test_key"));
}
@MediumTest
public void testRowNumberContentUri() {
ContentResolver r = getContext().getContentResolver();
// The bookmarks table (and everything else) uses standard row number content URIs.
Uri uri = Settings.Bookmarks.add(r, new Intent("TEST"),
"Test Title", "Test Folder", '*', 123);
assertTrue(ContentUris.parseId(uri) > 0);
assertEquals("TEST", Settings.Bookmarks.getIntentForShortcut(r, '*').getAction());
ContentValues v = new ContentValues();
v.put(Settings.Bookmarks.INTENT, "#Intent;action=TOAST;end");
assertEquals(1, r.update(uri, v, null, null));
assertEquals("TOAST", Settings.Bookmarks.getIntentForShortcut(r, '*').getAction());
assertEquals(1, r.delete(uri, null, null));
assertEquals(null, Settings.Bookmarks.getIntentForShortcut(r, '*'));
}
@MediumTest
public void testParseProviderList() {
ContentResolver r = getContext().getContentResolver();
// Make sure we get out what we put in.
Settings.Secure.putString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED,
"test1,test2,test3");
assertEquals(Settings.Secure.getString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED),
"test1,test2,test3");
// Test adding a value
Settings.Secure.putString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED,
"");
Settings.Secure.putString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED, "+test1");
assertEquals("test1",
Settings.Secure.getString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED));
// Test adding a second value
Settings.Secure.putString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED, "+test2");
assertEquals("test1,test2",
Settings.Secure.getString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED));
// Test adding a third value
Settings.Secure.putString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED, "+test3");
assertEquals("test1,test2,test3",
Settings.Secure.getString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED));
// Test deleting the first value in a 3 item list
Settings.Secure.putString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED, "-test1");
assertEquals("test2,test3",
Settings.Secure.getString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED));
// Test deleting the middle value in a 3 item list
Settings.Secure.putString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED,
"test1,test2,test3");
Settings.Secure.putString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED, "-test2");
assertEquals("test1,test3",
Settings.Secure.getString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED));
// Test deleting the last value in a 3 item list
Settings.Secure.putString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED,
"test1,test2,test3");
Settings.Secure.putString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED, "-test3");
assertEquals("test1,test2",
Settings.Secure.getString(r, Settings.Secure.LOCATION_PROVIDERS_ALLOWED));
}
private boolean findUser(UserManager um, int userHandle) {
for (UserInfo user : um.getUsers()) {
if (user.id == userHandle) {
return true;
}
}
return false;
}
@MediumTest
public void testPerUserSettings() {
UserManager um = (UserManager) getContext().getSystemService(Context.USER_SERVICE);
ContentResolver r = getContext().getContentResolver();
// Make sure there's an owner
assertTrue(findUser(um, UserHandle.USER_OWNER));
// create a new user to use for testing
UserInfo user = um.createUser("TestUser1", UserInfo.FLAG_GUEST);
assertTrue(user != null);
try {
// Write some settings for that user as well as the current user
final String TEST_KEY = "test_setting";
final int SELF_VALUE = 40;
final int OTHER_VALUE = 27;
Settings.System.putInt(r, TEST_KEY, SELF_VALUE);
Settings.System.putIntForUser(r, TEST_KEY, OTHER_VALUE, user.id);
// Verify that they read back as intended
int myValue = Settings.System.getInt(r, TEST_KEY, 0);
int otherValue = Settings.System.getIntForUser(r, TEST_KEY, 0, user.id);
assertTrue("Running as user " + UserHandle.myUserId()
+ " and reading/writing as user " + user.id
+ ", expected to read " + SELF_VALUE + " but got " + myValue,
myValue == SELF_VALUE);
assertTrue("Running as user " + UserHandle.myUserId()
+ " and reading/writing as user " + user.id
+ ", expected to read " + OTHER_VALUE + " but got " + otherValue,
otherValue == OTHER_VALUE);
} finally {
// Tidy up
um.removeUser(user.id);
}
}
@SmallTest
public void testSettings() {
assertCanBeHandled(new Intent(Settings.ACTION_ACCESSIBILITY_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_ADD_ACCOUNT));
assertCanBeHandled(new Intent(Settings.ACTION_AIRPLANE_MODE_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_APN_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS)
.setData(Uri.parse("package:" + getContext().getPackageName())));
assertCanBeHandled(new Intent(Settings.ACTION_APPLICATION_DEVELOPMENT_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_APPLICATION_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_BLUETOOTH_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_DATA_ROAMING_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_DATE_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_DEVICE_INFO_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_DISPLAY_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_INPUT_METHOD_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_INTERNAL_STORAGE_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_LOCALE_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_MANAGE_ALL_APPLICATIONS_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_MANAGE_APPLICATIONS_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_MEMORY_CARD_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_NETWORK_OPERATOR_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_PRIVACY_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_QUICK_LAUNCH_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_SEARCH_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_SECURITY_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_SOUND_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_SYNC_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_SYSTEM_UPDATE_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_USER_DICTIONARY_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_WIFI_IP_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_WIFI_SETTINGS));
assertCanBeHandled(new Intent(Settings.ACTION_WIRELESS_SETTINGS));
}
private void assertCanBeHandled(final Intent intent) {
PackageManager packageManager = mContext.getPackageManager();
List<ResolveInfo> resolveInfoList = packageManager.queryIntentActivities(intent, 0);
assertNotNull(resolveInfoList);
// one or more activity can handle this intent.
assertTrue(resolveInfoList.size() > 0);
}
}
| |
package src.usi.testcase;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import src.usi.application.ActionManager;
import src.usi.application.ApplicationHelper;
import src.usi.gui.GuiStateManager;
import src.usi.gui.structure.Action_widget;
import src.usi.gui.structure.GUI;
import src.usi.gui.structure.Input_widget;
import src.usi.gui.structure.Option_input_widget;
import src.usi.gui.structure.Selectable_widget;
import src.usi.gui.structure.Widget;
import src.usi.gui.structure.Window;
import src.usi.pattern.dialogs.Pattern_dialogs;
import src.usi.testcase.structure.Click;
import src.usi.testcase.structure.GUIAction;
import src.usi.testcase.structure.GUITestCase;
import src.usi.testcase.structure.Select;
import src.usi.util.DijkstraAlgorithm;
import src.usi.util.Graph;
import src.usi.util.Vertex;
import com.rational.test.ft.object.interfaces.TestObject;
public class TestCaseRunner {
// used for go actions
private final GUI gui;
private Map<Pair, List<String>> select_support_initial;
private Map<Pair, List<String>> select_support_added;
private Map<Pair, List<Integer>> select_support_added_indexes;
private final boolean skip_dialogs = true;
public TestCaseRunner(final GUI gui) {
this.gui = gui;
}
public GUITestCaseResult runTestCase(final GUITestCase tc) throws Exception {
final ApplicationHelper app = ApplicationHelper.getInstance();
if (app.isRunning()) {
app.restartApplication();
} else {
app.startApplication();
}
final GuiStateManager gmanager = GuiStateManager.getInstance();
gmanager.readGUI();
Window curr = null;
curr = gmanager.getCurrentActiveWindows();
if (curr == null) {
System.out.println("Testcase Runner: no windows found.");
if (app.isRunning()) {
app.restartApplication();
} else {
app.startApplication();
}
Thread.sleep(10000);
gmanager.readGUI();
curr = gmanager.getCurrentActiveWindows();
}
this.select_support_initial = new HashMap<>();
this.select_support_added = new HashMap<>();
this.select_support_added_indexes = new HashMap<>();
final List<GUIAction> actions = tc.getActions();
// structures needed to construct the GUITestCaseResult
final List<GUIAction> actions_executed = new ArrayList<>();
final List<GUIAction> actions_actually_executed = new ArrayList<>();
final List<Window> results = new ArrayList<>();
this.updatedStructuresForSelect(curr);
final Window initial = tc.getActions().get(0).getWindow();
if (!curr.isSimilar(initial)) {
// if we are not in the right window
if (this.gui == null) {
throw new Exception(
"TestCaseRunner - runTestCase: gui is required to reach initial window.");
}
Window curr_mapped_w = null;
for (final Window ww : this.gui.getWindows()) {
if (ww.isSimilar(curr)) {
curr_mapped_w = ww;
break;
}
}
if (curr_mapped_w == null) {
throw new Exception(
"TestCaseRunner - runTestCase: current window could not be found in gui.");
}
final List<GUIAction> go_actions = this.getActionSequenceToGO(curr_mapped_w, initial);
for (final GUIAction go : go_actions) {
try {
ActionManager.executeAction(go);
} catch (final Exception e) {
System.out.println("ERROR EXECUTING ACTION");
e.printStackTrace();
throw new Exception(
"TestCaseRunner - runTestCase: impossible to reach initial window.");
}
gmanager.readGUI();
this.dealWithDialogsWindow(gmanager);
actions_actually_executed.add(go);
this.updatedStructuresForSelect(gmanager.getCurrentActiveWindows());
}
}
if (!gmanager.getCurrentActiveWindows().isSimilar(initial)) {
throw new Exception("TestCaseRunner - runTestCase: impossible to reach initial window.");
}
mainloop: for (int cont = 0; cont < actions.size(); cont++) {
final GUIAction act = actions.get(cont);
curr = gmanager.getCurrentActiveWindows();
GUIAction act_to_execute = act;
// the index of the action must be adjusted to the real one in the
// app
if ((act instanceof Select)) {
final Select sel = (Select) act;
final boolean abs = sel.isAbstract();
if (abs) {
final Selectable_widget sw = (Selectable_widget) act.getWidget();
int ind = sel.getIndex();
final Pair new_p = new Pair(act.getWindow(), sw);
boolean found = false;
for (final Pair p : this.select_support_initial.keySet()) {
if (p.isSame(new_p)) {
if (this.select_support_added_indexes.get(p).size() <= ind) {
// the selectable widget is not as expected so
// we select the last index
ind = this.select_support_added_indexes.get(p).size() - 1;
if (ind == -1) {
continue;
}
}
final int size = this.select_support_initial.get(p).size()
+ this.select_support_added.get(p).size();
final int index = this.select_support_added_indexes.get(p).get(ind);
// TODO: we need to find a way to put the right
// selected
// index
final Selectable_widget new_sw = new Selectable_widget(sw.getId(),
sw.getLabel(), sw.getClasss(), sw.getX(), sw.getY(),
sw.getWidth(), sw.getHeight(), size, 0);
final GUIAction select = new Select(act.getWindow(), act.getOracle(),
new_sw, index, false);
act_to_execute = select;
found = true;
break;
}
}
if (!found) {
actions_executed.add(act);
if (gmanager.getCurrentActiveWindows() != null) {
results.add(this.getKnownWindowIfAny(gmanager.getCurrentActiveWindows()));
} else {
results.add(null);
}
continue mainloop;
}
}
}
if (ActionManager.executeAction(act_to_execute)) {
gmanager.readGUI();
this.dealWithDialogsWindow(gmanager);
// if (cont == actions.size() - 1) {
//
// System.out.println();
// }
actions_actually_executed.add(act_to_execute);
this.updatedStructuresForSelect(gmanager.getCurrentActiveWindows());
actions_executed.add(act);
if (gmanager.getCurrentActiveWindows() != null) {
results.add(this.getKnownWindowIfAny(gmanager.getCurrentActiveWindows()));
} else {
results.add(null);
}
if (cont < actions.size() - 1) {
// if the action was not executed and the next action is in
// a different window
if (!actions.get(cont + 1).getWindow().getId()
.equals(results.get(results.size() - 1).getId())) {
break mainloop;
}
}
} else {
actions_executed.add(act);
if (gmanager.getCurrentActiveWindows() != null) {
results.add(this.getKnownWindowIfAny(gmanager.getCurrentActiveWindows()));
} else {
results.add(null);
}
if (cont < actions.size() - 1) {
// if the action was not executed and the next action is in
// a different window
if (!actions.get(cont + 1).getWindow().getId()
.equals(results.get(results.size() - 1).getId())) {
break mainloop;
}
}
}
}
app.closeApplication();
final GUITestCaseResult res = new GUITestCaseResult(tc, actions_executed, results,
actions_actually_executed);
return res;
}
private void updatedStructuresForSelect(final Window curr) throws Exception {
if (curr == null) {
return;
}
loop: for (final Selectable_widget sw : curr.getSelectableWidgets()) {
final TestObject to = sw.getTo();
final List<String> curr_el = Selectable_widget.getElements(to);
final Pair new_p = new Pair(curr, sw);
for (final Pair p : this.select_support_initial.keySet()) {
if (p.isSame(new_p)) {
// the objects that are not available anymore are removed
final List<Integer> removed_indexes = new ArrayList<>();
for (int c = 0; c < this.select_support_added.get(p).size(); c++) {
final String el = this.select_support_added.get(p).get(c);
if (!curr_el.contains(el)) {
this.select_support_added.get(p).remove(c);
this.select_support_added_indexes.get(p).remove(c);
removed_indexes.add(c);
c--;
}
}
for (final String el : curr_el) {
if (!this.select_support_initial.get(p).contains(el)
&& !this.select_support_added.get(p).contains(el)) {
if (removed_indexes.size() == 1) {
// if one object is disappeared and one it
// appeared we consider it as an update of the
// previous (if the size is >1 or 0 we consider
// it as addition)
// TODO: this may work only for CRUD
this.select_support_added.get(p).add(removed_indexes.get(0), el);
this.select_support_added_indexes.get(p).add(
removed_indexes.get(0), curr_el.indexOf(el));
} else {
this.select_support_added.get(p).add(el);
this.select_support_added_indexes.get(p).add(curr_el.indexOf(el));
}
} else if (this.select_support_added.get(p).contains(el)) {
// we update the indexes
this.select_support_added_indexes.get(p).remove(
this.select_support_added.get(p).indexOf(el));
this.select_support_added_indexes.get(p).add(
this.select_support_added.get(p).indexOf(el),
curr_el.indexOf(el));
}
}
continue loop;
}
}
// if the sw is seen for the first time
this.select_support_added.put(new_p, new ArrayList<String>());
this.select_support_added_indexes.put(new_p, new ArrayList<Integer>());
this.select_support_initial.put(new_p, curr_el);
}
}
/**
* function that checks if the window is already in the GUI, and if it is
* the case returns the input window with the id it has in the GUI
*
* @return
* @throws Exception
*/
private Window getKnownWindowIfAny(final Window in) throws Exception {
for (final Window w : this.gui.getWindows()) {
if (w.isSame(in)) {
final Window out = new Window(in.getTo(), w.getId(), in.getLabel(), in.getClasss(),
in.getX(), in.getY(), in.getWidth(), in.getHeight(), in.isModal());
out.setRoot(w.isRoot());
// we can loop only once since if they are the same they must
// have the same widgets number
// we need to filter out the selectable widgets cause their
// position might change when they are scrolled
final List<Widget> widgets = in
.getWidgets()
.stream()
.filter(e -> {
if (e instanceof Action_widget
&& e.getClasss().toLowerCase().equals("menuitemui")
&& e.getLabel().toLowerCase().startsWith("window -")) {
return false;
}
// we deal with selectable widgets separately cause
// selecting an element can modify the position of
// the
// widget
if (e instanceof Selectable_widget) {
return false;
}
return true;
}).collect(Collectors.toList());
final List<Widget> widgets2 = w
.getWidgets()
.stream()
.filter(e -> {
if (e instanceof Action_widget
&& e.getClasss().toLowerCase().equals("menuitemui")
&& e.getLabel().toLowerCase().startsWith("window -")) {
return false;
}
// we deal with selectable widgets separately cause
// selecting an element can modify the position of
// the
// widget
if (e instanceof Selectable_widget) {
return false;
}
return true;
}).collect(Collectors.toList());
for (int x = 0; x < widgets.size(); x++) {
if (widgets2.get(x) instanceof Action_widget) {
final Action_widget aw = (Action_widget) widgets.get(x);
final Action_widget aw2 = (Action_widget) widgets2.get(x);
final Action_widget new_aw = new Action_widget(aw2.getId(), aw.getLabel(),
aw.getClasss(), aw.getX(), aw.getY(), aw.getWidth(), aw.getHeight());
new_aw.setDescriptor(aw.getDescriptor());
out.addWidget(new_aw);
} else if (widgets2.get(x) instanceof Input_widget) {
final Input_widget iw = (Input_widget) widgets.get(x);
if (widgets2.get(x) instanceof Option_input_widget) {
final Option_input_widget iw2 = (Option_input_widget) widgets2.get(x);
final Option_input_widget oiw = (Option_input_widget) iw;
final Option_input_widget new_oiw = new Option_input_widget(
iw2.getId(), iw.getLabel(), iw.getClasss(), iw.getX(),
iw.getY(), oiw.getWidth(), oiw.getHeight(), oiw.getSize(),
oiw.getSelected());
new_oiw.setDescriptor(iw.getDescriptor());
out.addWidget(new_oiw);
} else {
final Input_widget iw2 = (Input_widget) widgets2.get(x);
final Input_widget new_iw = new Input_widget(iw2.getId(),
iw.getLabel(), iw.getClasss(), iw.getX(), iw.getY(),
iw.getWidth(), iw.getHeight(), iw.getValue());
new_iw.setDescriptor(iw.getDescriptor());
out.addWidget(new_iw);
}
}
}
for (int x = 0; x < in.getSelectableWidgets().size(); x++) {
if (w.getSelectableWidgets().get(x) instanceof Selectable_widget) {
final Selectable_widget sw = in.getSelectableWidgets().get(x);
final Selectable_widget sw2 = w.getSelectableWidgets().get(x);
final Selectable_widget new_sw = new Selectable_widget(sw2.getId(),
sw.getLabel(), sw.getClasss(), sw.getX(), sw.getY(), sw.getWidth(),
sw.getHeight(), sw.getSize(), sw.getSelected());
new_sw.setDescriptor(sw.getDescriptor());
out.addWidget(new_sw);
}
}
return out;
}
}
for (final Window w : this.gui.getWindows()) {
if (w.isSimilar(in)) {
final Window out = new Window(in.getTo(), w.getId(), in.getLabel(), in.getClasss(),
in.getX(), in.getY(), in.getWidth(), in.getHeight(), in.isModal());
out.setRoot(w.isRoot());
// we can loop only once since if they are the same they must
// have the same widgets number
// we need to filter out the selectable widgets cause their
// position might change when they are scrolled
final List<Widget> widgets = in
.getWidgets()
.stream()
.filter(e -> {
if (e instanceof Action_widget
&& e.getClasss().toLowerCase().equals("menuitemui")
&& e.getLabel().toLowerCase().startsWith("window -")) {
return false;
}
// we deal with selectable widgets separately cause
// selecting an element can modify the position of
// the
// widget
if (e instanceof Selectable_widget) {
return false;
}
return true;
}).collect(Collectors.toList());
final List<Widget> widgets2 = w
.getWidgets()
.stream()
.filter(e -> {
if (e instanceof Action_widget
&& e.getClasss().toLowerCase().equals("menuitemui")
&& e.getLabel().toLowerCase().startsWith("window -")) {
return false;
}
// we deal with selectable widgets separately cause
// selecting an element can modify the position of
// the
// widget
if (e instanceof Selectable_widget) {
return false;
}
return true;
}).collect(Collectors.toList());
for (int x = 0; x < widgets.size(); x++) {
if (widgets2.get(x) instanceof Action_widget) {
final Action_widget aw = (Action_widget) widgets.get(x);
final Action_widget aw2 = (Action_widget) widgets2.get(x);
final Action_widget new_aw = new Action_widget(aw2.getId(), aw.getLabel(),
aw.getClasss(), aw.getX(), aw.getY(), aw.getWidth(), aw.getHeight());
new_aw.setDescriptor(aw.getDescriptor());
out.addWidget(new_aw);
} else if (widgets2.get(x) instanceof Input_widget) {
final Input_widget iw = (Input_widget) widgets.get(x);
if (widgets2.get(x) instanceof Option_input_widget) {
final Option_input_widget iw2 = (Option_input_widget) widgets2.get(x);
final Option_input_widget oiw = (Option_input_widget) iw;
final Option_input_widget new_oiw = new Option_input_widget(
iw2.getId(), iw.getLabel(), iw.getClasss(), iw.getX(),
iw.getY(), oiw.getWidth(), oiw.getHeight(), oiw.getSize(),
oiw.getSelected());
new_oiw.setDescriptor(iw.getDescriptor());
out.addWidget(new_oiw);
} else {
final Input_widget iw2 = (Input_widget) widgets2.get(x);
final Input_widget new_iw = new Input_widget(iw2.getId(),
iw.getLabel(), iw.getClasss(), iw.getX(), iw.getY(),
iw.getWidth(), iw.getHeight(), iw.getValue());
new_iw.setDescriptor(iw.getDescriptor());
out.addWidget(new_iw);
}
}
}
for (int x = 0; x < in.getSelectableWidgets().size(); x++) {
if (w.getSelectableWidgets().get(x) instanceof Selectable_widget) {
final Selectable_widget sw = in.getSelectableWidgets().get(x);
final Selectable_widget sw2 = w.getSelectableWidgets().get(x);
final Selectable_widget new_sw = new Selectable_widget(sw2.getId(),
sw.getLabel(), sw.getClasss(), sw.getX(), sw.getY(), sw.getWidth(),
sw.getHeight(), sw.getSize(), sw.getSelected());
new_sw.setDescriptor(sw.getDescriptor());
out.addWidget(new_sw);
}
}
return out;
}
}
return in;
}
private void dealWithDialogsWindow(final GuiStateManager gmanager) throws Exception {
if (!this.skip_dialogs) {
return;
}
if (gmanager.getCurrentActiveWindows() != null) {
final Window current = gmanager.getCurrentActiveWindows();
for (final Pattern_dialogs dialog : Pattern_dialogs.values()) {
if (dialog.isMatch(current)) {
final List<GUIAction> acts = dialog.getActionsToGoPast(current);
for (final GUIAction act : acts) {
ActionManager.executeAction(act);
gmanager.readGUI();
}
}
}
}
if (gmanager.getCurrentActiveWindows() != null) {
final Window current = gmanager.getCurrentActiveWindows();
for (final Pattern_dialogs dialog : Pattern_dialogs.values()) {
if (dialog.isMatch(current)) {
final List<GUIAction> acts = dialog.getActionsToGoPast(current);
for (final GUIAction act : acts) {
ActionManager.executeAction(act);
gmanager.readGUI();
}
}
}
}
}
private class Pair {
Window w;
Selectable_widget sw;
public Pair(final Window w, final Selectable_widget sw) {
this.w = w;
this.sw = sw;
}
public boolean isSame(final Pair p) {
if (this.w.isSimilar(p.w)) {
final Widget ww = p.w.getWidget(p.sw.getId());
final int index = p.w.getSelectableWidgets().indexOf(ww);
assert (index != -1);
final Widget ww2 = this.w.getWidget(this.sw.getId());
final int index2 = this.w.getSelectableWidgets().indexOf(ww2);
assert (index2 != -1);
if (index != index2
|| !this.w.getSelectableWidgets().get(index2)
.isSimilar(p.w.getSelectableWidgets().get(index))) {
return false;
}
return true;
}
return false;
}
}
private List<GUIAction> getActionSequenceToGO(final Window current, final Window targetw)
throws Exception {
final List<GUIAction> out = new ArrayList<>();
final Graph g = Graph.convertGUI(TestCaseRunner.this.gui);
Vertex source = g.getVertex(current.getId());
Vertex target = g.getVertex(targetw.getId());
final DijkstraAlgorithm alg = new DijkstraAlgorithm(g);
alg.execute(source);
final LinkedList<Vertex> path = alg.getPath(target);
if (path == null) {
throw new Exception(
"GUIAction - getActionSequence: action sequence could not be found.");
}
source = path.pop();
while (!path.isEmpty()) {
target = path.pop();
Click click = null;
final Window s = TestCaseRunner.this.gui.getWindow(source.getId());
final Window t = TestCaseRunner.this.gui.getWindow(target.getId());
for (final Action_widget aw : TestCaseRunner.this.gui.getStaticBackwardLinks(t.getId())) {
if (s.getWidget(aw.getId()) != null) {
click = new Click(s, null, aw);
break;
}
}
if (click == null) {
throw new Exception(
"GUIAction - getActionSequence: error generating action sequence.");
}
out.add(click);
source = target;
}
return out;
}
}
| |
/*******************************************************************************
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.apache.stanbol.enhancer.engines.speechtotext;
import java.io.File;
import java.io.InputStream;
import java.util.Collection;
import java.util.Dictionary;
import java.util.Hashtable;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleException;
import org.osgi.framework.BundleListener;
import org.osgi.framework.Filter;
import org.osgi.framework.FrameworkListener;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceListener;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.ServiceRegistration;
import org.osgi.service.component.ComponentContext;
import org.osgi.service.component.ComponentInstance;
public class MockComponentContext implements ComponentContext {
protected final Dictionary<String, Object> properties;
public MockComponentContext() {
properties = new Hashtable<String, Object>();
}
public MockComponentContext(Dictionary<String, Object> properties) {
this.properties = properties;
}
public void disableComponent(String name) {
}
public void enableComponent(String name) {
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public BundleContext getBundleContext() {
return new BundleContext() {
@Override
public boolean ungetService(ServiceReference reference) {
return false;
}
@Override
public void removeServiceListener(ServiceListener listener) {
}
@Override
public void removeFrameworkListener(FrameworkListener listener) {
}
@Override
public void removeBundleListener(BundleListener listener) {
}
@Override
public ServiceRegistration registerService(String clazz,
Object service, Dictionary properties) {
return null;
}
@Override
public ServiceRegistration registerService(String[] clazzes,
Object service, Dictionary properties) {
return null;
}
@Override
public Bundle installBundle(String location, InputStream input)
throws BundleException {
return null;
}
@Override
public Bundle installBundle(String location) throws BundleException {
return null;
}
@Override
public ServiceReference[] getServiceReferences(String clazz,
String filter) throws InvalidSyntaxException {
return null;
}
@Override
public ServiceReference getServiceReference(String clazz) {
return null;
}
@Override
public Object getService(ServiceReference reference) {
return null;
}
@Override
public String getProperty(String key) {
return null;
}
@Override
public File getDataFile(String filename) {
return new File(System.getProperty("java.io.tmpdir"));
}
@Override
public Bundle[] getBundles() {
return null;
}
@Override
public Bundle getBundle(long id) {
return null;
}
@Override
public Bundle getBundle() {
return null;
}
@Override
public ServiceReference[] getAllServiceReferences(String clazz,
String filter) throws InvalidSyntaxException {
return null;
}
@Override
public Filter createFilter(String filter)
throws InvalidSyntaxException {
return null;
}
@Override
public void addServiceListener(ServiceListener listener,
String filter) throws InvalidSyntaxException {
}
@Override
public void addServiceListener(ServiceListener listener) {
}
@Override
public void addFrameworkListener(FrameworkListener listener) {
}
@Override
public void addBundleListener(BundleListener listener) {
}
@Override
public <S> ServiceRegistration<S> registerService(Class<S> clazz, S service, Dictionary<String, ?> properties) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public <S> ServiceReference<S> getServiceReference(Class<S> clazz) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public <S> Collection<ServiceReference<S>> getServiceReferences(Class<S> clazz, String filter) throws InvalidSyntaxException {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public Bundle getBundle(String location) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
};
}
public ComponentInstance getComponentInstance() {
return null;
}
public Dictionary<String, Object> getProperties() {
return properties;
}
@SuppressWarnings("rawtypes")
public ServiceReference getServiceReference() {
return null;
}
public Bundle getUsingBundle() {
return null;
}
public Object locateService(String name) {
return null;
}
@SuppressWarnings("rawtypes")
public Object locateService(String name, ServiceReference reference) {
return null;
}
public Object[] locateServices(String name) {
return null;
}
}
| |
/*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.world.volume.game;
import org.spongepowered.api.block.BlockState;
import org.spongepowered.api.item.inventory.ItemStack;
import org.spongepowered.api.profile.GameProfile;
import org.spongepowered.api.util.Direction;
import org.spongepowered.api.world.volume.block.BlockVolume;
import org.spongepowered.math.vector.Vector3i;
import java.time.Duration;
import java.util.Objects;
public interface InteractableVolume extends BlockVolume, LocationBaseDataHolder {
/**
* Simulates hitting a block as if a player had done so.
*
* <p>The difference between this and {@link #digBlock} is that this will
* only do a single instantaneous "click" whereas digBlock will simulate
* holding the primary mouse button until the block breaks.</p>
*
* @param position The position of the block
* @param side The side of the block to interact with
* @param profile The game profile of the player this is imitating
* @return True if the interact succeeded
*/
default boolean hitBlock(Vector3i position, Direction side, GameProfile profile) {
return this.hitBlock(Objects.requireNonNull(position, "position").x(), position.y(), position.z(), side, profile);
}
/**
* Simulates hitting a block as if a player had done so.
*
* <p>The difference between this and {@link #digBlock} is that this will
* only do a single instantaneous "click" whereas digBlock will simulate
* holding the primary mouse button until the block breaks.</p>
*
* @param x The X position
* @param y The Y position
* @param z The Z position
* @param side The side of the block to interact with
* @param profile The game profile of the player this is imitating
* @return True if the interact succeeded
*/
boolean hitBlock(int x, int y, int z, Direction side, GameProfile profile);
/**
* Simulates the interaction the block as if a player had done so.
*
* @param position The position of the block
* @param side The side of the block to interact with
* @param profile The game profile of the player this is imitating
* @return True if the interact succeeded
*/
default boolean interactBlock(Vector3i position, Direction side, GameProfile profile) {
return this.interactBlock(Objects.requireNonNull(position, "position").x(), position.y(), position.z(), side, profile);
}
/**
* Simulates the interaction the block as if a player had done so.
*
* @param x The X position
* @param y The Y position
* @param z The Z position
* @param side The side of the block to interact with
* @param profile The game profile of the player this is imitating
* @return True if the interact succeeded
*/
boolean interactBlock(int x, int y, int z, Direction side, GameProfile profile);
/**
* Simulates the interaction the block using the given item as if the player
* had done so.
*
* @param position The position of the block
* @param itemStack The item
* @param side The side of the block to interact with
* @param profile The game profile of the player this is imitating
* @return True if the interact succeeded
*/
default boolean interactBlockWith(Vector3i position, ItemStack itemStack, Direction side, GameProfile profile) {
return this.interactBlockWith(Objects.requireNonNull(position, "position").x(), position.y(), position.z(), itemStack, side, profile);
}
/**
* Simulates the interaction the block using the given item as if the player
* had done so.
*
* @param x The X position
* @param y The Y position
* @param z The Z position
* @param itemStack The item
* @param side The side of the block to interact with
* @param profile The game profile of the player this is imitating
* @return True if the interact succeeded
*/
boolean interactBlockWith(int x, int y, int z, ItemStack itemStack, Direction side, GameProfile profile);
/**
* Simulates the placement of a block at the given location as if a player
* had done so.
*
* @param position The position of the block
* @param block The block state to be set to
* @param side The face of the block to place on
* @param profile The game profile of the player this is imitating
* @return Whether the block was successfully set
*/
default boolean placeBlock(Vector3i position, BlockState block, Direction side, GameProfile profile) {
return this.placeBlock(Objects.requireNonNull(position, "position").x(), position.y(), position.z(), block, side, profile);
}
/**
* Simulates the placement of a block at the given location as if a player
* had done so.
*
* @param x The X position
* @param y The Y position
* @param z The Z position
* @param block The block state to be set to
* @param side The face of the block to place on
* @param profile The game profile of the player this is imitating
* @return Whether the block was successfully set
*/
boolean placeBlock(int x, int y, int z, BlockState block, Direction side, GameProfile profile);
/**
* Simulate the digging of the block as if a player had done so.
*
* @param position The position of the block
* @param profile The game profile of the player this is imitating
* @return Whether the block was destroyed
*/
default boolean digBlock(Vector3i position, GameProfile profile) {
return this.digBlock(Objects.requireNonNull(position, "position").x(), position.y(), position.z(), profile);
}
/**
* Simulate the digging of the block as if a player had done so.
*
* @param x The X position
* @param y The Y position
* @param z The Z position
* @param profile The game profile of the player this is imitating
* @return Whether the block was destroyed
*/
boolean digBlock(int x, int y, int z, GameProfile profile);
/**
* Simulate the digging of the block with the given tool as if a player had
* done so.
*
* @param position The position of the block
* @param itemStack The tool
* @param profile The game profile of the player this is imitating
* @return Whether the block was destroyed
*/
default boolean digBlockWith(Vector3i position, ItemStack itemStack, GameProfile profile) {
return this.digBlockWith(Objects.requireNonNull(position, "position").x(), position.y(), position.z(), itemStack, profile);
}
/**
* Simulate the digging of the block with the given tool as if a player had
* done so.
*
* @param x The X position
* @param y The Y position
* @param z The Z position
* @param itemStack The tool
* @param profile The game profile of the player this is imitating
* @return Whether the block was destroyed
*/
boolean digBlockWith(int x, int y, int z, ItemStack itemStack, GameProfile profile);
/**
* Gets the {@link Duration} it takes to dig this block with the specified item.
*
* @param position The position of the block
* @param itemStack The item to pretend-dig with
* @param profile The game profile of the player this is imitating
* @return The duration it takes to dig the block
*/
default Duration blockDigTimeWith(Vector3i position, ItemStack itemStack, GameProfile profile) {
return this.blockDigTimeWith(Objects.requireNonNull(position, "position").x(), position.y(), position.z(), itemStack, profile);
}
/**
* Gets the {@link Duration} it takes to dig this block with the specified item.
*
* @param x The X position
* @param y The Y position
* @param z The Z position
* @param itemStack The item to pretend-dig with
* @param profile The game profile of the player this is imitating
* @return The duration it takes to dig the block
*/
Duration blockDigTimeWith(int x, int y, int z, ItemStack itemStack, GameProfile profile);
}
| |
package com.mentor.nucleus.bp.als.oal.test;
import java.io.StringReader;
import java.util.UUID;
import org.eclipse.jface.preference.IPreferenceStore;
import antlr.RecognitionException;
import antlr.TokenStreamException;
import antlr.TokenStreamRecognitionException;
import com.mentor.nucleus.bp.als.oal.OalLexer;
import com.mentor.nucleus.bp.als.oal.OalParser;
import com.mentor.nucleus.bp.als.oal.Oal_validate;
import com.mentor.nucleus.bp.core.ActionHome_c;
import com.mentor.nucleus.bp.core.Action_c;
import com.mentor.nucleus.bp.core.Block_c;
import com.mentor.nucleus.bp.core.Bridge_c;
import com.mentor.nucleus.bp.core.ClassStateMachine_c;
import com.mentor.nucleus.bp.core.Component_c;
import com.mentor.nucleus.bp.core.CorePlugin;
import com.mentor.nucleus.bp.core.ExecutableProperty_c;
import com.mentor.nucleus.bp.core.ExternalEntity_c;
import com.mentor.nucleus.bp.core.Function_c;
import com.mentor.nucleus.bp.core.Gd_c;
import com.mentor.nucleus.bp.core.InstanceStateMachine_c;
import com.mentor.nucleus.bp.core.InterfaceOperation_c;
import com.mentor.nucleus.bp.core.InterfaceReference_c;
import com.mentor.nucleus.bp.core.InterfaceSignal_c;
import com.mentor.nucleus.bp.core.Interface_c;
import com.mentor.nucleus.bp.core.ModelClass_c;
import com.mentor.nucleus.bp.core.MooreActionHome_c;
import com.mentor.nucleus.bp.core.Oalconstants_c;
import com.mentor.nucleus.bp.core.Ooaofooa;
import com.mentor.nucleus.bp.core.Operation_c;
import com.mentor.nucleus.bp.core.Package_c;
import com.mentor.nucleus.bp.core.PackageableElement_c;
import com.mentor.nucleus.bp.core.Port_c;
import com.mentor.nucleus.bp.core.ProvidedExecutableProperty_c;
import com.mentor.nucleus.bp.core.ProvidedOperation_c;
import com.mentor.nucleus.bp.core.ProvidedSignal_c;
import com.mentor.nucleus.bp.core.RequiredExecutableProperty_c;
import com.mentor.nucleus.bp.core.RequiredOperation_c;
import com.mentor.nucleus.bp.core.RequiredSignal_c;
import com.mentor.nucleus.bp.core.StateMachineState_c;
import com.mentor.nucleus.bp.core.StateMachine_c;
import com.mentor.nucleus.bp.core.TransitionActionHome_c;
import com.mentor.nucleus.bp.core.Transition_c;
import com.mentor.nucleus.bp.core.common.BridgePointPreferencesStore;
import com.mentor.nucleus.bp.core.common.ClassQueryInterface_c;
import com.mentor.nucleus.bp.test.common.BaseTest;
import com.mentor.nucleus.bp.test.common.TestingUtilities;
public class ImplicitComponentAddressTest_Generics extends BaseTest {
public static boolean configured = false;
private static String m_workspace_path = ""; //$NON-NLS-1$
private static String m_comp_pkg_name = "ComponentSyntaxTest"; //$NON-NLS-1$
public ImplicitComponentAddressTest_Generics() {
super(null, null);
}
/* (non-Javadoc)
* @see junit.framework.TestCase#setUp()
*/
protected void setUp() throws Exception {
if (configured) {
return;
}
configured = true;
super.setUp();
if (m_workspace_path.equals(""))//$NON-NLS-1$
{
m_workspace_path = System.getProperty("WORKSPACE_PATH");//$NON-NLS-1$
}
assertNotNull( m_workspace_path );
if (m_logfile_path == null || m_logfile_path.equals(""))
{
m_logfile_path = System.getProperty("LOGFILE_PATH");
}
assertNotNull( m_logfile_path );
TestingUtilities.importTestingProjectIntoWorkspace(m_comp_pkg_name);
modelRoot = Ooaofooa.getInstance(Ooaofooa.createModelRootId(
m_comp_pkg_name, m_comp_pkg_name, true));
m_sys = getSystemModel(m_comp_pkg_name);
IPreferenceStore store = CorePlugin.getDefault().getPreferenceStore();
store.setValue(
BridgePointPreferencesStore.ALLOW_IMPLICIT_COMPONENT_ADDRESSING, false);
}
public void testProvidedOperation() {
String x = parseAction("::FnWithCRParm(CRParm:sender);",
Oalconstants_c.PROV_OPERATION_TYPE, "ProvidedTestInterface::ProvOp");
assertEquals("Unexpected error:", "", x);
}
public void testProvidedSignal() {
String x = parseAction("::FnWithCRParm(CRParm:sender);",
Oalconstants_c.PROV_SIGNAL_TYPE, "ProvidedTestInterface::ProvSig");
assertEquals("Unexpected error:", "", x);
}
public void testClassStateMachineAllSignals() {
String x = parseAction("::FnWithCRParm(CRParm:sender);",
Oalconstants_c.STATE_TYPE, "Class State Machine::Signals Only");
assertEquals("Unexpected error:", "", x);
}
public void testRequiredOperation() {
String x = parseAction("::FnWithCRParm(CRParm:sender);",
Oalconstants_c.REQ_OPERATION_TYPE,
"PreferenceTestInterface::ReqOp");
assertEquals("Unexpected error:", ":1:23-28: Sender keyword can only be used in an incoming Interface Operation.\nline 1:30: expecting TOK_RPAREN, found ';'\nline 1:31: expecting Semicolon, found 'null'\n", x);
}
public void testRequiredSignal() {
String x = parseAction("::FnWithCRParm(CRParm:sender);",
Oalconstants_c.REQ_SIGNAL_TYPE,
"PreferenceTestInterface::ReqSig");
assertEquals("Unexpected error:", ":1:23-28: Sender keyword can only be used in an incoming Signal.\nline 1:30: expecting TOK_RPAREN, found ';'\nline 1:31: expecting Semicolon, found 'null'\n", x);
}
public void testClassStateMachineMixed() {
String x = parseAction("::FnWithCRParm(CRParm:sender);",
Oalconstants_c.STATE_TYPE, "Class State Machine::Hybrid");
assertEquals("Unexpected error:", ":1:23-28: Sender keyword can only be used where there are signals assigned to all incoming transitions\nline 1:30: expecting TOK_RPAREN, found ';'\nline 1:31: expecting Semicolon, found 'null'\n", x);
}
public void testInstanceStateMachine() {
String x = parseAction("self.CRA = sender;",
Oalconstants_c.STATE_TYPE,
"Instance State Machine::Preference Test State");
assertEquals("Unexpected error:", ":1:12-17: Sender keyword is valid only where a message is serviced directly\nline 1:19: unexpected token: null\nline 1:19: expecting Semicolon, found 'null'\n", x);
}
public void testClassStateMachineTransitionWithNoEvent() {
String x = parseAction("::FnWithCRParm(CRParm:sender);",
Oalconstants_c.TRANSITION_TYPE, "Class State Machine::No Event");
assertEquals("Unexpected error:", ":1:23-28: Sender keyword can only be used when a signal is assigned to this transition\nline 1:30: expecting TOK_RPAREN, found ';'\nline 1:31: expecting Semicolon, found 'null'\n", x);
}
public void testClassStateMachineTransitionWithNonSignal() {
String x = parseAction("::FnWithCRParm(CRParm:sender);",
Oalconstants_c.TRANSITION_TYPE, "Class State Machine::Ev");
assertEquals("Unexpected error:", ":1:23-28: Sender keyword can only be used when a signal is assigned to this transition\nline 1:30: expecting TOK_RPAREN, found ';'\nline 1:31: expecting Semicolon, found 'null'\n", x);
}
public void testClassStateMachineTransitionWithSignal() {
String x = parseAction("::FnWithCRParm(CRParm:sender);",
Oalconstants_c.TRANSITION_TYPE, "Class State Machine::Sig");
assertEquals("Unexpected error:", "", x);
}
public void testBridge() {
String x = parseAction("::FnWithCRParm(CRParm:sender);",
Oalconstants_c.BRIDGE_TYPE,
"ComponentTest::BrNoParmsNoReturn");
assertEquals("Unexpected error:", ":1:23-28: Sender keyword is valid only where a message is serviced directly\nline 1:30: expecting TOK_RPAREN, found ';'\nline 1:31: expecting Semicolon, found 'null'\n", x);
}
public void testFunction() {
String x = parseAction("::FnWithCRParm(CRParm:sender);",
Oalconstants_c.FUNCTION_TYPE, "FnNoParmsNoReturn");
assertEquals("Unexpected error:", ":1:23-28: Sender keyword is valid only where a message is serviced directly\nline 1:30: expecting TOK_RPAREN, found ';'\nline 1:31: expecting Semicolon, found 'null'\n", x);
}
public void testClassBasedOperation() {
String x = parseAction("::FnWithCRParm(CRParm:sender);",
Oalconstants_c.OPERATION_TYPE, "opClassBasedPreferenceTest");
assertEquals("Unexpected error:", ":1:23-28: Sender keyword is valid only where a message is serviced directly\nline 1:30: expecting TOK_RPAREN, found ';'\nline 1:31: expecting Semicolon, found 'null'\n", x);
}
public void testOperation() {
String x = parseAction("self.CRA = sender;",
Oalconstants_c.OPERATION_TYPE, "opPreferenceTest");
assertEquals("Unexpected error:", ":1:12-17: Sender keyword is valid only where a message is serviced directly\nline 1:19: unexpected token: null\nline 1:19: expecting Semicolon, found 'null'\n", x);
}
private String parseAction(String stmts, int opType, String actionName) {
String [] names = actionName.split("::");
OalLexer lexer = new OalLexer(new StringReader(stmts));
OalParser parser = new OalParser(modelRoot, lexer);
UUID actID = Gd_c.Null_unique_id();
class ComponentNameTest implements ClassQueryInterface_c {
public boolean evaluate(Object candidate) {
Component_c selected = (Component_c) candidate;
return selected.getName().equals("Good Syntax");
}
}
Component_c comp = Component_c.ComponentInstance(modelRoot,
new ComponentNameTest());
assertTrue("Test component not found: Good Syntax", comp != null);
parser.m_oal_context = new Oal_validate(comp);
class ModelClassNameTest implements ClassQueryInterface_c {
public boolean evaluate(Object candidate) {
ModelClass_c selected = (ModelClass_c) candidate;
return selected.getName().equals("Test Component Syntax");
}
}
ModelClass_c modelClass = ModelClass_c.getOneO_OBJOnR8001(PackageableElement_c
.getManyPE_PEsOnR8000(Package_c
.getManyEP_PKGsOnR1405(m_sys)),
new ModelClassNameTest());
assertTrue("Test class not found: Test Component Syntax",
modelClass != null);
if (opType == Oalconstants_c.OPERATION_TYPE) {
actID = handleOpTest(modelClass, stmts, names);
}
else if (opType == Oalconstants_c.STATE_TYPE) {
actID = handleStateTest(modelClass, stmts, names);
}
else if (opType == Oalconstants_c.TRANSITION_TYPE) {
actID = handleTransitionTest(modelClass, stmts, names);
}
else if (opType == Oalconstants_c.PROV_OPERATION_TYPE ||
opType == Oalconstants_c.REQ_OPERATION_TYPE ||
opType == Oalconstants_c.PROV_SIGNAL_TYPE ||
opType == Oalconstants_c.REQ_SIGNAL_TYPE) {
actID = handleMessageTest(comp, stmts, names, opType);
}
else if (opType == Oalconstants_c.BRIDGE_TYPE) {
actID = handleBridgeTest(comp, stmts, names);
}
else if (opType == Oalconstants_c.FUNCTION_TYPE) {
actID = handleFunctionTest(comp, stmts, names);
}
assertNotSame("Invalid activity ID", actID, Gd_c.Null_unique_id());
try {
parser.action(actID, opType);
}
catch (TokenStreamException e)
{
Block_c.Clearcurrentscope(modelRoot, parser.m_oal_context.m_act_id);
if ( e instanceof TokenStreamRecognitionException )
{
TokenStreamRecognitionException tsre = (TokenStreamRecognitionException)e;
parser.reportError(tsre.recog);
}
else
{
fail("Token stream exception in parser");
}
}
catch (RecognitionException e)
{
Block_c.Clearcurrentscope(modelRoot, parser.m_oal_context.m_act_id);
parser.reportError(e);
}
catch (InterruptedException ie){
}
return parser.m_output;
}
UUID handleStateTest(ModelClass_c modelClass, String stmts, String [] names) {
StateMachineState_c [] states =
StateMachineState_c.getManySM_STATEsOnR501(
StateMachine_c.getOneSM_SMOnR517(
ClassStateMachine_c.getOneSM_ASMOnR519(modelClass)));
if (names[0].equals("Instance State Machine")) {
states = StateMachineState_c.getManySM_STATEsOnR501(
StateMachine_c.getOneSM_SMOnR517(
InstanceStateMachine_c.getOneSM_ISMOnR518(modelClass)));
}
StateMachineState_c testState = null;
for (int i = 0; i < states.length; i++) {
if (states[i].getName().equals(names[1])) {
testState = states[i];
break;
}
}
assertNotNull("Test state not found", testState);
Action_c act = Action_c.getOneSM_ACTOnR514(
ActionHome_c.getOneSM_AHOnR513(
MooreActionHome_c.getOneSM_MOAHOnR511(testState)));
act.setAction_semantics_internal(stmts);
return act.getAct_id();
}
UUID handleTransitionTest(ModelClass_c modelClass, String stmts, String [] names) {
Transition_c[] trans =
Transition_c.getManySM_TXNsOnR505(
StateMachine_c.getOneSM_SMOnR517(
ClassStateMachine_c.getOneSM_ASMOnR519(modelClass)));
if (names[0].equals("Instance State Machine")) {
trans = Transition_c.getManySM_TXNsOnR505(
StateMachine_c.getOneSM_SMOnR517(
InstanceStateMachine_c.getOneSM_ISMOnR518(modelClass)));
}
Transition_c testTran = null;
for (int i = 0; i < trans.length; i++) {
if (trans[i].getName().contains(names[1])) {
testTran = trans[i];
break;
}
}
assertNotNull("Test transition not found", testTran);
Action_c act = Action_c.getOneSM_ACTOnR514(
ActionHome_c.getOneSM_AHOnR513(
TransitionActionHome_c.getOneSM_TAHOnR530(testTran)));
act.setAction_semantics_internal(stmts);
return act.getAct_id();
}
UUID handleOpTest(ModelClass_c modelClass, String stmts, String [] names) {
Operation_c op = null;
Operation_c [] ops = Operation_c.getManyO_TFRsOnR115(modelClass);
for (int i=0; i < ops.length; i++) {
if (ops[i].getName().equals(names[0])) {
op = ops[i];
break;
}
}
assertNotNull("Test operation not found", op);
op.setAction_semantics_internal(stmts);
return op.getTfr_id();
}
UUID handleMessageTest(Component_c comp, String stmts, String [] names,
int opType) {
InterfaceReference_c selectedIfRef = null;
Interface_c selectedIf = null;
InterfaceReference_c [] ifRefs = InterfaceReference_c.
getManyC_IRsOnR4016(Port_c.getManyC_POsOnR4010(comp));
for (int i=0; i < ifRefs.length; i++) {
Interface_c iface = Interface_c.getOneC_IOnR4012(ifRefs[i]);
if (iface.getName().equals(names[0])) {
selectedIfRef = ifRefs[i];
selectedIf = iface;
break;
}
}
assertNotNull("Interface not found", selectedIfRef);
ExecutableProperty_c selectedEp = null;
ExecutableProperty_c [] eps = ExecutableProperty_c.
getManyC_EPsOnR4003(selectedIf);
for (int i=0; i < eps.length; i++) {
InterfaceOperation_c ifOp = InterfaceOperation_c.getOneC_IOOnR4004(eps[i]);
InterfaceSignal_c ifSig = InterfaceSignal_c.getOneC_ASOnR4004(eps[i]);
if ((ifOp != null && ifOp.getName().equals(names[1])) ||
ifSig != null && ifSig.getName().equals(names[i])) {
selectedEp = eps[i];
break;
}
}
assertNotNull("Operation not found", selectedEp);
if (opType == Oalconstants_c.PROV_OPERATION_TYPE) {
ProvidedOperation_c po = ProvidedOperation_c.getOneSPR_POOnR4503(
ProvidedExecutableProperty_c.getOneSPR_PEPOnR4501(selectedEp));
po.setAction_semantics_internal(stmts);
return po.getId();
}
else if (opType == Oalconstants_c.REQ_OPERATION_TYPE) {
RequiredOperation_c ro = RequiredOperation_c.getOneSPR_ROOnR4502(
RequiredExecutableProperty_c.getOneSPR_REPOnR4500(selectedEp));
ro.setAction_semantics_internal(stmts);
return ro.getId();
}
else if (opType == Oalconstants_c.PROV_SIGNAL_TYPE) {
ProvidedSignal_c ps = ProvidedSignal_c.getOneSPR_PSOnR4503(
ProvidedExecutableProperty_c.getOneSPR_PEPOnR4501(selectedEp));
ps.setAction_semantics_internal(stmts);
return ps.getId();
}
else { // opType == Oalconstants_c.REQ_SIGNAL_TYPE
RequiredSignal_c ro = RequiredSignal_c.getOneSPR_RSOnR4502(
RequiredExecutableProperty_c.getOneSPR_REPOnR4500(selectedEp));
ro.setAction_semantics_internal(stmts);
return ro.getId();
}
}
UUID handleBridgeTest(Component_c comp, String stmts, String [] names) {
ExternalEntity_c eeUT = null;
ExternalEntity_c[] ees = ExternalEntity_c
.getManyS_EEsOnR8001(PackageableElement_c
.getManyPE_PEsOnR8000(Package_c
.getManyEP_PKGsOnR8001(PackageableElement_c
.getManyPE_PEsOnR8003(comp))));
for (int i=0; i < ees.length; i++) {
if (ees[i].getName().equals(names[0])) {
eeUT = ees[i];
break;
}
}
assertNotNull("EE not found", eeUT);
Bridge_c brgUT = null;
Bridge_c [] brgs = Bridge_c.getManyS_BRGsOnR19(eeUT);
for(int i=0; i < brgs.length; i++) {
if (brgs[i].getName().equals(names[1])) {
brgUT = brgs[i];
break;
}
}
assertNotNull("Bridge not found", brgUT);
brgUT.setAction_semantics_internal(stmts);
return brgUT.getBrg_id();
}
UUID handleFunctionTest(Component_c comp, String stmts, String [] names) {
Function_c fnUT = null;
Function_c[] fns = Function_c
.getManyS_SYNCsOnR8001(PackageableElement_c
.getManyPE_PEsOnR8000(Package_c
.getManyEP_PKGsOnR8001(PackageableElement_c
.getManyPE_PEsOnR8003(comp))));
for (int i=0; i < fns.length; i++) {
if (fns[i].getName().equals(names[0])) {
fnUT = fns[i];
break;
}
}
assertNotNull("Function not found", fnUT);
fnUT.setAction_semantics_internal(stmts);
return fnUT.getSync_id();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.index;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.concurrent.Callable;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.QueryEntity;
import org.apache.ignite.cache.query.SqlFieldsQuery;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.ClientConnectorConfiguration;
import org.apache.ignite.configuration.DataRegionConfiguration;
import org.apache.ignite.configuration.DataStorageConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.processors.query.IgniteSQLException;
import org.apache.ignite.internal.processors.query.QueryField;
import org.apache.ignite.internal.processors.query.QueryUtils;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.h2.value.DataType;
/**
* Common stuff for dynamic columns tests.
*/
public abstract class DynamicColumnsAbstractTest extends GridCommonAbstractTest {
/** SQL to create test table. */
final static String CREATE_SQL = "CREATE TABLE IF NOT EXISTS Person (id int primary key, name varchar)";
/** SQL to drop test table. */
final static String DROP_SQL = "DROP TABLE Person";
/** IP finder. */
private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true);
/**
* Check that given columns are seen by client.
* @param schemaName Schema name to look for the table in.
* @param tblName Table name to check.
* @param cols Columns whose presence must be checked.
*/
static void checkTableState(String schemaName, String tblName, QueryField... cols) throws SQLException {
checkTableState(schemaName, tblName, ClientConnectorConfiguration.DFLT_PORT, cols);
}
/**
* Check that given columns are seen by client.
* @param schemaName Schema name to look for the table in.
* @param tblName Table name to check.
* @param port Port number.
* @param cols Columns whose presence must be checked.
*/
static void checkTableState(String schemaName, String tblName, int port, QueryField... cols) throws SQLException {
List<QueryField> flds = new ArrayList<>();
try (Connection c = DriverManager.getConnection("jdbc:ignite:thin://127.0.0.1:" + port)) {
try (ResultSet rs = c.getMetaData().getColumns(null, schemaName, tblName, "%")) {
while (rs.next()) {
String name = rs.getString("COLUMN_NAME");
short type = rs.getShort("DATA_TYPE");
String typeClsName = DataType.getTypeClassName(DataType.convertSQLTypeToValueType(type));
short nullable = rs.getShort("NULLABLE");
flds.add(new QueryField(name, typeClsName, nullable == 1));
}
}
}
Iterator<QueryField> it = flds.iterator();
for (int i = flds.size() - cols.length; i > 0 && it.hasNext(); i--)
it.next();
for (QueryField exp : cols) {
assertTrue("New column not found in metadata: " + exp.name(), it.hasNext());
QueryField act = it.next();
assertEquals(exp.name(), act.name());
assertEquals(exp.typeName(), act.typeName());
// TODO uncomment after IGNITE-6529 is implemented.
//assertEquals(exp.isNullable(), act.isNullable());
}
}
/**
* @param name New column name.
* @param typeName Class name for this new column's data type.
* @return New column with given name and type.
*/
protected static QueryField c(String name, String typeName) {
return new QueryField(name, typeName, true);
}
/**
* @param idx Node index.
* @return Client configuration.
* @throws Exception if failed.
*/
protected IgniteConfiguration clientConfiguration(int idx) throws Exception {
QueryEntity e = new QueryEntity(Integer.class.getName(), "Person");
LinkedHashMap<String, String> flds = new LinkedHashMap<>();
flds.put("name", String.class.getName());
e.setFields(flds);
return commonConfiguration(idx).setClientMode(true).setCacheConfiguration(
new CacheConfiguration<>("idx").setQueryEntities(Collections.singletonList(e))
);
}
/**
* Create common node configuration.
*
* @param idx Index.
* @return Configuration.
* @throws Exception If failed.
*/
protected IgniteConfiguration commonConfiguration(int idx) throws Exception {
IgniteConfiguration cfg = getConfiguration(getTestIgniteInstanceName(idx));
cfg.setDiscoverySpi(new TcpDiscoverySpi().setIpFinder(IP_FINDER));
DataStorageConfiguration memCfg = new DataStorageConfiguration().setDefaultDataRegionConfiguration(
new DataRegionConfiguration().setMaxSize(128 * 1024 * 1024));
cfg.setDataStorageConfiguration(memCfg);
return optimize(cfg);
}
/**
* Create server node configuration.
*
* @param idx Index.
* @return Configuration.
* @throws Exception If failed.
*/
IgniteConfiguration serverConfiguration(int idx) throws Exception {
return commonConfiguration(idx);
}
/**
* Execute SQL command and return resulting dataset.
* @param node Node to run query from.
* @param sql Statement.
* @return result.
*/
protected List<List<?>> run(Ignite node, String sql) {
return ((IgniteEx)node).context().query()
.querySqlFieldsNoCache(new SqlFieldsQuery(sql).setSchema(QueryUtils.DFLT_SCHEMA), true).getAll();
}
/**
* Execute SQL command and return resulting dataset.
* @param cache Cache to initiate query from.
* @param sql Statement.
* @return result.
*/
protected List<List<?>> run(IgniteCache<?, ?> cache, String sql, Object... args) {
SqlFieldsQuery qry = new SqlFieldsQuery(sql).setSchema(QueryUtils.DFLT_SCHEMA).setArgs(args)
.setDistributedJoins(true);
return cache.query(qry).getAll();
}
/**
* Run specified statement expected to throw {@code IgniteSqlException} with expected specified message.
* @param sql Statement.
* @param msg Expected message.
*/
@SuppressWarnings("ThrowableResultOfMethodCallIgnored")
protected void assertThrows(final Ignite node, final String sql, String msg) {
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
run(node, sql);
return null;
}
}, IgniteSQLException.class, msg);
}
}
| |
/*
* The MIT License
*
* Copyright (c) Eduardo R. B. Marques
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.jdbdt;
import static org.jdbdt.JDBDT.*;
import java.sql.Date;
import java.sql.SQLException;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import static org.jdbdt.TestUtil.expectAssertionError;
import static org.jdbdt.TestUtil.expectException;
@SuppressWarnings("javadoc")
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class DBAssertTest extends DBTestCase {
private static final String ERROR_MSG = "assertion error";
private static final String EMPTY_MSG = "";
private Table table;
private Query query;
static class Actions {
static final User EXISTING_USER = getTestData(EXISTING_DATA_ID1);
static final User USER_TO_INSERT = new User(EXISTING_DATA_ID1 + "_", "New User", "pass", Date.valueOf("2099-01-01"));
static final User EXISTING_USER_UPDATED = new User(EXISTING_DATA_ID1, "new name", "new password", Date.valueOf("2099-01-01"));
static void nop() { }
static void insertNewUser() throws SQLException {
getDAO().doInsert(USER_TO_INSERT);
}
static void deleteUser() throws SQLException {
getDAO().doDelete(EXISTING_DATA_ID1);
}
static void updateUser() throws SQLException {
getDAO().doUpdate(EXISTING_USER_UPDATED);
}
static void deleteAll() throws SQLException {
getDAO().doDeleteAll();
}
}
private DataSet initialUsers;
private DataSet initialLogins;
private DataSet exUserDs;
private DataSet exUserLDs;
private DataSet newUserDs;
private DataSet newUserLDs;
private DataSet updUserDs;
@Before
public void takeDBSnapshot() {
table = table(UserDAO.TABLE_NAME)
.columns(UserDAO.COLUMNS)
.build(getDB());
query = select("LOGIN")
.from(table)
.build(getDB());
initialUsers = takeSnapshot(table);
initialLogins = takeSnapshot(query);
exUserDs = d(Actions.EXISTING_USER);
newUserDs = d(Actions.USER_TO_INSERT);
updUserDs = d(Actions.EXISTING_USER_UPDATED);
exUserLDs = d(Actions.EXISTING_USER.getLogin());
newUserLDs = d(Actions.USER_TO_INSERT.getLogin());
}
DataSet d(User... users) {
return data(table, getConversion()).rows(users);
}
DataSet d(String... logins) {
DataSet dataSet = data(query);
for (String login : logins) {
dataSet.row(login);
}
return dataSet;
}
@Test
public void testNop() {
Actions.nop();
assertUnchanged(table);
assertUnchanged(ERROR_MSG, query);
assertUnchanged(table, query);
assertUnchanged(ERROR_MSG, table, query);
assertDelta(empty(table), empty(table));
assertDelta(ERROR_MSG, empty(query), empty(query));
expectAssertionError(EMPTY_MSG, () -> assertDelta(exUserDs,updUserDs));
expectAssertionError(ERROR_MSG, () -> assertDelta(ERROR_MSG, exUserDs,updUserDs));
expectAssertionError(EMPTY_MSG, () -> assertInserted(exUserDs));
expectAssertionError(ERROR_MSG, () -> assertInserted(ERROR_MSG, exUserLDs));
expectAssertionError(EMPTY_MSG, () -> assertInserted(exUserDs,exUserLDs));
expectAssertionError(ERROR_MSG, () -> assertInserted(ERROR_MSG, exUserDs,exUserLDs));
expectAssertionError(EMPTY_MSG, () -> assertDeleted(exUserDs));
expectAssertionError(ERROR_MSG, () -> assertDeleted(ERROR_MSG, exUserLDs));
expectAssertionError(EMPTY_MSG, () -> assertDeleted(exUserDs,exUserLDs));
expectAssertionError(ERROR_MSG, () -> assertDeleted(ERROR_MSG, exUserDs,exUserLDs));
assertState(initialUsers);
assertState(ERROR_MSG, initialLogins);
assertState(initialUsers, initialLogins);
assertState(ERROR_MSG, initialUsers, initialLogins);
expectAssertionError(EMPTY_MSG, () -> assertEmpty(table));
expectAssertionError(ERROR_MSG, () -> assertEmpty(ERROR_MSG, query));
expectAssertionError(EMPTY_MSG, () -> assertEmpty(table, query));
expectAssertionError(ERROR_MSG, () -> assertEmpty(ERROR_MSG, table, query));
}
@Test
public void testDeleteAll() throws SQLException {
Actions.deleteAll();
expectAssertionError(EMPTY_MSG, () -> assertUnchanged(table));
expectAssertionError(ERROR_MSG, () -> assertUnchanged(ERROR_MSG, query));
expectAssertionError(EMPTY_MSG, () -> assertUnchanged(table, query));
expectAssertionError(ERROR_MSG, () -> assertUnchanged(ERROR_MSG, table, query));
assertDelta(initialUsers, empty(table));
assertDelta(ERROR_MSG, initialLogins, empty(query));
assertDeleted(initialUsers);
assertDeleted(ERROR_MSG, initialLogins);
assertDeleted(initialLogins, initialUsers);
assertDeleted(ERROR_MSG, initialLogins, initialUsers);
assertState(empty(table));
assertState(ERROR_MSG, empty(query));
assertState(empty(table), empty(query));
assertState(ERROR_MSG, empty(table), empty(query));
assertEmpty(table);
assertEmpty(ERROR_MSG, query);
assertEmpty(table, query);
assertEmpty(ERROR_MSG, table, query);
}
@Test
public void testDelete() throws SQLException {
Actions.deleteUser();
assertDelta(exUserDs, empty(table));
assertDelta(ERROR_MSG, exUserLDs, empty(query));
assertDeleted(exUserDs);
assertDeleted(ERROR_MSG, exUserLDs);
assertDeleted(exUserDs, exUserLDs);
assertDeleted(ERROR_MSG, exUserDs, exUserLDs);
}
@Test
public void testInsert() throws SQLException {
Actions.insertNewUser();
assertDelta(empty(table), newUserDs);
assertDelta(ERROR_MSG,empty(query), newUserLDs);
assertInserted(newUserDs);
assertInserted(ERROR_MSG, newUserDs);
assertInserted(newUserDs, newUserLDs);
assertInserted(ERROR_MSG, newUserDs, newUserLDs);
assertState(DataSet.join(initialUsers, newUserDs),
DataSet.join(initialLogins, newUserLDs));
}
@Test
public void testUpdate() throws SQLException {
Actions.updateUser();
assertDelta(exUserDs, updUserDs);
assertUnchanged(ERROR_MSG, query);
}
@Test
public void testRepeatedDataSets() throws SQLException {
expectException(InvalidOperationException.class, () -> assertUnchanged(table, table));
expectException(InvalidOperationException.class, () -> assertEmpty(table, table));
expectException(InvalidOperationException.class, () -> assertState(empty(table), empty(table)));
expectException(InvalidOperationException.class, () -> assertInserted(newUserDs, newUserDs));
expectException(InvalidOperationException.class, () -> assertDeleted(newUserDs, newUserDs));
}
}
| |
package com.firebase.geofire;
import com.google.firebase.database.DatabaseError;
import junit.framework.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.util.*;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
@RunWith(JUnit4.class)
public class GeoQueryTest extends RealDataTest {
@Test
public void keyEntered() throws InterruptedException {
GeoFire geoFire = newTestGeoFire();
setLoc(geoFire, "0", 0, 0);
setLoc(geoFire, "1", 37.0000, -122.0000);
setLoc(geoFire, "2", 37.0001, -122.0001);
setLoc(geoFire, "3", 37.1000, -122.0000);
setLoc(geoFire, "4", 37.0002, -121.9998, true);
GeoQuery query = geoFire.queryAtLocation(new GeoLocation(37, -122), 0.5);
GeoQueryEventTestListener testListener = new GeoQueryEventTestListener();
query.addGeoQueryEventListener(testListener);
waitForGeoFireReady(geoFire);
Set<String> events = new HashSet<String>();
events.add(GeoQueryEventTestListener.keyEntered("1", 37, -122));
events.add(GeoQueryEventTestListener.keyEntered("2", 37.0001, -122.0001));
events.add(GeoQueryEventTestListener.keyEntered("4", 37.0002, -121.9998));
testListener.expectEvents(events);
query.removeAllListeners();
}
@Test
public void keyExited() throws InterruptedException {
GeoFire geoFire = newTestGeoFire();
setLoc(geoFire, "0", 0, 0);
setLoc(geoFire, "1", 37.0000, -122.0000);
setLoc(geoFire, "2", 37.0001, -122.0001);
setLoc(geoFire, "3", 37.1000, -122.0000);
setLoc(geoFire, "4", 37.0002, -121.9998, true);
GeoQuery query = geoFire.queryAtLocation(new GeoLocation(37, -122), 0.5);
GeoQueryEventTestListener testListener = new GeoQueryEventTestListener(false, false, true);
query.addGeoQueryEventListener(testListener);
waitForGeoFireReady(geoFire);
setLoc(geoFire, "0", 0, 0); // not in query
setLoc(geoFire, "1", 0, 0); // exited
setLoc(geoFire, "2", 0, 0); // exited
setLoc(geoFire, "3", 2, 0, true); // not in query
setLoc(geoFire, "0", 3, 0); // not in query
setLoc(geoFire, "1", 4, 0); // not in query
setLoc(geoFire, "2", 5, 0, true); // not in query
List<String> events = new LinkedList<String>();
events.add(GeoQueryEventTestListener.keyExited("1"));
events.add(GeoQueryEventTestListener.keyExited("2"));
testListener.expectEvents(events);
}
@Test
public void keyMoved() throws InterruptedException {
GeoFire geoFire = newTestGeoFire();
setLoc(geoFire, "0", 0, 0);
setLoc(geoFire, "1", 37.0000, -122.0000);
setLoc(geoFire, "2", 37.0001, -122.0001);
setLoc(geoFire, "3", 37.1000, -122.0000);
setLoc(geoFire, "4", 37.0002, -121.9998, true);
GeoQuery query = geoFire.queryAtLocation(new GeoLocation(37, -122), 0.5);
GeoQueryEventTestListener testListener = new GeoQueryEventTestListener(false, true, false);
query.addGeoQueryEventListener(testListener);
GeoQueryEventTestListener exitListener = new GeoQueryEventTestListener(false, false, true);
query.addGeoQueryEventListener(exitListener);
waitForGeoFireReady(geoFire);
setLoc(geoFire, "0", 1, 1); // outside of query
setLoc(geoFire, "1", 37.0001, -122.0000); // moved
setLoc(geoFire, "2", 37.0001, -122.0001); // location stayed the same
setLoc(geoFire, "4", 37.0002, -122.0000); // moved
setLoc(geoFire, "3", 37.0000, -122.0000, true); // entered
setLoc(geoFire, "3", 37.0003, -122.0003, true); // moved:
setLoc(geoFire, "2", 0, 0, true); // exited
// wait for location to exit
exitListener.expectEvents(Arrays.asList(GeoQueryEventTestListener.keyExited("2")));
setLoc(geoFire, "2", 37.0000, -122.0000, true); // entered
setLoc(geoFire, "2", 37.0001, -122.0001, true); // moved
List<String> events = new LinkedList<String>();
events.add(GeoQueryEventTestListener.keyMoved("1", 37.0001, -122.0000));
events.add(GeoQueryEventTestListener.keyMoved("4", 37.0002, -122.0000));
events.add(GeoQueryEventTestListener.keyMoved("3", 37.0003, -122.0003));
events.add(GeoQueryEventTestListener.keyMoved("2", 37.0001, -122.0001));
testListener.expectEvents(events);
}
@Test
public void subQueryTriggersKeyMoved() throws InterruptedException {
GeoFire geoFire = newTestGeoFire();
setLoc(geoFire, "0", 1, 1, true);
setLoc(geoFire, "1", -1, -1, true);
GeoQuery query = geoFire.queryAtLocation(new GeoLocation(0, 0), 1000);
GeoQueryEventTestListener testListener = new GeoQueryEventTestListener(false, true, true);
query.addGeoQueryEventListener(testListener);
waitForGeoFireReady(geoFire);
setLoc(geoFire, "0", -1, -1);
setLoc(geoFire, "1", 1, 1);
Set<String> events = new HashSet<String>();
events.add(GeoQueryEventTestListener.keyMoved("0", -1, -1));
events.add(GeoQueryEventTestListener.keyMoved("1", 1, 1));
testListener.expectEvents(events);
}
@Test
public void removeSingleObserver() throws InterruptedException {
GeoFire geoFire = newTestGeoFire();
setLoc(geoFire, "0", 0, 0);
setLoc(geoFire, "1", 37.0000, -122.0000);
setLoc(geoFire, "2", 37.0001, -122.0001);
setLoc(geoFire, "3", 37.1000, -122.0000);
setLoc(geoFire, "4", 37.0002, -121.9998, true);
GeoQuery query = geoFire.queryAtLocation(new GeoLocation(37.0, -122), 1);
GeoQueryEventTestListener testListenerRemoved = new GeoQueryEventTestListener(true, true, true);
query.addGeoQueryEventListener(testListenerRemoved);
GeoQueryEventTestListener testListenerRemained = new GeoQueryEventTestListener(true, true, true);
query.addGeoQueryEventListener(testListenerRemained);
Set<String> addedEvents = new HashSet<String>();
addedEvents.add(GeoQueryEventTestListener.keyEntered("1", 37, -122));
addedEvents.add(GeoQueryEventTestListener.keyEntered("2", 37.0001, -122.0001));
addedEvents.add(GeoQueryEventTestListener.keyEntered("4", 37.0002, -121.9998));
testListenerRemained.expectEvents(addedEvents);
testListenerRemained.expectEvents(addedEvents);
query.removeGeoQueryEventListener(testListenerRemoved);
setLoc(geoFire, "0", 37, -122); // entered
setLoc(geoFire, "1", 0, 0); // exited
setLoc(geoFire, "2", 37, -122.0001); // moved
Set<String> furtherEvents = new HashSet<String>(addedEvents);
furtherEvents.add(GeoQueryEventTestListener.keyEntered("0", 37, -122)); // entered
furtherEvents.add(GeoQueryEventTestListener.keyExited("1")); // exited
furtherEvents.add(GeoQueryEventTestListener.keyMoved("2", 37.0000, -122.0001)); // moved
testListenerRemained.expectEvents(furtherEvents);
testListenerRemoved.expectEvents(addedEvents);
}
@Test
public void removeAllObservers() throws InterruptedException {
GeoFire geoFire = newTestGeoFire();
setLoc(geoFire, "0", 0, 0);
setLoc(geoFire, "1", 37.0000, -122.0000);
setLoc(geoFire, "2", 37.0001, -122.0001);
setLoc(geoFire, "3", 37.1000, -122.0000);
setLoc(geoFire, "4", 37.0002, -121.9998, true);
GeoQuery query = geoFire.queryAtLocation(new GeoLocation(37.0, -122), 1);
GeoQueryEventTestListener testListenerRemoved = new GeoQueryEventTestListener(true, true, true);
query.addGeoQueryEventListener(testListenerRemoved);
GeoQueryEventTestListener testListenerRemained = new GeoQueryEventTestListener(true, true, true);
query.addGeoQueryEventListener(testListenerRemained);
Set<String> addedEvents = new HashSet<String>();
addedEvents.add(GeoQueryEventTestListener.keyEntered("1", 37, -122));
addedEvents.add(GeoQueryEventTestListener.keyEntered("2", 37.0001, -122.0001));
addedEvents.add(GeoQueryEventTestListener.keyEntered("4", 37.0002, -121.9998));
testListenerRemained.expectEvents(addedEvents);
testListenerRemained.expectEvents(addedEvents);
query.removeGeoQueryEventListener(testListenerRemoved);
query.removeAllListeners();
setLoc(geoFire, "0", 37, -122); // entered
setLoc(geoFire, "1", 0, 0); // exited
setLoc(geoFire, "2", 37, -122.0001, true); // moved
testListenerRemained.expectEvents(addedEvents);
testListenerRemoved.expectEvents(addedEvents);
}
@Test
public void readyListener() throws InterruptedException {
GeoFire geoFire = newTestGeoFire();
setLoc(geoFire, "0", 0, 0);
setLoc(geoFire, "1", 37.0000, -122.0000);
setLoc(geoFire, "2", 37.0001, -122.0001);
setLoc(geoFire, "3", 37.1000, -122.0000);
setLoc(geoFire, "4", 37.0002, -121.9998, true);
GeoQuery query = geoFire.queryAtLocation(new GeoLocation(37.0, -122), 1);
final boolean[] done = new boolean[1];
final boolean[] failed = new boolean[1];
final Semaphore semaphore = new Semaphore(0);
query.addGeoQueryEventListener(new GeoQueryEventListener() {
@Override
public void onKeyEntered(String key, GeoLocation location) {
if (done[0]) {
failed[0] = true;
}
}
@Override
public void onKeyExited(String key) {
}
@Override
public void onKeyMoved(String key, GeoLocation location) {
}
@Override
public void onGeoQueryReady() {
semaphore.release();
done[0] = true;
}
@Override
public void onGeoQueryError(DatabaseError error) {
}
});
Assert.assertTrue(semaphore.tryAcquire(TestHelpers.TIMEOUT_SECONDS, TimeUnit.SECONDS));
Assert.assertTrue(done[0]);
// wait for any further events to fire
Thread.sleep(250);
Assert.assertFalse("Key entered after ready event occurred!", failed[0]);
}
@Test
public void readyListenerAfterReady() throws InterruptedException {
GeoFire geoFire = newTestGeoFire();
setLoc(geoFire, "0", 0, 0);
setLoc(geoFire, "1", 37.0000, -122.0000);
setLoc(geoFire, "2", 37.0001, -122.0001);
setLoc(geoFire, "3", 37.1000, -122.0000);
setLoc(geoFire, "4", 37.0002, -121.9998, true);
GeoQuery query = geoFire.queryAtLocation(new GeoLocation(37.0, -122), 1);
final Semaphore semaphore = new Semaphore(0);
query.addGeoQueryEventListener(new GeoQueryEventListener() {
@Override
public void onKeyEntered(String key, GeoLocation location) {
}
@Override
public void onKeyExited(String key) {
}
@Override
public void onKeyMoved(String key, GeoLocation location) {
}
@Override
public void onGeoQueryReady() {
semaphore.release();
}
@Override
public void onGeoQueryError(DatabaseError error) {
}
});
Assert.assertTrue(semaphore.tryAcquire(TestHelpers.TIMEOUT_SECONDS, TimeUnit.SECONDS));
query.addGeoQueryEventListener(new GeoQueryEventListener() {
@Override
public void onKeyEntered(String key, GeoLocation location) {
}
@Override
public void onKeyExited(String key) {
}
@Override
public void onKeyMoved(String key, GeoLocation location) {
}
@Override
public void onGeoQueryReady() {
semaphore.release();
}
@Override
public void onGeoQueryError(DatabaseError error) {
}
});
Assert.assertTrue(semaphore.tryAcquire(10, TimeUnit.MILLISECONDS));
}
@Test
public void readyAfterUpdateCriteria() throws InterruptedException {
GeoFire geoFire = newTestGeoFire();
setLoc(geoFire, "0", 0, 0);
setLoc(geoFire, "1", 37.0000, -122.0000);
setLoc(geoFire, "2", 37.0001, -122.0001);
setLoc(geoFire, "3", 37.1000, -122.0000);
setLoc(geoFire, "4", 37.0002, -121.9998, true);
GeoQuery query = geoFire.queryAtLocation(new GeoLocation(37.0, -122), 1);
final boolean[] done = new boolean[1];
final Semaphore semaphore = new Semaphore(0);
final int[] readyCount = new int[1];
query.addGeoQueryEventListener(new GeoQueryEventListener() {
@Override
public void onKeyEntered(String key, GeoLocation location) {
if (key.equals("0")) {
done[0] = true;
}
}
@Override
public void onKeyExited(String key) {
}
@Override
public void onKeyMoved(String key, GeoLocation location) {
}
@Override
public void onGeoQueryReady() {
semaphore.release();
readyCount[0]++;
}
@Override
public void onGeoQueryError(DatabaseError error) {
}
});
Assert.assertTrue(semaphore.tryAcquire(TestHelpers.TIMEOUT_SECONDS, TimeUnit.SECONDS));
query.setCenter(new GeoLocation(0,0));
Assert.assertTrue(semaphore.tryAcquire(TestHelpers.TIMEOUT_SECONDS, TimeUnit.SECONDS));
Assert.assertTrue(done[0]);
}
}
| |
package org.mapfish.print.config;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.geotools.styling.Style;
import org.json.JSONException;
import org.json.JSONWriter;
import org.mapfish.print.attribute.Attribute;
import org.mapfish.print.attribute.InternalAttribute;
import org.mapfish.print.attribute.map.MapfishMapContext;
import org.mapfish.print.config.access.AccessAssertion;
import org.mapfish.print.config.access.AlwaysAllowAssertion;
import org.mapfish.print.config.access.RoleAccessAssertion;
import org.mapfish.print.map.style.StyleParser;
import org.mapfish.print.processor.Processor;
import org.mapfish.print.processor.ProcessorDependencyGraph;
import org.mapfish.print.processor.ProcessorDependencyGraphFactory;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.client.ClientHttpRequestFactory;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nonnull;
/**
* Represents a report template configuration.
*/
public class Template implements ConfigurationObject, HasConfiguration {
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(Template.class);
@Autowired
private ProcessorDependencyGraphFactory processorGraphFactory;
@Autowired
private ClientHttpRequestFactory httpRequestFactory;
@Autowired
private StyleParser styleParser;
private String reportTemplate;
private Map<String, Attribute> attributes = Maps.newHashMap();
private List<Processor> processors = Lists.newArrayList();
private String jdbcUrl;
private String jdbcUser;
private String jdbcPassword;
private volatile ProcessorDependencyGraph processorGraph;
private Map<String, String> styles = new HashMap<String, String>();
private Configuration configuration;
private AccessAssertion accessAssertion = AlwaysAllowAssertion.INSTANCE;
private PDFConfig pdfConfig = new PDFConfig();
private String tableDataKey;
private String outputFilename;
/**
* The default output file name of the report (takes precedence over
* {@link org.mapfish.print.config.Configuration#setOutputFilename(String)}). This can be overridden by the outputFilename
* parameter in the request JSON.
* <p>
* This can be a string and can also have a date section in the string that will be filled when the report is created for
* example a section with ${<dateFormatString>} will be replaced with the current date formatted in the way defined
* by the <dateFormatString> string. The format rules are the rules in
* <a href="http://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html">java.text.SimpleDateFormat</a>
* (do a google search if the link above is broken).
* </p>
* <p>
* Example: <code>outputFilename: print-${dd-MM-yyyy}</code> should output: <code>print-22-11-2014.pdf</code>
* </p>
* <p>
* Note: the suffix will be appended to the end of the name.
* </p>
*
* @param outputFilename default output file name of the report.
*/
public final void setOutputFilename(final String outputFilename) {
this.outputFilename = outputFilename;
}
public final String getOutputFilename() {
return this.outputFilename;
}
/**
* Get the merged configuration between this template and the configuration's template. The settings in the template take
* priority over the configurations settings but if not set in the template then the default will be the configuration's options.
*/
// CSOFF: DesignForExtension -- Note this is disabled so that I can use Mockito and inject my own objects
public PDFConfig getPdfConfig() {
return this.pdfConfig.getMergedInstance(this.configuration.getPdfConfig());
}
/**
* Print out the template information that the client needs for performing a request.
*
* @param json the writer to write the information to.
*/
public final void printClientConfig(final JSONWriter json) throws JSONException {
json.key("attributes");
json.array();
for (Map.Entry<String, Attribute> entry : this.attributes.entrySet()) {
Attribute attribute = entry.getValue();
if (attribute.getClass().getAnnotation(InternalAttribute.class) == null) {
json.object();
json.key("name").value(entry.getKey());
attribute.printClientConfig(json, this);
json.endObject();
}
}
json.endArray();
}
/**
* Configure various properties related to the reports generated as PDFs.
* @param pdfConfig the pdf configuration
*/
public final void setPdfConfig(final PDFConfig pdfConfig) {
this.pdfConfig = pdfConfig;
}
public final Map<String, Attribute> getAttributes() {
return this.attributes;
}
/**
* Set the attributes for this template.
*
* @param attributes the attribute map
*/
public final void setAttributes(final Map<String, Attribute> attributes) {
for (Map.Entry<String, Attribute> entry : attributes.entrySet()) {
Object attribute = entry.getValue();
if (!(attribute instanceof Attribute)) {
final String msg = "Attribute: '" + entry.getKey() + "' is not an attribute. It is a: " + attribute;
LOGGER.error("Error setting the Attributes: " + msg);
throw new IllegalArgumentException(msg);
} else {
((Attribute) attribute).setConfigName(entry.getKey());
}
}
this.attributes = attributes;
}
public final String getReportTemplate() {
return this.reportTemplate;
}
public final void setReportTemplate(final String reportTemplate) {
this.reportTemplate = reportTemplate;
}
public final List<Processor> getProcessors() {
return this.processors;
}
/**
* Set the normal processors.
*
* @param processors the processors to set.
*/
public final void setProcessors(final List<Processor> processors) {
assertProcessors(processors);
this.processors = processors;
}
private void assertProcessors(final List<Processor> processorsToCheck) {
for (Processor entry : processorsToCheck) {
if (!(entry instanceof Processor)) {
final String msg = "Processor: " + entry + " is not a processor.";
LOGGER.error("Error setting the Attributes: " + msg);
throw new IllegalArgumentException(msg);
}
}
}
/**
* Set the key of the data that is the datasource for the main table in the report.
*
* @param tableData the key of the data that is the datasource for the main table in the report.
*/
public final void setTableData(final String tableData) {
this.tableDataKey = tableData;
}
public final String getTableDataKey() {
return this.tableDataKey;
}
public final String getJdbcUrl() {
return this.jdbcUrl;
}
public final void setJdbcUrl(final String jdbcUrl) {
this.jdbcUrl = jdbcUrl;
}
public final String getJdbcUser() {
return this.jdbcUser;
}
public final void setJdbcUser(final String jdbcUser) {
this.jdbcUser = jdbcUser;
}
public final String getJdbcPassword() {
return this.jdbcPassword;
}
public final void setJdbcPassword(final String jdbcPassword) {
this.jdbcPassword = jdbcPassword;
}
/**
* Get the processor graph to use for executing all the processors for the template.
*
* @return the processor graph.
*/
public final ProcessorDependencyGraph getProcessorGraph() {
if (this.processorGraph == null) {
synchronized (this) {
if (this.processorGraph == null) {
this.processorGraph = this.processorGraphFactory.build(this.processors);
}
}
}
return this.processorGraph;
}
/**
* Set the named styles defined in the configuration for this.
*
* @param styles set the styles specific for this template.
*/
public final void setStyles(final Map<String, String> styles) {
this.styles = styles;
}
/**
* Look for a style in the named styles provided in the configuration.
*
* @param styleName the name of the style to look for.
* @param mapContext information about the map projection, bounds, size, etc...
*/
@SuppressWarnings("unchecked")
@Nonnull
public final Optional<Style> getStyle(final String styleName,
final MapfishMapContext mapContext) {
final String styleRef = this.styles.get(styleName);
Optional<Style> style;
if (styleRef != null) {
style = (Optional<Style>) this.styleParser.loadStyle(getConfiguration(), this.httpRequestFactory, styleRef, mapContext);
} else {
style = Optional.absent();
}
return style.or(this.configuration.getStyle(styleName, mapContext));
}
@Override
public final void setConfiguration(final Configuration configuration) {
this.configuration = configuration;
}
public final Configuration getConfiguration() {
return this.configuration;
}
@Override
public final void validate(final List<Throwable> validationErrors, final Configuration config) {
this.accessAssertion.validate(validationErrors, config);
int numberOfTableConfigurations = this.tableDataKey == null ? 0 : 1;
numberOfTableConfigurations += this.jdbcUrl == null ? 0 : 1;
if (numberOfTableConfigurations > 1) {
validationErrors.add(new ConfigurationException("Only one of 'iterValue' or 'tableData' or 'jdbcUrl' should be defined."));
}
for (Processor processor : this.processors) {
processor.validate(validationErrors, config);
}
for (Attribute attribute : this.attributes.values()) {
attribute.validate(validationErrors, config);
}
try {
getProcessorGraph();
} catch (Throwable t) {
validationErrors.add(t);
}
if (getJdbcUrl() != null) {
Connection connection = null;
try {
if (getJdbcUser() != null) {
connection = DriverManager.getConnection(getJdbcUrl(), getJdbcUser(), getJdbcPassword());
} else {
connection = DriverManager.getConnection(getJdbcUrl());
}
} catch (SQLException e) {
validationErrors.add(e);
} finally {
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
validationErrors.add(e);
}
}
}
}
}
final void assertAccessible(final String name) {
this.accessAssertion.assertAccess("Template '" + name + "'", this);
}
/**
* The roles required to access this template. If empty or not set then it is a <em>public</em> template. If there are
* many roles then a user must have one of the roles in order to access the template.
* <p></p>
* The security (how authentication/authorization is done) is configured in the /WEB-INF/classes/mapfish-spring-security.xml
* <p>
* Any user without the required role will get an error when trying to access the template and the template will not
* be visible in the capabilities requests.
* </p>
*
* @param access the roles needed to access this
*/
public final void setAccess(final List<String> access) {
final RoleAccessAssertion assertion = new RoleAccessAssertion();
assertion.setRequiredRoles(access);
this.accessAssertion = assertion;
}
public final AccessAssertion getAccessAssertion() {
return this.accessAssertion;
}
}
| |
package org.apereo.cas.ticket;
import org.apereo.cas.authentication.Authentication;
import org.apereo.cas.authentication.CoreAuthenticationTestUtils;
import org.apereo.cas.services.RegisteredServiceTestUtils;
import org.apereo.cas.ticket.expiration.NeverExpiresExpirationPolicy;
import org.apereo.cas.util.DefaultUniqueTicketIdGenerator;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import lombok.val;
import org.apache.commons.io.FileUtils;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder;
import java.io.File;
import java.io.IOException;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import static org.junit.jupiter.api.Assertions.*;
/**
* @author Scott Battaglia
* @since 3.0.0
*/
public class TicketGrantingTicketImplTests {
private static final File TGT_JSON_FILE = new File(FileUtils.getTempDirectoryPath(), "tgt.json");
private static final String TGT_ID = "test";
private static final UniqueTicketIdGenerator ID_GENERATOR = new DefaultUniqueTicketIdGenerator();
private ObjectMapper mapper;
@BeforeEach
public void initialize() {
mapper = Jackson2ObjectMapperBuilder.json()
.featuresToDisable(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE)
.featuresToDisable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
.build();
mapper.findAndRegisterModules();
}
@Test
public void verifySerializeToJson() throws IOException {
val authenticationWritten = CoreAuthenticationTestUtils.getAuthentication();
val expirationPolicyWritten = NeverExpiresExpirationPolicy.INSTANCE;
val tgtWritten = new TicketGrantingTicketImpl(TGT_ID, null, null,
authenticationWritten, expirationPolicyWritten);
mapper.writeValue(TGT_JSON_FILE, tgtWritten);
val tgtRead = mapper.readValue(TGT_JSON_FILE, TicketGrantingTicketImpl.class);
assertEquals(tgtWritten, tgtRead);
assertEquals(authenticationWritten, tgtRead.getAuthentication());
}
@Test
public void verifyEquals() {
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
assertNotNull(t);
assertNotEquals(t, new Object());
assertEquals(t, t);
}
@Test
public void verifyNullAuthentication() {
assertThrows(Exception.class, () -> new TicketGrantingTicketImpl(TGT_ID, null, null, null, NeverExpiresExpirationPolicy.INSTANCE));
}
@Test
public void verifyGetAuthentication() {
val authentication = CoreAuthenticationTestUtils.getAuthentication();
val t = new TicketGrantingTicketImpl(TGT_ID, null, null, authentication, NeverExpiresExpirationPolicy.INSTANCE);
assertEquals(t.getAuthentication(), authentication);
assertEquals(t.getId(), t.toString());
}
@Test
public void verifyIsRootTrue() {
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
assertTrue(t.isRoot());
}
@Test
public void verifyIsRootFalse() {
val t1 = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
val t = new TicketGrantingTicketImpl(TGT_ID,
CoreAuthenticationTestUtils.getService("gantor"), t1,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
assertFalse(t.isRoot());
}
@Test
public void verifyProperRootIsReturned() {
val t1 = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
val t2 = new TicketGrantingTicketImpl(TGT_ID,
CoreAuthenticationTestUtils.getService("gantor"), t1,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
val t3 = new TicketGrantingTicketImpl(TGT_ID,
CoreAuthenticationTestUtils.getService("gantor"), t2,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
assertSame(t1, t3.getRoot());
}
@Test
public void verifyGetChainedPrincipalsWithOne() {
val authentication = CoreAuthenticationTestUtils.getAuthentication();
val principals = new ArrayList<Authentication>();
principals.add(authentication);
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
authentication, NeverExpiresExpirationPolicy.INSTANCE);
assertEquals(principals, t.getChainedAuthentications());
}
@Test
public void verifyCheckCreationTime() {
val authentication = CoreAuthenticationTestUtils.getAuthentication();
val startTime = ZonedDateTime.now(ZoneOffset.UTC).minusNanos(100);
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
authentication, NeverExpiresExpirationPolicy.INSTANCE);
val finishTime = ZonedDateTime.now(ZoneOffset.UTC).plusNanos(100);
assertTrue(startTime.isBefore(t.getCreationTime()) && finishTime.isAfter(t.getCreationTime()));
}
@Test
public void verifyGetChainedPrincipalsWithTwo() {
val authentication = CoreAuthenticationTestUtils.getAuthentication();
val authentication1 = CoreAuthenticationTestUtils.getAuthentication("test1");
val principals = new ArrayList<Authentication>();
principals.add(authentication);
principals.add(authentication1);
val t1 = new TicketGrantingTicketImpl(TGT_ID, null, null,
authentication1, NeverExpiresExpirationPolicy.INSTANCE);
val t = new TicketGrantingTicketImpl(TGT_ID,
CoreAuthenticationTestUtils.getService("gantor"), t1,
authentication, NeverExpiresExpirationPolicy.INSTANCE);
assertEquals(principals, t.getChainedAuthentications());
}
@Test
public void verifyServiceTicketAsFromInitialCredentials() {
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
val s = t.grantServiceTicket(ID_GENERATOR
.getNewTicketId(ServiceTicket.PREFIX), RegisteredServiceTestUtils.getService(),
NeverExpiresExpirationPolicy.INSTANCE, false, true);
assertTrue(s.isFromNewLogin());
}
@Test
public void verifyServiceTicketAsFromNotInitialCredentials() {
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService(),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
val s = t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService(),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
assertFalse(s.isFromNewLogin());
}
@Test
public void verifyWebApplicationServices() {
val testService = RegisteredServiceTestUtils.getService(TGT_ID);
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
t.grantServiceTicket(ID_GENERATOR
.getNewTicketId(ServiceTicket.PREFIX), testService,
NeverExpiresExpirationPolicy.INSTANCE, false, true);
val services = t.getServices();
assertEquals(1, services.size());
val ticketId = services.keySet().iterator().next();
assertEquals(testService, services.get(ticketId));
t.removeAllServices();
val services2 = t.getServices();
assertEquals(0, services2.size());
}
@Test
public void verifyWebApplicationExpire() {
val testService = RegisteredServiceTestUtils.getService(TGT_ID);
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
t.grantServiceTicket(ID_GENERATOR
.getNewTicketId(ServiceTicket.PREFIX), testService,
NeverExpiresExpirationPolicy.INSTANCE, false, true);
assertFalse(t.isExpired());
t.markTicketExpired();
assertTrue(t.isExpired());
}
@Test
public void verifyDoubleGrantSameServiceTicketKeepMostRecentSession() {
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService(),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService(),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
assertEquals(1, t.getServices().size());
}
@Test
public void verifyDoubleGrantSimilarServiceTicketKeepMostRecentSession() {
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService("http://host.com?test"),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService("http://host.com;JSESSIONID=xxx"),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
assertEquals(1, t.getServices().size());
}
@Test
public void verifyDoubleGrantSimilarServiceWithPathTicketKeepMostRecentSession() {
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService("http://host.com/webapp1"),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService("http://host.com/webapp1?test=true"),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
assertEquals(1, t.getServices().size());
}
@Test
public void verifyDoubleGrantSameServiceTicketKeepAll() {
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService(),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService(),
NeverExpiresExpirationPolicy.INSTANCE,
false,
false);
assertEquals(2, t.getServices().size());
}
@Test
public void verifyDoubleGrantDifferentServiceTicket() {
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService(),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService2(),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
assertEquals(2, t.getServices().size());
}
@Test
public void verifyDoubleGrantDifferentServiceOnPathTicket() {
val t = new TicketGrantingTicketImpl(TGT_ID, null, null,
CoreAuthenticationTestUtils.getAuthentication(), NeverExpiresExpirationPolicy.INSTANCE);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService("http://host.com/webapp1"),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
t.grantServiceTicket(
ID_GENERATOR.getNewTicketId(ServiceTicket.PREFIX),
RegisteredServiceTestUtils.getService("http://host.com/webapp2"),
NeverExpiresExpirationPolicy.INSTANCE,
false,
true);
assertEquals(2, t.getServices().size());
}
}
| |
package com.birdchess.ai.board;
import java.awt.Color;
import java.util.ArrayList;
import java.util.HashSet;
import com.birdchess.common.Move;
import com.birdchess.common.Position;
/**
* Creates a new Pawn object that extends the behavior of the Piece class
*
* @author Charley Huang and Jeffrey Yang
* @version January 22, 2013
*/
public class Pawn extends Piece
{
/**
* Constructs a new pawn object given a colour
* @param color the colour of the pawn to construct
*/
public Pawn(Color color)
{
//Call the Piece class constructor
super(color);
}
/**
* Generates a list of legal moves this pawn can make given
* the board and its position
*
* @param board the board that this object is on
* @param pos the position of this object on the given board
* @return a list of legal moves that can be made by this object
* given its board and position
*/
public ArrayList<Move> generateLegalMoves(Board board, Position pos)
{
//Declare the ArrayList of legal moves
ArrayList<Move> legalMoves = new ArrayList<Move>();
//Behavior for the movement of white pawns
if (color == Color.WHITE)
{
//Check to see if the pawn can perform an En Passant capture
//Store the piece to the left of the pawn
Piece left = board.getPiece (pos.row, pos.col - 1);
//En Passant is possible when the piece to the left of the current
//pawn is also a pawn, of the opposite colour, and the current
//pawn can be captured
if (left != null && left.getClass() == this.getClass() &&
((Pawn)left).canBePassed(board) && left.color != color)
{
//Add the coordinates to the map of legal moves
legalMoves.add(new Move(pos, new Position
(pos.row - 1, pos.col - 1), new Move(new Position
(pos.row, pos.col - 1), null, null), board));
}
//Store the piece to the right of the pawn
Piece right = board.getPiece (pos.row, pos.col + 1);
//En Passant is possible when the piece to the right of the current
//pawn is also a pawn, of the opposite colour, and the current
//pawn can be captured
if (right != null && right.getClass() == this.getClass() &&
((Pawn)right).canBePassed(board) && right.color != color)
{
//Add the coordinates to the map of legal moves
legalMoves.add(new Move(pos, new Position
(pos.row - 1, pos.col + 1), new Move(new Position
(pos.row, pos.col + 1), null, null), board));
}
//Add the locations of pieces that the pawn can capture
//Check the position to the north-east of the pawn for a piece
//that is available for capture
if (board.getPiece(pos.row - 1, pos.col + 1) != null)
{
//Add the coordinates to the map of legal moves
legalMoves.add(new Move(pos, new Position
(pos.row - 1, pos.col + 1), board));
}
//Check the position to the north-west of the pawn for a piece
//that is available for capture
if (board.getPiece(pos.row - 1, pos.col - 1) != null)
{
//Add the coordinates to the map of legal moves
legalMoves.add(new Move(pos, new Position
(pos.row - 1, pos.col - 1), board));
}
//Check to see if the pawn can advance forward
if (board.getPiece(pos.row - 1, pos.col) == null)
{
//Add forward movement (single square) for the pawn and
//add the coordinates to the map of legal moves
legalMoves.add(new Move(pos, new Position
(pos.row - 1, pos.col), board));
//Check to see if the pawn can advance two squares
if (board.getPiece(pos.row - 2, pos.col) == null
&& pos.row == board.length () - 2)
{
//Add the coordinates to the map of legal moves
legalMoves.add(new Move(pos, new Position
(pos.row - 2, pos.col), board));
}
}
}
//Behavior for the movement of black pawns
else
{
//Check to see if the pawn can perform an En Passant capture
//Store the piece to the left of the pawn
Piece left = board.getPiece (pos.row, pos.col - 1);
//En Passant is possible when the piece to the left of the current
//pawn is also a pawn, of the opposite colour, and the current
//pawn can be captured
if (left != null && left.getClass() == this.getClass() &&
((Pawn)left).canBePassed(board) && left.color != color)
{
//Add the coordinates to the map of legal moves
legalMoves.add(new Move(pos, new Position
(pos.row + 1, pos.col - 1), new Move(new Position
(pos.row, pos.col - 1), null, null), board));
}
//Store the piece to the right of the pawn
Piece right = board.getPiece (pos.row, pos.col + 1);
//En Passant is possible when the piece to the right of the current
//pawn is also a pawn, of the opposite colour, and the current
//pawn can be captured
if (right != null && right.typeOfPiece().equals ("Pawn") &&
((Pawn)right).canBePassed(board) && right.color != color)
{
//Add the coordinates to the map of legal moves
legalMoves.add(new Move(pos, new Position
(pos.row + 1, pos.col + 1), new Move(new Position
(pos.row, pos.col + 1), null, null), board));
}
//Add the locations of pieces that the pawn can capture
//Check the position to the south-east of the pawn for a piece
//that is available for capture
if (board.getPiece(pos.row + 1, pos.col + 1) != null)
{
//Add the coordinates to the map of legal moves
legalMoves.add(new Move(pos, new Position
(pos.row + 1, pos.col + 1), board));
}
//Check the position to the south-west of the pawn for a piece
//that is available for capture
if (board.getPiece(pos.row + 1, pos.col - 1) != null)
{
//Add the coordinates to the map of legal moves
legalMoves.add(new Move(pos, new Position
(pos.row + 1, pos.col - 1), board));
}
//Check to see if the pawn can advance forward
if (board.getPiece(pos.row + 1, pos.col) == null)
{
//Add forward movement (single square) for the pawn and
//add the coordinates to the map of legal moves
legalMoves.add(new Move(pos, new Position
(pos.row + 1, pos.col), board));
//Check to see if the pawn can advance two squares
if (board.getPiece(pos.row + 2, pos.col) == null && pos.row == 1)
{
//Add the coordinates to the map of legal moves
legalMoves.add(new Move(pos, new Position
(pos.row + 2, pos.col), board));
}
}
}
//Remove all allied pieces from the list of legal moves
for (int i = 0; i < legalMoves.size(); i++)
{
//Keep track of the moves and the piece at the position of the move
Move move = legalMoves.get(i);
Piece piece = board.getPiece(move.target);
//Remove any movements that would capture an allied piece
if (piece != null && piece.getColor () ==
board.getPiece (move.source).getColor ())
{
legalMoves.remove(i);
//Subtract one from the index to account for the removal
i--;
}
}
return legalMoves;
}
/**
* Generates the name for this type of piece
* @return a string representation for the name of this piece
*/
public String typeOfPiece ()
{
return "Pawn";
}
/**
* Generates the value for a pawn based on standard evaluations
* @return the value of the piece
*/
public int getPieceValue()
{
//A pawn is worth 100 points in board evaluation
return 100;
}
/**
* Check to see if the piece can be passed, or another pawn can perform
* an En Passant capture
*
* @param board the board to check for En Passant
* @return if the pawn can be passed
*/
public boolean canBePassed (Board board)
{
//Find how many times the pawn has moved
Integer timesMoved = board.piecesMovedCount.get(this);
//The pawn can only be captured when the pawn has
//moved forward two spaces
if (timesMoved != null && timesMoved == 1)
{
return true;
}
return false;
}
/**
* Generates the ID of the Pawn to be used for the positioning scoring
* table
* @return the ID of the Pawn
*/
public int getPieceID()
{
return Board.PAWN;
}
}
| |
package org.sagebionetworks.bridge.udd.worker;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.same;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.Map;
import java.util.Set;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.sagebionetworks.bridge.json.DefaultObjectMapper;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.sagebionetworks.bridge.rest.exceptions.BridgeSDKException;
import org.sagebionetworks.bridge.rest.exceptions.EntityNotFoundException;
import org.sagebionetworks.bridge.rest.model.Phone;
import org.sagebionetworks.bridge.schema.UploadSchema;
import org.sagebionetworks.bridge.sqs.PollSqsWorkerBadRequestException;
import org.sagebionetworks.bridge.udd.accounts.AccountInfo;
import org.sagebionetworks.bridge.udd.accounts.BridgeHelper;
import org.sagebionetworks.bridge.udd.dynamodb.DynamoHelper;
import org.sagebionetworks.bridge.udd.dynamodb.StudyInfo;
import org.sagebionetworks.bridge.udd.helper.SesHelper;
import org.sagebionetworks.bridge.udd.helper.SnsHelper;
import org.sagebionetworks.bridge.udd.s3.PresignedUrlInfo;
import org.sagebionetworks.bridge.udd.synapse.SynapsePackager;
@SuppressWarnings("unchecked")
public class BridgeUddProcessorTest {
// mock objects - These are used only as passthroughs between the sub-components. So just create mocks instead
// of instantiating all the fields.
public static final StudyInfo MOCK_STUDY_INFO = mock(StudyInfo.class);
public static final Map<String, UploadSchema> MOCK_SYNAPSE_TO_SCHEMA = ImmutableMap.of();
public static final Set<String> MOCK_SURVEY_TABLE_ID_SET = ImmutableSet.of();
public static final PresignedUrlInfo MOCK_PRESIGNED_URL_INFO = mock(PresignedUrlInfo.class);
// simple strings for test
public static final String EMAIL = "test@example.com";
public static final String HEALTH_CODE = "test-health-code";
public static final String USER_ID = "test-user-id";
public static final String STUDY_ID = "test-study";
// non-mock test objects - We break inside these objects to get data.
public static final AccountInfo USER_ID_ACCOUNT_INFO = new AccountInfo.Builder().withEmailAddress(EMAIL)
.withHealthCode(HEALTH_CODE).withUserId(USER_ID).build();
public static final AccountInfo ACCOUNT_INFO_NO_HEALTH_CODE = new AccountInfo.Builder().withEmailAddress(EMAIL)
.withUserId(USER_ID).build();
// test request
public static final String USER_ID_REQUEST_JSON_TEXT = "{\n" +
" \"studyId\":\"" + STUDY_ID +"\",\n" +
" \"userId\":\"" + USER_ID + "\",\n" +
" \"startDate\":\"2015-03-09\",\n" +
" \"endDate\":\"2015-03-31\"\n" +
"}";
public static final String INVALID_JSON_TEXT = "{\n" +
" \"invalidType\":\"" + STUDY_ID +"\",\n" +
" \"userId\":\"" + USER_ID + "\",\n" +
" \"startDate\":\"2015-03-09\",\n" +
" \"endDate\":\"2015-03-31\"\n" +
"}";
private JsonNode userIdRequestJson;
private JsonNode invalidRequestJson;
// test members
private BridgeUddProcessor callback;
private BridgeHelper mockBridgeHelper;
private SynapsePackager mockPackager;
private SesHelper mockSesHelper;
private SnsHelper mockSnsHelper;
@BeforeClass
public void generalSetup() throws IOException{
userIdRequestJson = DefaultObjectMapper.INSTANCE.readTree(USER_ID_REQUEST_JSON_TEXT);
invalidRequestJson = DefaultObjectMapper.INSTANCE.readTree(INVALID_JSON_TEXT);
}
@BeforeMethod
public void setup() throws Exception {
// mock BridgeHelper
mockBridgeHelper = mock(BridgeHelper.class);
when(mockBridgeHelper.getAccountInfo(STUDY_ID, USER_ID)).thenReturn(USER_ID_ACCOUNT_INFO);
// mock dynamo helper
DynamoHelper mockDynamoHelper = mock(DynamoHelper.class);
when(mockDynamoHelper.getStudy(STUDY_ID)).thenReturn(MOCK_STUDY_INFO);
when(mockDynamoHelper.getSynapseTableIdsForStudy(STUDY_ID)).thenReturn(MOCK_SYNAPSE_TO_SCHEMA);
when(mockDynamoHelper.getSynapseSurveyTablesForStudy(STUDY_ID)).thenReturn(MOCK_SURVEY_TABLE_ID_SET);
// mock SES helper
mockSesHelper = mock(SesHelper.class);
// mock SNS helper
mockSnsHelper = mock(SnsHelper.class);
// mock Synapse packager
mockPackager = mock(SynapsePackager.class);
// set up callback
callback = new BridgeUddProcessor();
callback.setBridgeHelper(mockBridgeHelper);
callback.setDynamoHelper(mockDynamoHelper);
callback.setSesHelper(mockSesHelper);
callback.setSnsHelper(mockSnsHelper);
callback.setSynapsePackager(mockPackager);
}
@Test
public void noData() throws Exception {
mockPackagerWithResult(null);
callback.process(userIdRequestJson);
verifySesNoData();
verify(mockBridgeHelper).getAccountInfo(STUDY_ID, USER_ID);
}
@Test
public void byUserId() throws Exception {
mockPackagerWithResult(MOCK_PRESIGNED_URL_INFO);
callback.process(userIdRequestJson);
verifySesSendsData();
verify(mockBridgeHelper).getAccountInfo(STUDY_ID, USER_ID);
}
@Test(expectedExceptions = PollSqsWorkerBadRequestException.class)
public void byUserIdBadRequest() throws Exception {
// Note: We need to manuall instantiate the exception. Otherwise, mock does something funky and bypasses the
// constructor that sets the status code.
when(mockBridgeHelper.getAccountInfo(STUDY_ID, USER_ID)).thenThrow(new EntityNotFoundException(
"text exception", null));
callback.process(userIdRequestJson);
}
@Test(expectedExceptions = RuntimeException.class)
public void byUserIdBridgeInternalError() throws Exception {
// Note: We need to manuall instantiate the exception. Otherwise, mock does something funky and bypasses the
// constructor that sets the status code.
when(mockBridgeHelper.getAccountInfo(STUDY_ID, USER_ID)).thenThrow(new BridgeSDKException("test exception",
null));
callback.process(userIdRequestJson);
}
@Test(expectedExceptions = PollSqsWorkerBadRequestException.class)
public void malformedRequest() throws Exception {
callback.process(invalidRequestJson);
}
@Test
public void userWithPhoneNumber() throws Exception {
Phone phone = new Phone().regionCode("US").number("4082588569");
AccountInfo accountInfo = new AccountInfo.Builder().withHealthCode(HEALTH_CODE).withUserId(USER_ID)
.withPhone(phone).build();
when(mockBridgeHelper.getAccountInfo(STUDY_ID, USER_ID)).thenReturn(accountInfo);
mockPackagerWithResult(MOCK_PRESIGNED_URL_INFO);
callback.process(userIdRequestJson);
verify(mockSnsHelper).sendPresignedUrlToAccount(same(MOCK_STUDY_INFO), same(MOCK_PRESIGNED_URL_INFO),
same(accountInfo));
verifyNoMoreInteractions(mockSnsHelper);
verifyNoMoreInteractions(mockSesHelper);
verify(mockBridgeHelper).getAccountInfo(STUDY_ID, USER_ID);
}
@Test
public void userWithPhoneNumberNoData() throws Exception {
Phone phone = new Phone().regionCode("US").number("4082588569");
AccountInfo accountInfo = new AccountInfo.Builder().withHealthCode(HEALTH_CODE).withUserId(USER_ID)
.withPhone(phone).build();
when(mockBridgeHelper.getAccountInfo(STUDY_ID, USER_ID)).thenReturn(accountInfo);
mockPackagerWithResult(null);
callback.process(userIdRequestJson);
verify(mockSnsHelper).sendNoDataMessageToAccount(same(MOCK_STUDY_INFO), same(accountInfo));
verifyNoMoreInteractions(mockSnsHelper);
verifyNoMoreInteractions(mockSesHelper);
verify(mockBridgeHelper).getAccountInfo(STUDY_ID, USER_ID);
}
private void mockPackagerWithResult(PresignedUrlInfo presignedUrlInfo) throws Exception {
when(mockPackager.packageSynapseData(same(MOCK_SYNAPSE_TO_SCHEMA), eq(HEALTH_CODE),
any(BridgeUddRequest.class), same(MOCK_SURVEY_TABLE_ID_SET))).thenReturn(presignedUrlInfo);
}
private void verifySesNoData() {
verify(mockSesHelper).sendNoDataMessageToAccount(same(MOCK_STUDY_INFO), same(USER_ID_ACCOUNT_INFO));
verifyNoMoreInteractions(mockSesHelper);
}
private void verifySesSendsData() {
verify(mockSesHelper).sendPresignedUrlToAccount(same(MOCK_STUDY_INFO), same(MOCK_PRESIGNED_URL_INFO),
same(USER_ID_ACCOUNT_INFO));
verifyNoMoreInteractions(mockSesHelper);
}
}
| |
/**
Copyright 2013 James McClure
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package net.xenqtt.message;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import java.lang.reflect.Field;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.nio.channels.UnresolvedAddressException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import net.xenqtt.message.AbstractMqttChannel;
import net.xenqtt.message.BlockingCommand;
import net.xenqtt.message.ConnAckMessage;
import net.xenqtt.message.ConnectMessage;
import net.xenqtt.message.ConnectReturnCode;
import net.xenqtt.message.DisconnectMessage;
import net.xenqtt.message.IdentifiableMqttMessage;
import net.xenqtt.message.MessageType;
import net.xenqtt.message.MqttChannel;
import net.xenqtt.message.MqttMessage;
import net.xenqtt.message.PingReqMessage;
import net.xenqtt.message.PingRespMessage;
import net.xenqtt.message.PubAckMessage;
import net.xenqtt.message.PubCompMessage;
import net.xenqtt.message.PubMessage;
import net.xenqtt.message.PubRecMessage;
import net.xenqtt.message.PubRelMessage;
import net.xenqtt.message.QoS;
import net.xenqtt.message.SubAckMessage;
import net.xenqtt.message.SubscribeMessage;
import net.xenqtt.message.UnsubAckMessage;
import net.xenqtt.message.UnsubscribeMessage;
import net.xenqtt.mock.MockMessageHandler;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
public class AbstractMqttChannelTest extends MqttChannelTestBase<MqttChannelTestBase<?, ?>.TestChannel, MqttChannelTestBase<?, ?>.TestChannel> {
@Mock BlockingCommand<MqttMessage> blockingCommand;
@Before
public void before() {
MockitoAnnotations.initMocks(this);
}
/**
* @see net.xenqtt.message.MqttChannelTestBase#newClientChannel(net.xenqtt.message.BlockingCommand)
*/
@Override
TestChannel newClientChannel(BlockingCommand<?> connectionCompleteCommand) throws Exception {
return new TestChannel("localhost", port, clientHandler, selector, 10000, connectionCompleteCommand);
}
@Override
TestChannel newBrokerChannel(SocketChannel brokerSocketChannel) throws Exception {
return new TestChannel(brokerSocketChannel, brokerHandler, selector, 10000);
}
@Test
public void testClose_WithCause() throws Exception {
RuntimeException e = new RuntimeException();
establishConnection();
clientChannel.close(e);
assertSame(e, clientHandler.lastChannelClosedCause());
}
@SuppressWarnings("unchecked")
@Test
public void testCancelBlockingCommands() throws Exception {
clientChannel = newClientChannel(blockingCommand);
BlockingCommand<MqttMessage> connAckReceivedCommand = mock(BlockingCommand.class);
setField(clientChannel, "connAckReceivedCommand", connAckReceivedCommand);
MqttMessage sendMessageInProgress = new MqttMessage(MessageType.PUBLISH, 0);
sendMessageInProgress.blockingCommand = mock(BlockingCommand.class);
setField(clientChannel, "sendMessageInProgress", sendMessageInProgress);
List<MqttMessage> writesPendingMessages = new ArrayList<MqttMessage>();
writesPendingMessages.add(new MqttMessage(MessageType.PUBLISH, 0));
writesPendingMessages.add(new MqttMessage(MessageType.PUBLISH, 0));
writesPendingMessages.add(new MqttMessage(MessageType.PUBLISH, 0));
writesPendingMessages.get(0).blockingCommand = mock(BlockingCommand.class);
writesPendingMessages.get(1).blockingCommand = mock(BlockingCommand.class);
writesPendingMessages.get(2).blockingCommand = mock(BlockingCommand.class);
addMessages(clientChannel, "writesPending", writesPendingMessages);
List<MqttMessage> messagesToResend = new ArrayList<MqttMessage>();
messagesToResend.add(new IdentifiableMqttMessage(MessageType.PUBLISH, 0) {
});
messagesToResend.add(new IdentifiableMqttMessage(MessageType.PUBLISH, 0) {
});
messagesToResend.add(new IdentifiableMqttMessage(MessageType.PUBLISH, 0) {
});
messagesToResend.get(0).blockingCommand = mock(BlockingCommand.class);
messagesToResend.get(1).blockingCommand = mock(BlockingCommand.class);
messagesToResend.get(2).blockingCommand = mock(BlockingCommand.class);
addMessages(clientChannel, "messagesToResend", messagesToResend);
Map<Integer, IdentifiableMqttMessage> inFlightMessages = new HashMap<Integer, IdentifiableMqttMessage>();
PubMessage pubMessage = new PubMessage(QoS.AT_LEAST_ONCE, false, "a/b/c", 0, new byte[] { 97, 98, 99 });
pubMessage.blockingCommand = mock(BlockingCommand.class);
inFlightMessages.put(Integer.valueOf(0), pubMessage);
pubMessage = new PubMessage(QoS.AT_LEAST_ONCE, false, "a/b/c", 1, new byte[] { 97, 98, 99 });
pubMessage.blockingCommand = mock(BlockingCommand.class);
inFlightMessages.put(Integer.valueOf(1), pubMessage);
pubMessage = new PubMessage(QoS.AT_LEAST_ONCE, false, "a/b/c", 2, new byte[] { 97, 98, 99 });
pubMessage.blockingCommand = mock(BlockingCommand.class);
inFlightMessages.put(Integer.valueOf(2), pubMessage);
setInFlightMessages(clientChannel, inFlightMessages);
clientChannel.cancelBlockingCommands();
verify(blockingCommand).cancel();
verify(connAckReceivedCommand).cancel();
verify(sendMessageInProgress.blockingCommand).cancel();
for (MqttMessage writePendingMessage : writesPendingMessages) {
verify(writePendingMessage.blockingCommand).cancel();
}
for (MqttMessage messageToResend : messagesToResend) {
verify(messageToResend.blockingCommand).cancel();
}
for (IdentifiableMqttMessage inFlightMessage : inFlightMessages.values()) {
verify(inFlightMessage.blockingCommand).cancel();
}
verifyNoMoreInteractions(allOf(writesPendingMessages, messagesToResend, inFlightMessages, blockingCommand, connAckReceivedCommand,
sendMessageInProgress.blockingCommand));
}
@SuppressWarnings("unchecked")
@Test
public void testCancelBlockingCommands_SomeMessagesDoNotHaveABlockingCommand() throws Exception {
clientChannel = newClientChannel(null);
setField(clientChannel, "connAckReceivedCommand", null);
MqttMessage sendMessageInProgress = new MqttMessage(MessageType.PUBLISH, 0);
sendMessageInProgress.blockingCommand = null;
setField(clientChannel, "sendMessageInProgress", sendMessageInProgress);
List<MqttMessage> writesPendingMessages = new ArrayList<MqttMessage>();
writesPendingMessages.add(new MqttMessage(MessageType.PUBLISH, 0));
writesPendingMessages.add(new MqttMessage(MessageType.PUBLISH, 0));
writesPendingMessages.add(new MqttMessage(MessageType.PUBLISH, 0));
writesPendingMessages.get(0).blockingCommand = mock(BlockingCommand.class);
writesPendingMessages.get(1).blockingCommand = null;
writesPendingMessages.get(2).blockingCommand = mock(BlockingCommand.class);
addMessages(clientChannel, "writesPending", writesPendingMessages);
List<MqttMessage> messagesToResend = new ArrayList<MqttMessage>();
messagesToResend.add(new IdentifiableMqttMessage(MessageType.PUBLISH, 0) {
});
messagesToResend.add(new IdentifiableMqttMessage(MessageType.PUBLISH, 0) {
});
messagesToResend.add(new IdentifiableMqttMessage(MessageType.PUBLISH, 0) {
});
messagesToResend.get(0).blockingCommand = mock(BlockingCommand.class);
messagesToResend.get(1).blockingCommand = mock(BlockingCommand.class);
messagesToResend.get(2).blockingCommand = null;
addMessages(clientChannel, "messagesToResend", messagesToResend);
Map<Integer, IdentifiableMqttMessage> inFlightMessages = new HashMap<Integer, IdentifiableMqttMessage>();
PubMessage pubMessage = new PubMessage(QoS.AT_LEAST_ONCE, false, "a/b/c", 0, new byte[] { 97, 98, 99 });
pubMessage.blockingCommand = null;
inFlightMessages.put(Integer.valueOf(0), pubMessage);
pubMessage = new PubMessage(QoS.AT_LEAST_ONCE, false, "a/b/c", 1, new byte[] { 97, 98, 99 });
pubMessage.blockingCommand = mock(BlockingCommand.class);
inFlightMessages.put(Integer.valueOf(1), pubMessage);
pubMessage = new PubMessage(QoS.AT_LEAST_ONCE, false, "a/b/c", 2, new byte[] { 97, 98, 99 });
pubMessage.blockingCommand = mock(BlockingCommand.class);
inFlightMessages.put(Integer.valueOf(2), pubMessage);
setInFlightMessages(clientChannel, inFlightMessages);
clientChannel.cancelBlockingCommands();
for (MqttMessage writePendingMessage : writesPendingMessages) {
if (writePendingMessage.blockingCommand != null) {
verify(writePendingMessage.blockingCommand).cancel();
}
}
for (MqttMessage messageToResend : messagesToResend) {
if (messageToResend.blockingCommand != null) {
verify(messageToResend.blockingCommand).cancel();
}
}
for (IdentifiableMqttMessage inFlightMessage : inFlightMessages.values()) {
if (inFlightMessage.blockingCommand != null) {
verify(inFlightMessage.blockingCommand).cancel();
}
}
verifyZeroInteractions(blockingCommand);
verifyNoMoreInteractions(allOf(writesPendingMessages, messagesToResend, inFlightMessages));
}
@SuppressWarnings("unchecked")
@Test
public void testCancelBlockingCommands_CancelsInFlightMessagesBeforeAcked() throws Exception {
establishConnection();
BlockingCommand<MqttMessage> command1 = mock(BlockingCommand.class);
BlockingCommand<MqttMessage> command2 = mock(BlockingCommand.class);
assertTrue(clientChannel.send(new UnsubscribeMessage(1, new String[] { "foo" }), command1));
assertTrue(clientChannel.send(new UnsubscribeMessage(2, new String[] { "foo" }), command2));
readWrite(0, 2);
assertEquals(2, clientChannel.getUnsentMessages().size());
clientChannel.cancelBlockingCommands();
verify(command1, timeout(1000)).cancel();
verify(command2, timeout(1000)).cancel();
verifyNoMoreInteractions(command1, command2);
}
@SuppressWarnings("rawtypes")
private void setField(net.xenqtt.message.MqttChannelTestBase.TestChannel clientChannel, String fieldName, Object value) throws Exception {
Field field = AbstractMqttChannel.class.getDeclaredField(fieldName);
field.setAccessible(true);
field.set(clientChannel, value);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private void addMessages(net.xenqtt.message.MqttChannelTestBase.TestChannel clientChannel, String messagesField, List<MqttMessage> messages)
throws Exception {
Field field = AbstractMqttChannel.class.getDeclaredField(messagesField);
field.setAccessible(true);
Collection<MqttMessage> writesPending = (Collection<MqttMessage>) field.get(clientChannel);
for (MqttMessage message : messages) {
assertTrue(writesPending.add(message));
}
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private void setInFlightMessages(net.xenqtt.message.MqttChannelTestBase.TestChannel clientChannel, Map<Integer, IdentifiableMqttMessage> messages)
throws Exception {
Field field = AbstractMqttChannel.class.getDeclaredField("inFlightMessages");
field.setAccessible(true);
Map<Integer, IdentifiableMqttMessage> inFlightMessages = (Map<Integer, IdentifiableMqttMessage>) field.get(clientChannel);
inFlightMessages.clear();
for (Entry<Integer, IdentifiableMqttMessage> entry : messages.entrySet()) {
inFlightMessages.put(entry.getKey(), entry.getValue());
}
}
private Object[] allOf(List<MqttMessage> writesPendingMessages, List<MqttMessage> messagesToResend, Map<Integer, IdentifiableMqttMessage> inFlightMessages,
BlockingCommand<?>... others) {
List<BlockingCommand<?>> blockingCommands = new ArrayList<BlockingCommand<?>>();
for (MqttMessage message : writesPendingMessages) {
if (message.blockingCommand != null) {
blockingCommands.add(message.blockingCommand);
}
}
for (MqttMessage message : messagesToResend) {
if (message.blockingCommand != null) {
blockingCommands.add(message.blockingCommand);
}
}
for (MqttMessage message : inFlightMessages.values()) {
if (message.blockingCommand != null) {
blockingCommands.add(message.blockingCommand);
}
}
for (BlockingCommand<?> other : others) {
if (other != null) {
blockingCommands.add(other);
}
}
return blockingCommands.toArray(new BlockingCommand<?>[0]);
}
@Test
public void testGetUnsentMessages_NoUnsentMessages() throws Exception {
clientChannel = newClientChannel(null);
assertTrue(clientChannel.getUnsentMessages().isEmpty());
}
@Test
public void testGetUnsentMessages_UnsentMessages() throws Exception {
establishConnection();
assertTrue(clientChannel.send(new UnsubscribeMessage(1, new String[] { "foo" }), null));
assertTrue(clientChannel.send(new UnsubscribeMessage(2, new String[] { "foo" }), null));
readWrite(0, 2);
// put a value in sendMessageInProgress directly because if we call send(...) it will write directly
Field field = AbstractMqttChannel.class.getDeclaredField("sendMessageInProgress");
field.setAccessible(true);
field.set(clientChannel, new UnsubscribeMessage(3, new String[] { "foo" }));
assertTrue(clientChannel.send(new UnsubscribeMessage(4, new String[] { "foo" }), null));
assertTrue(clientChannel.send(new UnsubscribeMessage(5, new String[] { "foo" }), null));
List<MqttMessage> unsent = clientChannel.getUnsentMessages();
Set<Integer> ids = new HashSet<Integer>();
for (MqttMessage m : unsent) {
ids.add(((IdentifiableMqttMessage) m).getMessageId());
}
assertEquals(5, unsent.size());
assertEquals(5, ids.size());
for (int i = 1; i <= 5; i++) {
assertTrue(ids.contains(i));
}
}
@Test
public void testCtors() throws Exception {
establishConnection();
assertFalse(clientChannel.isConnectionPending());
assertTrue(clientChannel.isOpen());
clientHandler.assertChannelOpenedCount(1);
assertFalse(brokerChannel.isConnectionPending());
assertTrue(brokerChannel.isOpen());
brokerHandler.assertChannelOpenedCount(1);
closeConnection();
}
@Test
public void testCtorInvalidHost() throws Exception {
try {
new TestChannel("foo", 123, clientHandler, selector, 10000);
fail("Expected exception");
} catch (UnresolvedAddressException e) {
clientHandler.assertChannelOpenedCount(0);
clientHandler.assertChannelClosedCount(1);
clientHandler.assertLastChannelClosedCause(e);
}
}
@Test
public void testCtorInvalidConnection() throws Exception {
try {
new TestChannel(null, brokerHandler, null, 10000);
fail("Expected exception");
} catch (NullPointerException e) {
brokerHandler.assertChannelOpenedCount(0);
brokerHandler.assertChannelClosedCount(1);
brokerHandler.assertLastChannelClosedCause(e);
}
}
@Test(expected = IllegalStateException.class)
public void testRegister_ChannelAlreadyRegistered() throws Exception {
establishConnection();
clientChannel.register(null, clientHandler);
}
@Test
public void testRegister_Fails() throws Exception {
establishConnection();
clientChannel.deregister();
assertFalse(clientChannel.register(null, clientHandler));
clientHandler.assertChannelAttachedCount(0);
}
@Test
public void testRegister_Succeeds() throws Exception {
establishConnection();
Selector newSelector = Selector.open();
assertEquals(0, newSelector.keys().size());
clientHandler = new MockMessageHandler();
clientChannel.deregister();
assertTrue(clientChannel.register(newSelector, clientHandler));
assertEquals(1, newSelector.keys().size());
clientHandler.assertChannelAttachedCount(1);
closeConnection();
}
@Test
public void testDeregister() throws Exception {
establishConnection();
int originalCancelledKeyCount = 0;
for (SelectionKey key : selector.keys()) {
if (!key.isValid()) {
originalCancelledKeyCount++;
}
}
clientChannel.deregister();
clientHandler.assertChannelDetachedCount(1);
int cancelledKeyCount = 0;
for (SelectionKey key : selector.keys()) {
if (!key.isValid()) {
cancelledKeyCount++;
}
}
assertEquals(originalCancelledKeyCount + 1, cancelledKeyCount);
}
@Test
public void testHouseKeeping_KeepAlive_ThrowsException() throws Exception {
clientChannel = new TestChannel("localhost", port, clientHandler, selector, 10);
clientChannel.exceptionToThrow = new RuntimeException("crap");
clientChannel.houseKeeping(now + 100);
}
@Test
public void testHouseKeeping_KeepAlive() throws Exception {
clientChannel = new TestChannel("localhost", port, clientHandler, selector, 10);
establishConnection();
PubMessage msg = new PubMessage(QoS.AT_MOST_ONCE, false, "foo", 12, new byte[] { 1, 2, 3 });
assertTrue(clientChannel.send(msg, null));
readWrite(0, 1);
assertEquals(25000, clientChannel.houseKeeping(now + 1000));
assertEquals(25000, brokerChannel.houseKeeping(now + 1000));
assertTrue(brokerChannel.lastReceived >= now && brokerChannel.lastReceived < now + 100);
assertEquals(0, clientChannel.lastReceived);
}
@Test
public void testSend_qos0() throws Exception {
clientChannel = new TestChannel("localhost", port, clientHandler, selector, 15000);
establishConnection();
PubMessage msg = new PubMessage(QoS.AT_MOST_ONCE, false, "foo", 12, new byte[] { 1, 2, 3 });
assertTrue(clientChannel.send(msg, null));
readWrite(0, 1);
brokerHandler.assertMessages(msg);
assertFalse(brokerHandler.message(0).isDuplicate());
assertEquals(0, clientChannel.inFlightMessageCount());
}
@Test
public void testHouseKeeping_ResendMessage_qos1() throws Exception {
clientChannel = new TestChannel("localhost", port, clientHandler, selector, 15000);
establishConnection();
PubMessage msg = new PubMessage(QoS.AT_LEAST_ONCE, false, "foo", 12, new byte[] { 1, 2, 3 });
assertTrue(clientChannel.send(msg, null));
readWrite(0, 1);
brokerHandler.assertMessages(msg);
assertFalse(brokerHandler.message(0).isDuplicate());
assertEquals(1, clientChannel.inFlightMessageCount());
// the time hasn't elapsed yet so we should get the time until next resend of the message
assertEquals(15000, clientChannel.houseKeeping(now));
assertEquals(1, clientChannel.inFlightMessageCount());
// now the time has elapsed so we resend and get the time until the keep alive
assertEquals(25000, clientChannel.houseKeeping(now + 15000));
readWrite(0, 1);
brokerHandler.assertMessageCount(1);
assertEquals(msg.getMessageId(), ((IdentifiableMqttMessage) brokerHandler.message(0)).getMessageId());
assertTrue(brokerHandler.message(0).isDuplicate());
assertEquals(1, clientChannel.inFlightMessageCount());
assertTrue(brokerChannel.send(new PubAckMessage(12), null));
readWrite(1, 0);
assertEquals(0, clientChannel.inFlightMessageCount());
assertEquals(0, brokerChannel.inFlightMessageCount());
}
@Test
public void testPauseRead_InvalidKey() throws Exception {
// testing to be sure no exception is thrown
clientChannel = new TestChannel("localhost", port, clientHandler, selector, 15000);
clientChannel.deregister();
clientChannel.pauseRead();
}
@Test
public void testResumeRead_InvalidKey() throws Exception {
// testing to be sure no exception is thrown
clientChannel = new TestChannel("localhost", port, clientHandler, selector, 15000);
clientChannel.deregister();
clientChannel.resumeRead();
}
@Test
public void testResumeRead_NotPaused() throws Exception {
establishConnection();
clientChannel.resumeRead();
SubAckMessage msg = new SubAckMessage(1, new QoS[] {});
brokerChannel.send(msg, blockingCommand);
assertTrue(readWrite(1, 0, 1000));
closeConnection();
}
@Test
public void testPauseRead() throws Exception {
establishConnection();
clientChannel.pauseRead();
SubAckMessage msg = new SubAckMessage(1, new QoS[] {});
brokerChannel.send(msg, blockingCommand);
assertFalse(readWrite(1, 0, 1000));
closeConnection();
}
@Test
public void testPauseAndResumeRead() throws Exception {
establishConnection();
clientChannel.pauseRead();
SubAckMessage msg = new SubAckMessage(1, new QoS[] {});
brokerChannel.send(msg, blockingCommand);
assertFalse(readWrite(1, 0, 1000));
clientChannel.resumeRead();
msg = new SubAckMessage(1, new QoS[] {});
brokerChannel.send(msg, blockingCommand);
assertTrue(readWrite(1, 0, 1000));
closeConnection();
}
@Test
public void testRead_ReadPaused() throws Exception {
establishConnection();
SubAckMessage msg = new SubAckMessage(1, new QoS[] {});
brokerChannel.send(msg, blockingCommand);
selector.select();
assertEquals(1, selector.selectedKeys().size());
Iterator<SelectionKey> iter = selector.selectedKeys().iterator();
SelectionKey key = iter.next();
MqttChannel channel = (MqttChannel) key.attachment();
assertTrue(key.isWritable());
channel.write(now);
iter.remove();
selector.select();
assertEquals(1, selector.selectedKeys().size());
iter = selector.selectedKeys().iterator();
key = iter.next();
channel = (MqttChannel) key.attachment();
assertTrue(key.isReadable());
iter.remove();
channel.pauseRead();
channel.read(now);
assertEquals(0, clientHandler.messageCount());
closeConnection();
}
@Test
public void testReadFromClosedConnection() throws Exception {
establishConnection();
clientChannel.close();
assertFalse(clientChannel.read(now));
}
@Test
public void testWriteToClosedConnection() throws Exception {
establishConnection();
clientChannel.close();
// put a value in sendMessageInProgress directly because if we call send(...) it will write directly
Field field = AbstractMqttChannel.class.getDeclaredField("sendMessageInProgress");
field.setAccessible(true);
field.set(clientChannel, new PingReqMessage());
assertFalse(clientChannel.write(now));
clientHandler.assertChannelClosedCount(1);
}
@Test
public void testSendToClosedConnection() throws Exception {
establishConnection();
clientChannel.close();
assertFalse(clientChannel.send(new PingReqMessage(), null));
}
@Test
public void testEstabishConnection_NonBlocking() throws Exception {
establishConnection();
assertTrue(clientChannel.isOpen());
assertFalse(clientChannel.isConnected());
assertFalse(clientChannel.connectedCalled);
assertEquals(0, clientChannel.pingIntervalMillis);
assertFalse(brokerChannel.isConnected());
assertFalse(brokerChannel.connectedCalled);
assertEquals(0, brokerChannel.pingIntervalMillis);
assertFalse(clientChannel.disconnectedCalled);
assertFalse(brokerChannel.disconnectedCalled);
closeConnection();
assertFalse(clientChannel.disconnectedCalled);
assertFalse(brokerChannel.disconnectedCalled);
}
@Test
public void testEstabishConnection_Blocking() throws Exception {
establishConnection(blockingCommand);
verify(blockingCommand).complete();
assertTrue(clientChannel.isOpen());
assertFalse(clientChannel.isConnected());
assertFalse(clientChannel.connectedCalled);
assertEquals(0, clientChannel.pingIntervalMillis);
assertFalse(brokerChannel.isConnected());
assertFalse(brokerChannel.connectedCalled);
assertEquals(0, brokerChannel.pingIntervalMillis);
assertFalse(clientChannel.disconnectedCalled);
assertFalse(brokerChannel.disconnectedCalled);
closeConnection();
assertFalse(clientChannel.disconnectedCalled);
assertFalse(brokerChannel.disconnectedCalled);
}
@Test
public void testReadWriteSend_Disconnect() throws Exception {
establishConnection();
DisconnectMessage discMsg = new DisconnectMessage();
assertTrue(clientChannel.send(discMsg, null));
readWrite(0, 1);
brokerHandler.assertMessages(discMsg);
assertFalse(clientChannel.isConnected());
assertFalse(clientChannel.disconnectedCalled);
assertFalse(brokerChannel.isConnected());
assertFalse(brokerChannel.disconnectedCalled);
assertFalse(clientChannel.isOpen());
assertFalse(brokerChannel.isOpen());
}
@Test
public void testReadWriteSend_ConnAckWithoutAccept_NonBlocking() throws Exception {
establishConnection();
ConnectMessage connMsg = new ConnectMessage("abc", false, 123);
ConnAckMessage ackMsg = new ConnAckMessage(ConnectReturnCode.BAD_CREDENTIALS);
assertTrue(clientChannel.send(connMsg, null));
readWrite(0, 1);
brokerHandler.assertMessages(connMsg);
assertTrue(brokerChannel.send(ackMsg, null));
readWrite(1, 0);
clientHandler.assertMessages(ackMsg);
assertFalse(clientChannel.isConnected());
assertFalse(clientChannel.connectedCalled);
assertFalse(clientChannel.disconnectedCalled);
assertFalse(clientChannel.isOpen());
assertFalse(brokerChannel.isConnected());
assertFalse(brokerChannel.connectedCalled);
assertFalse(brokerChannel.disconnectedCalled);
assertFalse(brokerChannel.isOpen());
}
@Test
public void testReadWriteSend_ConnAckWithoutAccept_Blocking() throws Exception {
establishConnection();
ConnectMessage connMsg = new ConnectMessage("abc", false, 123);
ConnAckMessage ackMsg = new ConnAckMessage(ConnectReturnCode.BAD_CREDENTIALS);
assertTrue(clientChannel.send(connMsg, blockingCommand));
readWrite(0, 1);
brokerHandler.assertMessages(connMsg);
verifyZeroInteractions(blockingCommand);
assertTrue(brokerChannel.send(ackMsg, null));
readWrite(1, 0);
clientHandler.assertMessages(ackMsg);
verify(blockingCommand).complete();
verify(blockingCommand).setResult(ackMsg);
assertFalse(clientChannel.isConnected());
assertFalse(clientChannel.connectedCalled);
assertFalse(clientChannel.disconnectedCalled);
assertFalse(clientChannel.isOpen());
assertFalse(brokerChannel.isConnected());
assertFalse(brokerChannel.connectedCalled);
assertFalse(brokerChannel.disconnectedCalled);
assertFalse(brokerChannel.isOpen());
}
@Test
public void testReadWriteSend_ConnAckWithAccept_NonBlocking() throws Exception {
establishConnection();
ConnectMessage connMsg = new ConnectMessage("abc", false, 123);
ConnAckMessage ackMsg = new ConnAckMessage(ConnectReturnCode.ACCEPTED);
assertTrue(clientChannel.send(connMsg, null));
readWrite(0, 1);
brokerHandler.assertMessages(connMsg);
assertTrue(brokerChannel.send(ackMsg, null));
readWrite(1, 0);
clientHandler.assertMessages(ackMsg);
assertTrue(clientChannel.isOpen());
assertTrue(clientChannel.isConnected());
assertTrue(clientChannel.connectedCalled);
assertEquals(123000, clientChannel.pingIntervalMillis);
assertTrue(brokerChannel.isConnected());
assertTrue(brokerChannel.connectedCalled);
assertEquals(123000, brokerChannel.pingIntervalMillis);
assertFalse(clientChannel.disconnectedCalled);
assertFalse(brokerChannel.disconnectedCalled);
closeConnection();
assertTrue(clientChannel.disconnectedCalled);
assertTrue(brokerChannel.disconnectedCalled);
}
@Test
public void testReadWriteSend_ConnAckWithAccept_Blocking() throws Exception {
establishConnection();
ConnectMessage connMsg = new ConnectMessage("abc", false, 123);
ConnAckMessage ackMsg = new ConnAckMessage(ConnectReturnCode.ACCEPTED);
assertTrue(clientChannel.send(connMsg, blockingCommand));
readWrite(0, 1);
brokerHandler.assertMessages(connMsg);
verifyZeroInteractions(blockingCommand);
assertTrue(brokerChannel.send(ackMsg, null));
readWrite(1, 0);
clientHandler.assertMessages(ackMsg);
verify(blockingCommand).complete();
verify(blockingCommand).setResult(ackMsg);
assertTrue(clientChannel.isOpen());
assertTrue(clientChannel.isConnected());
assertTrue(clientChannel.connectedCalled);
assertEquals(123000, clientChannel.pingIntervalMillis);
assertTrue(brokerChannel.isConnected());
assertTrue(brokerChannel.connectedCalled);
assertEquals(123000, brokerChannel.pingIntervalMillis);
assertFalse(clientChannel.disconnectedCalled);
assertFalse(brokerChannel.disconnectedCalled);
closeConnection();
assertTrue(clientChannel.disconnectedCalled);
assertTrue(brokerChannel.disconnectedCalled);
}
@Test
public void testReadWriteSend_PingReqResp() throws Exception {
establishConnection();
PingReqMessage pingReqMsg = new PingReqMessage();
PingRespMessage pingRespMsg = new PingRespMessage();
assertTrue(clientChannel.send(pingReqMsg, null));
readWrite(0, 1);
brokerHandler.assertMessages(pingReqMsg);
assertTrue(brokerChannel.send(pingRespMsg, null));
readWrite(1, 0);
clientHandler.assertMessages(pingRespMsg);
clientChannel.close();
brokerChannel.close();
}
@Test
public void testReadWriteSend_PingReq_ThrowsException() throws Exception {
establishConnection();
brokerHandler.setException(new RuntimeException("crap"));
PingReqMessage pingReqMsg = new PingReqMessage();
PubAckMessage msg2 = new PubAckMessage(1);
assertTrue(clientChannel.send(pingReqMsg, null));
assertTrue(clientChannel.send(msg2, null));
readWrite(0, 1);
brokerHandler.assertMessages(msg2);
closeConnection();
}
@Test
public void testReadWriteSend_PingResp_ThrowsException() throws Exception {
establishConnection();
brokerHandler.setException(new RuntimeException("crap"));
PingRespMessage pingRespMsg = new PingRespMessage();
PubAckMessage msg2 = new PubAckMessage(1);
assertTrue(clientChannel.send(pingRespMsg, null));
assertTrue(clientChannel.send(msg2, null));
readWrite(0, 1);
brokerHandler.assertMessages(msg2);
closeConnection();
}
@Test
public void testSend_NotConnectedYet() throws Exception {
PingReqMessage msg = new PingReqMessage();
clientChannel = new TestChannel("localhost", port, clientHandler, selector, 10000);
assertFalse(clientChannel.send(msg, null));
establishConnection();
readWrite(0, 1);
closeConnection();
}
@Test
public void testReadWriteSend_PublishAndAck_Qos1_NonBlocking() throws Exception {
PubMessage msg = new PubMessage(QoS.AT_LEAST_ONCE, false, "foo", 1, new byte[] {});
PubAckMessage ack = new PubAckMessage(1);
doTestReadWriteSend_NonBlocking(msg, ack);
}
@Test
public void testReadWriteSend_PublishAndAck_Qos1_Blocking() throws Exception {
PubMessage msg = new PubMessage(QoS.AT_LEAST_ONCE, false, "foo", 1, new byte[] {});
PubAckMessage ack = new PubAckMessage(1);
doTestReadWriteSend_Blocking(msg, ack);
}
@Test
public void testReadWriteSend_Publish_Qos0_NonBlocking() throws Exception {
PubMessage msg = new PubMessage(QoS.AT_MOST_ONCE, false, "foo", 0, new byte[] {});
doTestReadWriteSend_NonBlocking(msg, null);
}
@Test
public void testReadWriteSend_Publish_Qos0_Blocking() throws Exception {
PubMessage msg = new PubMessage(QoS.AT_MOST_ONCE, false, "foo", 0, new byte[] {});
doTestReadWriteSend_Blocking(msg, null);
}
@Test
public void testReadWriteSend_PublishAndPubRec_NonBlocking() throws Exception {
PubMessage msg = new PubMessage(QoS.AT_LEAST_ONCE, false, "foo", 1, new byte[] {});
PubRecMessage ack = new PubRecMessage(1);
doTestReadWriteSend_NonBlocking(msg, ack);
}
@Test
public void testReadWriteSend_PublishAndPubRec_Blocking() throws Exception {
PubMessage msg = new PubMessage(QoS.AT_LEAST_ONCE, false, "foo", 1, new byte[] {});
PubRecMessage ack = new PubRecMessage(1);
doTestReadWriteSend_Blocking(msg, ack);
}
@Test
public void testReadWriteSend_PubRelAndPubComp_NonBlocking() throws Exception {
PubRelMessage msg = new PubRelMessage(1);
PubCompMessage ack = new PubCompMessage(1);
doTestReadWriteSend_NonBlocking(msg, ack);
}
@Test
public void testReadWriteSend_PubRelAndPubComp_Blocking() throws Exception {
PubRelMessage msg = new PubRelMessage(1);
PubCompMessage ack = new PubCompMessage(1);
doTestReadWriteSend_Blocking(msg, ack);
}
@Test
public void testReadWriteSend_SubscribeAndAck_NonBlocking() throws Exception {
SubscribeMessage msg = new SubscribeMessage(1, new String[] {}, new QoS[] {});
SubAckMessage ack = new SubAckMessage(1, new QoS[] {});
doTestReadWriteSend_NonBlocking(msg, ack);
}
@Test
public void testReadWriteSend_SubscribeAndAck_Blocking() throws Exception {
SubscribeMessage msg = new SubscribeMessage(1, new String[] {}, new QoS[] {});
SubAckMessage ack = new SubAckMessage(1, new QoS[] {});
doTestReadWriteSend_Blocking(msg, ack);
}
@Test
public void testReadWriteSend_UnsubscribeAndAck_NonBlocking() throws Exception {
UnsubscribeMessage msg = new UnsubscribeMessage(1, new String[] {});
UnsubAckMessage ack = new UnsubAckMessage(1);
doTestReadWriteSend_NonBlocking(msg, ack);
}
@Test
public void testReadWriteSend_UnsubscribeAndAck_Blocking() throws Exception {
UnsubscribeMessage msg = new UnsubscribeMessage(1, new String[] {});
UnsubAckMessage ack = new UnsubAckMessage(1);
doTestReadWriteSend_Blocking(msg, ack);
}
@Test
public void testReadWriteSend_PingReq_NonBlocking() throws Exception {
PingReqMessage msg = new PingReqMessage();
doTestReadWriteSend_NonBlocking(msg, null);
}
@Test
public void testReadWriteSend_PingReq_Blocking() throws Exception {
PingReqMessage msg = new PingReqMessage();
doTestReadWriteSend_Blocking(msg, null);
}
@Test
public void testReadWriteSend_PingResp_NonBlocking() throws Exception {
PingRespMessage msg = new PingRespMessage();
doTestReadWriteSend_NonBlocking(msg, null);
}
@Test
public void testReadWriteSend_PingResp_Blocking() throws Exception {
PingRespMessage msg = new PingRespMessage();
doTestReadWriteSend_Blocking(msg, null);
}
@Test
public void testReadWriteSend_Disconnect_NonBlocking() throws Exception {
DisconnectMessage msg = new DisconnectMessage();
doTestReadWriteSend_NonBlocking(msg, null);
}
@Test
public void testReadWriteSend_Disconnect_Blocking() throws Exception {
DisconnectMessage msg = new DisconnectMessage();
doTestReadWriteSend_Blocking(msg, null);
}
@Test
public void testReadWriteSend_HandlerThrowsException() throws Exception {
establishConnection();
brokerHandler.setException(new RuntimeException());
UnsubAckMessage msg1 = new UnsubAckMessage(1);
PingReqMessage msg2 = new PingReqMessage();
assertTrue(clientChannel.send(msg1, null));
assertTrue(clientChannel.send(msg2, null));
readWrite(0, 1);
brokerHandler.assertMessages(msg2);
closeConnection();
}
@Test
public void testReadWriteSend_ClientClosesConnection() throws Exception {
establishConnection();
clientChannel.close();
for (int i = 0; i < 100; i++) {
if (!brokerChannel.read(now)) {
return;
}
}
fail("Expected the channel to close");
}
@Test
public void testReadWriteSend_BrokerClosesConnection() throws Exception {
establishConnection();
brokerChannel.close();
for (int i = 0; i < 100; i++) {
if (!clientChannel.read(now)) {
return;
}
}
fail("Expected the channel to close");
}
@Test
public void testReadWriteSend_IOException() throws Exception {
establishConnection();
brokerChannel.close();
for (int i = 0; i < 1000; i++) {
clientChannel.send(new PingReqMessage(), null);
}
assertFalse(clientChannel.write(now));
assertFalse(clientChannel.isOpen());
clientHandler.assertChannelClosedCount(1);
closeConnection();
}
@Test
public void testReadWriteSend_RemainingLengthZero() throws Exception {
establishConnection();
PingReqMessage msg = new PingReqMessage();
assertTrue(clientChannel.send(msg, null));
readWrite(0, 1);
assertEquals(1, stats.getMessagesSent());
closeConnection();
}
@Test
public void testReadWriteSend_RemainingLength2() throws Exception {
establishConnection();
PubAckMessage msg = new PubAckMessage(123);
assertTrue(clientChannel.send(msg, null));
readWrite(0, 1);
assertEquals(1, stats.getMessagesSent());
closeConnection();
}
@Test
public void testReadWriteSend_RemainingLength126to129() throws Exception {
doTestReadWriteSend(126, 4);
}
@Test
public void testReadWriteSend_RemainingLength16382to16385() throws Exception {
doTestReadWriteSend(16382, 4);
}
@Test
public void testReadWriteSend_RemainingLength2097150to2097155() throws Exception {
doTestReadWriteSend(2097150, 6);
}
@Test
public void testReadWriteSend_LotsOfMessages() throws Exception {
doTestReadWriteSend(1000, 5000);
}
private void doTestReadWriteSend(int firstRemainingLength, int messageCount) throws Exception {
List<PubMessage> messagesSent = new ArrayList<PubMessage>();
establishConnection();
for (int remainingLength = firstRemainingLength; remainingLength < firstRemainingLength + messageCount; remainingLength++) {
int payloadLength = remainingLength - 7;
byte[] payload = new byte[payloadLength];
Arrays.fill(payload, (byte) messageCount);
PubMessage msg = new PubMessage(QoS.AT_LEAST_ONCE, false, "abc", 123, payload);
assertTrue(clientChannel.send(msg, null));
messagesSent.add(msg);
}
readWrite(0, messageCount);
brokerHandler.assertMessages(messagesSent);
assertEquals(messagesSent.size(), stats.getMessagesSent());
closeConnection();
}
private void doTestReadWriteSend_NonBlocking(MqttMessage msg, MqttMessage ack) throws Exception {
establishConnection();
int messagesSent = 1;
clientChannel.send(msg, null);
readWrite(0, 1);
brokerHandler.assertMessages(msg);
if (ack != null) {
messagesSent++;
brokerChannel.send(ack, null);
readWrite(1, 0);
clientHandler.assertMessages(ack);
if (ack instanceof PubAckMessage) {
assertTrue(stats.getMinAckLatencyMillis() == 10);
assertTrue(stats.getMaxAckLatencyMillis() == 10);
assertTrue(stats.getAverageAckLatencyMillis() == 10.0);
}
}
assertEquals(messagesSent, stats.getMessagesSent());
closeConnection();
}
private void doTestReadWriteSend_Blocking(MqttMessage msg, MqttMessage ack) throws Exception {
establishConnection();
int messagesSent = 1;
clientChannel.send(msg, blockingCommand);
readWrite(0, 1);
brokerHandler.assertMessages(msg);
if (ack != null) {
messagesSent++;
verifyZeroInteractions(blockingCommand);
brokerChannel.send(ack, null);
readWrite(1, 0);
clientHandler.assertMessages(ack);
verify(blockingCommand).setResult(ack);
if (ack instanceof PubAckMessage) {
assertTrue(stats.getMinAckLatencyMillis() == 10);
assertTrue(stats.getMaxAckLatencyMillis() == 10);
assertTrue(stats.getAverageAckLatencyMillis() == 10.0);
}
}
verify(blockingCommand).complete();
assertEquals(messagesSent, stats.getMessagesSent());
closeConnection();
}
}
| |
package com.horcu.apps.balln.widget.floatinglabel.edittext;
import android.content.Context;
import android.content.res.ColorStateList;
import android.content.res.TypedArray;
import android.graphics.Typeface;
import android.os.Parcelable;
import android.text.Editable;
import android.text.InputType;
import android.text.TextWatcher;
import android.text.method.KeyListener;
import android.util.AttributeSet;
import android.widget.EditText;
import android.widget.TextView;
import com.horcu.apps.balln.R;
import com.horcu.apps.balln.widget.floatinglabel.FloatingLabelTextViewBase;
import com.horcu.apps.balln.widget.floatinglabel.anim.LabelAnimator;
import com.horcu.apps.balln.widget.floatinglabel.anim.TextViewLabelAnimator;
/**
* An implementation of the floating label input widget for Android's EditText
* <p/>
* Created by Vincent Mimoun-Prat @ MarvinLabs, 28/08/2014.
*/
public class FloatingLabelEditText extends FloatingLabelTextViewBase<EditText> {
public interface EditTextListener {
public void onTextChanged(FloatingLabelEditText source, String text);
}
/**
* The listener to notify when the selection changes
*/
protected EditTextListener editTextListener;
// =============================================================================================
// Lifecycle
// ==
public FloatingLabelEditText(Context context) {
super(context);
}
public FloatingLabelEditText(Context context, AttributeSet attrs) {
super(context, attrs);
}
public FloatingLabelEditText(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
// =============================================================================================
// Overridden methods
// ==
@Override
protected void afterLayoutInflated(Context context, AttributeSet attrs, int defStyle) {
super.afterLayoutInflated(context, attrs, defStyle);
final int inputType;
if (attrs == null) {
inputType = InputType.TYPE_CLASS_TEXT;
} else {
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.FloatingLabelEditText, defStyle, 0);
inputType = a.getInt(R.styleable.FloatingLabelEditText_android_inputType, InputType.TYPE_CLASS_TEXT);
a.recycle();
}
final EditText inputWidget = getInputWidget();
inputWidget.setInputType(inputType);
inputWidget.addTextChangedListener(new EditTextWatcher());
}
@Override
protected int getDefaultLayoutId() {
return R.layout.flw_widget_floating_label_item_chooser;
}
@Override
protected void restoreInputWidgetState(Parcelable inputWidgetState) {
getInputWidget().onRestoreInstanceState(inputWidgetState);
}
@Override
protected Parcelable saveInputWidgetInstanceState() {
return getInputWidget().onSaveInstanceState();
}
@Override
protected void setInitialWidgetState() {
setLabelAnchored(isEditTextEmpty());
}
@Override
protected LabelAnimator<EditText> getDefaultLabelAnimator() {
return new TextViewLabelAnimator<EditText>();
}
// =============================================================================================
// Delegate methods for the input widget
// ==
/**
* Delegate method for the input widget
*/
public Editable getInputWidgetText() {
return getInputWidget().getText();
}
/**
* Delegate method for the input widget
*/
public void setInputWidgetText(CharSequence text, TextView.BufferType type) {
getInputWidget().setText(text, type);
}
/**
* Delegate method for the input widget
*/
public void setInputWidgetTextSize(float size) {
getInputWidget().setTextSize(size);
}
/**
* Delegate method for the input widget
*/
public void setInputWidgetTextSize(int unit, float size) {
getInputWidget().setTextSize(unit, size);
}
/**
* Delegate method for the input widget
*/
public void setInputWidgetKeyListener(KeyListener input) {
getInputWidget().setKeyListener(input);
}
/**
* Delegate method for the input widget
*/
public void setInputWidgetTypeface(Typeface tf, int style) {
getInputWidget().setTypeface(tf, style);
}
/**
* Delegate method for the input widget
*/
public void setInputWidgetTextColor(int color) {
getInputWidget().setTextColor(color);
}
/**
* Delegate method for the input widget
*/
public void setInputWidgetTextColor(ColorStateList colors) {
getInputWidget().setTextColor(colors);
}
/**
* Delegate method for the input widget
*/
public void setInputWidgetText(CharSequence text) {
getInputWidget().setText(text);
}
/**
* Delegate method for the input widget
*/
public void setInputWidgetText(int resid) {
getInputWidget().setText(resid);
}
/**
* Delegate method for the input widget
*/
public void setInputWidgetInputType(int type) {
getInputWidget().setInputType(type);
}
/**
* Delegate method for the input widget
*/
public void addInputWidgetTextChangedListener(TextWatcher watcher) {
getInputWidget().addTextChangedListener(watcher);
}
/**
* Delegate method for the input widget
*/
public void removeInputWidgetTextChangedListener(TextWatcher watcher) {
getInputWidget().removeTextChangedListener(watcher);
}
// =============================================================================================
// Other methods
// ==
public EditTextListener getEditTextListener() {
return editTextListener;
}
public void setEditTextListener(EditTextListener editTextListener) {
this.editTextListener = editTextListener;
}
/**
* Called when the text within the input widget is updated
*
* @param s The new text
*/
protected void onTextChanged(String s) {
if (!isFloatOnFocusEnabled()) {
if (s.length() == 0) {
anchorLabel();
} else {
floatLabel();
}
}
if (editTextListener != null) editTextListener.onTextChanged(this, s);
}
/**
* @return true if the input widget is empty
*/
private boolean isEditTextEmpty() {
return getInputWidget().getText().toString().isEmpty();
}
/**
* TextWatcher that changes the floating label state when the EditText content changes between
* empty and not empty.
*/
private class EditTextWatcher implements TextWatcher {
@Override
public void afterTextChanged(Editable s) {
FloatingLabelEditText.this.onTextChanged(s.toString());
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
// Ignored
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.security.authentication.token;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.UUID;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.memory.PropertyStates;
import org.apache.jackrabbit.oak.plugins.tree.TreeProvider;
import org.apache.jackrabbit.oak.plugins.tree.TreeUtil;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.Validator;
import org.apache.jackrabbit.oak.spi.nodetype.NodeTypeConstants;
import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters;
import org.apache.jackrabbit.oak.spi.security.authentication.token.TokenConstants;
import org.apache.jackrabbit.oak.spi.security.authentication.token.TokenInfo;
import org.apache.jackrabbit.oak.spi.security.user.util.PasswordUtil;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.util.ISO8601;
import org.jetbrains.annotations.NotNull;
import org.junit.Before;
import org.junit.Test;
import static org.apache.jackrabbit.oak.api.Type.DATE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TokenValidatorTest extends AbstractTokenTest {
private String userId;
@Override
@Before
public void before() throws Exception {
super.before();
userId = getTestUser().getID();
}
private static String getDateValue() {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(new Date().getTime());
return ISO8601.format(calendar);
}
@Test
public void testCreateReservedKeyProperty() throws Exception {
Tree tree = TreeUtil.addChild(root.getTree("/"), "testNode", JcrConstants.NT_UNSTRUCTURED);
try {
tree.setProperty(TOKEN_ATTRIBUTE_KEY, "anyValue");
root.commit(CommitMarker.asCommitAttributes());
fail("The reserved token key property must not used with other node types.");
} catch (CommitFailedException e) {
assertEquals(60, e.getCode());
} finally {
tree.remove();
if (root.hasPendingChanges()) {
root.commit();
}
}
}
@Test
public void testCreateReservedKeyProperty2() throws Exception {
Tree tree = TreeUtil.addChild(root.getTree("/"), "testNode", JcrConstants.NT_UNSTRUCTURED);
try {
tree.setProperty(TOKEN_ATTRIBUTE_KEY, "anyValue");
root.commit();
fail("The reserved token key property must only be created by the TokenProvider.");
} catch (CommitFailedException e) {
assertEquals(63, e.getCode());
} finally {
tree.remove();
if (root.hasPendingChanges()) {
root.commit();
}
}
}
@Test
public void testChangingTokenKey() throws Exception {
TokenInfo info = createTokenInfo(tokenProvider, userId);
Tree tokenTree = getTokenTree(info);
try {
tokenTree.setProperty(TOKEN_ATTRIBUTE_KEY, PasswordUtil.buildPasswordHash("anotherValue"));
root.commit(CommitMarker.asCommitAttributes());
fail("The token key must never be modified.");
} catch (CommitFailedException e) {
assertEquals(61, e.getCode());
}
}
@Test
public void testPlaintextTokenKey() {
TokenInfo info = createTokenInfo(tokenProvider, userId);
Tree tokenTree = getTokenTree(info);
try {
tokenTree.setProperty(TOKEN_ATTRIBUTE_KEY, "anotherValue");
root.commit(CommitMarker.asCommitAttributes());
fail("The token key must not be plaintext.");
} catch (CommitFailedException e) {
assertEquals(66, e.getCode());
}
}
@Test
public void testManuallyModifyExpirationDate() {
TokenInfo info = createTokenInfo(tokenProvider, userId);
Tree tokenTree = getTokenTree(info);
try {
tokenTree.setProperty(TOKEN_ATTRIBUTE_EXPIRY, getDateValue(), DATE);
root.commit();
fail("The token expiry must not manually be changed");
} catch (CommitFailedException e) {
assertEquals(63, e.getCode());
}
}
@Test
public void testModifyExpirationDate() throws Exception {
TokenInfo info = createTokenInfo(tokenProvider, userId);
Tree tokenTree = getTokenTree(info);
tokenTree.setProperty(TOKEN_ATTRIBUTE_EXPIRY, getDateValue(), DATE);
root.commit(CommitMarker.asCommitAttributes());
}
@Test
public void testCreateTokenAtInvalidLocationBelowTestNode() throws Exception {
TokenInfo info = createTokenInfo(tokenProvider, userId);
assertNotNull(tokenProvider.getTokenInfo(info.getToken()));
Tree tree = TreeUtil.addChild(root.getTree("/"), "testNode", JcrConstants.NT_UNSTRUCTURED);
try {
replaceTokenTree(info, tree, TOKEN_NT_NAME);
root.commit(CommitMarker.asCommitAttributes());
fail("Creating a new token not at '/testNode' must fail.");
} catch (CommitFailedException e) {
assertEquals(64, e.getCode());
} finally {
tree.remove();
root.commit(CommitMarker.asCommitAttributes());
}
}
@Test
public void testCreateTokenAtInvalidLocationInsideUser() throws Exception {
TokenInfo info = createTokenInfo(tokenProvider, userId);
assertNotNull(tokenProvider.getTokenInfo(info.getToken()));
Tree userTree = root.getTree(getUserManager(root).getAuthorizable(userId).getPath());
Tree tree = TreeUtil.addChild(userTree, "testNode", JcrConstants.NT_UNSTRUCTURED);
try {
replaceTokenTree(info, tree, TOKEN_NT_NAME);
root.commit(CommitMarker.asCommitAttributes());
fail("Creating a new token '" + tree.getPath() + "' must fail.");
} catch (CommitFailedException e) {
assertEquals(65, e.getCode());
} finally {
tree.remove();
root.commit(CommitMarker.asCommitAttributes());
}
}
@Test
public void testCreateTokenAtInvalidLocationInsideUser2() throws Exception {
TokenInfo info = createTokenInfo(tokenProvider, userId);
assertNotNull(tokenProvider.getTokenInfo(info.getToken()));
Tree userTree = root.getTree(getUserManager(root).getAuthorizable(userId).getPath());
Tree tree = TreeUtil.addChild(userTree, TOKENS_NODE_NAME, TOKENS_NT_NAME);
try {
tree = TreeUtil.addChild(tree, "invalid", JcrConstants.NT_UNSTRUCTURED);
replaceTokenTree(info, tree, TOKEN_NT_NAME);
root.commit(CommitMarker.asCommitAttributes());
fail("Creating a new token '" + tree.getPath() + "' must fail.");
} catch (CommitFailedException e) {
assertEquals(65, e.getCode());
} finally {
tree.remove();
root.commit(CommitMarker.asCommitAttributes());
}
}
@Test
public void testManuallyCreateToken() throws Exception {
TokenInfo info = createTokenInfo(tokenProvider, userId);
assertNotNull(tokenProvider.getTokenInfo(info.getToken()));
Tree userTree = root.getTree(getUserManager(root).getAuthorizable(userId).getPath());
try {
// create a valid token node using the test root
replaceTokenTree(info, userTree.getChild(TOKENS_NODE_NAME), TOKEN_NT_NAME);
root.commit();
fail("Manually creating a token node must fail.");
} catch (CommitFailedException e) {
assertEquals(63, e.getCode());
} finally {
root.refresh();
root.commit();
}
}
@Test
public void testCreateTokenWithInvalidNodeType() throws Exception {
TokenInfo info = createTokenInfo(tokenProvider, userId);
assertNotNull(tokenProvider.getTokenInfo(info.getToken()));
Tree userTree = root.getTree(getUserManager(root).getAuthorizable(userId).getPath());
Tree t = null;
try {
t = replaceTokenTree(info, userTree.getChild(TOKENS_NODE_NAME), JcrConstants.NT_UNSTRUCTURED);
root.commit(CommitMarker.asCommitAttributes());
fail("The token node must be of type rep:Token.");
} catch (CommitFailedException e) {
assertEquals(60, e.getCode());
} finally {
if (t != null) {
t.remove();
root.commit(CommitMarker.asCommitAttributes());
}
}
}
@Test
public void testRemoveTokenNode() throws Exception {
TokenInfo info = createTokenInfo(tokenProvider, userId);
getTokenTree(info).remove();
root.commit();
}
@Test
public void testInvalidTokenParentNode() throws Exception {
Tree userTree = root.getTree(getUserManager(root).getAuthorizable(userId).getPath());
Tree node = TreeUtil.addChild(userTree, "testNode", JcrConstants.NT_UNSTRUCTURED);
try {
// Invalid node type of '.tokens' node
TreeUtil.addChild(node, TOKENS_NODE_NAME, JcrConstants.NT_UNSTRUCTURED);
root.commit(CommitMarker.asCommitAttributes());
fail("Creating a new token '" + node.getPath() + "' must fail.");
} catch (CommitFailedException e) {
assertEquals(68, e.getCode());
} finally {
node.remove();
root.commit(CommitMarker.asCommitAttributes());
}
}
@Test
public void testManuallyCreateTokenParent() throws Exception {
Tree userTree = root.getTree(getUserManager(root).getAuthorizable(userId).getPath());
TreeUtil.addChild(userTree, TOKENS_NODE_NAME, TOKENS_NT_NAME);
root.commit();
}
@Test
public void testManuallyCreateTokenParentWithNtUnstructured() throws Exception {
Tree userTree = root.getTree(getUserManager(root).getAuthorizable(userId).getPath());
TreeUtil.addChild(userTree, TOKENS_NODE_NAME, JcrConstants.NT_UNSTRUCTURED);
root.commit();
}
@Test
public void testTokensNodeBelowRoot() throws Exception {
Tree rootNode = root.getTree("/");
Tree n = null;
try {
// Invalid node type of '.tokens' node
n = TreeUtil.addChild(rootNode, TOKENS_NODE_NAME, TOKENS_NT_NAME);
root.commit();
fail("The token parent node must be located below the configured user root.");
} catch (CommitFailedException e) {
assertEquals(64, e.getCode());
} finally {
if (n != null) {
n.remove();
root.commit(CommitMarker.asCommitAttributes());
}
}
}
@Test
public void testTokensNodeAtInvalidPathBelowUser() throws Exception {
Tree userTree = root.getTree(getUserManager(root).getAuthorizable(userId).getPath());
Tree n = null;
try {
// Invalid node type of '.tokens' node
n = TreeUtil.addChild(userTree, "test", JcrConstants.NT_UNSTRUCTURED);
TreeUtil.addChild(n, TOKENS_NODE_NAME, TOKENS_NT_NAME);
root.commit();
fail("The token parent node must be located below the user home node.");
} catch (CommitFailedException e) {
assertEquals(68, e.getCode());
} finally {
if (n != null) {
n.remove();
root.commit(CommitMarker.asCommitAttributes());
}
}
}
@Test
public void testChangeTokenParentPrimaryTypeToRepUnstructured() throws Exception {
Tree userTree = root.getTree(getUserManager(root).getAuthorizable(userId).getPath());
Tree node = TreeUtil.addChild(userTree, TOKENS_NODE_NAME, JcrConstants.NT_UNSTRUCTURED);
root.commit();
node.setProperty(JcrConstants.JCR_PRIMARYTYPE, TOKENS_NT_NAME, Type.NAME);
root.commit();
}
@Test
public void testChangeTokenParentPrimaryType() {
TokenInfo info = createTokenInfo(tokenProvider, userId);
try {
Tree tokensTree = getTokenTree(info).getParent();
tokensTree.setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
root.commit();
fail("The primary type of the token parent must not be changed from rep:Unstructured to another type.");
} catch (CommitFailedException e) {
assertEquals(69, e.getCode());
} finally {
root.refresh();
}
}
@Test
public void testChangeRegularRepUnstructuredPrimaryType() throws Exception {
Tree userTree = root.getTree(getUserManager(root).getAuthorizable(userId).getPath());
Tree n = TreeUtil.getOrAddChild(userTree,"test", NodeTypeConstants.NT_REP_UNSTRUCTURED);
root.commit();
n.setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
root.commit();
}
@Test
public void testChangeToReservedTokenNodeType() throws Exception {
String parentPath = getTestUser().getPath() + "/"+TokenConstants.TOKENS_NODE_NAME;
String path = parentPath+"/node";
try {
Tree t = root.getTree(getTestUser().getPath()).addChild(TokenConstants.TOKENS_NODE_NAME);
t.setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
t.addChild("node").setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_UNSTRUCTURED, Type.NAME);
root.commit();
Tree node = root.getTree(path);
node.setProperty(JcrConstants.JCR_PRIMARYTYPE, TokenConstants.TOKEN_NT_NAME, Type.NAME);
node.setProperty(JcrConstants.JCR_UUID, UUID.randomUUID().toString());
node.setProperty(TokenConstants.TOKEN_ATTRIBUTE_KEY, PasswordUtil.buildPasswordHash("key"));
node.setProperty(TokenConstants.TOKEN_ATTRIBUTE_EXPIRY, getDateValue(), Type.DATE);
root.commit(CommitMarker.asCommitAttributes());
} catch (CommitFailedException e) {
assertEquals(62, e.getCode());
} finally {
root.refresh();
root.getTree(parentPath).remove();
root.commit();
}
}
@Test
public void testReservedPropertyAddedValidParent() throws Exception {
Tree tokenTree = TreeUtil.addChild(root.getTree(PathUtils.ROOT_PATH), "name", TOKEN_NT_NAME);
Validator v = createRootValidator(tokenTree, tokenTree);
v.propertyAdded(PropertyStates.createProperty(TokenConstants.TOKEN_ATTRIBUTE_EXPIRY, "anyValue"));
}
@Test(expected = CommitFailedException.class)
public void testReservedPropertyAddedInvalidParent() throws Exception {
Tree rootTree = root.getTree(PathUtils.ROOT_PATH);
try {
Validator v = createRootValidator(rootTree, rootTree);
v.propertyAdded(PropertyStates.createProperty(TokenConstants.TOKEN_ATTRIBUTE_EXPIRY, "anyValue"));
} catch (CommitFailedException e) {
assertTrue(e.isConstraintViolation());
assertEquals(60, e.getCode());
throw e;
}
}
@Test(expected = CommitFailedException.class)
public void testAddTokenTreeMissingKey() throws Exception {
Tree tokenTree = getTokenTree(createTokenInfo(tokenProvider, userId));
tokenTree.removeProperty(TokenConstants.TOKEN_ATTRIBUTE_KEY);
Tree rootTree = root.getTree(PathUtils.ROOT_PATH);
try {
Validator v = createValidator(rootTree, rootTree, tokenTree.getParent().getPath(), false);
v.childNodeAdded(tokenTree.getName(), getTreeProvider().asNodeState(tokenTree));
} catch (CommitFailedException e) {
assertTrue(e.isConstraintViolation());
assertEquals(66, e.getCode());
throw e;
} finally {
root.refresh();
}
}
@Test(expected = CommitFailedException.class)
public void testAddTokenTreeMissingTokensParent() throws Exception {
Tree tokenTree = getTokenTree(createTokenInfo(tokenProvider, userId));
root.move(tokenTree.getPath(), PathUtils.concat(getTestUser().getPath(), tokenTree.getName()));
Tree rootTree = root.getTree(PathUtils.ROOT_PATH);
try {
Validator v = createValidator(rootTree, rootTree, getTestUser().getPath(), true);
v.childNodeAdded(tokenTree.getName(), mock(NodeState.class));
} catch (CommitFailedException e) {
assertTrue(e.isConstraintViolation());
assertEquals(65, e.getCode());
throw e;
} finally {
root.refresh();
}
}
@Test(expected = CommitFailedException.class)
public void testAddTokenTreeMissingUserGrandParent() throws Exception {
// since adding/changing an invalid tokens-parent node will be detected, mocking is required to
// reach the desired invalid state, where the .tokens node isn't located below the user home.
Tree tokenTree = getTokenTree(createTokenInfo(tokenProvider, userId));
Tree tokensTree = tokenTree.getParent();
// move .tokens node one level up
String destPath = PathUtils.concat(PathUtils.getParentPath(getTestUser().getPath()), tokensTree.getName());
root.move(tokensTree.getPath(), destPath);
try {
// create a validator that has 'tokensTree' as parentBefore and parentAfter
NodeState ns = getTreeProvider().asNodeState(tokensTree);
TreeProvider tp = when(mock(TreeProvider.class).createReadOnlyTree(ns)).thenReturn(tokensTree).getMock();
TokenValidatorProvider tvp = new TokenValidatorProvider(ConfigurationParameters.EMPTY, tp);
Validator v = tvp.getRootValidator(ns, ns, new CommitInfo("sid", "uid", CommitMarker.asCommitAttributes()));
assertNotNull(v);
v.childNodeChanged(tokenTree.getName(), mock(NodeState.class), mock(NodeState.class));
} catch (CommitFailedException e) {
assertTrue(e.isConstraintViolation());
assertEquals(65, e.getCode());
throw e;
} finally {
root.refresh();
}
}
@Test(expected = CommitFailedException.class)
public void testAddTokenTreeInvalidKey() throws Exception {
Tree tokenTree = getTokenTree(createTokenInfo(tokenProvider, userId));
tokenTree.setProperty(TokenConstants.TOKEN_ATTRIBUTE_KEY, "someValue");
Tree rootTree = root.getTree(PathUtils.ROOT_PATH);
try {
Validator v = createValidator(rootTree, rootTree, tokenTree.getParent().getPath(), true);
v.childNodeAdded(tokenTree.getName(), getTreeProvider().asNodeState(tokenTree));
} catch (CommitFailedException e) {
assertTrue(e.isConstraintViolation());
assertEquals(66, e.getCode());
throw e;
} finally {
root.refresh();
}
}
@Test(expected = CommitFailedException.class)
public void testAddTokenTreeMissingExpiry() throws Exception {
Tree tokenTree = getTokenTree(createTokenInfo(tokenProvider, userId));
tokenTree.removeProperty(TokenConstants.TOKEN_ATTRIBUTE_EXPIRY);
Tree rootTree = root.getTree(PathUtils.ROOT_PATH);
Validator v = createValidator(rootTree, rootTree, tokenTree.getParent().getPath(), false);
try {
v.childNodeAdded(tokenTree.getName(), getTreeProvider().asNodeState(tokenTree));
} catch (CommitFailedException e) {
assertTrue(e.isConstraintViolation());
assertEquals(67, e.getCode());
throw e;
} finally {
root.refresh();
}
}
@Test(expected = IllegalStateException.class)
public void testIllegalValidatorSequence() throws Exception {
Tree tokenTree = getTokenTree(createTokenInfo(tokenProvider, userId));
Tree rootTree = root.getTree(PathUtils.ROOT_PATH);
// illegal sequence of adding nodes and the changing -> must be spotted by the validator
Validator v = createValidator(rootTree, rootTree, tokenTree.getParent().getPath(), true);
v.childNodeChanged(tokenTree.getName(), mock(NodeState.class), mock(NodeState.class));
}
@NotNull
private Validator createRootValidator(@NotNull Tree before, @NotNull Tree after) {
TokenValidatorProvider tvp = new TokenValidatorProvider(ConfigurationParameters.EMPTY, getTreeProvider());
Validator v = tvp.getRootValidator(getTreeProvider().asNodeState(before), getTreeProvider().asNodeState(after), new CommitInfo("sid", "uid", CommitMarker.asCommitAttributes()));
assertNotNull(v);
return v;
}
@NotNull
private Validator createValidator(@NotNull Tree before, @NotNull Tree after, @NotNull String path, boolean isAdd) throws CommitFailedException {
TokenValidatorProvider tvp = new TokenValidatorProvider(ConfigurationParameters.EMPTY, getTreeProvider());
NodeState b = getTreeProvider().asNodeState(before);
NodeState a = getTreeProvider().asNodeState(after);
Validator v = tvp.getRootValidator(b, a, new CommitInfo("sid", "uid", CommitMarker.asCommitAttributes()));
for (String name : PathUtils.elements(path)) {
assertNotNull(v);
b = b.getChildNode(name);
a = a.getChildNode(name);
v = (isAdd) ? v.childNodeAdded(name, a) : v.childNodeChanged(name, b, a);
}
assertNotNull(v);
return v;
}
}
| |
package im.actor.sdk.controllers.auth;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.MenuItem;
import im.actor.core.AuthState;
import im.actor.core.entity.AuthCodeRes;
import im.actor.core.entity.AuthRes;
import im.actor.core.entity.AuthStartRes;
import im.actor.core.entity.Sex;
import im.actor.core.network.RpcException;
import im.actor.core.network.RpcInternalException;
import im.actor.core.network.RpcTimeoutException;
import im.actor.runtime.actors.Actor;
import im.actor.runtime.actors.ActorCreator;
import im.actor.runtime.actors.ActorRef;
import im.actor.runtime.actors.ActorSystem;
import im.actor.runtime.actors.Props;
import im.actor.runtime.function.Consumer;
import im.actor.runtime.promise.Promise;
import im.actor.runtime.storage.PreferencesStorage;
import im.actor.sdk.ActorSDK;
import im.actor.sdk.R;
import im.actor.sdk.controllers.activity.ActorMainActivity;
import im.actor.sdk.controllers.activity.BaseFragmentActivity;
import static im.actor.sdk.util.ActorSDKMessenger.messenger;
public class AuthActivity extends BaseFragmentActivity {
public static final String AUTH_TYPE_KEY = "auth_type";
public static final String SIGN_TYPE_KEY = "sign_type";
public static final int AUTH_TYPE_PHONE = 1;
public static final int AUTH_TYPE_EMAIL = 2;
public static final int SIGN_TYPE_IN = 3;
public static final int SIGN_TYPE_UP = 4;
private static final int OAUTH_DIALOG = 1;
private ProgressDialog progressDialog;
private AlertDialog alertDialog;
private AuthState state;
private int availableAuthType = AUTH_TYPE_PHONE;
private int currentAuthType = AUTH_TYPE_PHONE;
private int signType;
private long currentPhone;
private String currentEmail;
private String transactionHash;
private String currentCode;
private boolean isRegistered = false;
private String currentName;
private Sex currentSex;
private ActorRef authActor;
private boolean codeValidated = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
authActor = ActorSystem.system().actorOf(Props.create(new ActorCreator() {
@Override
public Actor create() {
return new Actor();
}
}), "actor/auth_promises_actor");
signType = getIntent().getIntExtra(SIGN_TYPE_KEY, SIGN_TYPE_IN);
PreferencesStorage preferences = messenger().getPreferences();
currentPhone = preferences.getLong("currentPhone", 0);
currentEmail = preferences.getString("currentEmail");
transactionHash = preferences.getString("transactionHash");
isRegistered = preferences.getBool("isRegistered", false);
codeValidated = preferences.getBool("codeValidated", false);
currentName = preferences.getString("currentName");
signType = preferences.getInt("signType", signType);
String savedState = preferences.getString("auth_state");
state = Enum.valueOf(AuthState.class, savedState != null ? savedState : "AUTH_START");
updateState(state, true);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
}
return super.onOptionsItemSelected(item);
}
private void updateState(AuthState state) {
updateState(state, false);
}
private void updateState(AuthState state, boolean force) {
if (this.state != null && (this.state == state && !force)) {
return;
}
PreferencesStorage preferences = messenger().getPreferences();
preferences.putLong("currentPhone", currentPhone);
preferences.putString("currentEmail", currentEmail);
preferences.putString("transactionHash", transactionHash);
preferences.putBool("isRegistered", isRegistered);
preferences.putBool("codeValidated", codeValidated);
preferences.putString("currentName", currentName);
preferences.putInt("signType", signType);
preferences.putString("auth_state", state.toString());
// if we show the next fragment when app is in background and not visible , app crashes!
// e.g when the GSM data is off and after trying to send code we go to settings to turn on, app is going invisible and ...
if (state != AuthState.LOGGED_IN && getIsResumed() == false) {
return;
}
this.state = state;
switch (state) {
case AUTH_START:
if (signType == SIGN_TYPE_UP) {
updateState(AuthState.SIGN_UP);
} else if (signType == SIGN_TYPE_IN) {
showFragment(new SignInFragment(), false, false);
}
break;
case SIGN_UP:
if (currentName != null && !currentName.isEmpty()) {
startAuth(currentName);
} else {
showFragment(new SignUpFragment(), false, false);
}
break;
case AUTH_PHONE:
currentAuthType = AUTH_TYPE_PHONE;
currentCode = "";
showFragment(ActorSDK.sharedActor().getDelegatedFragment(ActorSDK.sharedActor().getDelegate().getAuthStartIntent(), new SignPhoneFragment(), BaseAuthFragment.class), false, false);
break;
case AUTH_EMAIL:
currentCode = "";
currentAuthType = AUTH_TYPE_EMAIL;
showFragment(ActorSDK.sharedActor().getDelegatedFragment(ActorSDK.sharedActor().getDelegate().getAuthStartIntent(), new SignEmailFragment(), BaseAuthFragment.class), false, false);
break;
case CODE_VALIDATION_PHONE:
case CODE_VALIDATION_EMAIL:
Fragment signInFragment = new ValidateCodeFragment();
Bundle args = new Bundle();
args.putString("authType", state == AuthState.CODE_VALIDATION_EMAIL ? ValidateCodeFragment.AUTH_TYPE_EMAIL : ValidateCodeFragment.AUTH_TYPE_PHONE);
args.putBoolean(ValidateCodeFragment.AUTH_TYPE_SIGN, signType == SIGN_TYPE_IN);
args.putString("authId", state == AuthState.CODE_VALIDATION_EMAIL ? currentEmail : Long.toString(currentPhone));
signInFragment.setArguments(args);
showFragment(signInFragment, false, false);
break;
case LOGGED_IN:
finish();
startActivity(new Intent(this, ActorMainActivity.class));
break;
}
}
public void startAuth(String name) {
currentName = name;
currentSex = Sex.UNKNOWN;
availableAuthType = ActorSDK.sharedActor().getAuthType();
AuthState authState;
if (!codeValidated) {
if ((availableAuthType & AUTH_TYPE_PHONE) == AUTH_TYPE_PHONE) {
authState = AuthState.AUTH_PHONE;
} else if ((availableAuthType & AUTH_TYPE_EMAIL) == AUTH_TYPE_EMAIL) {
authState = AuthState.AUTH_EMAIL;
} else {
// none of valid auth types selected - force crash?
return;
}
updateState(authState);
} else {
signUp(messenger().doSignup(currentName, currentSex != null ? currentSex : Sex.UNKNOWN, transactionHash), currentName, currentSex);
}
}
public void startPhoneAuth(Promise<AuthStartRes> promise, long phone) {
currentAuthType = AUTH_TYPE_PHONE;
currentPhone = phone;
startAuth(promise);
}
public void startEmailAuth(Promise<AuthStartRes> promise, String email) {
currentAuthType = AUTH_TYPE_EMAIL;
currentEmail = email;
startAuth(promise);
}
private void startAuth(Promise<AuthStartRes> res) {
showProgress();
res.then(new Consumer<AuthStartRes>() {
@Override
public void apply(AuthStartRes authStartRes) {
if (dismissProgress()) {
transactionHash = authStartRes.getTransactionHash();
isRegistered = authStartRes.isRegistered();
switch (authStartRes.getAuthMode()) {
case OTP:
switch (currentAuthType) {
case AUTH_TYPE_PHONE:
updateState(AuthState.CODE_VALIDATION_PHONE);
break;
case AUTH_TYPE_EMAIL:
updateState(AuthState.CODE_VALIDATION_EMAIL);
break;
}
break;
default:
//not supported AuthMode - force crash?
}
}
}
}).failure(new Consumer<Exception>() {
@Override
public void apply(Exception e) {
handleAuthError(e);
}
});
}
public void validateCode(Promise<AuthCodeRes> promise, String code) {
currentCode = code;
showProgress();
promise.then(new Consumer<AuthCodeRes>() {
@Override
public void apply(AuthCodeRes authCodeRes) {
if (dismissProgress()) {
codeValidated = true;
transactionHash = authCodeRes.getTransactionHash();
if (!authCodeRes.isNeedToSignup()) {
messenger().doCompleteAuth(authCodeRes.getResult()).then(new Consumer<Boolean>() {
@Override
public void apply(Boolean aBoolean) {
updateState(AuthState.LOGGED_IN);
}
}).failure(new Consumer<Exception>() {
@Override
public void apply(Exception e) {
handleAuthError(e);
}
});
} else {
if (currentName == null || currentName.isEmpty()) {
updateState(AuthState.SIGN_UP, true);
} else {
signUp(messenger().doSignup(currentName, currentSex != null ? currentSex : Sex.UNKNOWN, transactionHash), currentName, currentSex);
}
}
}
}
}).failure(new Consumer<Exception>() {
@Override
public void apply(Exception e) {
handleAuthError(e);
}
});
}
public void signUp(Promise<AuthRes> promise, String name, Sex sex) {
currentName = name;
currentSex = sex;
promise.then(new Consumer<AuthRes>() {
@Override
public void apply(AuthRes authRes) {
dismissProgress();
messenger().doCompleteAuth(authRes).then(new Consumer<Boolean>() {
@Override
public void apply(Boolean aBoolean) {
updateState(AuthState.LOGGED_IN);
}
}).failure(new Consumer<Exception>() {
@Override
public void apply(Exception e) {
handleAuthError(e);
}
});
}
}).failure(new Consumer<Exception>() {
@Override
public void apply(Exception e) {
handleAuthError(e);
}
});
}
public void handleAuthError(final Exception e) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (dismissProgress()) {
boolean canTryAgain = false;
boolean keepState = false;
String message = getString(R.string.error_unknown);
String tag = "UNKNOWN";
if (e instanceof RpcException) {
RpcException re = (RpcException) e;
if (re instanceof RpcInternalException) {
message = getString(R.string.error_unknown);
canTryAgain = true;
} else if (re instanceof RpcTimeoutException) {
message = getString(R.string.error_connection);
canTryAgain = true;
} else {
if ("PHONE_CODE_EXPIRED".equals(re.getTag()) || "EMAIL_CODE_EXPIRED".equals(re.getTag())) {
currentCode = "";
message = getString(R.string.auth_error_code_expired);
canTryAgain = false;
} else if ("PHONE_CODE_INVALID".equals(re.getTag()) || "EMAIL_CODE_INVALID".equals(re.getTag())) {
message = getString(R.string.auth_error_code_invalid);
canTryAgain = false;
keepState = true;
} else if ("FAILED_GET_OAUTH2_TOKEN".equals(re.getTag())) {
message = getString(R.string.auth_error_failed_get_oauth2_token);
canTryAgain = false;
} else {
message = re.getMessage();
canTryAgain = re.isCanTryAgain();
}
}
}
try {
if (canTryAgain) {
new AlertDialog.Builder(AuthActivity.this)
.setMessage(message)
.setPositiveButton(R.string.dialog_try_again, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dismissAlert();
switch (state) {
case AUTH_EMAIL:
case AUTH_PHONE:
switch (currentAuthType) {
case AUTH_TYPE_PHONE:
startAuth(messenger().doStartPhoneAuth(currentPhone));
break;
case AUTH_TYPE_EMAIL:
startAuth(messenger().doStartEmailAuth(currentEmail));
break;
}
break;
case CODE_VALIDATION_EMAIL:
case CODE_VALIDATION_PHONE:
validateCode(messenger().doValidateCode(currentCode, transactionHash), currentCode);
break;
case SIGN_UP:
signUp(messenger().doSignup(currentName, currentSex!=null?currentSex:Sex.UNKNOWN, transactionHash), currentName, currentSex);
break;
}
}
})
.setNegativeButton(R.string.dialog_cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dismissAlert();
updateState(AuthState.AUTH_START);
}
}).setCancelable(false)
.show()
.setCanceledOnTouchOutside(false);
} else {
final boolean finalKeepState = keepState;
new AlertDialog.Builder(AuthActivity.this)
.setMessage(message)
.setPositiveButton(R.string.dialog_ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dismissAlert();
if (finalKeepState) {
updateState(state, true);
} else if (signType == SIGN_TYPE_UP) {
if (currentAuthType == AUTH_TYPE_EMAIL) {
switchToEmailAuth();
} else if (currentAuthType == AUTH_TYPE_PHONE) {
switchToPhoneAuth();
} else {
updateState(AuthState.AUTH_START);
}
} else if (signType == SIGN_TYPE_IN) {
startSignIn();
} else {
updateState(AuthState.AUTH_START);
}
}
})
.setCancelable(false)
.show()
.setCanceledOnTouchOutside(false);
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
});
}
public void switchToEmailAuth() {
updateState(AuthState.AUTH_EMAIL);
}
public void switchToPhoneAuth() {
updateState(AuthState.AUTH_PHONE);
}
public void startSignIn() {
signType = SIGN_TYPE_IN;
updateState(AuthState.AUTH_START, true);
}
public void startSignUp() {
signType = SIGN_TYPE_UP;
updateState(AuthState.AUTH_START, true);
}
public void showProgress() {
dismissProgress();
progressDialog = new ProgressDialog(this);
progressDialog.setCanceledOnTouchOutside(false);
progressDialog.setCancelable(false);
progressDialog.setTitle(getString(R.string.progress_common));
progressDialog.show();
}
@Override
protected void onPause() {
super.onPause();
dismissProgress();
dismissAlert();
}
private boolean dismissProgress() {
if (progressDialog != null) {
progressDialog.dismiss();
progressDialog = null;
return true;
}
return false;
}
private void dismissAlert() {
if (alertDialog != null) {
alertDialog.dismiss();
alertDialog = null;
}
}
public String getCurrentCode() {
return currentCode;
}
public String getTransactionHash() {
return transactionHash;
}
}
| |
package edacc.experiment.tabs.solver;
import edacc.model.Experiment;
import edacc.model.Parameter;
import edacc.model.ParameterDAO;
import edacc.model.ParameterInstance;
import edacc.model.ParameterInstanceDAO;
import edacc.model.Solver;
import edacc.model.SolverBinaries;
import edacc.model.SolverConfiguration;
import edacc.model.SolverDAO;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author simon
*/
public class SolverConfigurationEntry {
private SolverConfiguration solverConfig;
private Solver solver;
private String hint;
private SolverBinaries solverBinary;
private String name;
private int seedGroup;
private SolverConfigEntryTableModel tableModel;
private Experiment experiment;
public SolverConfigurationEntry(SolverConfiguration solverConfig, Experiment experiment) throws SQLException {
this(SolverDAO.getById(solverConfig.getSolverBinary().getIdSolver()), experiment);
this.solverConfig = solverConfig;
hint = solverConfig.getHint();
name = solverConfig.getName();
seedGroup = solverConfig.getSeed_group();
solverBinary = solverConfig.getSolverBinary();
tableModel.setParameterInstances(ParameterInstanceDAO.getBySolverConfig(solverConfig));
}
public SolverConfigurationEntry(Solver solver, Experiment experiment) throws SQLException {
tableModel = new SolverConfigEntryTableModel();
this.solver = solver;
this.experiment = experiment;
ArrayList<Parameter> params = new ArrayList<Parameter>();
params.addAll(ParameterDAO.getParameterFromSolverId(solver.getId()));
tableModel.setParameters(params);
name = solver.getName();
seedGroup = 0;
if (!solver.getSolverBinaries().isEmpty()) {
solverBinary = solver.getSolverBinaries().get(0);
}
hint = "";
}
public SolverConfigurationEntry(SolverConfigurationEntry other) throws SQLException {
this(other.getSolver(), other.getExperiment());
this.assign(other);
}
/**
* Returns a list of all parameter instances. Creates new parameter instances for not existing parameter instances.
* @throws SQLException
*/
public List<ParameterInstance> getParameterInstances() throws SQLException {
ArrayList<ParameterInstance> parameters = new ArrayList<ParameterInstance>();
ArrayList<ParameterInstance> newParameters = new ArrayList<ParameterInstance>();
for (int i = 0; i < tableModel.getRowCount(); i++) {
if ((Boolean) tableModel.getValueAt(i, 0)) {
Parameter p = (Parameter) tableModel.getValueAt(i, 5);
ParameterInstance pi = (ParameterInstance) tableModel.getValueAt(i, 6);
if (pi == null) {
pi = new ParameterInstance();
pi.setParameter_id(p.getId());
pi.setSolverConfiguration(solverConfig);
pi.setValue((String) tableModel.getValueAt(i));
newParameters.add(pi);
parameters.add(pi);
}
if (!pi.getValue().equals(tableModel.getValueAt(i))) {
pi.setValue(tableModel.getValueAt(i));
ParameterInstanceDAO.setModified(pi);
parameters.add(pi);
}
} else {
ParameterInstance pi = (ParameterInstance) tableModel.getValueAt(i, 6);
if (pi != null) {
ParameterInstanceDAO.setDeleted(pi);
tableModel.removeParameterInstance(pi);
parameters.add(pi);
}
}
}
if (newParameters.size() > 0) {
tableModel.setParameterInstances(newParameters);
}
return parameters;
}
/**
* Checks for unsaved data, i.e. checks if the seed group, the parameter instances, name, hint have been changed.<br/>
* If the seed group is not a valid integer it will be substituted and used as 0.
* @return <code>true</code>, if and only if data is unsaved, false otherwise
*/
public boolean isModified() {
if (solverConfig == null
|| solverConfig.getSeed_group() != seedGroup
|| !name.equals(solverConfig.getName())
|| solverConfig.getSolverBinary() != solverBinary
|| !hint.equals(solverConfig.getHint())) {
return true;
}
return tableModel.isModified();
}
/**
* Checks for unsaved data, i.e. checks if the seed group, the parameter instances, have been changed.<br/>
* name and hint are not checked with this method.<br />
* If the seed group is not a valid integer it will be substituted and used as 0.
* @return <code>true</code>, if and only if data is unsaved, false otherwise
*/
public boolean parametersModified() {
if (solverConfig == null
|| solverConfig.getSeed_group() != seedGroup
|| solverConfig.getSolverBinary() != solverBinary) {
return true;
}
return tableModel.isModified();
}
public boolean hasEmptyValues() {
for (int i = 0; i < tableModel.getRowCount(); i++) {
if (edacc.experiment.Util.isMagicSolverParameter((String) tableModel.getValueAt(i, 1))) {
continue;
}
if ((Boolean) tableModel.getValueAt(i, 0)) {
if (tableModel.getParameters().get(i).getHasValue()) {
if ("".equals(tableModel.getValueAt(i, 3))) {
return true;
}
}
}
}
return false;
}
public Experiment getExperiment() {
return experiment;
}
public void setExperiment(Experiment experiment) {
this.experiment = experiment;
}
public String getHint() {
return hint;
}
public void setHint(String hint) {
this.hint = hint;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getSeedGroup() {
return seedGroup;
}
public void setSeedGroup(int seedGroup) {
this.seedGroup = seedGroup;
}
public Solver getSolver() {
return solver;
}
public void setSolver(Solver solver) {
this.solver = solver;
}
public SolverBinaries getSolverBinary() {
return solverBinary;
}
public void setSolverBinary(SolverBinaries solverBinary) {
this.solverBinary = solverBinary;
}
public SolverConfiguration getSolverConfig() {
return solverConfig;
}
public void setSolverConfig(SolverConfiguration solverConfig) {
if (solverConfig == null) {
tableModel.setParameterInstances(null);
}
this.solverConfig = solverConfig;
}
public SolverConfigEntryTableModel getTableModel() {
return tableModel;
}
public void setTableModel(SolverConfigEntryTableModel tableModel) {
this.tableModel = tableModel;
}
public void assign(SolverConfigurationEntry other) {
if (this.getSolver() != other.getSolver()) {
return;
}
this.hint = other.hint;
this.name = other.name;
this.seedGroup = other.seedGroup;
this.solverBinary = other.solverBinary;
for (int i = 0; i < other.tableModel.getRowCount(); i++) {
tableModel.setValueAt(other.tableModel.getValueAt(i, 3), i, 3);
tableModel.setValueAt(other.tableModel.getValueAt(i, 0), i, 0);
}
}
}
| |
package org.libbun.drv;
import java.lang.invoke.CallSite;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.lang.reflect.InvocationTargetException;
import java.util.HashMap;
import java.util.Map;
import java.util.Stack;
import org.libbun.BunDriver;
import org.libbun.BunType;
import org.libbun.DriverCommand;
import org.libbun.Namespace;
import org.libbun.UMap;
import org.libbun.drv.JvmRuntime.ArrayImpl;
import org.libbun.drv.JvmRuntime.FuncHolder;
import org.libbun.drv.JvmRuntime.JvmOperator;
import org.libbun.drv.JvmRuntime.MapImpl;
import org.libbun.peg4d.PegObject;
import org.objectweb.asm.Handle;
import org.objectweb.asm.Label;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.commons.GeneratorAdapter;
import org.objectweb.asm.commons.Method;
/**
* generate java byte code and invoke.
* @author skgchxngsxyz-osx
*
*/
public class JvmDriver extends BunDriver implements Opcodes {
public final static String globalVarHolderSuffix = "_LetVarHolder";
public final static String globalVarHolderFieldName = "letVarValue";
public final static String staticFuncMethodName = "callFuncDirectly";
public final static String funcMethodName = "callFunc";
public final static String funcFieldName = "funcField";
public static int JAVA_VERSION = V1_7;
protected final String bunModel;
/**
* used for byte code loading.
*/
protected final JvmByteCodeLoader loader;
/**
* used for bun type to java class translation.
*/
protected final UMap<Class<?>> classMap;
/**
* used for java class generation.
*/
protected ClassBuilder classBuilder;
/**
* used for java method (includes constructor, static initializer) generation.
*/
protected final Stack<MethodBuilder> mBuilders;
protected Namespace gamma;
/**
* represents current command name,
*/
protected String currentCommand;
protected final Map<String, Handle> handleMap;
/**
* contains type descriptor and method descriptor of static method.
*/
protected final Map<String, Pair<Type, Method>> staticFuncMap;
/**
* contains method descriptor of main, and holder class.
*/
protected Pair<Class<?>, Method> mainClassPair;
public JvmDriver() {
this("lib/driver/jvm/common.bun");
}
protected JvmDriver(String bunModel) {
this.bunModel = bunModel;
this.loader = new JvmByteCodeLoader();
this.classMap = new UMap<Class<?>>();
this.mBuilders = new Stack<MethodBuilder>();
this.staticFuncMap = new HashMap<>();
this.handleMap = new HashMap<String, Handle>();
this.initBsmHandle("UnaryOp");
this.initBsmHandle("BinaryOp");
this.initBsmHandle("CompOp");
this.initBsmHandle("Method");
this.initBsmHandle("Func");
// init driver command.
this.addCommand("PushAsLong", new PushAsLong());
this.addCommand("PushAsDouble", new PushAsDouble());
this.addCommand("PushAsBoolean", new PushAsBoolean());
this.addCommand("PushAsString", new PushAsString());
this.addCommand("CallOp", new CallOperator());
this.addCommand("And", new CondAnd());
this.addCommand("Or", new CondOr());
this.addCommand("VarDecl", new VarDecl());
this.addCommand("If", new IfStatement());
this.addCommand("While", new WhileStatement());
this.addCommand("Block", new Block());
this.addCommand("Print", new PrintCommand());
this.addCommand("IsStmtEnd", new IsStmtEndCommand());
this.addCommand("Label", new LabelCommand());
this.addCommand("Jump", new JumpCommand());
this.addCommand("Box", new BoxCommand());
this.addCommand("Unbox", new UnBoxCommand());
this.addCommand("NewArray", new NewArrayCommand());
this.addCommand("NewMap", new NewMapCommand());
this.addCommand("Assign", new AssignCommand());
this.addCommand("PyAssign", new PythonAssign());
this.addCommand("Trinary", new TrinaryCommand());
this.addCommand("Defun", new DefineFunction());
this.addCommand("Return", new ReturnStatement());
this.addCommand("CallDynamic", new DynamicInvokeCommand());
this.addCommand("Apply", new ApplyCommand());
/**
* add jvm opcode command
*/
new ZeroOperandInsCommand().addToDriver(this);
new SingleIntOperandInsCommand().addToDriver(this);
new VarInsCommand().addToDriver(this);
new TypeInsCommand().addToDriver(this);
new FieldInsCommand().addToDriver(this);
new MethodInsCommand().addToDriver(this);
new JumpInsCommand().addToDriver(this);
}
@Override
public String getDesc() {
return "Java bytecode generator by Nagisa Sekiguchi (YNU)";
}
@Override
public void initTable(Namespace gamma) {
this.classMap.put("long", long.class);
this.classMap.put("int", int.class);
this.classMap.put("float", float.class);
this.classMap.put("double", double.class);
this.classMap.put("boolean", boolean.class);
this.classMap.put("String", String.class);
this.classMap.put("void", void.class);
this.classMap.put("Object", Object.class);
this.classMap.put("untyped", Object.class);
this.classMap.put("Integer", Integer.class);
this.classMap.put("Float", Float.class);
this.classMap.put("Long", Long.class);
this.classMap.put("Double", Double.class);
this.classMap.put("Boolean", Boolean.class);
gamma.loadBunModel(this.bunModel, this);
}
@Override
public void startTransaction(String fileName) { // create top level wrapper
this.classBuilder = new ClassBuilder();
this.mBuilders.push(this.classBuilder.createMethodBuilder());
}
/**
* finalize class builder and invoke generated class.
*/
@Override
public void endTransaction() {
if(this.mBuilders.empty()) {
return;
}
this.mBuilders.peek().returnValue();
this.mBuilders.pop().endMethod();
this.classBuilder.visitEnd();
String className = this.classBuilder.getClassName();
byte[] byteCode = this.classBuilder.toByteArray();
Class<?> generatedClass = this.loader.createChildLoader().generateClassFromByteCode(className, byteCode);
try {
generatedClass.getMethod("invoke").invoke(null);
}
catch(InvocationTargetException e) {
e.getCause().printStackTrace();
}
catch(Throwable t) {
t.printStackTrace();
System.exit(1);
}
}
@Override
public void startTopLevel() {
}
@Override
public void endTopLevel() {
}
@Override
public void generateMain() {
if(this.hasMainFunction()) {
this.hasMainFunc = false;
Class<?> mainHolderClass = this.mainClassPair.getLeft();
this.mBuilders.peek().invokeStatic(Type.getType(mainHolderClass), this.mainClassPair.getRight());
this.mainClassPair = null;
}
}
protected void initBsmHandle(String name) {
String bsmName = "bsm" + name;
Type[] paramTypes = {Type.getType(MethodHandles.Lookup.class), Type.getType(String.class), Type.getType(MethodType.class)};
Method methodDesc = new Method(bsmName, Type.getType(CallSite.class), paramTypes);
Handle handle = new Handle(H_INVOKESTATIC, Type.getType(JvmRuntime.class).getInternalName(), bsmName, methodDesc.getDescriptor());
this.handleMap.put(bsmName, handle);
}
/**
* insert print instruction after top level expression.
* it is interactive mode only.
*/
protected void insertPrintIns(Class<?> stackTopClass, boolean allowPrinting) {
if(stackTopClass.equals(Void.class)) {
return;
}
if(!allowPrinting) {
this.mBuilders.peek().pop(stackTopClass);
return;
}
if(!stackTopClass.isPrimitive()) {
stackTopClass = Object.class;
}
try {
java.lang.reflect.Method method = JvmOperator.class.getMethod("printValue", stackTopClass);
this.mBuilders.peek().invokeStatic(Type.getType(JvmOperator.class), Method.getMethod(method));
}
catch(Throwable t) {
t.printStackTrace();
}
}
/**
*
* BunType to Java class
* @param type
* @return
* - if has no class, return Object.class
*/
protected Class<?> toJavaClass(BunType type) {
if(type == null) {
return Object.class;
}
String typeName = type.getName();
Class<?> javaClass = this.classMap.get(typeName);
if(javaClass == null) {
//throw new RuntimeException("has no java class: " + typeName);
return Object.class;
}
return javaClass;
}
protected BunType toType(PegObject pObject) {
String name = pObject.tag;
String typeName = name.substring(2);
BunType type = this.gamma.getType(typeName, null);
if(type == null) {
throw new RuntimeException("undefined type: " + typeName);
}
return type;
}
public String getCommandSymbol() {
return this.currentCommand;
}
@Override
public void pushCommand(String cmd, PegObject node, String[] params) {
DriverCommand command = this.commandMap.get(cmd);
this.currentCommand = cmd;
command.invoke(this, node, params);
}
@Override
public void pushName(PegObject node, String name) {
MethodBuilder mBuilder = mBuilders.peek();
String varName = node.getText();
VarEntry entry = mBuilder.getScopes().getEntry(varName);
if(!(entry instanceof FuncEntry)) { // load variable
Type varTypeDesc = Type.getType(entry.getVarClass());
if(!entry.isGlobal()) { // get local variable
mBuilder.visitVarInsn(varTypeDesc.getOpcode(ILOAD), entry.getVarIndex());
}
else { // get global variable
Type varHolderDesc = Type.getType(varName + globalVarHolderSuffix);
mBuilder.getStatic(varHolderDesc, globalVarHolderFieldName, varTypeDesc);
}
}
else { // load func object
Type funcHolderDesc = Type.getType("L" + ((FuncEntry)entry).getInternalName() + ";");
mBuilder.getStatic(funcHolderDesc, funcFieldName, funcHolderDesc);
}
if(node.getParent() == null) {
insertPrintIns(toJavaClass(node.getType(null).getReturnType()), node.source.fileName.equals("(stdin)"));
}
}
@Override
public void pushApplyNode(String name, PegObject args) {
MethodBuilder mBuilder = mBuilders.peek();
VarScopes scopes = mBuilder.getScopes();
Pair<Type, Method> pair = staticFuncMap.get(((FuncEntry)scopes.getEntry(name)).getInternalName());
int paramSize = args.size();
for(int i = 0 ; i < paramSize; i++) {
this.pushNode(args.get(i));
}
mBuilder.invokeStatic(pair.getLeft(), pair.getRight());
}
@Override
public void pushType(BunType type) {
}
@Override
public void pushCode(String text) {
}
@Override
public void pushErrorNode(PegObject errorNode) {
super.pushErrorNode(errorNode);
mBuilders.clear();
}
protected void generateBlockWithNewScope(BunDriver driver, PegObject node) {
VarScopes scopes = this.mBuilders.peek().getScopes();
scopes.createNewScope();
this.generateBlockWithCurrentScope(driver, node);
scopes.removeCurrentScope();
}
protected void generateBlockWithCurrentScope(BunDriver driver, PegObject node) {
if(!node.is("#block")) {
throw new RuntimeException("require block");
}
driver.pushNode(node);
}
protected class IsStmtEndCommand extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
if(node.getParent() == null) {
BunType type = node.getType(null);
if(node.is("#apply")) {
type = node.get(1).getType(null);
}
insertPrintIns(toJavaClass(type), node.source.fileName.equals("(stdin)"));
return;
}
if(param.length == 1 && param[0].equals("method")) {
PegObject applyNode = node.getParent();
this.invoke(driver, applyNode, new String[]{});
}
}
}
/**
* push value as java long.
* @author skgchxngsxyz-osx
*
*/
protected class PushAsLong extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
mBuilders.peek().push(Long.parseLong(node.getText()));
}
}
/**
* push value as java double.
* @author skgchxngsxyz-osx
*
*/
protected class PushAsDouble extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
mBuilders.peek().push(Double.parseDouble(node.getText()));
}
}
/**
* push value as java boolean.
* @author skgchxngsxyz-osx
*
*/
protected class PushAsBoolean extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
mBuilders.peek().push(param[0].equals("true"));
}
}
/**
* push value as java string.
* @author skgchxngsxyz-osx
*
*/
protected class PushAsString extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
mBuilders.peek().push(this.parseTokenText(node.getText()));
}
/**
* decode escape sequence.
* @param text
* - may be include encoded escape sequence.
* @return
* - decoded string value.
*/
private String parseTokenText(String text) {
StringBuilder sBuilder = new StringBuilder();
int size = text.length();
for(int i = 0; i < size; i++) {
char ch = text.charAt(i);
if(ch == '\\') {
char nextCh = text.charAt(++i);
switch(nextCh) {
case 't' : ch = '\t'; break;
case 'b' : ch = '\b'; break;
case 'n' : ch = '\n'; break;
case 'r' : ch = '\r'; break;
case 'f' : ch = '\f'; break;
case '\'': ch = '\''; break;
case '"' : ch = '"'; break;
case '\\': ch = '\\'; break;
}
}
sBuilder.append(ch);
}
return sBuilder.toString();
}
}
/**
* generate binary and unary operator call instruction.
* @author skgchxngsxyz-osx
*
*/
protected class CallOperator extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
String opName = node.tag.substring(1);
int size = node.size();
Class<?>[] paramClasses = new Class<?>[size];
for(int i = 0; i < size; i++) {
paramClasses[i] = toJavaClass(node.get(i).getType(null));
}
this.callOperator(opName, paramClasses);
}
/**
* look up and generate invokestatic instruction.
* @param opName
* - operator name.
* @param paramClasses
* - operator parameter classes.
*/
protected void callOperator(String opName, Class<?>[] paramClasses) {
try {
java.lang.reflect.Method method = JvmOperator.class.getMethod(opName, paramClasses);
Method methodDesc = Method.getMethod(method);
mBuilders.peek().invokeStatic(Type.getType(JvmOperator.class), methodDesc);
}
catch(SecurityException e) {
e.printStackTrace();
}
catch(NoSuchMethodException e) {
e.printStackTrace();
}
}
}
/**
* generate conditional and. after evaluation, push boolean.
* @author skgchxngsxyz-osx
*
*/
protected class CondAnd extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
MethodBuilder mBuilder = mBuilders.peek();
Label rightLabel = mBuilder.newLabel();
Label mergeLabel = mBuilder.newLabel();
// and left
driver.pushNode(node.get(0));
mBuilder.unbox(toJavaClass(node.get(0).getType(null)), boolean.class);
mBuilder.push(true);
mBuilder.ifCmp(Type.BOOLEAN_TYPE, GeneratorAdapter.EQ, rightLabel);
mBuilder.push(false);
mBuilder.goTo(mergeLabel);
// and right
mBuilder.mark(rightLabel);
driver.pushNode(node.get(1));
mBuilder.unbox(toJavaClass(node.get(1).getType(null)), boolean.class);
mBuilder.mark(mergeLabel);
}
}
/**
* generate condiotional or. after evaluation, push boolean.
* @author skgchxngsxyz-osx
*
*/
protected class CondOr extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
MethodBuilder mBuilder = mBuilders.peek();
Label rightLabel = mBuilder.newLabel();
Label mergeLabel = mBuilder.newLabel();
// or left
driver.pushNode(node.get(0));
mBuilder.unbox(toJavaClass(node.get(0).getType(null)), boolean.class);
mBuilder.push(true);
mBuilder.ifCmp(Type.BOOLEAN_TYPE, GeneratorAdapter.NE, rightLabel);
mBuilder.push(true);
mBuilder.goTo(mergeLabel);
// or right
mBuilder.mark(rightLabel);
driver.pushNode(node.get(1));
mBuilder.unbox(toJavaClass(node.get(1).getType(null)), boolean.class);
mBuilder.mark(mergeLabel);
}
}
protected class TrinaryCommand extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
MethodBuilder mBuilder = mBuilders.peek();
Label elseLabel = mBuilder.newLabel();
Label mergeLabel = mBuilder.newLabel();
// cond
driver.pushNode(node.get(0));
mBuilder.unbox(toJavaClass(node.get(0).getType(null)), boolean.class);
mBuilder.push(true);
mBuilder.ifCmp(Type.BOOLEAN_TYPE, GeneratorAdapter.NE, elseLabel);
// then
driver.pushNode(node.get(1));
mBuilder.goTo(mergeLabel);
// else
mBuilder.mark(elseLabel);
driver.pushNode(node.get(2));
// merge
mBuilder.mark(mergeLabel);
}
}
/**
* generate let.
* @author skgchxngsxyz-osx
*
*/
protected class VarDecl extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
String varName = node.get(0).getText();
boolean isReadOnly = node.is("#let");
defineVariable(driver, varName, node.get(2), isReadOnly);
}
}
protected void defineVariable(BunDriver driver, String varName, PegObject initValueNode, boolean isReadOnly) {
MethodBuilder currentBuilder = mBuilders.peek();
// add var entry to scope
Class<?> varClass = toJavaClass(initValueNode.getType(null));
VarEntry entry = currentBuilder.getScopes().addEntry(varName, varClass, isReadOnly);
int scopeDepth = currentBuilder.getScopes().depth();
if(scopeDepth > 1) { // define as local variable.
driver.pushNode(initValueNode);
currentBuilder.visitVarInsn(Type.getType(varClass).getOpcode(ISTORE), entry.getVarIndex());
}
else { // define as global variable.
ClassBuilder cBuilder = new ClassBuilder(varName + JvmDriver.globalVarHolderSuffix);
// create static initializer
Method methodDesc = Method.getMethod("void <clinit> ()");
MethodBuilder mBuilder = new MethodBuilder(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, methodDesc, null, null, cBuilder);
mBuilders.push(mBuilder);
driver.pushNode(initValueNode);
Type ownerTypeDesc = Type.getType(cBuilder.getClassName());
Type fieldTypeDesc = Type.getType(varClass);
mBuilder.putStatic(ownerTypeDesc, globalVarHolderFieldName, fieldTypeDesc);
mBuilder.returnValue();
mBuilder.endMethod();
mBuilders.pop();
// create var field
cBuilder.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, globalVarHolderFieldName, fieldTypeDesc.getDescriptor(), null, null);
// finalize
cBuilder.visitEnd();
loader.generateClassFromByteCode(cBuilder.getClassName(), cBuilder.toByteArray());
// initialized let var.
currentBuilder.getStatic(ownerTypeDesc, globalVarHolderFieldName, fieldTypeDesc);
currentBuilder.pop(varClass);
}
}
protected class AssignCommand extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
assignToLeft(driver, node, param);
}
}
protected void assignToLeft(BunDriver driver, PegObject node, String[] param) { //TODO: field
MethodBuilder mBuilder = mBuilders.peek();
PegObject leftNode = node.get(0);
PegObject rightNode = node.get(1);
if(leftNode.is("#name")) {
String varName = leftNode.getText();
VarEntry entry = mBuilder.getScopes().getEntry(varName);
if(entry.isReadOnly()) {
throw new RuntimeException("read only variable: " + varName);
}
Type varTypeDesc = Type.getType(entry.getVarClass());
if(!entry.isGlobal()) { // local variable
int varIndex = entry.getVarIndex();
driver.pushNode(rightNode);
mBuilder.visitVarInsn(varTypeDesc.getOpcode(ISTORE), varIndex);
}
else { // global variable
driver.pushNode(rightNode);
Type varHolderDesc = Type.getType(varName + globalVarHolderSuffix);
mBuilder.putStatic(varHolderDesc, globalVarHolderFieldName, varTypeDesc);
}
}
else if(leftNode.is("#index")) {
PegObject recvNode = leftNode.get(0);
driver.pushNode(recvNode);
}
else if(leftNode.is("#field")) { //TODO:
}
else {
throw new RuntimeException("unsuppored assing: " + leftNode.tag);
}
}
protected class PythonAssign extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
PegObject leftNode = node.get(0);
if(leftNode.is("#name")) {
if(!mBuilders.peek().getScopes().hasEntry(leftNode.getText())) {
defineVariable(driver, leftNode.getText(), node.get(1), false);
return;
}
}
assignToLeft(driver, node, param);
}
}
protected class SymbolCommand extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
MethodBuilder mBuilder = mBuilders.peek();
String varName = node.getText();
VarEntry entry = mBuilder.getScopes().getEntry(varName);
if(!(entry instanceof FuncEntry)) { // load variable
Type varTypeDesc = Type.getType(entry.getVarClass());
if(!entry.isGlobal()) { // get local variable
mBuilder.visitVarInsn(varTypeDesc.getOpcode(ILOAD), entry.getVarIndex());
}
else { // get global variable
Type varHolderDesc = Type.getType(varName + globalVarHolderSuffix);
mBuilder.getStatic(varHolderDesc, globalVarHolderFieldName, varTypeDesc);
}
}
else { // load func object
Type funcHolderDesc = Type.getType("L" + ((FuncEntry)entry).getInternalName() + ";");
mBuilder.getStatic(funcHolderDesc, funcFieldName, funcHolderDesc);
}
}
}
protected class Block extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
int size = node.size();
for(int i = 0; i < size; i++) {
PegObject targetNode = node.get(i);
driver.pushNode(targetNode);
Class<?> stacktopClass = toJavaClass(targetNode.getType(null));
if(!stacktopClass.equals(Void.class)) {
mBuilders.peek().pop(stacktopClass);
}
}
}
}
protected class IfStatement extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
int nodeSize = node.size();
MethodBuilder mBuilder = mBuilders.peek();
Label elseLabel = mBuilder.newLabel();
Label mergeLabel = mBuilder.newLabel();
// if cond
driver.pushNode(node.get(0));
mBuilder.unbox(toJavaClass(node.get(0).getType(null)), boolean.class);
mBuilder.push(true);
mBuilder.ifCmp(Type.BOOLEAN_TYPE, GeneratorAdapter.NE, elseLabel);
// then block
generateBlockWithNewScope(driver, node.get(1));
mBuilder.goTo(mergeLabel);
// else block
mBuilder.mark(elseLabel);
if(nodeSize == 3) {
generateBlockWithNewScope(driver, node.get(2));
}
mBuilder.mark(mergeLabel);
}
}
protected class WhileStatement extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
MethodBuilder mBuilder = mBuilders.peek();
Label breakLabel = mBuilder.newLabel();
Label continueLabel = mBuilder.newLabel();
mBuilder.getBreackLabels().push(breakLabel);
mBuilder.getContinueLabels().push(continueLabel);
mBuilder.mark(continueLabel);
mBuilder.push(true);
driver.pushNode(node.get(0));
mBuilder.unbox(toJavaClass(node.get(0).getType(null)), boolean.class);
mBuilder.ifCmp(Type.BOOLEAN_TYPE, GeneratorAdapter.NE, breakLabel);
generateBlockWithNewScope(driver, node.get(1));
mBuilder.goTo(continueLabel);
mBuilder.mark(breakLabel);
mBuilder.getBreackLabels().pop();
mBuilder.getContinueLabels().pop();
}
}
protected class BoxCommand extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
String typeName = param[0];
Class<?> stacktopClass = classMap.get(typeName);
mBuilders.peek().box(Type.getType(stacktopClass));
}
}
/**
* need stacktop class
* @author skgchxngsxyz-osx
*
*/
protected class UnBoxCommand extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
String typeName = param[0];
Class<?> stacktopClass = classMap.get(typeName);
mBuilders.peek().unbox(Type.getType(stacktopClass));
}
}
/**
* generate array. actually call constructor of ArrayImpl.ArrayImpl(Object[])
* not support primitive value.
* @author skgchxngsxyz-osx
*
*/
protected class NewArrayCommand extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
int size = node.size();
Type elementTypeDesc = Type.getType(Object.class);
Type arrayClassDesc = Type.getType(ArrayImpl.class);
Method arrayInitDesc = this.createArrayInitDesc(elementTypeDesc);
GeneratorAdapter adapter = mBuilders.peek();
adapter.newInstance(arrayClassDesc);
adapter.dup();
adapter.push(size);
adapter.newArray(elementTypeDesc);
for(int i = 0; i < size; i++) {
adapter.dup();
adapter.push(i);
driver.pushNode(node.get(i));
adapter.arrayStore(elementTypeDesc);
}
adapter.invokeConstructor(arrayClassDesc, arrayInitDesc);
}
private Method createArrayInitDesc(Type elementTypeDesc) {
Type paramTypeDesc = Type.getType("[" + elementTypeDesc.getDescriptor());
Type returnTypeDesc = Type.VOID_TYPE;
return new Method("<init>", returnTypeDesc, new Type[]{paramTypeDesc});
}
}
protected class NewMapCommand extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
int size = node.size();
Type keyTypeDesc = Type.getType(String.class);
Type valueTypeDesc = Type.getType(Object.class);
Type mapClassDesc = Type.getType(MapImpl.class);
Method mapInitDesc = this.createMapInitDesc(valueTypeDesc);
GeneratorAdapter adapter = mBuilders.peek();
adapter.newInstance(mapClassDesc);
adapter.dup();
// create key array
adapter.push(size);
adapter.newArray(keyTypeDesc);
for(int i = 0; i < size; i++) {
adapter.dup();
adapter.push(i);
driver.pushNode(node.get(i).get(0));
adapter.checkCast(keyTypeDesc);
adapter.arrayStore(keyTypeDesc);
}
// create value array
adapter.push(size);
adapter.newArray(valueTypeDesc);
for(int i = 0; i < size; i++) {
adapter.dup();
adapter.push(i);
driver.pushNode(node.get(i).get(1));
adapter.arrayStore(valueTypeDesc);
}
adapter.invokeConstructor(mapClassDesc, mapInitDesc);
}
private Method createMapInitDesc(Type valueTypeDesc) {
Type paramTypeDesc1 = Type.getType("[" + Type.getType(String.class).getDescriptor());
Type paramTypeDesc2 = Type.getType("[" + valueTypeDesc.getDescriptor());
Type returnTypeDesc = Type.VOID_TYPE;
return new Method("<init>", returnTypeDesc, new Type[]{paramTypeDesc1, paramTypeDesc2});
}
}
protected class PrintCommand extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
Class<?> valueClass = toJavaClass(node.get(0).getType(null));
if(!valueClass.isPrimitive()) {
valueClass = Object.class;
}
try {
java.lang.reflect.Method method = JvmOperator.class.getMethod("printValue", valueClass);
mBuilders.peek().invokeStatic(Type.getType(JvmOperator.class), Method.getMethod(method));
} catch (Throwable e) {
e.printStackTrace();
}
}
}
protected class DefineFunction extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
String funcName = node.get(0).getText();
String internalName = mBuilders.peek().getScopes().addFuncEntry(funcName, false).getInternalName();
ClassBuilder cBuilder = new ClassBuilder(internalName, FuncHolder.class);
// static field
Type fieldTypeDesc = Type.getType("L" + internalName + ";");
cBuilder.visitField(ACC_PUBLIC | ACC_STATIC, funcFieldName, fieldTypeDesc.getDescriptor(), null, null);
// static initializer
Method initDesc = Method.getMethod("void <init> ()");
Method clinitDesc = Method.getMethod("void <clinit> ()");
MethodBuilder mBuilder = new MethodBuilder(ACC_PUBLIC | ACC_STATIC, clinitDesc, null, null, cBuilder);
mBuilder.newInstance(fieldTypeDesc);
mBuilder.dup();
mBuilder.invokeConstructor(fieldTypeDesc, initDesc);
mBuilder.putStatic(fieldTypeDesc, funcFieldName, fieldTypeDesc);
mBuilder.returnValue();
mBuilder.endMethod();
// constructor
mBuilder = new MethodBuilder(ACC_PUBLIC, initDesc, null, null, cBuilder);
mBuilder.loadThis();
mBuilder.invokeConstructor(Type.getType(FuncHolder.class), initDesc);
mBuilder.returnValue();
mBuilder.endMethod();
// static method
BunType returnType = node.getType(null).getReturnType().getRealType();
PegObject paramsNode = node.get(1);
Method methodDesc = this.toMethodDesc(returnType, staticFuncMethodName, paramsNode);//TODO:
staticFuncMap.put(internalName, new Pair<Type, Method>(fieldTypeDesc, methodDesc));
mBuilder = new MethodBuilder(ACC_PUBLIC | ACC_STATIC, methodDesc, null, null, cBuilder);
mBuilders.push(mBuilder);
// set argument
mBuilder.getScopes().createNewScope();
int paramSize = paramsNode.size();
for(int i = 0; i < paramSize; i++) {
PegObject paramNode = paramsNode.get(i).get(0);
mBuilder.getScopes().addEntry(paramNode.getText(), toJavaClass(paramNode.getType(null)), false);
}
// generate func body
generateBlockWithCurrentScope(driver, node.get(3));
mBuilder.getScopes().removeCurrentScope();
mBuilders.pop().endMethod();
// instance method
Method indirectMethidDesc = this.toMethodDesc(returnType, funcMethodName, paramsNode);
mBuilder = new MethodBuilder(ACC_PUBLIC, indirectMethidDesc, null, null, cBuilder);
mBuilder.loadArgs();
mBuilder.invokeStatic(fieldTypeDesc, methodDesc);
mBuilder.returnValue();
mBuilder.endMethod();
Class<?> funcHolderClass = loader.generateClassFromByteCode(internalName, cBuilder.toByteArray());
if(funcName.equals("main")) {
mainClassPair = new Pair<Class<?>, Method>(funcHolderClass, methodDesc);
}
}
// TODO: return type
private Method toMethodDesc(BunType returnType, String methodName, PegObject paramsNode) {
Type returnTypeDesc = Type.getType(toJavaClass(returnType));
int paramSize = paramsNode.size();
Type[] paramTypeDescs = new Type[paramSize];
for(int i = 0; i < paramSize; i++) {
paramTypeDescs[i] = Type.getType(toJavaClass(paramsNode.get(i).get(0).getType(null)));
}
return new Method(methodName, returnTypeDesc, paramTypeDescs);
}
}
protected class ReturnStatement extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
mBuilders.peek().returnValue();
}
}
protected class JumpCommand extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
MethodBuilder mBuilder = mBuilders.peek();
String target = param[0];
if(target.equals("break")) {
Label label = mBuilder.getBreackLabels().peek();
mBuilder.goTo(label);
} else if(target.equals("continue")) {
Label label = mBuilder.getContinueLabels().peek();
mBuilder.goTo(label);
} else {
throw new RuntimeException("unsupported target: " + target);
}
}
}
/**
* INDY [method name] [bootstrap method name] [return class] [param classes...]
* @author skgchxngsxyz-osx
*
*/
protected class DynamicInvokeCommand extends DriverCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
MethodBuilder mBuilder = mBuilders.peek();
String methodName = param[0];
int size = param.length;
int startIndex = 3;
Type returnType = Type.getType(classMap.get(param[startIndex - 1]));
Type[] paramTypes = new Type[size - startIndex];
for(int i = 0; i < paramTypes.length; i++) {
paramTypes[i] = Type.getType(classMap.get(param[startIndex + i]));
}
Type typeDesc = Type.getMethodType(returnType, paramTypes);
mBuilder.invokeDynamic(methodName, typeDesc.getDescriptor(), handleMap.get(param[1]));
}
}
protected class ApplyCommand extends DriverCommand { //FIXME: method call
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
PegObject targetNode = node.get(0);
PegObject argsNode = node.get(1);
if(targetNode.is("#field")) { // method call
PegObject recvNode = targetNode.get(0);
String methodName = targetNode.get(1).getText();
int paramSize = argsNode.size();
driver.pushNode(recvNode);
for(int i = 0 ; i < paramSize; i++) {
driver.pushNode(argsNode.get(i));
}
String typeDesc = this.createDescriptor(paramSize + 1).getDescriptor();
mBuilders.peek().invokeDynamic(methodName, typeDesc, handleMap.get("bsmMethod"));
}
else if(targetNode.is("#name")) { // func call
MethodBuilder mBuilder = mBuilders.peek();
String internalFuncName = ((FuncEntry)mBuilder.getScopes().getEntry(targetNode.getText())).getInternalName();
Pair<Type, Method> pair = staticFuncMap.get(internalFuncName);
int paramSize = argsNode.size();
for(int i = 0 ; i < paramSize; i++) {
driver.pushNode(argsNode.get(i));
}
mBuilder.invokeStatic(pair.getLeft(), pair.getRight());
}
else {
throw new RuntimeException("unsupported apply: " + targetNode.tag);
}
}
private Type createDescriptor(int paramSize) {
Type[] paramTypeDescs = new Type[paramSize];
Type returnTypeDesc = Type.getType(Object.class);
for(int i = 0; i < paramSize; i++) {
paramTypeDescs[i] = Type.getType(Object.class);
}
return Type.getMethodType(returnTypeDesc, paramTypeDescs);
}
}
protected abstract class JvmOpcodeCommand extends DriverCommand {
public abstract void addToDriver(JvmDriver driver);
/**
* replace '.' to '/'
* add class name prefix (org/libbun/drv/JvmRuntime/)
* @param name
* - class name or method descriptor.
* @return
*/
protected String format(String name) {
return this.format(name, false);
}
protected String format(String name, boolean usedForMethodDesc) {
switch(name) {
case "int":
return Type.INT_TYPE.getDescriptor();
case "long":
return Type.LONG_TYPE.getDescriptor();
case "short":
return Type.SHORT_TYPE.getDescriptor();
case "byte":
return Type.BYTE_TYPE.getDescriptor();
case "float":
return Type.FLOAT_TYPE.getDescriptor();
case "double":
return Type.DOUBLE_TYPE.getDescriptor();
case "boolean":
return Type.BOOLEAN_TYPE.getDescriptor();
case "void":
return Type.VOID_TYPE.getDescriptor();
}
String replacedName = name.replace('.', '/');
if(replacedName.indexOf('/') == -1) {
replacedName = "org/libbun/drv/JvmRuntime$" + replacedName;
}
return usedForMethodDesc? "L" + replacedName + ";" : replacedName;
}
/**
* create method descriptor
* @param returnClassName
* @param paramClassName
* @param startIndex
* @return
*/
protected String format(String returnClassName, String[] paramClassName, int startIndex) {
StringBuilder sBuilder = new StringBuilder();
sBuilder.append('(');
for(int i = startIndex; i < paramClassName.length; i++) {
sBuilder.append(this.format(paramClassName[i], true));
}
sBuilder.append(')');
sBuilder.append(this.format(returnClassName, true));
return sBuilder.toString();
}
}
protected class ZeroOperandInsCommand extends JvmOpcodeCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
String ins = ((JvmDriver) driver).getCommandSymbol();
JvmOpCode code = ZeroOperandIns.toCode(ins);
mBuilders.peek().visitInsn(code.getOpCode());
}
@Override
public void addToDriver(JvmDriver driver) {
ZeroOperandIns[] codes = ZeroOperandIns.values();
for(ZeroOperandIns code : codes) {
driver.addCommand(code.name(), this);
}
}
}
protected class SingleIntOperandInsCommand extends JvmOpcodeCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
String ins = ((JvmDriver) driver).getCommandSymbol();
JvmOpCode code = SingleIntOperandIns.toCode(ins);
mBuilders.peek().visitIntInsn(code.getOpCode(), Integer.parseInt(param[0]));
}
@Override
public void addToDriver(JvmDriver driver) {
SingleIntOperandIns[] codes = SingleIntOperandIns.values();
for(SingleIntOperandIns code : codes) {
driver.addCommand(code.name(), this);
}
}
}
protected class VarInsCommand extends JvmOpcodeCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
String ins = ((JvmDriver) driver).getCommandSymbol();
JvmOpCode code = VarIns.toCode(ins);
mBuilders.peek().visitVarInsn(code.getOpCode(), Integer.parseInt(param[0]));
}
@Override
public void addToDriver(JvmDriver driver) {
VarIns[] codes = VarIns.values();
for(VarIns code : codes) {
driver.addCommand(code.name(), this);
}
}
}
protected class TypeInsCommand extends JvmOpcodeCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
String ins = ((JvmDriver) driver).getCommandSymbol();
JvmOpCode code = TypeIns.toCode(ins);
mBuilders.peek().visitTypeInsn(code.getOpCode(), format(param[0]));
}
@Override
public void addToDriver(JvmDriver driver) {
TypeIns[] codes = TypeIns.values();
for(TypeIns code : codes) {
driver.addCommand(code.name(), this);
}
}
}
protected class FieldInsCommand extends JvmOpcodeCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
String ins = ((JvmDriver) driver).getCommandSymbol();
JvmOpCode code = FieldIns.toCode(ins);
mBuilders.peek().visitFieldInsn(code.getOpCode(), format(param[0]), format(param[1]), format(param[2]));
}
@Override
public void addToDriver(JvmDriver driver) {
FieldIns[] codes = FieldIns.values();
for(FieldIns code : codes) {
driver.addCommand(code.name(), this);
}
}
}
protected class MethodInsCommand extends JvmOpcodeCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
String ins = ((JvmDriver) driver).getCommandSymbol();
JvmOpCode code = MethodIns.toCode(ins);
mBuilders.peek().visitMethodInsn(code.getOpCode(), format(param[0]), param[2], format(param[1], param, 3));
}
@Override
public void addToDriver(JvmDriver driver) {
MethodIns[] codes = MethodIns.values();
for(MethodIns code : codes) {
driver.addCommand(code.name(), this);
}
}
}
protected class JumpInsCommand extends JvmOpcodeCommand {
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
String ins = ((JvmDriver) driver).getCommandSymbol();
JvmOpCode code = JumpIns.toCode(ins);
MethodBuilder mBuilder = mBuilders.peek();
mBuilder.visitJumpInsn(code.getOpCode(), mBuilder.getLabelMap().get(param[0]));
}
@Override
public void addToDriver(JvmDriver driver) {
JumpIns[] codes = JumpIns.values();
for(JumpIns code : codes) {
driver.addCommand(code.name(), this);
}
}
}
protected class LabelCommand extends DriverCommand { //FIXME:
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
int nameSuffix = node.hashCode();
Map<String, Label> labelMap = mBuilders.peek().getLabelMap();
for(String labelName : param) {
String actualName = labelName + nameSuffix;
if(labelMap.containsKey(actualName)) {
throw new RuntimeException("areadly defined label: " + actualName);
}
labelMap.put(actualName, mBuilders.peek().newLabel());
}
}
}
protected class UnLabelCommand extends DriverCommand { //FIXME
@Override
public void invoke(BunDriver driver, PegObject node, String[] param) {
int nameSuffix = node.hashCode();
Map<String, Label> labelMap = mBuilders.peek().getLabelMap();
for(String labelName : param) {
labelMap.remove(labelName + nameSuffix);
}
}
}
public static class DebuggableJvmDriver extends JvmDriver {
public DebuggableJvmDriver() {
JvmByteCodeLoader.setDebugMode(true);
}
}
}
/**
* represent jvm opcode
* @author skgchxngsxyz-osx
*
*/
interface JvmOpCode {
/**
* get opcode used by asm.
* @return
* - opcode
*/
public int getOpCode();
}
/**
* OP
* @author skgchxngsxyz-osx
*
*/
enum ZeroOperandIns implements JvmOpCode {
NOP (Opcodes.NOP),
ACONST_NULL (Opcodes.ACONST_NULL),
ICONST_M1 (Opcodes.ICONST_M1),
ICONST_0 (Opcodes.ICONST_0),
ICONST_1 (Opcodes.ICONST_1),
ICONST_2 (Opcodes.ICONST_2),
ICONST_3 (Opcodes.ICONST_3),
ICONST_4 (Opcodes.ICONST_4),
ICONST_5 (Opcodes.ICONST_5),
LCONST_0 (Opcodes.LCONST_0),
LCONST_1 (Opcodes.LCONST_1),
FCONST_0 (Opcodes.FCONST_0),
FCONST_1 (Opcodes.FCONST_1),
FCONST_2 (Opcodes.FCONST_2),
DCONST_0 (Opcodes.DCONST_0),
DCONST_1 (Opcodes.DCONST_1),
IALOAD (Opcodes.IALOAD),
LALOAD (Opcodes.LALOAD),
FALOAD (Opcodes.FALOAD),
DALOAD (Opcodes.DALOAD),
AALOAD (Opcodes.AALOAD),
BALOAD (Opcodes.BALOAD),
CALOAD (Opcodes.CALOAD),
SALOAD (Opcodes.SALOAD),
IASTORE (Opcodes.IASTORE),
LASTORE (Opcodes.LASTORE),
FASTORE (Opcodes.FASTORE),
DASTORE (Opcodes.DASTORE),
AASTORE (Opcodes.AASTORE),
BASTORE (Opcodes.BASTORE),
CASTORE (Opcodes.CASTORE),
SASTORE (Opcodes.SASTORE),
POP (Opcodes.POP),
POP2 (Opcodes.POP2),
DUP (Opcodes.DUP),
DUP_X1 (Opcodes.DUP_X1),
DUP_X2 (Opcodes.DUP_X2),
DUP2 (Opcodes.DUP2),
DUP2_X1 (Opcodes.DUP2_X1),
DUP2_X2 (Opcodes.DUP2_X2),
SWAP (Opcodes.SWAP),
IADD (Opcodes.IADD),
LADD (Opcodes.LADD),
FADD (Opcodes.FADD),
DADD (Opcodes.DADD),
ISUB (Opcodes.ISUB),
LSUB (Opcodes.LSUB),
FSUB (Opcodes.FSUB),
DSUB (Opcodes.DSUB),
IMUL (Opcodes.IMUL),
LMUL (Opcodes.LMUL),
FMUL (Opcodes.FMUL),
DMUL (Opcodes.DMUL),
IDIV (Opcodes.IDIV),
LDIV (Opcodes.LDIV),
FDIV (Opcodes.FDIV),
DDIV (Opcodes.DDIV),
IREM (Opcodes.IREM),
LREM (Opcodes.LREM),
FREM (Opcodes.FREM),
DREM (Opcodes.DREM),
INEG (Opcodes.INEG),
LNEG (Opcodes.LNEG),
FNEG (Opcodes.FNEG),
DNEG (Opcodes.DNEG),
ISHL (Opcodes.ISHL),
LSHL (Opcodes.LSHL),
ISHR (Opcodes.ISHR),
LSHR (Opcodes.LSHR),
IUSHR (Opcodes.IUSHR),
LUSHR (Opcodes.LUSHR),
IAND (Opcodes.IAND),
LAND (Opcodes.LAND),
IOR (Opcodes.IOR),
LOR (Opcodes.LOR),
IXOR (Opcodes.IXOR),
LXOR (Opcodes.LXOR),
I2L (Opcodes.I2L),
I2F (Opcodes.I2F),
I2D (Opcodes.I2D),
L2I (Opcodes.L2I),
L2F (Opcodes.L2F),
L2D (Opcodes.L2D),
F2I (Opcodes.F2I),
F2L (Opcodes.F2L),
F2D (Opcodes.F2D),
D2I (Opcodes.D2I),
D2L (Opcodes.D2L),
D2F (Opcodes.D2F),
I2B (Opcodes.I2B),
I2C (Opcodes.I2C),
I2S (Opcodes.I2S),
LCMP (Opcodes.LCMP),
FCMPL (Opcodes.FCMPL),
FCMPG (Opcodes.FCMPG),
DCMPL (Opcodes.DCMPL),
DCMPG (Opcodes.DCMPG),
IRETURN (Opcodes.IRETURN),
LRETURN (Opcodes.LRETURN),
FRETURN (Opcodes.FRETURN),
DRETURN (Opcodes.DRETURN),
ARETURN (Opcodes.ARETURN),
RETURN (Opcodes.RETURN),
ARRAYLENGTH (Opcodes.ARRAYLENGTH),
ATHROW (Opcodes.ATHROW),
MONITORENTER (Opcodes.MONITORENTER),
MONITOREXIT (Opcodes.MONITOREXIT);
private final int opcode;
private ZeroOperandIns(int opcode) {
this.opcode = opcode;
}
@Override
public int getOpCode() {
return this.opcode;
}
public static JvmOpCode toCode(String codeString) {
try {
return valueOf(codeString);
}
catch(Exception e) {
}
return null;
}
}
/**
* OP [byte]
* @author skgchxngsxyz-osx
*
*/
enum SingleIntOperandIns implements JvmOpCode {
BIPUSH (Opcodes.BIPUSH),
SIPUSH (Opcodes.SIPUSH),
NEWARRAY (Opcodes.NEWARRAY);
private final int opcode;
private SingleIntOperandIns(int opcode) {
this.opcode = opcode;
}
@Override
public int getOpCode() {
return this.opcode;
}
public static JvmOpCode toCode(String codeString) {
try {
return valueOf(codeString);
}
catch(Exception e) {
}
return null;
}
}
/**
* OP [var index]
* @author skgchxngsxyz-osx
*
*/
enum VarIns implements JvmOpCode {
ILOAD (Opcodes.ILOAD),
LLOAD (Opcodes.LLOAD),
FLOAD (Opcodes.FLOAD),
DLOAD (Opcodes.DLOAD),
ALOAD (Opcodes.ALOAD),
ISTORE (Opcodes.ISTORE),
LSTORE (Opcodes.LSTORE),
FSTORE (Opcodes.FSTORE),
DSTORE (Opcodes.DSTORE),
ASTORE (Opcodes.ASTORE),
RET (Opcodes.RET);
private final int opcode;
private VarIns(int opcode) {
this.opcode = opcode;
}
@Override
public int getOpCode() {
return this.opcode;
}
public static JvmOpCode toCode(String codeString) {
try {
return valueOf(codeString);
}
catch(Exception e) {
}
return null;
}
}
/**
* OP [internal class name]
* @author skgchxngsxyz-osx
*
*/
enum TypeIns implements JvmOpCode {
NEW (Opcodes.NEW),
ANEWARRAY (Opcodes.ANEWARRAY),
CHECKCAST (Opcodes.CHECKCAST),
INSTANCEOF (Opcodes.INSTANCEOF);
private final int opcode;
private TypeIns(int opcode) {
this.opcode = opcode;
}
@Override
public int getOpCode() {
return this.opcode;
}
public static JvmOpCode toCode(String codeString) {
try {
return valueOf(codeString);
}
catch(Exception e) {
}
return null;
}
}
/**
* OP [internal class name of owner] [field name] [field's type descriptor]
* @author skgchxngsxyz-osx
*
*/
enum FieldIns implements JvmOpCode {
GETSTATIC (Opcodes.GETSTATIC),
PUTSTATIC (Opcodes.PUTSTATIC),
GETFIELD (Opcodes.GETFIELD),
PUTFIELD (Opcodes.PUTFIELD);
private final int opcode;
private FieldIns(int opcode) {
this.opcode = opcode;
}
@Override
public int getOpCode() {
return this.opcode;
}
public static JvmOpCode toCode(String codeString) {
try {
return valueOf(codeString);
}
catch(Exception e) {
}
return null;
}
}
/**
* OP [internal class name of owner] [return class name] [method name] [param class names]...
* @author skgchxngsxyz-osx
*
*/
enum MethodIns implements JvmOpCode {
INVOKEVIRTUAL (Opcodes.INVOKEVIRTUAL),
INVOKESPECIAL (Opcodes.INVOKESPECIAL),
INVOKESTATIC (Opcodes.INVOKESTATIC),
INVOKEINTERFACE (Opcodes.INVOKEINTERFACE);
private final int opcode;
private MethodIns(int opcode) {
this.opcode = opcode;
}
@Override
public int getOpCode() {
return this.opcode;
}
public static JvmOpCode toCode(String codeString) {
try {
return valueOf(codeString);
}
catch(Exception e) {
}
return null;
}
}
/**
* OP [label name]
* @author skgchxngsxyz-osx
*
*/
enum JumpIns implements JvmOpCode {
IFEQ (Opcodes.IFEQ),
IFNE (Opcodes.IFNE),
IFLT (Opcodes.IFLT),
IFGE (Opcodes.IFGE),
IFGT (Opcodes.IFGT),
IFLE (Opcodes.IFLE),
IF_ICMPEQ (Opcodes.IF_ICMPEQ),
IF_ICMPNE (Opcodes.IF_ICMPNE),
IF_ICMPLT (Opcodes.IF_ICMPLT),
IF_ICMPGE (Opcodes.IF_ICMPGE),
IF_ICMPGT (Opcodes.IF_ICMPGT),
IF_ICMPLE (Opcodes.IF_ICMPLE),
IF_ACMPEQ (Opcodes.IF_ACMPEQ),
IF_ACMPNE (Opcodes.IF_ACMPNE),
GOTO (Opcodes.GOTO),
JSR (Opcodes.JSR),
IFNULL (Opcodes.IFNULL),
IFNONNULL (Opcodes.IFNONNULL);
private final int opcode;
private JumpIns(int opcode) {
this.opcode = opcode;
}
@Override
public int getOpCode() {
return this.opcode;
}
public static JvmOpCode toCode(String codeString) {
try {
return valueOf(codeString);
}
catch(Exception e) {
}
return null;
}
}
/**
* OP [non null value(Integer, Float, Long, Double, String...)]
* @author skgchxngsxyz-osx
*
*/
enum LdcIns implements JvmOpCode { //TODO:
LDC (Opcodes.LDC);
private final int opcode;
private LdcIns(int opcode) {
this.opcode = opcode;
}
@Override
public int getOpCode() {
return this.opcode;
}
public static JvmOpCode toCode(String codeString) {
try {
return valueOf(codeString);
}
catch(Exception e) {
}
return null;
}
}
class Pair<L,R> {
private L left;
private R right;
public Pair(L left, R right) {
this.left = left;
this.right = right;
}
public L getLeft() {
return this.left;
}
public R getRight() {
return this.right;
}
public void setLeft(L left) {
this.left = left;
}
public void setRight(R right) {
this.right = right;
}
@Override
public String toString() {
return "(" + this.left.toString() + ", " + this.right.toString() + ")";
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.NavigableMap;
import java.util.Set;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hbase.ClusterId;
import org.apache.hadoop.hbase.FullyQualifiedTableName;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.backup.HFileArchiver;
import org.apache.hadoop.hbase.catalog.MetaReader;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.exceptions.InvalidFamilyOperationException;
import org.apache.hadoop.hbase.exceptions.OrphanHLogAfterSplitException;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogSplitter;
import org.apache.hadoop.hbase.regionserver.wal.HLogUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.zookeeper.KeeperException;
/**
* This class abstracts a bunch of operations the HMaster needs to interact with
* the underlying file system, including splitting log files, checking file
* system status, etc.
*/
@InterfaceAudience.Private
public class MasterFileSystem {
private static final Log LOG = LogFactory.getLog(MasterFileSystem.class.getName());
// HBase configuration
Configuration conf;
// master status
Server master;
// metrics for master
MetricsMaster metricsMaster;
// Persisted unique cluster ID
private ClusterId clusterId;
// Keep around for convenience.
private final FileSystem fs;
// Is the fileystem ok?
private volatile boolean fsOk = true;
// The Path to the old logs dir
private final Path oldLogDir;
// root hbase directory on the FS
private final Path rootdir;
// hbase temp directory used for table construction and deletion
private final Path tempdir;
// create the split log lock
final Lock splitLogLock = new ReentrantLock();
final boolean distributedLogReplay;
final boolean distributedLogSplitting;
final SplitLogManager splitLogManager;
private final MasterServices services;
final static PathFilter META_FILTER = new PathFilter() {
public boolean accept(Path p) {
return HLogUtil.isMetaFile(p);
}
};
final static PathFilter NON_META_FILTER = new PathFilter() {
public boolean accept(Path p) {
return !HLogUtil.isMetaFile(p);
}
};
public MasterFileSystem(Server master, MasterServices services,
MetricsMaster metricsMaster, boolean masterRecovery)
throws IOException {
this.conf = master.getConfiguration();
this.master = master;
this.services = services;
this.metricsMaster = metricsMaster;
// Set filesystem to be that of this.rootdir else we get complaints about
// mismatched filesystems if hbase.rootdir is hdfs and fs.defaultFS is
// default localfs. Presumption is that rootdir is fully-qualified before
// we get to here with appropriate fs scheme.
this.rootdir = FSUtils.getRootDir(conf);
this.tempdir = new Path(this.rootdir, HConstants.HBASE_TEMP_DIRECTORY);
// Cover both bases, the old way of setting default fs and the new.
// We're supposed to run on 0.20 and 0.21 anyways.
this.fs = this.rootdir.getFileSystem(conf);
FSUtils.setFsDefault(conf, new Path(this.fs.getUri()));
// make sure the fs has the same conf
fs.setConf(conf);
this.splitLogManager = new SplitLogManager(master.getZooKeeper(), master.getConfiguration(),
master, services, master.getServerName());
this.distributedLogSplitting = conf.getBoolean(HConstants.DISTRIBUTED_LOG_SPLITTING_KEY, true);
if (this.distributedLogSplitting) {
this.splitLogManager.finishInitialization(masterRecovery);
}
this.distributedLogReplay = this.conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY,
HConstants.DEFAULT_DISTRIBUTED_LOG_REPLAY_CONFIG);
// setup the filesystem variable
// set up the archived logs path
this.oldLogDir = createInitialFileSystemLayout();
HFileSystem.addLocationsOrderInterceptor(conf);
}
/**
* Create initial layout in filesystem.
* <ol>
* <li>Check if the meta region exists and is readable, if not create it.
* Create hbase.version and the .META. directory if not one.
* </li>
* <li>Create a log archive directory for RS to put archived logs</li>
* </ol>
* Idempotent.
*/
private Path createInitialFileSystemLayout() throws IOException {
// check if the root directory exists
checkRootDir(this.rootdir, conf, this.fs);
// check if temp directory exists and clean it
checkTempDir(this.tempdir, conf, this.fs);
Path oldLogDir = new Path(this.rootdir, HConstants.HREGION_OLDLOGDIR_NAME);
// Make sure the region servers can archive their old logs
if(!this.fs.exists(oldLogDir)) {
this.fs.mkdirs(oldLogDir);
}
return oldLogDir;
}
public FileSystem getFileSystem() {
return this.fs;
}
/**
* Get the directory where old logs go
* @return the dir
*/
public Path getOldLogDir() {
return this.oldLogDir;
}
/**
* Checks to see if the file system is still accessible.
* If not, sets closed
* @return false if file system is not available
*/
public boolean checkFileSystem() {
if (this.fsOk) {
try {
FSUtils.checkFileSystemAvailable(this.fs);
FSUtils.checkDfsSafeMode(this.conf);
} catch (IOException e) {
master.abort("Shutting down HBase cluster: file system not available", e);
this.fsOk = false;
}
}
return this.fsOk;
}
/**
* @return HBase root dir.
*/
public Path getRootDir() {
return this.rootdir;
}
/**
* @return HBase temp dir.
*/
public Path getTempDir() {
return this.tempdir;
}
/**
* @return The unique identifier generated for this cluster
*/
public ClusterId getClusterId() {
return clusterId;
}
/**
* Inspect the log directory to find dead servers which need recovery work
* @return A set of ServerNames which aren't running but still have WAL files left in file system
*/
Set<ServerName> getFailedServersFromLogFolders() {
boolean retrySplitting = !conf.getBoolean("hbase.hlog.split.skip.errors",
HLog.SPLIT_SKIP_ERRORS_DEFAULT);
Set<ServerName> serverNames = new HashSet<ServerName>();
Path logsDirPath = new Path(this.rootdir, HConstants.HREGION_LOGDIR_NAME);
do {
if (master.isStopped()) {
LOG.warn("Master stopped while trying to get failed servers.");
break;
}
try {
if (!this.fs.exists(logsDirPath)) return serverNames;
FileStatus[] logFolders = FSUtils.listStatus(this.fs, logsDirPath, null);
// Get online servers after getting log folders to avoid log folder deletion of newly
// checked in region servers . see HBASE-5916
Set<ServerName> onlineServers = ((HMaster) master).getServerManager().getOnlineServers()
.keySet();
if (logFolders == null || logFolders.length == 0) {
LOG.debug("No log files to split, proceeding...");
return serverNames;
}
for (FileStatus status : logFolders) {
String sn = status.getPath().getName();
// truncate splitting suffix if present (for ServerName parsing)
if (sn.endsWith(HLog.SPLITTING_EXT)) {
sn = sn.substring(0, sn.length() - HLog.SPLITTING_EXT.length());
}
ServerName serverName = ServerName.parseServerName(sn);
if (!onlineServers.contains(serverName)) {
LOG.info("Log folder " + status.getPath() + " doesn't belong "
+ "to a known region server, splitting");
serverNames.add(serverName);
} else {
LOG.info("Log folder " + status.getPath() + " belongs to an existing region server");
}
}
retrySplitting = false;
} catch (IOException ioe) {
LOG.warn("Failed getting failed servers to be recovered.", ioe);
if (!checkFileSystem()) {
LOG.warn("Bad Filesystem, exiting");
Runtime.getRuntime().halt(1);
}
try {
if (retrySplitting) {
Thread.sleep(conf.getInt("hbase.hlog.split.failure.retry.interval", 30 * 1000));
}
} catch (InterruptedException e) {
LOG.warn("Interrupted, aborting since cannot return w/o splitting");
Thread.currentThread().interrupt();
retrySplitting = false;
Runtime.getRuntime().halt(1);
}
}
} while (retrySplitting);
return serverNames;
}
public void splitLog(final ServerName serverName) throws IOException {
Set<ServerName> serverNames = new HashSet<ServerName>();
serverNames.add(serverName);
splitLog(serverNames);
}
/**
* Specialized method to handle the splitting for meta HLog
* @param serverName
* @throws IOException
*/
public void splitMetaLog(final ServerName serverName) throws IOException {
long splitTime = 0, splitLogSize = 0;
Set<ServerName> serverNames = new HashSet<ServerName>();
serverNames.add(serverName);
List<Path> logDirs = getLogDirs(serverNames);
splitLogManager.handleDeadWorkers(serverNames);
splitTime = EnvironmentEdgeManager.currentTimeMillis();
splitLogSize = splitLogManager.splitLogDistributed(serverNames, logDirs, META_FILTER);
splitTime = EnvironmentEdgeManager.currentTimeMillis() - splitTime;
if (this.metricsMaster != null) {
this.metricsMaster.addMetaWALSplit(splitTime, splitLogSize);
}
}
private List<Path> getLogDirs(final Set<ServerName> serverNames) throws IOException {
List<Path> logDirs = new ArrayList<Path>();
for (ServerName serverName: serverNames) {
Path logDir = new Path(this.rootdir, HLogUtil.getHLogDirectoryName(serverName.toString()));
Path splitDir = logDir.suffix(HLog.SPLITTING_EXT);
// Rename the directory so a rogue RS doesn't create more HLogs
if (fs.exists(logDir)) {
if (!this.fs.rename(logDir, splitDir)) {
throw new IOException("Failed fs.rename for log split: " + logDir);
}
logDir = splitDir;
LOG.debug("Renamed region directory: " + splitDir);
} else if (!fs.exists(splitDir)) {
LOG.info("Log dir for server " + serverName + " does not exist");
continue;
}
logDirs.add(splitDir);
}
return logDirs;
}
/**
* Mark regions in recovering state when distributedLogReplay are set true
* @param serverNames Set of ServerNames to be replayed wals in order to recover changes contained
* in them
* @throws IOException
*/
public void prepareLogReplay(Set<ServerName> serverNames) throws IOException {
if (!this.distributedLogReplay) {
return;
}
// mark regions in recovering state
for (ServerName serverName : serverNames) {
NavigableMap<HRegionInfo, Result> regions = this.getServerUserRegions(serverName);
if (regions == null) {
continue;
}
try {
this.splitLogManager.markRegionsRecoveringInZK(serverName, regions.keySet());
} catch (KeeperException e) {
throw new IOException(e);
}
}
}
/**
* Mark meta regions in recovering state when distributedLogReplay are set true. The function is used
* when {@link #getServerUserRegions(ServerName)} can't be used in case meta RS is down.
* @param serverName
* @param regions
* @throws IOException
*/
public void prepareMetaLogReplay(ServerName serverName, Set<HRegionInfo> regions)
throws IOException {
if (!this.distributedLogReplay || (regions == null)) {
return;
}
// mark regions in recovering state
try {
this.splitLogManager.markRegionsRecoveringInZK(serverName, regions);
} catch (KeeperException e) {
throw new IOException(e);
}
}
public void splitLog(final Set<ServerName> serverNames) throws IOException {
splitLog(serverNames, NON_META_FILTER);
}
/**
* Wrapper function on {@link SplitLogManager#removeStaleRecoveringRegionsFromZK(Set)}
* @param failedServers
* @throws KeeperException
*/
void removeStaleRecoveringRegionsFromZK(final Set<ServerName> failedServers)
throws KeeperException {
this.splitLogManager.removeStaleRecoveringRegionsFromZK(failedServers);
}
/**
* This method is the base split method that splits HLog files matching a filter. Callers should
* pass the appropriate filter for meta and non-meta HLogs.
* @param serverNames
* @param filter
* @throws IOException
*/
public void splitLog(final Set<ServerName> serverNames, PathFilter filter) throws IOException {
long splitTime = 0, splitLogSize = 0;
List<Path> logDirs = getLogDirs(serverNames);
if (distributedLogSplitting) {
splitLogManager.handleDeadWorkers(serverNames);
splitTime = EnvironmentEdgeManager.currentTimeMillis();
splitLogSize = splitLogManager.splitLogDistributed(serverNames, logDirs, filter);
splitTime = EnvironmentEdgeManager.currentTimeMillis() - splitTime;
} else {
for(Path logDir: logDirs){
// splitLogLock ensures that dead region servers' logs are processed
// one at a time
this.splitLogLock.lock();
try {
HLogSplitter splitter = HLogSplitter.createLogSplitter(conf, rootdir, logDir, oldLogDir,
this.fs);
try {
// If FS is in safe mode, just wait till out of it.
FSUtils.waitOnSafeMode(conf, conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 1000));
splitter.splitLog();
} catch (OrphanHLogAfterSplitException e) {
LOG.warn("Retrying splitting because of:", e);
//An HLogSplitter instance can only be used once. Get new instance.
splitter = HLogSplitter.createLogSplitter(conf, rootdir, logDir,
oldLogDir, this.fs);
splitter.splitLog();
}
splitTime = splitter.getTime();
splitLogSize = splitter.getSize();
} finally {
this.splitLogLock.unlock();
}
}
}
if (this.metricsMaster != null) {
if (filter == this.META_FILTER) {
this.metricsMaster.addMetaWALSplit(splitTime, splitLogSize);
} else {
this.metricsMaster.addSplit(splitTime, splitLogSize);
}
}
}
/**
* Get the rootdir. Make sure its wholesome and exists before returning.
* @param rd
* @param c
* @param fs
* @return hbase.rootdir (after checks for existence and bootstrapping if
* needed populating the directory with necessary bootup files).
* @throws IOException
*/
private Path checkRootDir(final Path rd, final Configuration c,
final FileSystem fs)
throws IOException {
// If FS is in safe mode wait till out of it.
FSUtils.waitOnSafeMode(c, c.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000));
// Filesystem is good. Go ahead and check for hbase.rootdir.
try {
if (!fs.exists(rd)) {
fs.mkdirs(rd);
// DFS leaves safe mode with 0 DNs when there are 0 blocks.
// We used to handle this by checking the current DN count and waiting until
// it is nonzero. With security, the check for datanode count doesn't work --
// it is a privileged op. So instead we adopt the strategy of the jobtracker
// and simply retry file creation during bootstrap indefinitely. As soon as
// there is one datanode it will succeed. Permission problems should have
// already been caught by mkdirs above.
FSUtils.setVersion(fs, rd, c.getInt(HConstants.THREAD_WAKE_FREQUENCY,
10 * 1000), c.getInt(HConstants.VERSION_FILE_WRITE_ATTEMPTS,
HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS));
} else {
if (!fs.isDirectory(rd)) {
throw new IllegalArgumentException(rd.toString() + " is not a directory");
}
// as above
FSUtils.checkVersion(fs, rd, true, c.getInt(HConstants.THREAD_WAKE_FREQUENCY,
10 * 1000), c.getInt(HConstants.VERSION_FILE_WRITE_ATTEMPTS,
HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS));
}
} catch (DeserializationException de) {
LOG.fatal("Please fix invalid configuration for " + HConstants.HBASE_DIR, de);
IOException ioe = new IOException();
ioe.initCause(de);
throw ioe;
} catch (IllegalArgumentException iae) {
LOG.fatal("Please fix invalid configuration for "
+ HConstants.HBASE_DIR + " " + rd.toString(), iae);
throw iae;
}
// Make sure cluster ID exists
if (!FSUtils.checkClusterIdExists(fs, rd, c.getInt(
HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000))) {
FSUtils.setClusterId(fs, rd, new ClusterId(), c.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000));
}
clusterId = FSUtils.getClusterId(fs, rd);
// Make sure the meta region directory exists!
if (!FSUtils.metaRegionExists(fs, rd)) {
bootstrap(rd, c);
}
// Create tableinfo-s for META if not already there.
FSTableDescriptors.createTableDescriptor(fs, rd, HTableDescriptor.META_TABLEDESC, false);
return rd;
}
/**
* Make sure the hbase temp directory exists and is empty.
* NOTE that this method is only executed once just after the master becomes the active one.
*/
private void checkTempDir(final Path tmpdir, final Configuration c, final FileSystem fs)
throws IOException {
// If the temp directory exists, clear the content (left over, from the previous run)
if (fs.exists(tmpdir)) {
// Archive table in temp, maybe left over from failed deletion,
// if not the cleaner will take care of them.
for (Path tabledir: FSUtils.getTableDirs(fs, tmpdir)) {
for (Path regiondir: FSUtils.getRegionDirs(fs, tabledir)) {
HFileArchiver.archiveRegion(fs, this.rootdir, tabledir, regiondir);
}
}
if (!fs.delete(tmpdir, true)) {
throw new IOException("Unable to clean the temp directory: " + tmpdir);
}
}
// Create the temp directory
if (!fs.mkdirs(tmpdir)) {
throw new IOException("HBase temp directory '" + tmpdir + "' creation failure.");
}
}
private static void bootstrap(final Path rd, final Configuration c)
throws IOException {
LOG.info("BOOTSTRAP: creating META region");
try {
// Bootstrapping, make sure blockcache is off. Else, one will be
// created here in bootstap and it'll need to be cleaned up. Better to
// not make it in first place. Turn off block caching for bootstrap.
// Enable after.
HRegionInfo metaHRI = new HRegionInfo(HRegionInfo.FIRST_META_REGIONINFO);
setInfoFamilyCachingForMeta(false);
HRegion meta = HRegion.createHRegion(metaHRI, rd, c,
HTableDescriptor.META_TABLEDESC);
setInfoFamilyCachingForMeta(true);
HRegion.closeHRegion(meta);
} catch (IOException e) {
e = RemoteExceptionHandler.checkIOException(e);
LOG.error("bootstrap", e);
throw e;
}
}
/**
* Enable in memory caching for .META.
*/
public static void setInfoFamilyCachingForMeta(final boolean b) {
for (HColumnDescriptor hcd:
HTableDescriptor.META_TABLEDESC.getColumnFamilies()) {
if (Bytes.equals(hcd.getName(), HConstants.CATALOG_FAMILY)) {
hcd.setBlockCacheEnabled(b);
hcd.setInMemory(b);
}
}
}
public void deleteRegion(HRegionInfo region) throws IOException {
HFileArchiver.archiveRegion(conf, fs, region);
}
public void deleteTable(FullyQualifiedTableName tableName) throws IOException {
fs.delete(FSUtils.getTableDir(rootdir, tableName), true);
}
/**
* Move the specified table to the hbase temp directory
* @param tableName Table name to move
* @return The temp location of the table moved
* @throws IOException in case of file-system failure
*/
public Path moveTableToTemp(FullyQualifiedTableName tableName) throws IOException {
Path srcPath = FSUtils.getTableDir(rootdir, tableName);
Path tempPath = FSUtils.getTableDir(this.tempdir, tableName);
// Ensure temp exists
if (!fs.exists(tempPath.getParent()) && !fs.mkdirs(tempPath.getParent())) {
throw new IOException("HBase temp directory '" + tempPath.getParent() + "' creation failure.");
}
if (!fs.rename(srcPath, tempPath)) {
throw new IOException("Unable to move '" + srcPath + "' to temp '" + tempPath + "'");
}
return tempPath;
}
public void updateRegionInfo(HRegionInfo region) {
// TODO implement this. i think this is currently broken in trunk i don't
// see this getting updated.
// @see HRegion.checkRegioninfoOnFilesystem()
}
public void deleteFamilyFromFS(HRegionInfo region, byte[] familyName)
throws IOException {
// archive family store files
Path tableDir = FSUtils.getTableDir(rootdir, region.getFullyQualifiedTableName());
HFileArchiver.archiveFamily(fs, conf, region, tableDir, familyName);
// delete the family folder
Path familyDir = new Path(tableDir,
new Path(region.getEncodedName(), Bytes.toString(familyName)));
if (fs.delete(familyDir, true) == false) {
throw new IOException("Could not delete family "
+ Bytes.toString(familyName) + " from FileSystem for region "
+ region.getRegionNameAsString() + "(" + region.getEncodedName()
+ ")");
}
}
public void stop() {
if (splitLogManager != null) {
this.splitLogManager.stop();
}
}
/**
* Create new HTableDescriptor in HDFS.
*
* @param htableDescriptor
*/
public void createTableDescriptor(HTableDescriptor htableDescriptor)
throws IOException {
FSTableDescriptors.createTableDescriptor(htableDescriptor, conf);
}
/**
* Delete column of a table
* @param tableName
* @param familyName
* @return Modified HTableDescriptor with requested column deleted.
* @throws IOException
*/
public HTableDescriptor deleteColumn(FullyQualifiedTableName tableName, byte[] familyName)
throws IOException {
LOG.info("DeleteColumn. Table = " + tableName
+ " family = " + Bytes.toString(familyName));
HTableDescriptor htd = this.services.getTableDescriptors().get(tableName);
htd.removeFamily(familyName);
this.services.getTableDescriptors().add(htd);
return htd;
}
/**
* Modify Column of a table
* @param tableName
* @param hcd HColumnDesciptor
* @return Modified HTableDescriptor with the column modified.
* @throws IOException
*/
public HTableDescriptor modifyColumn(FullyQualifiedTableName tableName, HColumnDescriptor hcd)
throws IOException {
LOG.info("AddModifyColumn. Table = " + tableName
+ " HCD = " + hcd.toString());
HTableDescriptor htd = this.services.getTableDescriptors().get(tableName);
byte [] familyName = hcd.getName();
if(!htd.hasFamily(familyName)) {
throw new InvalidFamilyOperationException("Family '" +
Bytes.toString(familyName) + "' doesn't exists so cannot be modified");
}
htd.addFamily(hcd);
this.services.getTableDescriptors().add(htd);
return htd;
}
/**
* Add column to a table
* @param tableName
* @param hcd
* @return Modified HTableDescriptor with new column added.
* @throws IOException
*/
public HTableDescriptor addColumn(FullyQualifiedTableName tableName, HColumnDescriptor hcd)
throws IOException {
LOG.info("AddColumn. Table = " + tableName + " HCD = " +
hcd.toString());
HTableDescriptor htd = this.services.getTableDescriptors().get(tableName);
if (htd == null) {
throw new InvalidFamilyOperationException("Family '" +
hcd.getNameAsString() + "' cannot be modified as HTD is null");
}
htd.addFamily(hcd);
this.services.getTableDescriptors().add(htd);
return htd;
}
private NavigableMap<HRegionInfo, Result> getServerUserRegions(ServerName serverName)
throws IOException {
if (!this.master.isStopped()) {
try {
this.master.getCatalogTracker().waitForMeta();
return MetaReader.getServerUserRegions(this.master.getCatalogTracker(), serverName);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IOException("Interrupted", e);
}
}
return null;
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Class ClassFilterEditor
* @author Jeka
*/
package com.intellij.ui.classFilter;
import com.intellij.ide.util.ClassFilter;
import com.intellij.ide.util.TreeClassChooser;
import com.intellij.ide.util.TreeClassChooserFactory;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiClass;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.ui.*;
import com.intellij.ui.table.JBTable;
import com.intellij.util.IconUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.ComponentWithEmptyText;
import com.intellij.util.ui.ItemRemovable;
import com.intellij.util.ui.StatusText;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.table.*;
import java.awt.*;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
public class ClassFilterEditor extends JPanel implements ComponentWithEmptyText {
protected JBTable myTable = null;
protected FilterTableModel myTableModel = null;
protected final Project myProject;
private final ClassFilter myChooserFilter;
@Nullable
private final String myPatternsHelpId;
private String classDelimiter = "$";
public ClassFilterEditor(Project project) {
this(project, null);
}
public ClassFilterEditor(Project project, ClassFilter classFilter) {
this(project, classFilter, null);
}
public ClassFilterEditor(Project project, ClassFilter classFilter, @Nullable String patternsHelpId) {
super(new BorderLayout());
myPatternsHelpId = patternsHelpId;
myTable = new JBTable();
final ToolbarDecorator decorator = ToolbarDecorator.createDecorator(myTable)
.addExtraAction(new AnActionButton(getAddButtonText(), getAddButtonIcon()) {
@Override
public void actionPerformed(AnActionEvent e) {
addClassFilter();
}
@Override
public void updateButton(AnActionEvent e) {
super.updateButton(e);
setEnabled(!myProject.isDefault());
}
});
if (addPatternButtonVisible()) {
decorator.addExtraAction(new AnActionButton(getAddPatternButtonText(), getAddPatternButtonIcon()) {
@Override
public void actionPerformed(AnActionEvent e) {
addPatternFilter();
}
@Override
public void updateButton(AnActionEvent e) {
super.updateButton(e);
setEnabled(!myProject.isDefault());
}
});
}
add(decorator.setRemoveAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
TableUtil.removeSelectedItems(myTable);
}
}).setButtonComparator(getAddButtonText(), getAddPatternButtonText(), "Remove")
.disableUpDownActions().createPanel(), BorderLayout.CENTER);
myChooserFilter = classFilter;
myProject = project;
myTableModel = new FilterTableModel();
myTable.setModel(myTableModel);
myTable.setShowGrid(false);
myTable.setIntercellSpacing(new Dimension(0, 0));
myTable.setTableHeader(null);
myTable.setAutoResizeMode(JTable.AUTO_RESIZE_LAST_COLUMN);
myTable.setColumnSelectionAllowed(false);
myTable.setPreferredScrollableViewportSize(new Dimension(200, myTable.getRowHeight() * JBTable.PREFERRED_SCROLLABLE_VIEWPORT_HEIGHT_IN_ROWS));
TableColumnModel columnModel = myTable.getColumnModel();
TableColumn column = columnModel.getColumn(FilterTableModel.CHECK_MARK);
TableUtil.setupCheckboxColumn(column);
column.setCellRenderer(new EnabledCellRenderer(myTable.getDefaultRenderer(Boolean.class)));
columnModel.getColumn(FilterTableModel.FILTER).setCellRenderer(new FilterCellRenderer());
getEmptyText().setText(UIBundle.message("no.patterns"));
}
@NotNull
@Override
public StatusText getEmptyText() {
return myTable.getEmptyText();
}
protected String getAddButtonText() {
return UIBundle.message("button.add.class");
}
protected String getAddPatternButtonText() {
return UIBundle.message("button.add.pattern");
}
protected Icon getAddButtonIcon() {
return IconUtil.getAddClassIcon();
}
protected Icon getAddPatternButtonIcon() {
return IconUtil.getAddPatternIcon();
}
protected boolean addPatternButtonVisible() {
return true;
}
public void setFilters(com.intellij.ui.classFilter.ClassFilter[] filters) {
myTableModel.setFilters(filters);
}
public com.intellij.ui.classFilter.ClassFilter[] getFilters() {
return myTableModel.getFilters();
}
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
myTable.setEnabled(enabled);
myTable.setRowSelectionAllowed(enabled);
myTableModel.fireTableDataChanged();
}
public void stopEditing() {
TableCellEditor editor = myTable.getCellEditor();
if (editor != null) {
editor.stopCellEditing();
}
}
protected final class FilterTableModel extends AbstractTableModel implements ItemRemovable {
private final List<com.intellij.ui.classFilter.ClassFilter> myFilters = new LinkedList<com.intellij.ui.classFilter.ClassFilter>();
public static final int CHECK_MARK = 0;
public static final int FILTER = 1;
public final void setFilters(com.intellij.ui.classFilter.ClassFilter[] filters) {
myFilters.clear();
if (filters != null) {
ContainerUtil.addAll(myFilters, filters);
}
fireTableDataChanged();
}
public com.intellij.ui.classFilter.ClassFilter[] getFilters() {
for (Iterator<com.intellij.ui.classFilter.ClassFilter> it = myFilters.iterator(); it.hasNext(); ) {
com.intellij.ui.classFilter.ClassFilter filter = it.next();
String pattern = filter.getPattern();
if (pattern == null || "".equals(pattern)) {
it.remove();
}
}
return myFilters.toArray(new com.intellij.ui.classFilter.ClassFilter[myFilters.size()]);
}
public com.intellij.ui.classFilter.ClassFilter getFilterAt(int index) {
return myFilters.get(index);
}
public int getFilterIndex(com.intellij.ui.classFilter.ClassFilter filter) {
return myFilters.indexOf(filter);
}
public void addRow(com.intellij.ui.classFilter.ClassFilter filter) {
myFilters.add(filter);
int row = myFilters.size() - 1;
fireTableRowsInserted(row, row);
}
public int getRowCount() {
return myFilters.size();
}
public int getColumnCount() {
return 2;
}
public Object getValueAt(int rowIndex, int columnIndex) {
com.intellij.ui.classFilter.ClassFilter filter = myFilters.get(rowIndex);
if (columnIndex == FILTER) {
return filter;
}
if (columnIndex == CHECK_MARK) {
return filter.isEnabled() ? Boolean.TRUE : Boolean.FALSE;
}
return null;
}
public void setValueAt(Object aValue, int rowIndex, int columnIndex) {
com.intellij.ui.classFilter.ClassFilter filter = myFilters.get(rowIndex);
if (columnIndex == FILTER) {
filter.setPattern(aValue != null ? aValue.toString() : "");
}
else if (columnIndex == CHECK_MARK) {
filter.setEnabled(aValue == null || ((Boolean)aValue).booleanValue());
}
// fireTableCellUpdated(rowIndex, columnIndex);
fireTableRowsUpdated(rowIndex, rowIndex);
}
public Class getColumnClass(int columnIndex) {
if (columnIndex == CHECK_MARK) {
return Boolean.class;
}
return super.getColumnClass(columnIndex);
}
public boolean isCellEditable(int rowIndex, int columnIndex) {
return isEnabled();
}
public void removeRow(final int idx) {
myFilters.remove(idx);
fireTableRowsDeleted(idx, idx);
}
}
private class FilterCellRenderer extends DefaultTableCellRenderer {
public Component getTableCellRendererComponent(JTable table, Object value,
boolean isSelected, boolean hasFocus, int row, int column) {
Color color = UIUtil.getTableFocusCellBackground();
UIManager.put(UIUtil.TABLE_FOCUS_CELL_BACKGROUND_PROPERTY, table.getSelectionBackground());
Component component = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
if (component instanceof JLabel) {
((JLabel)component).setBorder(noFocusBorder);
}
UIManager.put(UIUtil.TABLE_FOCUS_CELL_BACKGROUND_PROPERTY, color);
com.intellij.ui.classFilter.ClassFilter filter =
(com.intellij.ui.classFilter.ClassFilter)table.getValueAt(row, FilterTableModel.FILTER);
component.setEnabled(isSelected || (ClassFilterEditor.this.isEnabled() && filter.isEnabled()));
return component;
}
}
private class EnabledCellRenderer extends DefaultTableCellRenderer {
private final TableCellRenderer myDelegate;
public EnabledCellRenderer(TableCellRenderer delegate) {
myDelegate = delegate;
}
public Component getTableCellRendererComponent(JTable table, Object value,
boolean isSelected, boolean hasFocus, int row, int column) {
Component component = myDelegate.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
component.setEnabled(ClassFilterEditor.this.isEnabled());
return component;
}
}
@NotNull
protected com.intellij.ui.classFilter.ClassFilter createFilter(String pattern) {
return new com.intellij.ui.classFilter.ClassFilter(pattern);
}
protected void addPatternFilter() {
ClassFilterEditorAddDialog dialog = new ClassFilterEditorAddDialog(myProject, myPatternsHelpId);
if (dialog.showAndGet()) {
String pattern = dialog.getPattern();
if (pattern != null) {
com.intellij.ui.classFilter.ClassFilter filter = createFilter(pattern);
myTableModel.addRow(filter);
int row = myTableModel.getRowCount() - 1;
myTable.getSelectionModel().setSelectionInterval(row, row);
myTable.scrollRectToVisible(myTable.getCellRect(row, 0, true));
myTable.requestFocus();
}
}
}
protected void addClassFilter() {
TreeClassChooser chooser = TreeClassChooserFactory.getInstance(myProject).createNoInnerClassesScopeChooser(
UIBundle.message("class.filter.editor.choose.class.title"), GlobalSearchScope.allScope(myProject), myChooserFilter, null);
chooser.showDialog();
PsiClass selectedClass = chooser.getSelected();
if (selectedClass != null) {
com.intellij.ui.classFilter.ClassFilter filter = createFilter(getJvmClassName(selectedClass));
myTableModel.addRow(filter);
int row = myTableModel.getRowCount() - 1;
myTable.getSelectionModel().setSelectionInterval(row, row);
myTable.scrollRectToVisible(myTable.getCellRect(row, 0, true));
myTable.requestFocus();
}
}
@Nullable
private String getJvmClassName(PsiClass aClass) {
PsiClass parentClass = PsiTreeUtil.getParentOfType(aClass, PsiClass.class, true);
if (parentClass != null) {
final String parentName = getJvmClassName(parentClass);
if (parentName == null) {
return null;
}
return parentName + classDelimiter + aClass.getName();
}
return aClass.getQualifiedName();
}
public void setClassDelimiter(String classDelimiter) {
this.classDelimiter = classDelimiter;
}
public void addPattern(String pattern) {
com.intellij.ui.classFilter.ClassFilter filter = createFilter(pattern);
myTableModel.addRow(filter);
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import com.google.common.base.Preconditions;
import com.google.common.base.Verify;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.actions.FileValue;
import com.google.devtools.build.lib.actions.InconsistentFilesystemException;
import com.google.devtools.build.lib.cmdline.LabelConstants;
import com.google.devtools.build.lib.cmdline.LabelValidator;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.packages.BuildFileName;
import com.google.devtools.build.lib.packages.BuildFileNotFoundException;
import com.google.devtools.build.lib.packages.ErrorDeterminingRepositoryException;
import com.google.devtools.build.lib.packages.NoSuchPackageException;
import com.google.devtools.build.lib.packages.RepositoryFetchException;
import com.google.devtools.build.lib.packages.semantics.BuildLanguageOptions;
import com.google.devtools.build.lib.pkgcache.PathPackageLocator;
import com.google.devtools.build.lib.repository.ExternalPackageHelper;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.Root;
import com.google.devtools.build.lib.vfs.RootedPath;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyFunctionException;
import com.google.devtools.build.skyframe.SkyFunctionException.Transience;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
import net.starlark.java.eval.EvalException;
import net.starlark.java.eval.StarlarkSemantics;
/**
* SkyFunction for {@link PackageLookupValue}s.
*/
public class PackageLookupFunction implements SkyFunction {
/** Lists possible ways to handle a package label which crosses into a new repository. */
public enum CrossRepositoryLabelViolationStrategy {
/** Ignore the violation. */
IGNORE,
/** Generate an error. */
ERROR;
}
private final AtomicReference<ImmutableSet<PackageIdentifier>> deletedPackages;
private final CrossRepositoryLabelViolationStrategy crossRepositoryLabelViolationStrategy;
private final ImmutableList<BuildFileName> buildFilesByPriority;
private final ExternalPackageHelper externalPackageHelper;
public PackageLookupFunction(
AtomicReference<ImmutableSet<PackageIdentifier>> deletedPackages,
CrossRepositoryLabelViolationStrategy crossRepositoryLabelViolationStrategy,
ImmutableList<BuildFileName> buildFilesByPriority,
ExternalPackageHelper externalPackageHelper) {
this.deletedPackages = deletedPackages;
this.crossRepositoryLabelViolationStrategy = crossRepositoryLabelViolationStrategy;
this.buildFilesByPriority = buildFilesByPriority;
this.externalPackageHelper = externalPackageHelper;
}
@Override
public SkyValue compute(SkyKey skyKey, Environment env)
throws PackageLookupFunctionException, InterruptedException {
PathPackageLocator pkgLocator = PrecomputedValue.PATH_PACKAGE_LOCATOR.get(env);
StarlarkSemantics semantics = PrecomputedValue.STARLARK_SEMANTICS.get(env);
PackageIdentifier packageKey = (PackageIdentifier) skyKey.argument();
String packageNameErrorMsg = LabelValidator.validatePackageName(
packageKey.getPackageFragment().getPathString());
if (packageNameErrorMsg != null) {
return PackageLookupValue.invalidPackageName("Invalid package name '" + packageKey + "': "
+ packageNameErrorMsg);
}
if (deletedPackages.get().contains(packageKey)) {
return PackageLookupValue.DELETED_PACKAGE_VALUE;
}
if (!packageKey.getRepository().isMain()) {
return computeExternalPackageLookupValue(skyKey, env, packageKey);
}
if (packageKey.equals(LabelConstants.EXTERNAL_PACKAGE_IDENTIFIER)) {
return semantics.getBool(BuildLanguageOptions.EXPERIMENTAL_DISABLE_EXTERNAL_PACKAGE)
? PackageLookupValue.NO_BUILD_FILE_VALUE
: computeWorkspacePackageLookupValue(env);
}
// Check .bazelignore file under main repository.
IgnoredPackagePrefixesValue ignoredPatternsValue =
(IgnoredPackagePrefixesValue) env.getValue(IgnoredPackagePrefixesValue.key());
if (ignoredPatternsValue == null) {
return null;
}
if (isPackageIgnored(packageKey, ignoredPatternsValue)) {
return PackageLookupValue.DELETED_PACKAGE_VALUE;
}
return findPackageByBuildFile(env, pkgLocator, packageKey);
}
/**
* For a package identifier {@code packageKey} such that the compute for {@code
* PackageLookupValue.key(packageKey)} returned {@code NO_BUILD_FILE_VALUE}, provide a
* human-readable error message with more details on where we searched for the package.
*/
public static String explainNoBuildFileValue(PackageIdentifier packageKey, Environment env)
throws InterruptedException {
String educationalMessage = "Add a BUILD file to a directory to mark it as a package.";
if (packageKey.getRepository().isMain()) {
PathPackageLocator pkgLocator = PrecomputedValue.PATH_PACKAGE_LOCATOR.get(env);
StringBuilder message = new StringBuilder();
message.append("BUILD file not found in any of the following directories. ");
message.append(educationalMessage);
for (Root root : pkgLocator.getPathEntries()) {
message
.append("\n - ")
.append(root.asPath().getRelative(packageKey.getPackageFragment()).getPathString());
}
return message.toString();
} else {
return "BUILD file not found in directory '"
+ packageKey.getPackageFragment()
+ "' of external repository "
+ packageKey.getRepository()
+ ". "
+ educationalMessage;
}
}
@Nullable
@Override
public String extractTag(SkyKey skyKey) {
return null;
}
@Nullable
private PackageLookupValue findPackageByBuildFile(
Environment env, PathPackageLocator pkgLocator, PackageIdentifier packageKey)
throws PackageLookupFunctionException, InterruptedException {
// TODO(bazel-team): The following is O(n^2) on the number of elements on the package path due
// to having restart the SkyFunction after every new dependency. However, if we try to batch
// the missing value keys, more dependencies than necessary will be declared. This wart can be
// fixed once we have nicer continuation support [skyframe-loading]
for (Root packagePathEntry : pkgLocator.getPathEntries()) {
// This checks for the build file names in the correct precedence order.
for (BuildFileName buildFileName : buildFilesByPriority) {
PackageLookupValue result =
getPackageLookupValue(env, packagePathEntry, packageKey, buildFileName);
if (result == null) {
return null;
}
if (result != PackageLookupValue.NO_BUILD_FILE_VALUE) {
return result;
}
}
}
return PackageLookupValue.NO_BUILD_FILE_VALUE;
}
@Nullable
private static FileValue getFileValue(
RootedPath fileRootedPath, Environment env, PackageIdentifier packageIdentifier)
throws PackageLookupFunctionException, InterruptedException {
String basename = fileRootedPath.asPath().getBaseName();
SkyKey fileSkyKey = FileValue.key(fileRootedPath);
FileValue fileValue = null;
try {
fileValue = (FileValue) env.getValueOrThrow(fileSkyKey, IOException.class);
} catch (InconsistentFilesystemException e) {
// This error is not transient from the perspective of the PackageLookupFunction.
throw new PackageLookupFunctionException(e, Transience.PERSISTENT);
} catch (FileSymlinkException e) {
throw new PackageLookupFunctionException(new BuildFileNotFoundException(packageIdentifier,
"Symlink cycle detected while trying to find " + basename + " file "
+ fileRootedPath.asPath()),
Transience.PERSISTENT);
} catch (IOException e) {
throw new PackageLookupFunctionException(new BuildFileNotFoundException(packageIdentifier,
"IO errors while looking for " + basename + " file reading "
+ fileRootedPath.asPath() + ": " + e.getMessage(), e),
Transience.PERSISTENT);
}
return fileValue;
}
private PackageLookupValue getPackageLookupValue(
Environment env,
Root packagePathEntry,
PackageIdentifier packageIdentifier,
BuildFileName buildFileName)
throws InterruptedException, PackageLookupFunctionException {
PathFragment buildFileFragment = buildFileName.getBuildFileFragment(packageIdentifier);
RootedPath buildFileRootedPath = RootedPath.toRootedPath(packagePathEntry, buildFileFragment);
if (crossRepositoryLabelViolationStrategy == CrossRepositoryLabelViolationStrategy.ERROR) {
// Is this path part of a local repository?
RootedPath currentPath =
RootedPath.toRootedPath(packagePathEntry, buildFileFragment.getParentDirectory());
SkyKey repositoryLookupKey = LocalRepositoryLookupValue.key(currentPath);
// TODO(jcater): Consider parallelizing these lookups.
LocalRepositoryLookupValue localRepository;
try {
localRepository =
(LocalRepositoryLookupValue)
env.getValueOrThrow(repositoryLookupKey, ErrorDeterminingRepositoryException.class);
if (localRepository == null) {
return null;
}
} catch (ErrorDeterminingRepositoryException e) {
// If the directory selected isn't part of a repository, that's an error.
// TODO(katre): Improve the error message given here.
throw new PackageLookupFunctionException(
new BuildFileNotFoundException(
packageIdentifier,
"Unable to determine the local repository for directory "
+ currentPath.asPath().getPathString()),
Transience.PERSISTENT);
}
if (localRepository.exists()
&& !localRepository.getRepository().equals(packageIdentifier.getRepository())) {
// There is a repository mismatch, this is an error.
// The correct package path is the one originally given, minus the part that is the local
// repository.
PathFragment pathToRequestedPackage = packageIdentifier.getSourceRoot();
PathFragment localRepositoryPath = localRepository.getPath();
if (localRepositoryPath.isAbsolute()) {
// We need the package path to also be absolute.
pathToRequestedPackage =
packagePathEntry.getRelative(pathToRequestedPackage).asFragment();
}
PathFragment remainingPath = pathToRequestedPackage.relativeTo(localRepositoryPath);
PackageIdentifier correctPackage =
PackageIdentifier.create(localRepository.getRepository(), remainingPath);
return PackageLookupValue.incorrectRepositoryReference(packageIdentifier, correctPackage);
}
// There's no local repository, keep going.
} else {
// Future-proof against adding future values to CrossRepositoryLabelViolationStrategy.
Preconditions.checkState(
crossRepositoryLabelViolationStrategy == CrossRepositoryLabelViolationStrategy.IGNORE,
crossRepositoryLabelViolationStrategy);
}
// Check for the existence of the build file.
FileValue fileValue = getFileValue(buildFileRootedPath, env, packageIdentifier);
if (fileValue == null) {
return null;
}
if (fileValue.isFile()) {
return PackageLookupValue.success(buildFileRootedPath.getRoot(), buildFileName);
}
return PackageLookupValue.NO_BUILD_FILE_VALUE;
}
private static boolean isPackageIgnored(
PackageIdentifier id, IgnoredPackagePrefixesValue ignoredPatternsValue) {
PathFragment packageFragment = id.getPackageFragment();
for (PathFragment pattern : ignoredPatternsValue.getPatterns()) {
if (packageFragment.startsWith(pattern)) {
return true;
}
}
return false;
}
private PackageLookupValue computeWorkspacePackageLookupValue(Environment env)
throws InterruptedException {
RootedPath workspaceFile = externalPackageHelper.findWorkspaceFile(env);
if (env.valuesMissing()) {
return null;
}
if (workspaceFile == null) {
return PackageLookupValue.NO_BUILD_FILE_VALUE;
} else {
BuildFileName filename = null;
for (BuildFileName candidate : BuildFileName.values()) {
if (workspaceFile.getRootRelativePath().equals(candidate.getFilenameFragment())) {
filename = candidate;
break;
}
}
// Otherwise ExternalPackageUtil.findWorkspaceFile() returned something whose name is not in
// BuildFileName
Verify.verify(filename != null);
return PackageLookupValue.success(workspaceFile.getRoot(), filename);
}
}
/**
* Gets a PackageLookupValue from a different Bazel repository.
*
* <p>To do this, it looks up the "external" package and finds a path mapping for the repository
* name.
*/
private PackageLookupValue computeExternalPackageLookupValue(
SkyKey skyKey, Environment env, PackageIdentifier packageIdentifier)
throws PackageLookupFunctionException, InterruptedException {
PackageIdentifier id = (PackageIdentifier) skyKey.argument();
SkyKey repositoryKey = RepositoryValue.key(id.getRepository());
RepositoryValue repositoryValue;
try {
repositoryValue =
(RepositoryValue)
env.getValueOrThrow(
repositoryKey,
NoSuchPackageException.class,
IOException.class,
EvalException.class,
AlreadyReportedException.class);
if (repositoryValue == null) {
return null;
}
} catch (NoSuchPackageException e) {
throw new PackageLookupFunctionException(new BuildFileNotFoundException(id, e.getMessage()),
Transience.PERSISTENT);
} catch (IOException | EvalException | AlreadyReportedException e) {
throw new PackageLookupFunctionException(
new RepositoryFetchException(id, e.getMessage()), Transience.PERSISTENT);
}
if (!repositoryValue.repositoryExists()) {
// TODO(ulfjack): Maybe propagate the error message from the repository delegator function?
return new PackageLookupValue.NoRepositoryPackageLookupValue(id.getRepository().getName());
}
// Check .bazelignore file after fetching the external repository.
IgnoredPackagePrefixesValue ignoredPatternsValue =
(IgnoredPackagePrefixesValue)
env.getValue(IgnoredPackagePrefixesValue.key(id.getRepository()));
if (ignoredPatternsValue == null) {
return null;
}
if (isPackageIgnored(id, ignoredPatternsValue)) {
return PackageLookupValue.DELETED_PACKAGE_VALUE;
}
// This checks for the build file names in the correct precedence order.
for (BuildFileName buildFileName : buildFilesByPriority) {
PathFragment buildFileFragment =
id.getPackageFragment().getRelative(buildFileName.getFilenameFragment());
RootedPath buildFileRootedPath =
RootedPath.toRootedPath(Root.fromPath(repositoryValue.getPath()), buildFileFragment);
FileValue fileValue = getFileValue(buildFileRootedPath, env, packageIdentifier);
if (fileValue == null) {
return null;
}
if (fileValue.isFile()) {
return PackageLookupValue.success(
repositoryValue, Root.fromPath(repositoryValue.getPath()), buildFileName);
}
}
return PackageLookupValue.NO_BUILD_FILE_VALUE;
}
/**
* Used to declare all the exception types that can be wrapped in the exception thrown by
* {@link PackageLookupFunction#compute}.
*/
private static final class PackageLookupFunctionException extends SkyFunctionException {
public PackageLookupFunctionException(BuildFileNotFoundException e, Transience transience) {
super(e, transience);
}
public PackageLookupFunctionException(RepositoryFetchException e, Transience transience) {
super(e, transience);
}
public PackageLookupFunctionException(InconsistentFilesystemException e,
Transience transience) {
super(e, transience);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.server.impl.jdbc;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Stream;
import org.apache.activemq.artemis.core.config.storage.DatabaseStorageConfiguration;
import org.apache.activemq.artemis.core.server.NodeManager.LockListener;
import org.apache.activemq.artemis.jdbc.store.drivers.JDBCUtils;
import org.apache.activemq.artemis.jdbc.store.sql.SQLProvider;
import org.apache.activemq.artemis.tests.util.ActiveMQTestBase;
import org.apache.activemq.artemis.utils.Wait;
import org.apache.activemq.artemis.utils.actors.ArtemisExecutor;
import org.apache.activemq.artemis.utils.actors.OrderedExecutorFactory;
import org.hamcrest.MatcherAssert;
import org.junit.After;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
@RunWith(Parameterized.class)
public class JdbcLeaseLockTest extends ActiveMQTestBase {
private JdbcSharedStateManager jdbcSharedStateManager;
private DatabaseStorageConfiguration dbConf;
private SQLProvider sqlProvider;
@Parameterized.Parameters(name = "create_tables_prior_test={0}")
public static List<Object[]> data() {
return Arrays.asList(new Object[][] {
{true},
{false}
});
}
@Parameter(0)
public boolean withExistingTable;
private LeaseLock lock() {
return lock(dbConf.getJdbcLockExpirationMillis());
}
private LeaseLock lock(long acquireMillis) {
try {
return JdbcSharedStateManager
.createLiveLock(
UUID.randomUUID().toString(),
jdbcSharedStateManager.getJdbcConnectionProvider(),
sqlProvider,
acquireMillis);
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
private LeaseLock lock(long acquireMillis, long queryTimeoutMillis) {
try {
return JdbcSharedStateManager
.createLiveLock(
UUID.randomUUID().toString(),
jdbcSharedStateManager.getJdbcConnectionProvider(),
sqlProvider,
acquireMillis,
queryTimeoutMillis);
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Before
public void createLockTable() throws Exception {
dbConf = createDefaultDatabaseStorageConfiguration();
sqlProvider = JDBCUtils.getSQLProvider(
dbConf.getJdbcDriverClassName(),
dbConf.getNodeManagerStoreTableName(),
SQLProvider.DatabaseStoreType.NODE_MANAGER);
if (withExistingTable) {
TestJDBCDriver testDriver = TestJDBCDriver
.usingDbConf(
dbConf,
sqlProvider);
testDriver.start();
testDriver.stop();
}
jdbcSharedStateManager = JdbcSharedStateManager
.usingConnectionProvider(
UUID.randomUUID().toString(),
dbConf.getJdbcLockExpirationMillis(),
dbConf.getConnectionProvider(),
sqlProvider);
}
@After
public void dropLockTable() throws Exception {
jdbcSharedStateManager.destroy();
jdbcSharedStateManager.close();
}
@Test
public void shouldAcquireLock() {
final LeaseLock lock = lock();
final boolean acquired = lock.tryAcquire();
Assert.assertTrue("Must acquire the lock!", acquired);
try {
Assert.assertTrue("The lock is been held by the caller!", lock.isHeldByCaller());
} finally {
lock.release();
}
}
@Test
public void shouldNotAcquireLockWhenAlreadyHeldByOthers() {
final LeaseLock lock = lock();
Assert.assertTrue("Must acquire the lock", lock.tryAcquire());
try {
Assert.assertTrue("Lock held by the caller", lock.isHeldByCaller());
final LeaseLock failingLock = lock();
Assert.assertFalse("lock already held by other", failingLock.tryAcquire());
Assert.assertFalse("lock already held by other", failingLock.isHeldByCaller());
Assert.assertTrue("lock already held by other", failingLock.isHeld());
} finally {
lock.release();
}
}
@Test
public void shouldNotAcquireLockTwice() {
final LeaseLock lock = lock();
Assert.assertTrue("Must acquire the lock", lock.tryAcquire());
try {
Assert.assertFalse("lock already acquired", lock.tryAcquire());
} finally {
lock.release();
}
}
@Test
public void shouldNotCorruptGuardedState() throws InterruptedException {
final AtomicLong sharedState = new AtomicLong(0);
final int producers = 2;
final int writesPerProducer = 10;
final long idleMillis = 1000;
final long millisToAcquireLock = writesPerProducer * (producers - 1) * idleMillis;
final LeaseLock.Pauser pauser = LeaseLock.Pauser.sleep(idleMillis, TimeUnit.MILLISECONDS);
final CountDownLatch finished = new CountDownLatch(producers);
final LeaseLock[] locks = new LeaseLock[producers];
final AtomicInteger lockIndex = new AtomicInteger(0);
final Runnable producerTask = () -> {
final LeaseLock lock = locks[lockIndex.getAndIncrement()];
try {
for (int i = 0; i < writesPerProducer; i++) {
final LeaseLock.AcquireResult acquireResult = lock.tryAcquire(millisToAcquireLock, pauser, () -> true);
if (acquireResult != LeaseLock.AcquireResult.Done) {
throw new IllegalStateException(acquireResult + " from " + Thread.currentThread());
}
//avoid the atomic getAndIncrement operation on purpose
sharedState.lazySet(sharedState.get() + 1);
lock.release();
}
} finally {
finished.countDown();
}
};
final Thread[] producerThreads = new Thread[producers];
for (int i = 0; i < producers; i++) {
locks[i] = lock();
producerThreads[i] = new Thread(producerTask);
}
Stream.of(producerThreads).forEach(Thread::start);
final long maxTestTime = millisToAcquireLock * writesPerProducer * producers;
Assert.assertTrue("Each producers must complete the writes", finished.await(maxTestTime, TimeUnit.MILLISECONDS));
Assert.assertEquals("locks hasn't mutual excluded producers", writesPerProducer * producers, sharedState.get());
}
@Test
public void shouldAcquireExpiredLock() throws InterruptedException {
final LeaseLock lock = lock(10);
Assert.assertTrue("lock is not owned by anyone", lock.tryAcquire());
try {
Thread.sleep(lock.expirationMillis() * 2);
Assert.assertFalse("lock is already expired", lock.isHeldByCaller());
Assert.assertFalse("lock is already expired", lock.isHeld());
Assert.assertTrue("lock is already expired", lock.tryAcquire());
} finally {
lock.release();
}
}
@Test
public void shouldOtherAcquireExpiredLock() throws InterruptedException {
final LeaseLock lock = lock(10);
Assert.assertTrue("lock is not owned by anyone", lock.tryAcquire());
try {
Thread.sleep(lock.expirationMillis() * 2);
Assert.assertFalse("lock is already expired", lock.isHeldByCaller());
Assert.assertFalse("lock is already expired", lock.isHeld());
final LeaseLock otherLock = lock(10);
try {
Assert.assertTrue("lock is already expired", otherLock.tryAcquire());
} finally {
otherLock.release();
}
} finally {
lock.release();
}
}
@Test
public void shouldRenewAcquiredLock() throws InterruptedException {
final LeaseLock lock = lock(TimeUnit.SECONDS.toMillis(10));
Assert.assertTrue("lock is not owned by anyone", lock.tryAcquire());
try {
Assert.assertTrue("lock is owned", lock.renew());
} finally {
lock.release();
}
}
@Test
public void shouldNotRenewReleasedLock() throws InterruptedException {
final LeaseLock lock = lock(TimeUnit.SECONDS.toMillis(10));
Assert.assertTrue("lock is not owned by anyone", lock.tryAcquire());
lock.release();
Assert.assertFalse("lock is already released", lock.isHeldByCaller());
Assert.assertFalse("lock is already released", lock.isHeld());
Assert.assertFalse("lock is already released", lock.renew());
}
@Test
public void shouldRenewExpiredLockNotAcquiredByOthers() throws InterruptedException {
final LeaseLock lock = lock(500);
Assert.assertTrue("lock is not owned by anyone", lock.tryAcquire());
try {
Thread.sleep(lock.expirationMillis() * 2);
Assert.assertFalse("lock is already expired", lock.isHeldByCaller());
Assert.assertFalse("lock is already expired", lock.isHeld());
Assert.assertTrue("lock is owned", lock.renew());
} finally {
lock.release();
}
}
@Test
public void shouldNotRenewLockAcquiredByOthers() throws InterruptedException {
final LeaseLock lock = lock(10);
Assert.assertTrue("lock is not owned by anyone", lock.tryAcquire());
try {
Thread.sleep(lock.expirationMillis() * 2);
Assert.assertFalse("lock is already expired", lock.isHeldByCaller());
Assert.assertFalse("lock is already expired", lock.isHeld());
final LeaseLock otherLock = lock(TimeUnit.SECONDS.toMillis(10));
Assert.assertTrue("lock is already expired", otherLock.tryAcquire());
try {
Assert.assertFalse("lock is owned by others", lock.renew());
} finally {
otherLock.release();
}
} finally {
lock.release();
}
}
@Test
public void shouldNotNotifyLostLock() throws Exception {
final ExecutorService executorService = Executors.newSingleThreadExecutor();
final ScheduledExecutorService scheduledExecutorService = Executors.newScheduledThreadPool(1);
final OrderedExecutorFactory factory = new OrderedExecutorFactory(executorService);
final ArtemisExecutor artemisExecutor = factory.getExecutor();
final AtomicLong lostLock = new AtomicLong();
final LockListener lockListener = () -> {
lostLock.incrementAndGet();
};
final ScheduledLeaseLock scheduledLeaseLock = ScheduledLeaseLock
.of(scheduledExecutorService, artemisExecutor,
"test", lock(), dbConf.getJdbcLockRenewPeriodMillis(), lockListener);
Assert.assertTrue(scheduledLeaseLock.lock().tryAcquire());
scheduledLeaseLock.start();
Assert.assertEquals(0, lostLock.get());
scheduledLeaseLock.stop();
Assert.assertEquals(0, lostLock.get());
executorService.shutdown();
scheduledExecutorService.shutdown();
scheduledLeaseLock.lock().release();
}
@Test
public void shouldNotifyManyTimesLostLock() throws Exception {
final ExecutorService executorService = Executors.newSingleThreadExecutor();
final ScheduledExecutorService scheduledExecutorService = Executors.newScheduledThreadPool(1);
final OrderedExecutorFactory factory = new OrderedExecutorFactory(executorService);
final ArtemisExecutor artemisExecutor = factory.getExecutor();
final AtomicLong lostLock = new AtomicLong();
final LockListener lockListener = () -> {
lostLock.incrementAndGet();
};
final ScheduledLeaseLock scheduledLeaseLock = ScheduledLeaseLock
.of(scheduledExecutorService, artemisExecutor,
"test", lock(TimeUnit.SECONDS.toMillis(1)), 100, lockListener);
Assert.assertTrue(scheduledLeaseLock.lock().tryAcquire());
scheduledLeaseLock.start();
// should let the renew to happen at least 1 time, excluding the time to start a scheduled task
TimeUnit.MILLISECONDS.sleep(2 * scheduledLeaseLock.renewPeriodMillis());
Assert.assertTrue(scheduledLeaseLock.lock().isHeldByCaller());
Assert.assertEquals(0, lostLock.get());
scheduledLeaseLock.lock().release();
Assert.assertFalse(scheduledLeaseLock.lock().isHeldByCaller());
TimeUnit.MILLISECONDS.sleep(3 * scheduledLeaseLock.renewPeriodMillis());
MatcherAssert.assertThat(lostLock.get(), is(greaterThanOrEqualTo(2L)));
scheduledLeaseLock.stop();
executorService.shutdown();
scheduledExecutorService.shutdown();
}
@Test
public void shouldJdbcAndSystemTimeToBeAligned() throws InterruptedException {
final LeaseLock lock = lock(TimeUnit.SECONDS.toMillis(10), TimeUnit.SECONDS.toMillis(10));
Assume.assumeThat(lock, instanceOf(JdbcLeaseLock.class));
final JdbcLeaseLock jdbcLock = JdbcLeaseLock.class.cast(lock);
final long utcSystemTime = System.currentTimeMillis();
TimeUnit.SECONDS.sleep(1);
final long utcJdbcTime = jdbcLock.dbCurrentTimeMillis();
final long millisDiffJdbcSystem = utcJdbcTime - utcSystemTime;
MatcherAssert.assertThat(millisDiffJdbcSystem, greaterThanOrEqualTo(0L));
MatcherAssert.assertThat(millisDiffJdbcSystem, lessThan(TimeUnit.SECONDS.toMillis(10)));
}
@Test
public void shouldNotifyOnceLostLockIfStopped() throws Exception {
final ExecutorService executorService = Executors.newSingleThreadExecutor();
final ScheduledExecutorService scheduledExecutorService = Executors.newScheduledThreadPool(1);
final OrderedExecutorFactory factory = new OrderedExecutorFactory(executorService);
final ArtemisExecutor artemisExecutor = factory.getExecutor();
final AtomicLong lostLock = new AtomicLong();
final AtomicReference<ScheduledLeaseLock> lock = new AtomicReference<>();
final AtomicReference<Throwable> stopErrors = new AtomicReference<>();
final LockListener lockListener = () -> {
lostLock.incrementAndGet();
try {
lock.get().stop();
} catch (Throwable e) {
stopErrors.set(e);
}
};
final ScheduledLeaseLock scheduledLeaseLock = ScheduledLeaseLock
.of(scheduledExecutorService, artemisExecutor, "test", lock(TimeUnit.SECONDS.toMillis(1)),
100, lockListener);
lock.set(scheduledLeaseLock);
Assert.assertTrue(scheduledLeaseLock.lock().tryAcquire());
lostLock.set(0);
scheduledLeaseLock.start();
Assert.assertTrue(scheduledLeaseLock.lock().isHeldByCaller());
scheduledLeaseLock.lock().release();
Assert.assertFalse(scheduledLeaseLock.lock().isHeldByCaller());
Wait.assertTrue(() -> lostLock.get() > 0);
Assert.assertFalse(scheduledLeaseLock.isStarted());
// wait enough to see if it get triggered again
TimeUnit.MILLISECONDS.sleep(scheduledLeaseLock.renewPeriodMillis());
Assert.assertEquals(1, lostLock.getAndSet(0));
Assert.assertNull(stopErrors.getAndSet(null));
scheduledLeaseLock.stop();
executorService.shutdown();
scheduledExecutorService.shutdown();
}
}
| |
package io.protostuff.jetbrains.plugin.reference;
import static io.protostuff.compiler.model.ProtobufConstants.MSG_ENUM_OPTIONS;
import static io.protostuff.compiler.model.ProtobufConstants.MSG_ENUM_VALUE_OPTIONS;
import static io.protostuff.compiler.model.ProtobufConstants.MSG_FIELD_OPTIONS;
import static io.protostuff.compiler.model.ProtobufConstants.MSG_FILE_OPTIONS;
import static io.protostuff.compiler.model.ProtobufConstants.MSG_MESSAGE_OPTIONS;
import static io.protostuff.compiler.model.ProtobufConstants.MSG_METHOD_OPTIONS;
import static io.protostuff.compiler.model.ProtobufConstants.MSG_ONEOF_OPTIONS;
import static io.protostuff.compiler.model.ProtobufConstants.MSG_SERVICE_OPTIONS;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.PsiReference;
import io.protostuff.jetbrains.plugin.psi.AbstractFieldReferenceNode;
import io.protostuff.jetbrains.plugin.psi.DataTypeContainer;
import io.protostuff.jetbrains.plugin.psi.EnumConstantNode;
import io.protostuff.jetbrains.plugin.psi.EnumNode;
import io.protostuff.jetbrains.plugin.psi.ExtendNode;
import io.protostuff.jetbrains.plugin.psi.FieldNode;
import io.protostuff.jetbrains.plugin.psi.FieldReferenceNode;
import io.protostuff.jetbrains.plugin.psi.GroupNode;
import io.protostuff.jetbrains.plugin.psi.MapNode;
import io.protostuff.jetbrains.plugin.psi.MessageField;
import io.protostuff.jetbrains.plugin.psi.MessageNode;
import io.protostuff.jetbrains.plugin.psi.OneOfNode;
import io.protostuff.jetbrains.plugin.psi.ProtoPsiFileRoot;
import io.protostuff.jetbrains.plugin.psi.ProtoRootNode;
import io.protostuff.jetbrains.plugin.psi.RpcMethodNode;
import io.protostuff.jetbrains.plugin.psi.ServiceNode;
import io.protostuff.jetbrains.plugin.psi.TypeReferenceNode;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Deque;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Field reference provider.
*
* @author Kostiantyn Shchepanovskyi
*/
public class FieldReferenceProviderImpl implements FieldReferenceProvider {
// "default" field option (a special case)
public static final String DEFAULT = "default";
// // Like default value, this "json_name" is not an actual option.
// https://github.com/protocolbuffers/protobuf/blob/ba8692fbade4ba329cc4531e286ab5a8e0821d97/src/google/protobuf/compiler/parser.cc#L1087
public static final String JSON_NAME = "json_name";
private static final Logger LOGGER = Logger.getInstance(FieldReferenceProviderImpl.class);
private static final Map<Class<? extends PsiElement>, String> TARGET_MAPPING
= ImmutableMap.<Class<? extends PsiElement>, String>builder()
.put(FieldNode.class, MSG_FIELD_OPTIONS)
.put(MapNode.class, MSG_FIELD_OPTIONS)
.put(MessageNode.class, MSG_MESSAGE_OPTIONS)
.put(EnumConstantNode.class, MSG_ENUM_VALUE_OPTIONS)
.put(EnumNode.class, MSG_ENUM_OPTIONS)
.put(RpcMethodNode.class, MSG_METHOD_OPTIONS)
.put(ServiceNode.class, MSG_SERVICE_OPTIONS)
.put(ProtoRootNode.class, MSG_FILE_OPTIONS)
.put(OneOfNode.class, MSG_ONEOF_OPTIONS)
.build();
private final Project project;
private PsiFile inm = null;
public FieldReferenceProviderImpl(Project project) {
this.project = project;
}
@NotNull
@Override
public PsiReference[] getReferencesByElement(FieldReferenceNode fieldReference) {
String text = fieldReference.getText();
if (Strings.isNullOrEmpty(text)) {
return new PsiReference[0];
}
String targetType = getTarget(fieldReference);
MessageNode message = resolveType(fieldReference, targetType);
if (message == null) {
LOGGER.warn("Could not resolve " + targetType);
return new PsiReference[0];
}
List<AbstractFieldReferenceNode> components = new ArrayList<>();
for (PsiElement element : fieldReference.getChildren()) {
if (element instanceof AbstractFieldReferenceNode) {
components.add((AbstractFieldReferenceNode) element);
}
}
List<PsiReference> result = new ArrayList<>();
for (AbstractFieldReferenceNode fieldRef : components) {
String key = fieldRef.getText();
MessageField target = null;
if (message != null) {
if (fieldRef.isExtension()) {
target = resolveCustomOptionReference(fieldReference, message, key);
} else {
target = resolveStandardOptionReference(fieldReference, message, key);
}
message = null;
if (target instanceof FieldNode) {
FieldNode targetField = (FieldNode) target;
TypeReferenceNode fieldTypeRef = targetField.getFieldType();
if (fieldTypeRef != null) {
PsiReference reference = fieldTypeRef.getReference();
if (reference != null) {
PsiElement fieldType = reference.resolve();
if (fieldType instanceof MessageNode) {
message = (MessageNode) fieldType;
}
}
}
}
if (target instanceof GroupNode) {
message = (MessageNode) target;
}
}
TextRange textRange = getTextRange(fieldReference, fieldRef);
result.add(new OptionReference(fieldReference, textRange, target));
}
Collections.reverse(result);
return result.toArray(new PsiReference[0]);
}
@NotNull
private TextRange getTextRange(FieldReferenceNode sourceReference, AbstractFieldReferenceNode subReference) {
int baseOffset = sourceReference.getTextOffset();
int startOffset = subReference.getTextOffset();
int length = subReference.getTextLength();
return new TextRange(startOffset - baseOffset, startOffset - baseOffset + length);
}
@Nullable
private String getTarget(PsiElement element) {
while (element != null) {
String result = TARGET_MAPPING.get(element.getClass());
if (result != null) {
return result;
}
element = element.getParent();
}
return null;
}
private ProtoRootNode getProtoRoot(PsiElement element) {
PsiElement parent = element.getParent();
while (!(parent instanceof ProtoRootNode)) {
parent = parent.getParent();
}
return (ProtoRootNode) parent;
}
private MessageField resolveStandardOptionReference(PsiElement sourceElement, MessageNode target, String key) {
if (MSG_FIELD_OPTIONS.equals(target.getQualifiedName())) {
if (DEFAULT.equals(key)) {
return resolveDefaultOptionReference(sourceElement);
}
if (JSON_NAME.equals(key)) {
MessageField result = resolveJsonNameOptionReference(sourceElement, key);
return result;
}
}
for (MessageField field : target.getFields()) {
if (Objects.equals(key, field.getFieldName())) {
return field;
}
}
return null;
}
/**
* "default" field option is a special case: it is not defined
* in the {@code google/protobuf/descriptor.proto} and it cannot
* be treated like other options, as its type depends on a field's
* type.
* <p>
* In order to implement value validation, we have to return the
* field where this option was applied.
*/
private MessageField resolveDefaultOptionReference(PsiElement element) {
while (element != null) {
if (element instanceof FieldNode) {
return (MessageField) element;
}
element = element.getParent();
}
return null;
}
/**
* "json_name" field option is a special case: it is not defined in google.protobuf.FieldOptions,
* so it is not an actual option.
* It's definition is inside of google.protobuf.FieldDescriptorProto.
*/
private MessageField resolveJsonNameOptionReference(PsiElement sourceElement, String key) {
MessageNode message = resolveType(sourceElement, ".google.protobuf.FieldDescriptorProto");
MessageField result = null;
if (message != null) {
result = resolveStandardOptionReference(sourceElement, message, key);
}
return result;
}
@Nullable
private MessageField resolveCustomOptionReference(PsiElement element, MessageNode target, String key) {
ProtoRootNode protoRoot = getProtoRoot(element);
DataTypeContainer container = getContainer(element);
Deque<String> scopeLookupList = TypeReferenceProviderImpl.createScopeLookupList(container);
// case 1: (.package.field)
// case 2: (.package.field).field
// case 3: (.package.field).(.package.field)
Collection<ExtendNode> extensions = protoRoot.getExtenstions(target);
Map<String, MessageField> extensionFields = new HashMap<>();
for (ExtendNode extension : extensions) {
for (MessageField field : extension.getExtensionFields().values()) {
extensionFields.put(extension.getNamespace() + field.getFieldName(), field);
}
}
if (key.startsWith(".")) {
return extensionFields.get(key);
} else {
for (String scope : scopeLookupList) {
MessageField field = extensionFields.get(scope + key);
if (field != null) {
return field;
}
}
}
return null;
}
@NotNull
private DataTypeContainer getContainer(PsiElement element) {
PsiElement parent = element.getParent();
while (!(parent instanceof DataTypeContainer)) {
parent = parent.getParent();
}
return (DataTypeContainer) parent;
}
private MessageNode resolveType(PsiElement element, String qualifiedName) {
MessageNode message = resolveTypeFromCurrentFile(element, qualifiedName);
// For standard options import is not required.
// This way they cannot be resolved in standard way, we have to check them
// separately using bundled descriptor.proto
// TODO: what if there is non-bundled descriptor.proto available in other location?
if (message == null) {
ProtoPsiFileRoot descriptorProto = (ProtoPsiFileRoot) loadInMemoryDescriptorProto();
if (descriptorProto == null) {
// by some reason protobuf library is not yet loaded or not attached
return null;
}
return (MessageNode) descriptorProto.findType(qualifiedName.substring(1));
}
return message;
}
@Nullable
private PsiFile loadInMemoryDescriptorProto() {
if (inm == null) {
for (Module module : ModuleManager.getInstance(project).getModules()) {
ModuleRootManager moduleRootManager = ModuleRootManager.getInstance(module);
VirtualFile[] allSourceRoots = moduleRootManager.orderEntries().getAllSourceRoots();
for (VirtualFile allSourceRoot : allSourceRoots) {
PsiDirectory directory = PsiManager.getInstance(project).findDirectory(allSourceRoot);
if (directory != null && directory.isValid()) {
String relPath = "google/protobuf/descriptor.proto";
VirtualFile file = directory.getVirtualFile().findFileByRelativePath(relPath);
if (file != null) {
PsiManager psiManager = PsiManager.getInstance(project);
PsiFile psiFile = psiManager.findFile(file);
if (psiFile instanceof ProtoPsiFileRoot) {
inm = psiFile;
return (ProtoPsiFileRoot) psiFile;
}
}
}
}
}
}
return inm;
}
@Nullable
private MessageNode resolveTypeFromCurrentFile(PsiElement element, String qualifiedName) {
PsiElement protoElement = element;
while (protoElement != null && !(protoElement instanceof ProtoRootNode)) {
protoElement = protoElement.getParent();
}
if (protoElement == null) {
return null;
}
ProtoRootNode proto = (ProtoRootNode) protoElement;
return (MessageNode) proto.resolve(qualifiedName, new ArrayDeque<>());
}
}
| |
package org.motechproject.nms.mobileacademy.service.impl;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.motechproject.mtraining.domain.Chapter;
import org.motechproject.mtraining.domain.Course;
import org.motechproject.mtraining.domain.CourseUnitState;
import org.motechproject.mtraining.domain.Lesson;
import org.motechproject.mtraining.domain.Question;
import org.motechproject.mtraining.domain.Quiz;
import org.motechproject.mtraining.service.MTrainingService;
import org.motechproject.nms.mobileacademy.commons.MobileAcademyConstants;
import org.motechproject.nms.mobileacademy.commons.OperatorDetails;
import org.motechproject.nms.mobileacademy.domain.ChapterContent;
import org.motechproject.nms.mobileacademy.domain.LessonContent;
import org.motechproject.nms.mobileacademy.domain.QuestionContent;
import org.motechproject.nms.mobileacademy.domain.QuizContent;
import org.motechproject.nms.mobileacademy.domain.ScoreContent;
import org.motechproject.nms.mobileacademy.repository.ChapterContentDataService;
import org.motechproject.nms.mobileacademy.service.CoursePopulateService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* CoursePopulateServiceImpl class implements CoursePopulateService interface to
* perform course populate related operations in mtraining and content tables.
*
*/
@Service("CoursePopulateService")
public class CoursePopulateServiceImpl implements CoursePopulateService {
@Autowired
private MTrainingService mTrainingService;
@Autowired
private ChapterContentDataService chapterContentDataService;
private static final Logger LOGGER = LoggerFactory
.getLogger(CoursePopulateServiceImpl.class);
@Override
public Course populateMtrainingCourseData(OperatorDetails operatorDetails) {
List<Chapter> chapters = new ArrayList<>();
for (int chapterCount = 1; chapterCount <= MobileAcademyConstants.NUM_OF_CHAPTERS; chapterCount++) {
List<Lesson> lessons = new ArrayList<>();
for (int lessonCount = 1; lessonCount <= MobileAcademyConstants.NUM_OF_LESSONS; lessonCount++) {
Lesson lesson = new Lesson(
MobileAcademyConstants.LESSON
+ String.format(
MobileAcademyConstants.TWO_DIGIT_INTEGER_FORMAT,
lessonCount), null, null);
lesson.setCreator(operatorDetails.getCreator());
lesson.setModifiedBy(operatorDetails.getModifiedBy());
lesson.setOwner(operatorDetails.getOwner());
lessons.add(lesson);
}
List<Question> questions = new ArrayList<>();
for (int questionCount = 1; questionCount <= MobileAcademyConstants.NUM_OF_QUESTIONS; questionCount++) {
Question question = new Question(
MobileAcademyConstants.QUESTION
+ String.format(
MobileAcademyConstants.TWO_DIGIT_INTEGER_FORMAT,
questionCount), null);
questions.add(question);
}
Quiz quiz = new Quiz(MobileAcademyConstants.QUIZ, null, null,
questions, 0.0);
quiz.setCreator(operatorDetails.getCreator());
quiz.setModifiedBy(operatorDetails.getModifiedBy());
quiz.setOwner(operatorDetails.getOwner());
Chapter chapter = new Chapter(MobileAcademyConstants.CHAPTER
+ String.format(
MobileAcademyConstants.TWO_DIGIT_INTEGER_FORMAT,
chapterCount), null, null, lessons, quiz);
chapter.setCreator(operatorDetails.getCreator());
chapter.setModifiedBy(operatorDetails.getModifiedBy());
chapter.setOwner(operatorDetails.getOwner());
chapters.add(chapter);
}
Course course = new Course(MobileAcademyConstants.DEFAULT_COURSE_NAME,
CourseUnitState.Inactive, null, chapters);
course.setCreator(operatorDetails.getCreator());
course.setModifiedBy(operatorDetails.getModifiedBy());
course.setOwner(operatorDetails.getOwner());
mTrainingService.createCourse(course);
LOGGER.info("Course Structure in Mtraining Populated");
return course;
}
@Override
public Course getMtrainingCourse() {
Course course = null;
List<Course> courses = mTrainingService
.getCourseByName(MobileAcademyConstants.DEFAULT_COURSE_NAME);
if (CollectionUtils.isNotEmpty(courses)) {
course = courses.get(0);
}
return course;
}
@Override
public CourseUnitState findCourseState() {
CourseUnitState state = null;
List<Course> courses = mTrainingService
.getCourseByName(MobileAcademyConstants.DEFAULT_COURSE_NAME);
if (CollectionUtils.isNotEmpty(courses)) {
state = courses.get(0).getState();
}
return state;
}
@Override
public int getCorrectAnswerOption(Integer chapterNo, Integer questionNo) {
List<Chapter> chapters = mTrainingService
.getChapterByName(MobileAcademyConstants.CHAPTER
+ String.format(
MobileAcademyConstants.TWO_DIGIT_INTEGER_FORMAT,
chapterNo));
Chapter chapter;
Quiz quiz;
if (CollectionUtils.isNotEmpty(chapters)) {
chapter = chapters.get(0);
quiz = chapter.getQuiz();
for (Question question : quiz.getQuestions()) {
if (question
.getQuestion()
.equalsIgnoreCase(
MobileAcademyConstants.QUESTION
+ String.format(
MobileAcademyConstants.TWO_DIGIT_INTEGER_FORMAT,
questionNo))) {
String answer = question.getAnswer();
return Integer.valueOf(answer);
}
}
}
return 0;
}
@Override
public void updateCourseState(CourseUnitState courseUnitState,
OperatorDetails operatorDetails) {
List<Course> courses = mTrainingService
.getCourseByName(MobileAcademyConstants.DEFAULT_COURSE_NAME);
if (CollectionUtils.isNotEmpty(courses)) {
Course course = courses.get(0);
course.setState(courseUnitState);
course.setModifiedBy(operatorDetails.getModifiedBy());
mTrainingService.updateCourse(course);
}
}
@Override
public void updateCorrectAnswer(String chapterName, String questionName,
String answer, OperatorDetails operatorDetails) {
List<Chapter> chapters = mTrainingService.getChapterByName(chapterName);
if (CollectionUtils.isNotEmpty(chapters)) {
Chapter chapter = chapters.get(0);
Quiz quiz = chapter.getQuiz();
for (Question question : quiz.getQuestions()) {
if (questionName.equalsIgnoreCase(question.getQuestion())) {
question.setAnswer(answer);
quiz.setModifiedBy(operatorDetails.getModifiedBy());
mTrainingService.updateQuiz(quiz);
break;
}
}
}
}
@Override
public List<ChapterContent> getAllChapterContents() {
return chapterContentDataService.retrieveAll();
}
@Override
public LessonContent getLessonContent(List<ChapterContent> chapterContents,
int chapterId, int lessonId, String type) {
LessonContent lessonContentReturn = null;
if (CollectionUtils.isNotEmpty(chapterContents)) {
outer: for (ChapterContent chapterContent : chapterContents) {
if (chapterContent.getChapterNumber() == chapterId) {
for (LessonContent lessonContent : chapterContent
.getLessons()) {
if ((lessonContent.getLessonNumber() == lessonId)
&& (lessonContent.getName()
.equalsIgnoreCase(type))) {
lessonContentReturn = lessonContent;
break outer;
}
}
}
}
}
return lessonContentReturn;
}
@Override
public void setLessonContent(int chapterId, int lessonId, String type,
String fileName, OperatorDetails operatorDetails) {
List<ChapterContent> chapterContents = chapterContentDataService
.retrieveAll();
if (CollectionUtils.isNotEmpty(chapterContents)) {
outer: for (ChapterContent chapterContent : chapterContents) {
if (chapterContent.getChapterNumber() == chapterId) {
for (LessonContent lessonContent : chapterContent
.getLessons()) {
if ((lessonContent.getLessonNumber() == lessonId)
&& (lessonContent.getName()
.equalsIgnoreCase(type))) {
lessonContent.setAudioFile(fileName);
lessonContent.setModifiedBy(operatorDetails
.getModifiedBy());
chapterContent.setModifiedBy(operatorDetails
.getModifiedBy());
chapterContentDataService.update(chapterContent);
break outer;
}
}
}
}
}
}
@Override
public QuestionContent getQuestionContent(
List<ChapterContent> chapterContents, int chapterId,
int questionId, String type) {
QuestionContent questionContentReturn = null;
if (CollectionUtils.isNotEmpty(chapterContents)) {
outer: for (ChapterContent chapterContent : chapterContents) {
if (chapterContent.getChapterNumber() == chapterId) {
for (QuestionContent questionContent : chapterContent
.getQuiz().getQuestions()) {
if ((questionContent.getQuestionNumber() == questionId)
&& (questionContent.getName()
.equalsIgnoreCase(type))) {
questionContentReturn = questionContent;
break outer;
}
}
}
}
}
return questionContentReturn;
}
@Override
public void setQuestionContent(int chapterId, int questionId, String type,
String fileName, OperatorDetails operatorDetails) {
List<ChapterContent> chapterContents = chapterContentDataService
.retrieveAll();
if (CollectionUtils.isNotEmpty(chapterContents)) {
outer: for (ChapterContent chapterContent : chapterContents) {
if (chapterContent.getChapterNumber() == chapterId) {
for (QuestionContent questionContent : chapterContent
.getQuiz().getQuestions()) {
if ((questionContent.getQuestionNumber() == questionId)
&& (questionContent.getName()
.equalsIgnoreCase(type))) {
questionContent.setAudioFile(fileName);
questionContent.setModifiedBy(operatorDetails
.getModifiedBy());
chapterContent.setModifiedBy(operatorDetails
.getModifiedBy());
chapterContentDataService.update(chapterContent);
break outer;
}
}
}
}
}
}
@Override
public ScoreContent getScore(List<ChapterContent> chapterContents,
int chapterId, int scoreId, String type) {
ScoreContent scoreContentReturn = null;
if (CollectionUtils.isNotEmpty(chapterContents)) {
outer: for (ChapterContent chapterContent : chapterContents) {
if (chapterContent.getChapterNumber() == chapterId) {
for (ScoreContent scoreContent : chapterContent.getScores()) {
if ((scoreContent.getName()
.equalsIgnoreCase(type
+ String.format(
MobileAcademyConstants.TWO_DIGIT_INTEGER_FORMAT,
scoreId)))) {
scoreContentReturn = scoreContent;
break outer;
}
}
}
}
}
return scoreContentReturn;
}
@Override
public void setScore(int chapterId, int scoreId, String type,
String fileName, OperatorDetails operatorDetails) {
List<ChapterContent> chapterContents = chapterContentDataService
.retrieveAll();
if (CollectionUtils.isNotEmpty(chapterContents)) {
outer: for (ChapterContent chapterContent : chapterContents) {
if (chapterContent.getChapterNumber() == chapterId) {
for (ScoreContent scoreContent : chapterContent.getScores()) {
if ((scoreContent.getName()
.equalsIgnoreCase(type
+ String.format(
MobileAcademyConstants.TWO_DIGIT_INTEGER_FORMAT,
scoreId)))) {
scoreContent.setAudioFile(fileName);
scoreContent.setModifiedBy(operatorDetails
.getModifiedBy());
chapterContent.setModifiedBy(operatorDetails
.getModifiedBy());
chapterContentDataService.update(chapterContent);
break outer;
}
}
}
}
}
}
@Override
public ChapterContent getChapterContent(
List<ChapterContent> chapterContents, int chapterId, String type) {
ChapterContent chapterContentReturn = null;
if (CollectionUtils.isNotEmpty(chapterContents)) {
outer: for (ChapterContent chapterContent : chapterContents) {
if (chapterContent.getChapterNumber() == chapterId) {
if (chapterContent.getName().equalsIgnoreCase(type)) {
chapterContentReturn = chapterContent;
break outer;
}
}
}
}
return chapterContentReturn;
}
@Override
public void setChapterContent(int chapterId, String type, String fileName,
OperatorDetails operatorDetails) {
List<ChapterContent> chapterContents = chapterContentDataService
.retrieveAll();
if (CollectionUtils.isNotEmpty(chapterContents)) {
for (ChapterContent chapterContent : chapterContents) {
if (chapterContent.getChapterNumber() == chapterId) {
if (chapterContent.getName().equalsIgnoreCase(type)) {
chapterContent.setAudioFile(fileName);
chapterContent.setModifiedBy(operatorDetails
.getModifiedBy());
chapterContentDataService.update(chapterContent);
break;
}
}
}
}
}
@Override
public QuizContent getQuizContent(List<ChapterContent> chapterContents,
int chapterId, String type) {
QuizContent quizContentReturn = null;
if (CollectionUtils.isNotEmpty(chapterContents)) {
outer: for (ChapterContent chapterContent : chapterContents) {
if (chapterContent.getChapterNumber() == chapterId) {
QuizContent quizContent = chapterContent.getQuiz();
if (quizContent.getName().equalsIgnoreCase(type)) {
quizContentReturn = quizContent;
break outer;
}
}
}
}
return quizContentReturn;
}
@Override
public void setQuizContent(int chapterId, String type, String fileName,
OperatorDetails operatorDetails) {
List<ChapterContent> chapterContents = chapterContentDataService
.retrieveAll();
if (CollectionUtils.isNotEmpty(chapterContents)) {
for (ChapterContent chapterContent : chapterContents) {
if (chapterContent.getChapterNumber() == chapterId) {
QuizContent quizContent = chapterContent.getQuiz();
if (quizContent.getName().equalsIgnoreCase(type)) {
quizContent.setAudioFile(fileName);
quizContent.setModifiedBy(operatorDetails
.getModifiedBy());
chapterContent.setModifiedBy(operatorDetails
.getModifiedBy());
chapterContentDataService.update(chapterContent);
break;
}
}
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.