gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright (C) 2018 AlexMofer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package am.util.font;
import android.text.TextUtils;
import android.util.Xml;
import org.xmlpull.v1.XmlPullParser;
import java.io.File;
import java.io.FileReader;
import java.io.Reader;
import java.util.ArrayList;
/**
* API 9
* Created by Alex on 2018/8/30.
*/
@SuppressWarnings("WeakerAccess")
class FontsReaderBase implements FontsReader {
protected static final String NAME_FONTS = "fonts";
protected static final String NAME_FONT = "font";
protected static final String NAME_NAME = "name";
protected static final String NAME_FALLBACK = "fallback";
protected static final String ATTR_TTF = "ttf";
protected FamilySet mSet;
private String mTTF;
private ArrayList<String> mNames = new ArrayList<>();
@Override
public String getConfigDir() {
return DIR_CONFIG;
}
@Override
public String getFontsDir() {
return DIR_FONTS;
}
@Override
public FamilySet readConfig() {
final File config = new File(getConfigDir(), "fonts.xml");
if (!config.exists() || !config.isFile() || !config.canRead())
return null;
final Reader reader;
try {
reader = new FileReader(config);
} catch (Exception e) {
return null;
}
readConfig(reader);
try {
reader.close();
} catch (Exception e) {
// ignore
}
return mSet;
}
protected void readConfig(Reader reader) {
final XmlPullParser parser;
try {
parser = Xml.newPullParser();
parser.setInput(reader);
} catch (Exception e) {
return;
}
try {
int event = parser.getEventType();
while (true) {
switch (event) {
case XmlPullParser.START_DOCUMENT:
break;
case XmlPullParser.END_DOCUMENT:
return;
case XmlPullParser.START_TAG:
startTag(parser);
break;
case XmlPullParser.END_TAG:
endTag(parser);
break;
}
event = parser.next();
}
} catch (Exception e) {
// ignore
}
}
protected boolean startTag(XmlPullParser parser) {
final String name = parser.getName();
switch (name) {
case NAME_FONTS:
startFonts(parser);
return true;
case NAME_FONT:
startFont(parser);
return true;
case NAME_NAME:
startName(parser);
return true;
case NAME_FALLBACK:
startFallback(parser);
return true;
}
return false;
}
protected void startFonts(@SuppressWarnings("unused") XmlPullParser parser) {
mSet = new FamilySet(null);
}
protected void startFont(XmlPullParser parser) {
mTTF = parser.getAttributeValue(null, ATTR_TTF);
mNames.clear();
}
protected void startName(XmlPullParser parser) {
try {
if (parser.next() != XmlPullParser.TEXT)
return;
} catch (Exception e) {
return;
}
mNames.add(parser.getText());
}
protected void startFallback(XmlPullParser parser) {
if (mSet == null)
return;
final String ttf = parser.getAttributeValue(null, ATTR_TTF);
if (TextUtils.isEmpty(ttf))
return;
final Fallback fallback = new Fallback();
fallback.addFont(getFont(ttf + "-Thin.ttf", 100, Font.STYLE_NORMAL));
fallback.addFont(getFont(ttf + "-ThinItalic.ttf", 100, Font.STYLE_ITALIC));
fallback.addFont(getFont(ttf + "-Light.ttf", 300, Font.STYLE_NORMAL));
fallback.addFont(getFont(ttf + "-LightItalic.ttf", 300, Font.STYLE_ITALIC));
fallback.addFont(getFont(ttf + ".ttf", 400, Font.STYLE_NORMAL));
fallback.addFont(getFont(ttf + "-Regular.ttf", 400, Font.STYLE_NORMAL));
fallback.addFont(getFont(ttf + "-Italic.ttf", 400, Font.STYLE_ITALIC));
fallback.addFont(getFont(ttf + "-Medium.ttf", 500, Font.STYLE_NORMAL));
fallback.addFont(getFont(ttf + "-MediumItalic.ttf", 500, Font.STYLE_ITALIC));
fallback.addFont(getFont(ttf + "-Black.ttf", 900, Font.STYLE_NORMAL));
fallback.addFont(getFont(ttf + "-BlackItalic.ttf", 900, Font.STYLE_ITALIC));
fallback.addFont(getFont(ttf + "-Bold.ttf", 700, Font.STYLE_NORMAL));
fallback.addFont(getFont(ttf + "-BoldItalic.ttf", 700, Font.STYLE_ITALIC));
mSet.putFallback(fallback);
}
protected boolean endTag(XmlPullParser parser) {
final String name = parser.getName();
switch (name) {
case NAME_FONTS:
endFonts();
return true;
case NAME_FONT:
endFont();
return true;
case NAME_NAME:
// do nothing
return true;
case NAME_FALLBACK:
// do nothing
return true;
}
return false;
}
protected void endFonts() {
if (mSet != null && mSet.isAvailable())
return;
mSet = null;
}
protected void endFont() {
if (mSet == null || mNames.isEmpty() || TextUtils.isEmpty(mTTF)) {
mNames.clear();
mTTF = null;
return;
}
final String to = mNames.get(0);
final Family family = new Family(to);
family.addFont(getFont(mTTF + "-Thin.ttf", 100, Font.STYLE_NORMAL));
family.addFont(getFont(mTTF + "-ThinItalic.ttf", 100, Font.STYLE_ITALIC));
family.addFont(getFont(mTTF + "-Light.ttf", 300, Font.STYLE_NORMAL));
family.addFont(getFont(mTTF + "-LightItalic.ttf", 300, Font.STYLE_ITALIC));
family.addFont(getFont(mTTF + ".ttf", 400, Font.STYLE_NORMAL));
family.addFont(getFont(mTTF + "-Regular.ttf", 400, Font.STYLE_NORMAL));
family.addFont(getFont(mTTF + "-Italic.ttf", 400, Font.STYLE_ITALIC));
family.addFont(getFont(mTTF + "-Medium.ttf", 500, Font.STYLE_NORMAL));
family.addFont(getFont(mTTF + "-MediumItalic.ttf", 500, Font.STYLE_ITALIC));
family.addFont(getFont(mTTF + "-Black.ttf", 900, Font.STYLE_NORMAL));
family.addFont(getFont(mTTF + "-BlackItalic.ttf", 900, Font.STYLE_ITALIC));
family.addFont(getFont(mTTF + "-Bold.ttf", 700, Font.STYLE_NORMAL));
family.addFont(getFont(mTTF + "-BoldItalic.ttf", 700, Font.STYLE_ITALIC));
if (mSet.putFamily(family)) {
final int count = mNames.size();
for (int i = 1; i < count; i++) {
mSet.putAlias(new Alias(mNames.get(i), to, -1));
}
}
mTTF = null;
}
private Font getFont(String name, int weight, int style) {
final File font = new File(getFontsDir(), name);
if (font.exists() && font.isFile())
return new Font(name, weight, style);
return null;
}
}
| |
/*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl;
import com.intellij.codeInsight.daemon.impl.DaemonProgressIndicator;
import com.intellij.ide.startup.impl.StartupManagerImpl;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ex.ApplicationManagerEx;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.FileViewProvider;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.util.Processor;
import com.intellij.util.SmartList;
import com.intellij.util.containers.Queue;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.text.SimpleDateFormat;
import java.util.*;
public class DocumentCommitThread extends DocumentCommitProcessor implements Runnable, Disposable {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.DocumentCommitThread");
private final Queue<CommitTask> documentsToCommit = new Queue<CommitTask>(10);
private final List<CommitTask> documentsToApplyInEDT = new ArrayList<CommitTask>(10); // guarded by documentsToCommit
private volatile boolean isDisposed;
private CommitTask currentTask; // guarded by documentsToCommit
private volatile boolean threadFinished;
private volatile boolean myEnabled = true; // true if we can do commits. set to false temporarily during the write action.
public static DocumentCommitThread getInstance() {
return ServiceManager.getService(DocumentCommitThread.class);
}
public DocumentCommitThread() {
log("Starting thread",null, false);
new Thread(this, "Document commit thread").start();
}
@Override
public void dispose() {
isDisposed = true;
synchronized (documentsToCommit) {
documentsToCommit.clear();
}
cancel("Stop thread");
wakeUpQueue();
while (!threadFinished) {
wakeUpQueue();
synchronized (documentsToCommit) {
try {
documentsToCommit.wait(10);
}
catch (InterruptedException ignored) {
}
}
}
}
public void disable(@NonNls Object reason) {
// write action has just started, all commits are useless
cancel(reason);
myEnabled = false;
log("Disabled", null, false, reason);
}
public void enable(Object reason) {
myEnabled = true;
wakeUpQueue();
log("Enabled", null, false, reason);
}
private void wakeUpQueue() {
synchronized (documentsToCommit) {
documentsToCommit.notifyAll();
}
}
private void cancel(@NonNls Object reason) {
startNewTask(null, reason);
}
@Override
public void commitAsynchronously(@NotNull final Project project, @NotNull final Document document, @NonNls @NotNull Object reason) {
queueCommit(project, document, reason);
}
public void queueCommit(@NotNull final Project project, @NotNull final Document document, @NonNls @NotNull Object reason) {
assert !isDisposed : "already disposed";
if (!project.isInitialized()) return;
PsiFile psiFile = PsiDocumentManager.getInstance(project).getCachedPsiFile(document);
if (psiFile == null) return;
doQueue(project, document, reason);
}
private void doQueue(Project project, Document document, Object reason) {
synchronized (documentsToCommit) {
ProgressIndicator indicator = new DaemonProgressIndicator();
CommitTask newTask = new CommitTask(document, project, indicator, reason);
markRemovedFromDocsToCommit(newTask);
markRemovedCurrentTask(newTask);
removeFromDocsToApplyInEDT(newTask);
documentsToCommit.addLast(newTask);
log("Queued", newTask, false, reason);
wakeUpQueue();
}
}
private final StringBuilder log = new StringBuilder();
@Override
public void log(@NonNls String msg, CommitTask task, boolean synchronously, @NonNls Object... args) {
if (true) return;
String indent = new SimpleDateFormat("mm:ss:SSSS").format(new Date()) +
(SwingUtilities.isEventDispatchThread() ? "- " : Thread.currentThread().getName().equals("Document commit thread") ? "- >" : "-");
@NonNls
String s = indent +
msg + (synchronously ? " (sync)" : "") +
(task == null ? "" : "; task: " + task+" ("+System.identityHashCode(task)+")");
for (Object arg : args) {
if (!StringUtil.isEmpty(String.valueOf(arg))) {
s += "; "+arg;
}
}
if (task != null) {
boolean stillUncommitted = !task.project.isDisposed() &&
((PsiDocumentManagerImpl)PsiDocumentManager.getInstance(task.project)).isInUncommittedSet(task.document);
if (stillUncommitted) {
s += "; Uncommitted: " + task.document;
}
}
System.err.println(s);
log.append(s).append("\n");
if (log.length() > 1000000) {
log.delete(0, 1000000);
}
}
// cancels all pending commits
@TestOnly
public void cancelAll() {
synchronized (documentsToCommit) {
cancel("cancel all in tests");
markRemovedFromDocsToCommit(null);
documentsToCommit.clear();
removeFromDocsToApplyInEDT(null);
markRemovedCurrentTask(null);
}
}
@TestOnly
public void clearQueue() {
cancelAll();
log.setLength(0);
disable("end of test");
wakeUpQueue();
}
private void markRemovedCurrentTask(@Nullable CommitTask newTask) {
CommitTask task = currentTask;
if (task != null && (newTask == null || task.equals(newTask))) {
task.removed = true;
cancel("Sync commit intervened");
}
}
private void removeFromDocsToApplyInEDT(@Nullable("null means all") CommitTask newTask) {
for (int i = documentsToApplyInEDT.size() - 1; i >= 0; i--) {
CommitTask task = documentsToApplyInEDT.get(i);
if (newTask == null || task.equals(newTask)) {
task.removed = true;
documentsToApplyInEDT.remove(i);
log("Marked and Removed from EDT apply queue (sync commit called)", task, true);
}
}
}
private void markRemovedFromDocsToCommit(@Nullable("null means all") final CommitTask newTask) {
processAll(new Processor<CommitTask>() {
@Override
public boolean process(CommitTask task) {
if (newTask == null || task.equals(newTask)) {
task.removed = true;
log("marker as Removed in background queue", task, true);
}
return true;
}
});
}
@Override
public void run() {
threadFinished = false;
try {
while (!isDisposed) {
try {
pollQueue();
}
catch(Throwable e) {
LOG.error(e);
}
}
}
finally {
threadFinished = true;
}
// ping the thread waiting for close
wakeUpQueue();
log("Good bye", null, false);
}
private void pollQueue() {
boolean success = false;
Document document = null;
Project project = null;
CommitTask task = null;
try {
ProgressIndicator indicator;
synchronized (documentsToCommit) {
if (!myEnabled || documentsToCommit.isEmpty()) {
documentsToCommit.wait();
return;
}
task = documentsToCommit.pullFirst();
document = task.document;
indicator = task.indicator;
project = task.project;
log("Pulled", task, false, indicator);
if (project.isDisposed() || !((PsiDocumentManagerImpl)PsiDocumentManager.getInstance(project)).isInUncommittedSet(document)) {
log("Abandon and proceed to next",task, false);
return;
}
if (task.removed) {
return; // document has been marked as removed, e.g. by synchronous commit
}
startNewTask(task, "Pulled new task");
// transfer to documentsToApplyInEDT
documentsToApplyInEDT.add(task);
}
Runnable finishRunnable = null;
if (indicator.isCanceled()) {
success = false;
}
else {
final CommitTask commitTask = task;
final Runnable[] result = new Runnable[1];
ProgressManager.getInstance().executeProcessUnderProgress(new Runnable() {
@Override
public void run() {
result[0] = commitUnderProgress(commitTask, false);
}
}, commitTask.indicator);
finishRunnable = result[0];
success = finishRunnable != null;
log("commit returned", task, false, finishRunnable, indicator);
}
if (success) {
assert !ApplicationManager.getApplication().isDispatchThread();
UIUtil.invokeLaterIfNeeded(finishRunnable);
log("Invoked later finishRunnable", task, false, success, finishRunnable, indicator);
}
}
catch (ProcessCanceledException e) {
cancel(e); // leave queue unchanged
log("PCE", task, false, e);
success = false;
}
catch (InterruptedException e) {
// app must be closing
log("IE", task, false, e);
cancel(e);
}
catch (Throwable e) {
LOG.error(e);
cancel(e);
}
synchronized (documentsToCommit) {
if (!success && !task.removed) { // sync commit has not intervened
// reset status for queue back successfully
doQueue(project, document, "re-added on failure");
}
currentTask = null; // do not cancel, it's being invokeLatered
}
}
@Override
public void commitSynchronously(@NotNull Document document, @NotNull Project project) {
assert !isDisposed;
ApplicationManager.getApplication().assertWriteAccessAllowed();
if (!project.isInitialized() && !project.isDefault()) {
@NonNls String s = project + "; Disposed: "+project.isDisposed()+"; Open: "+project.isOpen();
s += "; SA Passed: ";
try {
s += ((StartupManagerImpl)StartupManager.getInstance(project)).startupActivityPassed();
}
catch (Exception e) {
s += e;
}
try {
Disposer.dispose(project);
}
catch (Throwable ignored) {
// do not fill log with endless exceptions
}
throw new RuntimeException(s);
}
ProgressIndicator indicator = createProgressIndicator();
CommitTask task = new CommitTask(document, project, indicator, "Sync commit");
synchronized (documentsToCommit) {
markRemovedFromDocsToCommit(task);
markRemovedCurrentTask(task);
removeFromDocsToApplyInEDT(task);
}
log("About to commit sync", task, true, indicator);
Runnable finish = commitUnderProgress(task, true);
log("Committed sync", task, true, finish, indicator);
assert finish != null;
finish.run();
// let our thread know that queue must be polled again
wakeUpQueue();
}
@NotNull
@Override
protected ProgressIndicator createProgressIndicator() {
return new ProgressIndicatorBase();
}
private void startNewTask(CommitTask task, Object reason) {
synchronized (documentsToCommit) { // sync to prevent overwriting
CommitTask cur = currentTask;
if (cur != null) {
cur.indicator.cancel();
}
currentTask = task;
}
}
// returns finish commit Runnable (to be invoked later in EDT), or null on failure
@Nullable
private Runnable commitUnderProgress(@NotNull final CommitTask task,
final boolean synchronously) {
final Project project = task.project;
final Document document = task.document;
final List<Processor<Document>> finishProcessors = new SmartList<Processor<Document>>();
Runnable runnable = new Runnable() {
@Override
public void run() {
ApplicationManager.getApplication().assertReadAccessAllowed();
if (project.isDisposed()) return;
final PsiDocumentManagerImpl documentManager = (PsiDocumentManagerImpl)PsiDocumentManager.getInstance(project);
FileViewProvider viewProvider = documentManager.getCachedViewProvider(document);
if (viewProvider == null) return;
List<PsiFile> psiFiles = viewProvider.getAllFiles();
for (PsiFile file : psiFiles) {
if (file.isValid()) {
Processor<Document> finishProcessor = doCommit(task, file, synchronously);
if (finishProcessor != null) {
finishProcessors.add(finishProcessor);
}
}
}
}
};
if (synchronously) {
ApplicationManager.getApplication().assertWriteAccessAllowed();
runnable.run();
}
else {
if (!ApplicationManagerEx.getApplicationEx().tryRunReadAction(runnable)) {
log("Could not start read action", task, synchronously, ApplicationManager.getApplication().isReadAccessAllowed(), Thread.currentThread());
return null;
}
}
boolean canceled = task.indicator.isCanceled();
assert !synchronously || !canceled;
if (canceled || task.removed) {
return null;
}
Runnable finishRunnable = new Runnable() {
@Override
public void run() {
ApplicationManager.getApplication().assertIsDispatchThread();
Project project = task.project;
if (project.isDisposed()) return;
Document document = task.document;
synchronized (documentsToCommit) {
boolean isValid = !task.removed;
for (int i = documentsToApplyInEDT.size() - 1; i >= 0; i--) {
CommitTask queuedTask = documentsToApplyInEDT.get(i);
boolean taskIsValid = !queuedTask.removed;
if (task == queuedTask) { // find the same task in the queue
documentsToApplyInEDT.remove(i);
isValid &= taskIsValid;
log("Task matched, removed from documentsToApplyInEDT", queuedTask, false, task);
}
else if (!taskIsValid) {
documentsToApplyInEDT.remove(i);
log("Task invalid, removed from documentsToApplyInEDT", queuedTask, false);
}
}
if (!isValid) {
log("Marked as already committed in EDT apply queue, return", task, true);
return;
}
}
PsiDocumentManagerImpl documentManager = (PsiDocumentManagerImpl)PsiDocumentManager.getInstance(project);
log("Executing later finishCommit", task, false);
boolean success = documentManager.finishCommit(document, finishProcessors, synchronously, task.reason);
if (synchronously) {
assert success;
}
log("after call finishCommit",task, synchronously, success);
if (synchronously || success) {
assert !documentManager.isInUncommittedSet(document);
}
if (!success) {
// add document back to the queue
queueCommit(project, document, "Re-added back");
}
}
};
return finishRunnable;
}
private boolean processAll(final Processor<CommitTask> processor) {
final boolean[] result = {true};
synchronized (documentsToCommit) {
documentsToCommit.process(new Processor<CommitTask>() {
@Override
public boolean process(CommitTask commitTask) {
result[0] &= processor.process(commitTask);
return true;
}
});
}
return result[0];
}
}
| |
package com.v3;
import java.util.List;
import javax.swing.JOptionPane;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.HibernateException;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.Transaction;
import com.v3.utilities.HibernateUtil;
public class GenericCRUD <T>{
private Session session;
private Log log = LogFactory.getLog(GenericCRUD.class);
public GenericCRUD(){
openSession();
}
/**
* This method is already pre-intiatilized within the Constructor
*/
public void openSession(){
// log.info("Opening Hibernate Session");
session = HibernateUtil.getSessionFactory().openSession();
}
/**
* <b>REQUIRED CALL - When Finished.</b>
* Close the Session we are working with.
*
*/
public void closeSession(){
// log.info("Closing Hibernate Session");
if(session.isConnected())
session.close();
}
/**
* Checks to see if session is open.
* Will open new session if not open.
*/
private void isSessionOpen(){
if(!session.isConnected()){
// log.info("Found Closed Session: Opening new Session!");
openSession();
}
//JOptionPane.showMessageDialog(null, "Current Session Active!");
}
public void evictSessionItem(T entity){
session.evict(entity);
}
/**
* CREATE
* @param entity
*/
public boolean create(T entity){
isSessionOpen();
Transaction txn = null;
boolean rv = false;
log.info("Session pushing create()");
try{
txn = session.beginTransaction();
session.save(entity);
txn.commit();//commit pushes changes to Database
rv = true;
}catch(HibernateException e){
//e.printStackTrace();
txn.rollback();
e.printStackTrace();
JOptionPane.showMessageDialog(null, "Error Creating Item: " + e.getMessage());
session.evict(entity);
rv = false;
}
// closeSession();
return rv;
}
/**
* UPDATE & MERGE so as to attach to session
* @param entity
*/
public void update(T entity){
isSessionOpen();
Transaction txn = null;
log.info("Session pushing update()");
try{
txn = session.beginTransaction();
session.merge(entity);
txn.commit();
}catch(HibernateException e){
txn.rollback();
JOptionPane.showMessageDialog(null, "Error" + e);
}
// closeSession();
}
/**
* DELETE -- In this method why don't we need to use merge()?
* I was under the impression that entity is considered detached.
* Find Answer for this question.
* @param entity
*/
public void delete(T entity){
isSessionOpen();
Transaction txn = null;
log.info("Session pushing delete()");
try{
txn = session.beginTransaction();
//session.merge(entity);
session.delete(entity);
txn.commit();
}catch(HibernateException e){
JOptionPane.showMessageDialog(null, "Error" + e);
txn.rollback();
}
// closeSession();
}
//READING methods
/**
* Its better to use this method versus findall(),
* This method allows for pagination and more in depth constraining.
* Use findall() for a simple and quick query to list all items.
* @param query, String representation of query
* @return List<T> of Entity objects.
*/
public List<T> findMany(Query queryCall){
isSessionOpen();
Transaction txn = null;
List<T> t = null;
log.info("Session pushing findMany()");
try{
txn = session.beginTransaction();
//Query queryCall = session.createQuery(query);
t = (List<T>) queryCall.list();
txn.commit();
}catch(HibernateException e){
e.printStackTrace();
JOptionPane.showMessageDialog(null, "Error: " + e.getMessage());
session.evict(t);
txn.rollback();
}
// closeSession();
return t;
}
public T findOne (Query queryCall){
isSessionOpen();
//JOptionPane.showMessageDialog(null, "We did our check");
Transaction txn = null;
T t = null;
log.info("Session pushing findOne()");
try{
txn = session.beginTransaction();
t = (T) queryCall.uniqueResult();
txn.commit();
}catch(HibernateException e){
txn.rollback();
JOptionPane.showMessageDialog(null, "Error" + e);
}
session.evict(t);
// closeSession();
return t;
}
public List<T> findAll(Class clazz){
isSessionOpen();
Transaction txn = null;
List<T> t = null;
log.info("Session pushing findAll()");
try{
txn = session.beginTransaction();
t = (List<T>)session.createQuery("from "+ clazz.getName()).list();
txn.commit();
}catch(HibernateException e){
e.printStackTrace();
txn.rollback();
JOptionPane.showMessageDialog(null, "Error" + e);
}
// closeSession();
return t;
}
public T findByID(Class clazz, Integer id) {
isSessionOpen();
Transaction txn = null;
T t = null;
log.info("Session pushing findByID()");
try{
txn = session.beginTransaction();
t = (T) session.get(clazz, id);
txn.commit();
}catch(HibernateException e){
txn.rollback();
JOptionPane.showMessageDialog(null, "Error" + e);
}
// closeSession();
return t;
}
/**
* @return the session
*/
public Session getSession() {
isSessionOpen();
return session;
}
/**
* @param session the session to set
*/
public void setSession(Session session) {
this.session = session;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.protocol.mqtt;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.activemq.artemis.api.core.ActiveMQBuffer;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.core.remoting.CloseListener;
import org.apache.activemq.artemis.core.remoting.FailureListener;
import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection;
import org.apache.activemq.artemis.spi.core.remoting.Connection;
public class MQTTConnection implements RemotingConnection {
private final Connection transportConnection;
private final long creationTime;
private AtomicBoolean dataReceived;
private boolean destroyed;
private boolean connected;
private final List<FailureListener> failureListeners = Collections.synchronizedList(new ArrayList<FailureListener>());
private final List<CloseListener> closeListeners = Collections.synchronizedList(new ArrayList<CloseListener>());
public MQTTConnection(Connection transportConnection) throws Exception {
this.transportConnection = transportConnection;
this.creationTime = System.currentTimeMillis();
this.dataReceived = new AtomicBoolean();
this.destroyed = false;
}
public Object getID() {
return transportConnection.getID();
}
@Override
public long getCreationTime() {
return creationTime;
}
@Override
public String getRemoteAddress() {
return transportConnection.getRemoteAddress();
}
@Override
public void addFailureListener(FailureListener listener) {
failureListeners.add(listener);
}
@Override
public boolean removeFailureListener(FailureListener listener) {
return failureListeners.remove(listener);
}
@Override
public void addCloseListener(CloseListener listener) {
closeListeners.add(listener);
}
@Override
public boolean removeCloseListener(CloseListener listener) {
return closeListeners.remove(listener);
}
@Override
public List<CloseListener> removeCloseListeners() {
synchronized (closeListeners) {
List<CloseListener> deletedCloseListeners = new ArrayList<CloseListener>(closeListeners);
closeListeners.clear();
return deletedCloseListeners;
}
}
@Override
public void setCloseListeners(List<CloseListener> listeners) {
closeListeners.addAll(listeners);
}
@Override
public List<FailureListener> getFailureListeners() {
return failureListeners;
}
@Override
public List<FailureListener> removeFailureListeners() {
synchronized (failureListeners) {
List<FailureListener> deletedFailureListeners = new ArrayList<FailureListener>(failureListeners);
failureListeners.clear();
return deletedFailureListeners;
}
}
@Override
public void setFailureListeners(List<FailureListener> listeners) {
synchronized (failureListeners) {
failureListeners.clear();
failureListeners.addAll(listeners);
}
}
@Override
public ActiveMQBuffer createTransportBuffer(int size) {
return transportConnection.createTransportBuffer(size);
}
@Override
public void fail(ActiveMQException me) {
synchronized (failureListeners) {
for (FailureListener listener : failureListeners) {
listener.connectionFailed(me, false);
}
}
}
@Override
public void fail(ActiveMQException me, String scaleDownTargetNodeID) {
synchronized (failureListeners) {
for (FailureListener listener : failureListeners) {
//FIXME(mtaylor) How do we check if the node has failed over?
listener.connectionFailed(me, false);
}
}
}
@Override
public void destroy() {
//TODO(mtaylor) ensure this properly destroys this connection.
destroyed = true;
disconnect(false);
}
@Override
public Connection getTransportConnection() {
return transportConnection;
}
@Override
public boolean isClient() {
return false;
}
@Override
public boolean isDestroyed() {
return destroyed;
}
@Override
public void disconnect(boolean criticalError) {
transportConnection.forceClose();
}
@Override
public void disconnect(String scaleDownNodeID, boolean criticalError) {
transportConnection.forceClose();
}
protected void dataReceived() {
dataReceived.set(true);
}
@Override
public boolean checkDataReceived() {
return dataReceived.compareAndSet(true, false);
}
@Override
public void flush() {
transportConnection.checkFlushBatchBuffer();
}
@Override
public void bufferReceived(Object connectionID, ActiveMQBuffer buffer) {
}
public void setConnected(boolean connected) {
this.connected = connected;
}
public boolean getConnected() {
return connected;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tech.tablesaw.io.csv;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import java.net.URL;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import tech.tablesaw.api.ColumnType;
import tech.tablesaw.io.ReadOptions;
import tech.tablesaw.io.Source;
public class CsvReadOptions extends ReadOptions {
private final Character separator;
private final Character quoteChar;
private final Character escapeChar;
private final String lineEnding;
private final Integer maxNumberOfColumns;
private final Character commentPrefix;
private final boolean lineSeparatorDetectionEnabled;
private final int sampleSize;
private CsvReadOptions(CsvReadOptions.Builder builder) {
super(builder);
separator = builder.separator;
quoteChar = builder.quoteChar;
escapeChar = builder.escapeChar;
lineEnding = builder.lineEnding;
maxNumberOfColumns = builder.maxNumberOfColumns;
commentPrefix = builder.commentPrefix;
lineSeparatorDetectionEnabled = builder.lineSeparatorDetectionEnabled;
sampleSize = builder.sampleSize;
}
public static Builder builder(Source source) {
return new Builder(source);
}
public static Builder builder(File file) {
return new Builder(file).tableName(file.getName());
}
public static Builder builder(String fileName) {
return new Builder(new File(fileName));
}
public static Builder builder(URL url) throws IOException {
return new Builder(url);
}
public static Builder builderFromFile(String fileName) {
return new Builder(new File(fileName));
}
public static Builder builderFromString(String contents) {
return new Builder(new StringReader(contents));
}
public static Builder builderFromUrl(String url) throws IOException {
return new Builder(new URL(url));
}
/**
* This method may cause tablesaw to buffer the entire InputStream.
*
* <p>If you have a large amount of data, you can do one of the following: 1. Use the method
* taking a File instead of a stream, or 2. Provide the array of column types as an option. If you
* provide the columnType array, we skip type detection and can avoid reading the entire file
*/
public static Builder builder(InputStream stream) {
return new Builder(stream);
}
/**
* This method may cause tablesaw to buffer the entire InputStream.
*
* <p>If you have a large amount of data, you can do one of the following: 1. Use the method
* taking a File instead of a reader, or 2. Provide the array of column types as an option. If you
* provide the columnType array, we skip type detection and can avoid reading the entire file
*/
public static Builder builder(Reader reader) {
return new Builder(reader);
}
/**
* This method may cause tablesaw to buffer the entire InputStream.
*
* <p>If you have a large amount of data, you can do one of the following: 1. Use the method
* taking a File instead of a reader, or 2. Provide the array of column types as an option. If you
* provide the columnType array, we skip type detection and can avoid reading the entire file
*/
public static Builder builder(InputStreamReader reader) {
return new Builder(reader);
}
public ColumnType[] columnTypes() {
return columnTypeReadOptions.columnTypes();
}
public Character separator() {
return separator;
}
public Character quoteChar() {
return quoteChar;
}
public Character escapeChar() {
return escapeChar;
}
public String lineEnding() {
return lineEnding;
}
public boolean lineSeparatorDetectionEnabled() {
return lineSeparatorDetectionEnabled;
}
public Integer maxNumberOfColumns() {
return maxNumberOfColumns;
}
public Character commentPrefix() {
return commentPrefix;
}
public int maxCharsPerColumn() {
return maxCharsPerColumn;
}
public int sampleSize() {
return sampleSize;
}
public static class Builder extends ReadOptions.Builder {
private Character separator;
private Character quoteChar;
private Character escapeChar;
private String lineEnding;
private Integer maxNumberOfColumns = 10_000;
private Character commentPrefix;
private boolean lineSeparatorDetectionEnabled = true;
private int sampleSize = -1;
protected Builder(Source source) {
super(source);
}
protected Builder(URL url) throws IOException {
super(url);
}
protected Builder(File file) {
super(file);
}
protected Builder(InputStreamReader reader) {
super(reader);
}
protected Builder(Reader reader) {
super(reader);
}
protected Builder(InputStream stream) {
super(stream);
}
@Override
public Builder columnTypes(ColumnType[] columnTypes) {
super.columnTypes(columnTypes);
return this;
}
@Override
public Builder columnTypes(Function<String, ColumnType> columnTypeFunction) {
super.columnTypes(columnTypeFunction);
return this;
}
@Override
public Builder columnTypesPartial(Function<String, Optional<ColumnType>> columnTypeFunction) {
super.columnTypesPartial(columnTypeFunction);
return this;
}
@Override
public Builder columnTypesPartial(Map<String, ColumnType> columnTypeByName) {
super.columnTypesPartial(columnTypeByName);
return this;
}
public Builder separator(Character separator) {
this.separator = separator;
return this;
}
public Builder quoteChar(Character quoteChar) {
this.quoteChar = quoteChar;
return this;
}
public Builder escapeChar(Character escapeChar) {
this.escapeChar = escapeChar;
return this;
}
public Builder lineEnding(String lineEnding) {
this.lineEnding = lineEnding;
this.lineSeparatorDetectionEnabled = false;
return this;
}
/**
* Defines maximal value of columns in csv file.
*
* @param maxNumberOfColumns - must be positive integer. Default is 10,000
*/
public Builder maxNumberOfColumns(Integer maxNumberOfColumns) {
this.maxNumberOfColumns = maxNumberOfColumns;
return this;
}
public Builder commentPrefix(Character commentPrefix) {
this.commentPrefix = commentPrefix;
return this;
}
/**
* Defines the maximum number of rows to be read from the file. Sampling is performed in a
* single pass using the reservoir sampling algorithm
* (https://en.wikipedia.org/wiki/Reservoir_sampling). Given a file with 'n' rows, if
* 'numSamples is smaller than 'n', than exactly 'numSamples' random samples are returned; if
* 'numSamples' is greater than 'n', then only 'n' samples are returned (no oversampling is
* performed to increase the data to match 'numSamples').
*/
public Builder sampleSize(int numSamples) {
this.sampleSize = numSamples;
return this;
}
@Override
public CsvReadOptions build() {
return new CsvReadOptions(this);
}
// Override super-class setters to return an instance of this class
@Override
public Builder header(boolean header) {
super.header(header);
return this;
}
/**
* Enable reading of a table with duplicate column names. After the first appearance of a column
* name, subsequent appearances will have a number appended.
*
* @param allow if true, duplicate names will be allowed
*/
@Override
public Builder allowDuplicateColumnNames(Boolean allow) {
super.allowDuplicateColumnNames(allow);
return this;
}
@Override
public Builder columnTypesToDetect(List<ColumnType> columnTypesToDetect) {
super.columnTypesToDetect(columnTypesToDetect);
return this;
}
@Override
public Builder tableName(String tableName) {
super.tableName(tableName);
return this;
}
@Override
public Builder sample(boolean sample) {
super.sample(sample);
return this;
}
@Override
@Deprecated
public Builder dateFormat(String dateFormat) {
super.dateFormat(dateFormat);
return this;
}
@Override
@Deprecated
public Builder timeFormat(String timeFormat) {
super.timeFormat(timeFormat);
return this;
}
@Override
@Deprecated
public Builder dateTimeFormat(String dateTimeFormat) {
super.dateTimeFormat(dateTimeFormat);
return this;
}
@Override
public Builder dateFormat(DateTimeFormatter dateFormat) {
super.dateFormat(dateFormat);
return this;
}
@Override
public Builder timeFormat(DateTimeFormatter timeFormat) {
super.timeFormat(timeFormat);
return this;
}
@Override
public Builder dateTimeFormat(DateTimeFormatter dateTimeFormat) {
super.dateTimeFormat(dateTimeFormat);
return this;
}
@Override
public Builder maxCharsPerColumn(int maxCharsPerColumn) {
super.maxCharsPerColumn(maxCharsPerColumn);
return this;
}
@Override
public Builder locale(Locale locale) {
super.locale(locale);
return this;
}
@Override
public Builder missingValueIndicator(String... missingValueIndicators) {
super.missingValueIndicator(missingValueIndicators);
return this;
}
@Override
public Builder minimizeColumnSizes() {
super.minimizeColumnSizes();
return this;
}
@Override
public Builder ignoreZeroDecimal(boolean ignoreZeroDecimal) {
super.ignoreZeroDecimal(ignoreZeroDecimal);
return this;
}
@Override
public Builder skipRowsWithInvalidColumnCount(boolean skipRowsWithInvalidColumnCount) {
super.skipRowsWithInvalidColumnCount(skipRowsWithInvalidColumnCount);
return this;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.rest.swagger;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.swagger.models.HttpMethod;
import io.swagger.models.Operation;
import io.swagger.models.Path;
import io.swagger.models.Scheme;
import io.swagger.models.Swagger;
import io.swagger.models.auth.ApiKeyAuthDefinition;
import io.swagger.models.auth.In;
import io.swagger.models.auth.SecuritySchemeDefinition;
import io.swagger.models.parameters.Parameter;
import io.swagger.models.parameters.QueryParameter;
import io.swagger.parser.SwaggerParser;
import io.swagger.util.Json;
import org.apache.camel.CamelContext;
import org.apache.camel.Category;
import org.apache.camel.Consumer;
import org.apache.camel.Endpoint;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.component.http.HttpComponent;
import org.apache.camel.component.http.HttpEndpoint;
import org.apache.camel.component.http.HttpProducer;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.RestConfiguration;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.support.CamelContextHelper;
import org.apache.camel.support.DefaultEndpoint;
import org.apache.camel.support.ResourceHelper;
import org.apache.camel.support.jsse.SSLContextParameters;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.StringHelper;
import org.apache.camel.util.UnsafeUriCharactersEncoder;
import org.apache.http.client.methods.HttpGet;
import static java.util.Optional.ofNullable;
import static org.apache.camel.component.rest.swagger.RestSwaggerHelper.isHostParam;
import static org.apache.camel.component.rest.swagger.RestSwaggerHelper.isMediaRange;
import static org.apache.camel.util.ObjectHelper.isNotEmpty;
import static org.apache.camel.util.ObjectHelper.notNull;
import static org.apache.camel.util.StringHelper.after;
import static org.apache.camel.util.StringHelper.before;
import static org.apache.camel.util.StringHelper.notEmpty;
/**
* Configure REST producers based on a Swagger (OpenAPI) specification document delegating to a component implementing
* the RestProducerFactory interface.
*/
@UriEndpoint(firstVersion = "2.19.0", scheme = "rest-swagger", title = "REST Swagger",
syntax = "rest-swagger:specificationUri#operationId",
category = { Category.REST, Category.SWAGGER, Category.HTTP }, producerOnly = true)
public final class RestSwaggerEndpoint extends DefaultEndpoint {
/**
* Remaining parameters specified in the Endpoint URI.
*/
Map<String, Object> parameters = Collections.emptyMap();
@UriParam(
description = "API basePath, for example \"`/v2`\". Default is unset, if set overrides the value present in"
+ " Swagger specification and in the component configuration.",
defaultValue = "", label = "producer")
private String basePath;
@UriParam(description = "Name of the Camel component that will perform the requests. The component must be present"
+ " in Camel registry and it must implement RestProducerFactory service provider interface. If not set"
+ " CLASSPATH is searched for single component that implements RestProducerFactory SPI. Overrides"
+ " component configuration.",
label = "producer")
private String componentName;
@UriParam(
description = "What payload type this component capable of consuming. Could be one type, like `application/json`"
+ " or multiple types as `application/json, application/xml; q=0.5` according to the RFC7231. This equates"
+ " to the value of `Accept` HTTP header. If set overrides any value found in the Swagger specification and."
+ " in the component configuration",
label = "producer")
private String consumes;
@UriParam(description = "Scheme hostname and port to direct the HTTP requests to in the form of"
+ " `http[s]://hostname[:port]`. Can be configured at the endpoint, component or in the corresponding"
+ " REST configuration in the Camel Context. If you give this component a name (e.g. `petstore`) that"
+ " REST configuration is consulted first, `rest-swagger` next, and global configuration last. If set"
+ " overrides any value found in the Swagger specification, RestConfiguration. Overrides all other "
+ " configuration.",
label = "producer")
private String host;
@UriPath(description = "ID of the operation from the Swagger specification.", label = "producer")
@Metadata(required = true)
private String operationId;
@UriParam(description = "What payload type this component is producing. For example `application/json`"
+ " according to the RFC7231. This equates to the value of `Content-Type` HTTP header. If set overrides"
+ " any value present in the Swagger specification. Overrides all other configuration.",
label = "producer")
private String produces;
@UriParam(label = "security", description = "To configure security using SSLContextParameters.")
private SSLContextParameters sslContextParameters;
@UriPath(description = "Path to the Swagger specification file. The scheme, host base path are taken from this"
+ " specification, but these can be overridden with properties on the component or endpoint level. If not"
+ " given the component tries to load `swagger.json` resource from the classpath. Note that the `host` defined on the"
+ " component and endpoint of this Component should contain the scheme, hostname and optionally the"
+ " port in the URI syntax (i.e. `http://api.example.com:8080`). Overrides component configuration."
+ " The Swagger specification can be loaded from different sources by prefixing with file: classpath: http: https:."
+ " Support for https is limited to using the JDK installed UrlHandler, and as such it can be cumbersome to setup"
+ " TLS/SSL certificates for https (such as setting a number of javax.net.ssl JVM system properties)."
+ " How to do that consult the JDK documentation for UrlHandler.",
defaultValue = RestSwaggerComponent.DEFAULT_SPECIFICATION_URI_STR,
defaultValueNote = "By default loads `swagger.json` file", label = "producer")
private URI specificationUri = RestSwaggerComponent.DEFAULT_SPECIFICATION_URI;
@UriParam(description = "Resolve references in Swagger specification.", label = "producer")
private Boolean resolveReferences;
public RestSwaggerEndpoint() {
// help tooling instantiate endpoint
}
public RestSwaggerEndpoint(final String uri, final String remaining, final RestSwaggerComponent component,
final Map<String, Object> parameters) {
super(notEmpty(uri, "uri"), notNull(component, "component"));
this.parameters = parameters;
final URI componentSpecificationUri = component.getSpecificationUri();
specificationUri = before(remaining, "#", StringHelper::trimToNull).map(URI::create)
.orElse(ofNullable(componentSpecificationUri).orElse(RestSwaggerComponent.DEFAULT_SPECIFICATION_URI));
operationId = ofNullable(after(remaining, "#")).orElse(remaining);
setExchangePattern(ExchangePattern.InOut);
}
@Override
public Consumer createConsumer(final Processor processor) throws Exception {
throw new UnsupportedOperationException("Consumer not supported");
}
@Override
public Producer createProducer() throws Exception {
final CamelContext camelContext = getCamelContext();
final Swagger swagger = loadSpecificationFrom(camelContext, specificationUri, resolveSslContextParameters(),
determineResolveReferences());
final Map<String, Path> paths = swagger.getPaths();
for (final Entry<String, Path> pathEntry : paths.entrySet()) {
final Path path = pathEntry.getValue();
final Optional<Entry<HttpMethod, Operation>> maybeOperationEntry = path.getOperationMap().entrySet()
.stream().filter(operationEntry -> operationId.equals(operationEntry.getValue().getOperationId()))
.findAny();
if (maybeOperationEntry.isPresent()) {
final Entry<HttpMethod, Operation> operationEntry = maybeOperationEntry.get();
final Operation operation = operationEntry.getValue();
final Map<String, Parameter> pathParameters = operation.getParameters().stream()
.filter(p -> "path".equals(p.getIn()))
.collect(Collectors.toMap(Parameter::getName, Function.identity()));
final String uriTemplate = resolveUri(pathEntry.getKey(), pathParameters);
final HttpMethod httpMethod = operationEntry.getKey();
final String method = httpMethod.name();
return createProducerFor(swagger, operation, method, uriTemplate);
}
}
final String supportedOperations = paths.values().stream().flatMap(p -> p.getOperations().stream())
.map(Operation::getOperationId).collect(Collectors.joining(", "));
throw new IllegalArgumentException(
"The specified operation with ID: `" + operationId
+ "` cannot be found in the Swagger specification loaded from `" + specificationUri
+ "`. Operations defined in the specification are: " + supportedOperations);
}
private SSLContextParameters resolveSslContextParameters() {
if (sslContextParameters != null) {
return sslContextParameters;
}
if (component().getSslContextParameters() != null) {
return component().getSslContextParameters();
}
return component().retrieveGlobalSslContextParameters();
}
public String getBasePath() {
return basePath;
}
public String getComponentName() {
return componentName;
}
public String getConsumes() {
return consumes;
}
public SSLContextParameters getSslContextParameters() {
return sslContextParameters;
}
public String getHost() {
return host;
}
public String getOperationId() {
return operationId;
}
public String getProduces() {
return produces;
}
public URI getSpecificationUri() {
return specificationUri;
}
@Override
public boolean isLenientProperties() {
return true;
}
public void setBasePath(final String basePath) {
this.basePath = notEmpty(basePath, "basePath");
}
public void setComponentName(final String componentName) {
this.componentName = notEmpty(componentName, "componentName");
}
public void setConsumes(final String consumes) {
this.consumes = isMediaRange(consumes, "consumes");
}
public void setSslContextParameters(SSLContextParameters sslContextParameters) {
this.sslContextParameters = sslContextParameters;
}
public void setHost(final String host) {
this.host = isHostParam(host);
}
public void setOperationId(final String operationId) {
this.operationId = notEmpty(operationId, "operationId");
}
public void setProduces(final String produces) {
this.produces = isMediaRange(produces, "produces");
}
public void setSpecificationUri(final URI specificationUri) {
this.specificationUri = notNull(specificationUri, "specificationUri");
}
public Boolean getResolveReferences() {
return resolveReferences;
}
public void setResolveReferences(Boolean resolveReferences) {
this.resolveReferences = resolveReferences;
}
RestSwaggerComponent component() {
return (RestSwaggerComponent) getComponent();
}
Producer createProducerFor(
final Swagger swagger, final Operation operation, final String method,
final String uriTemplate)
throws Exception {
final String basePath = determineBasePath(swagger);
final StringBuilder componentEndpointUri = new StringBuilder(200).append("rest:").append(method).append(":")
.append(basePath).append(":").append(uriTemplate);
final CamelContext camelContext = getCamelContext();
final Endpoint endpoint = camelContext.getEndpoint(componentEndpointUri.toString());
Map<String, Object> params = determineEndpointParameters(swagger, operation);
boolean hasHost = params.containsKey("host");
if (endpoint instanceof DefaultEndpoint) {
// let the rest endpoint configure itself
DefaultEndpoint de = (DefaultEndpoint) endpoint;
de.setProperties(endpoint, params);
}
// if there is a host then we should use this hardcoded host instead of any Header that may have an existing
// Host header from some other HTTP input, and if so then lets remove it
return new RestSwaggerProducer(endpoint.createProducer(), hasHost);
}
String determineBasePath(final Swagger swagger) {
if (isNotEmpty(basePath)) {
return basePath;
}
final String componentBasePath = component().getBasePath();
if (isNotEmpty(componentBasePath)) {
return componentBasePath;
}
final String specificationBasePath = swagger.getBasePath();
if (isNotEmpty(specificationBasePath)) {
return specificationBasePath;
}
final CamelContext camelContext = getCamelContext();
final RestConfiguration restConfiguration
= CamelContextHelper.getRestConfiguration(camelContext, null, determineComponentName());
final String restConfigurationBasePath = restConfiguration.getContextPath();
if (isNotEmpty(restConfigurationBasePath)) {
return restConfigurationBasePath;
}
return RestSwaggerComponent.DEFAULT_BASE_PATH;
}
String determineComponentName() {
return Optional.ofNullable(componentName).orElse(component().getComponentName());
}
Boolean determineResolveReferences() {
return Stream.of(getResolveReferences(), component().getResolveReferences())
.filter(Objects::nonNull)
.findFirst()
.orElse(false);
}
Map<String, Object> determineEndpointParameters(final Swagger swagger, final Operation operation) {
final Map<String, Object> parameters = new HashMap<>();
final String componentName = determineComponentName();
if (componentName != null) {
parameters.put("producerComponentName", componentName);
}
final String host = determineHost(swagger);
if (host != null) {
parameters.put("host", host);
}
final RestSwaggerComponent component = component();
// what we consume is what the API defined by Swagger specification
// produces
final String determinedConsumes = determineOption(swagger.getProduces(), operation.getProduces(),
component.getConsumes(), consumes);
if (isNotEmpty(determinedConsumes)) {
parameters.put("consumes", determinedConsumes);
}
// what we produce is what the API defined by Swagger specification
// consumes
final String determinedProduces = determineOption(swagger.getConsumes(), operation.getConsumes(),
component.getProduces(), produces);
if (isNotEmpty(determinedProduces)) {
parameters.put("produces", determinedProduces);
}
final String queryParameters = determineQueryParameters(swagger, operation).map(this::queryParameter)
.collect(Collectors.joining("&"));
if (isNotEmpty(queryParameters)) {
parameters.put("queryParameters", queryParameters);
}
// pass properties that might be applied if the delegate component is
// created, i.e. if it's not
// present in the Camel Context already
final Map<String, Object> componentParameters = new HashMap<>();
if (component.isUseGlobalSslContextParameters()) {
// by default it's false
componentParameters.put("useGlobalSslContextParameters", component.isUseGlobalSslContextParameters());
}
if (component.getSslContextParameters() != null) {
componentParameters.put("sslContextParameters", component.getSslContextParameters());
}
final Map<Object, Object> nestedParameters = new HashMap<>();
if (!componentParameters.isEmpty()) {
// we're trying to set RestEndpoint.parameters['component']
nestedParameters.put("component", componentParameters);
}
// Add rest endpoint parameters
addRestEndpointParameters(operation, nestedParameters);
if (!nestedParameters.isEmpty()) {
parameters.put("parameters", nestedParameters);
}
return parameters;
}
private void addRestEndpointParameters(Operation operation, Map<Object, Object> nestedParameters) {
if (this.parameters != null && operation.getParameters() != null) {
for (Entry<String, Object> entry : this.parameters.entrySet()) {
for (Parameter param : operation.getParameters()) {
// skip parameters that are part of the operation as path as otherwise
// it will be duplicated as query parameter as well
boolean clash = "path".equals(param.getIn()) && entry.getKey().equals(param.getName());
if (!clash) {
nestedParameters.put(entry.getKey(), entry.getValue());
}
}
}
}
}
String determineHost(final Swagger swagger) {
if (isNotEmpty(host)) {
return host;
}
final String componentHost = component().getHost();
if (isNotEmpty(componentHost)) {
return componentHost;
}
final String swaggerScheme = pickBestScheme(specificationUri.getScheme(), swagger.getSchemes());
final String swaggerHost = swagger.getHost();
if (isNotEmpty(swaggerScheme) && isNotEmpty(swaggerHost)) {
return swaggerScheme + "://" + swaggerHost;
}
final CamelContext camelContext = getCamelContext();
final RestConfiguration globalRestConfiguration
= CamelContextHelper.getRestConfiguration(camelContext, null, determineComponentName());
final String globalConfigurationHost = hostFrom(globalRestConfiguration);
if (globalConfigurationHost != null) {
return globalConfigurationHost;
}
final String specificationScheme = specificationUri.getScheme();
// Perform a case insensitive "startsWith" check that works for different locales
String prefix = Scheme.HTTP.toValue();
if (specificationUri.isAbsolute() && specificationScheme.regionMatches(true, 0, prefix, 0, prefix.length())) {
try {
return new URI(
specificationUri.getScheme(), specificationUri.getUserInfo(), specificationUri.getHost(),
specificationUri.getPort(), null, null, null).toString();
} catch (final URISyntaxException e) {
throw new IllegalStateException("Unable to create a new URI from: " + specificationUri, e);
}
}
throw new IllegalStateException(
"Unable to determine destination host for requests. The Swagger specification"
+ " does not specify `scheme` and `host` parameters, the specification URI is not absolute with `http` or"
+ " `https` scheme, and no RestConfigurations configured with `scheme`, `host` and `port` were found for `"
+ (determineComponentName() != null
? determineComponentName() : "default" + "` component")
+ " and there is no global RestConfiguration with those properties");
}
String literalPathParameterValue(final Parameter parameter) {
final String name = parameter.getName();
final String valueStr = String.valueOf(parameters.get(name));
final String encoded = UnsafeUriCharactersEncoder.encode(valueStr);
return encoded;
}
String literalQueryParameterValue(final Parameter parameter) {
final String name = parameter.getName();
final String valueStr = String.valueOf(parameters.get(name));
final String encoded = UnsafeUriCharactersEncoder.encode(valueStr);
return name + "=" + encoded;
}
String queryParameter(final Parameter parameter) {
final String name = parameter.getName();
if (ObjectHelper.isEmpty(name)) {
return "";
}
if (parameters.containsKey(name)) {
return literalQueryParameterValue(parameter);
}
return queryParameterExpression(parameter);
}
String resolveUri(final String uriTemplate, final Map<String, Parameter> pathParameters) {
if (pathParameters.isEmpty()) {
return uriTemplate;
}
int start = uriTemplate.indexOf('{');
if (start == -1) {
return uriTemplate;
}
int pos = 0;
final StringBuilder resolved = new StringBuilder(uriTemplate.length() * 2);
while (start != -1) {
resolved.append(uriTemplate, pos, start);
final int end = uriTemplate.indexOf('}', start);
final String name = uriTemplate.substring(start + 1, end);
if (parameters.containsKey(name)) {
final Parameter parameter = pathParameters.get(name);
final Object value = literalPathParameterValue(parameter);
resolved.append(value);
} else {
resolved.append('{').append(name).append('}');
}
pos = end + 1;
start = uriTemplate.indexOf('{', pos);
}
if (pos < uriTemplate.length()) {
resolved.append(uriTemplate, pos, uriTemplate.length());
}
return resolved.toString();
}
static String determineOption(
final List<String> specificationLevel, final List<String> operationLevel,
final String componentLevel, final String endpointLevel) {
if (isNotEmpty(endpointLevel)) {
return endpointLevel;
}
if (isNotEmpty(componentLevel)) {
return componentLevel;
}
if (operationLevel != null && !operationLevel.isEmpty()) {
return String.join(", ", operationLevel);
}
if (specificationLevel != null && !specificationLevel.isEmpty()) {
return String.join(", ", specificationLevel);
}
return null;
}
static Stream<Parameter> determineQueryParameters(final Swagger swagger, final Operation operation) {
final List<Map<String, List<String>>> securityRequirements = operation.getSecurity();
final List<QueryParameter> apiKeyQueryParameters = new ArrayList<>();
if (securityRequirements != null) {
final Map<String, SecuritySchemeDefinition> securityDefinitions = swagger.getSecurityDefinitions();
for (final Map<String, List<String>> securityRequirement : securityRequirements) {
for (final String securityRequirementName : securityRequirement.keySet()) {
final SecuritySchemeDefinition securitySchemeDefinition = securityDefinitions
.get(securityRequirementName);
if (securitySchemeDefinition instanceof ApiKeyAuthDefinition) {
final ApiKeyAuthDefinition apiKeyDefinition = (ApiKeyAuthDefinition) securitySchemeDefinition;
if (apiKeyDefinition.getIn() == In.QUERY) {
apiKeyQueryParameters.add(new QueryParameter().name(apiKeyDefinition.getName())
.required(true).type("string").description(apiKeyDefinition.getDescription()));
}
}
}
}
}
return Stream.concat(apiKeyQueryParameters.stream(),
operation.getParameters().stream().filter(p -> "query".equals(p.getIn())));
}
static String hostFrom(final RestConfiguration restConfiguration) {
if (restConfiguration == null) {
return null;
}
final String scheme = restConfiguration.getScheme();
final String host = restConfiguration.getHost();
final int port = restConfiguration.getPort();
if (scheme == null || host == null) {
return null;
}
final StringBuilder answer = new StringBuilder(scheme).append("://").append(host);
if (port > 0 && !(Scheme.HTTP.toValue().equalsIgnoreCase(scheme) && port == 80)
&& !(Scheme.HTTPS.toValue().equalsIgnoreCase(scheme) && port == 443)) {
answer.append(':').append(port);
}
return answer.toString();
}
/**
* Loads the Swagger definition model from the given path. Tries to resolve the resource using Camel's resource
* loading support, if it fails uses Swagger's resource loading support instead.
*
* @param uri URI of the specification
* @param camelContext context to use
* @return the specification
* @throws IOException
*/
static Swagger loadSpecificationFrom(
final CamelContext camelContext, final URI uri, SSLContextParameters sslContextParameters,
boolean resolveReferences)
throws IOException {
final ObjectMapper mapper = Json.mapper();
final SwaggerParser swaggerParser = new SwaggerParser();
final String uriAsString = uri.toString();
if (sslContextParameters == null) {
try (InputStream stream = ResourceHelper.resolveMandatoryResourceAsInputStream(camelContext, uriAsString)) {
return parseInputStream(swaggerParser, mapper, stream, resolveReferences);
} catch (final Exception e) {
return loadSpecificationFallback(swaggerParser, uriAsString, e, resolveReferences);
}
}
HttpComponent httpComponent = new HttpComponent();
httpComponent.setSslContextParameters(sslContextParameters);
httpComponent.setCamelContext(camelContext);
try (HttpEndpoint e = (HttpEndpoint) httpComponent.createEndpoint(uriAsString);
HttpProducer p = (HttpProducer) e.createProducer();
InputStream stream = p.getHttpClient().execute(new HttpGet(uri)).getEntity().getContent()) {
return parseInputStream(swaggerParser, mapper, stream, resolveReferences);
} catch (final Exception e) {
return loadSpecificationFallback(swaggerParser, uriAsString, e, resolveReferences);
}
}
static Swagger loadSpecificationFallback(
SwaggerParser swaggerParser, String uriAsString, Exception originalException, boolean resolveReferences) {
// try Swaggers loader
final Swagger swagger = swaggerParser.read(uriAsString, null, resolveReferences);
if (swagger != null) {
return swagger;
}
throw new IllegalArgumentException(
"The given Swagger specification could not be loaded from `" + uriAsString
+ "`. Tried loading using Camel's resource resolution and using Swagger's own resource resolution."
+ " Swagger tends to swallow exceptions while parsing, try specifying Java system property `debugParser`"
+ " (e.g. `-DdebugParser=true`), the exception that occurred when loading using Camel's resource"
+ " loader follows",
originalException);
}
static Swagger parseInputStream(
SwaggerParser swaggerParser, ObjectMapper mapper, InputStream stream, boolean resolveReferences)
throws IOException {
final JsonNode node = mapper.readTree(stream);
return swaggerParser.read(node, resolveReferences);
}
static String pickBestScheme(final String specificationScheme, final List<Scheme> schemes) {
if (schemes != null && !schemes.isEmpty()) {
if (schemes.contains(Scheme.HTTPS)) {
return Scheme.HTTPS.toValue();
}
if (schemes.contains(Scheme.HTTP)) {
return Scheme.HTTP.toValue();
}
}
if (specificationScheme != null && (Scheme.HTTP.toValue().contains(specificationScheme)
|| Scheme.HTTPS.toValue().contains(specificationScheme))) {
return specificationScheme;
}
// there is no support for WebSocket (Scheme.WS, Scheme.WSS)
return null;
}
static String queryParameterExpression(final Parameter parameter) {
final String name = parameter.getName();
final StringBuilder expression = new StringBuilder(name).append("={").append(name);
if (!parameter.getRequired()) {
expression.append('?');
}
expression.append('}');
return expression.toString();
}
}
| |
/*
* [The "BSD license"]
* Copyright (c) 2012 Terence Parr
* Copyright (c) 2012 Sam Harwell
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.antlr.v4.test.tool;
import org.antlr.v4.runtime.atn.ArrayPredictionContext;
import org.antlr.v4.runtime.atn.PredictionContext;
import org.antlr.v4.runtime.atn.PredictionContextCache;
import org.antlr.v4.runtime.atn.SingletonPredictionContext;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.IdentityHashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
public class TestGraphNodes {
PredictionContextCache contextCache;
@Before
public void setUp() {
PredictionContext.globalNodeCount = 1;
contextCache = new PredictionContextCache();
}
public boolean rootIsWildcard() { return true; }
public boolean fullCtx() { return false; }
@Test public void test_$_$() {
PredictionContext r = PredictionContext.merge(PredictionContext.EMPTY,
PredictionContext.EMPTY,
rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"*\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_$_$_fullctx() {
PredictionContext r = PredictionContext.merge(PredictionContext.EMPTY,
PredictionContext.EMPTY,
fullCtx(), null);
System.out.println(toDOTString(r, fullCtx()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"$\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, fullCtx()));
}
@Test public void test_x_$() {
PredictionContext r = PredictionContext.merge(x(), PredictionContext.EMPTY, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"*\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_x_$_fullctx() {
PredictionContext r = PredictionContext.merge(x(), PredictionContext.EMPTY, fullCtx(), null);
System.out.println(toDOTString(r, fullCtx()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>$\"];\n" +
" s1[label=\"$\"];\n" +
" s0:p0->s1[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, fullCtx()));
}
@Test public void test_$_x() {
PredictionContext r = PredictionContext.merge(PredictionContext.EMPTY, x(), rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"*\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_$_x_fullctx() {
PredictionContext r = PredictionContext.merge(PredictionContext.EMPTY, x(), fullCtx(), null);
System.out.println(toDOTString(r, fullCtx()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>$\"];\n" +
" s1[label=\"$\"];\n" +
" s0:p0->s1[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, fullCtx()));
}
@Test public void test_a_a() {
PredictionContext r = PredictionContext.merge(a(), a(), rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"0\"];\n" +
" s1[label=\"*\"];\n" +
" s0->s1[label=\"1\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_a$_ax() {
PredictionContext a1 = a();
PredictionContext x = x();
PredictionContext a2 = createSingleton(x, 1);
PredictionContext r = PredictionContext.merge(a1, a2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"0\"];\n" +
" s1[label=\"*\"];\n" +
" s0->s1[label=\"1\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_a$_ax_fullctx() {
PredictionContext a1 = a();
PredictionContext x = x();
PredictionContext a2 = createSingleton(x, 1);
PredictionContext r = PredictionContext.merge(a1, a2, fullCtx(), null);
System.out.println(toDOTString(r, fullCtx()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"0\"];\n" +
" s1[shape=record, label=\"<p0>|<p1>$\"];\n" +
" s2[label=\"$\"];\n" +
" s0->s1[label=\"1\"];\n" +
" s1:p0->s2[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, fullCtx()));
}
@Test public void test_ax$_a$() {
PredictionContext x = x();
PredictionContext a1 = createSingleton(x, 1);
PredictionContext a2 = a();
PredictionContext r = PredictionContext.merge(a1, a2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"0\"];\n" +
" s1[label=\"*\"];\n" +
" s0->s1[label=\"1\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_aa$_a$_$_fullCtx() {
PredictionContext empty = PredictionContext.EMPTY;
PredictionContext child1 = createSingleton(empty, 8);
PredictionContext right = PredictionContext.merge(empty, child1, false, null);
PredictionContext left = createSingleton(right, 8);
PredictionContext merged = PredictionContext.merge(left, right, false, null);
String actual = toDOTString(merged, false);
System.out.println(actual);
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>$\"];\n" +
" s1[shape=record, label=\"<p0>|<p1>$\"];\n" +
" s2[label=\"$\"];\n" +
" s0:p0->s1[label=\"8\"];\n" +
" s1:p0->s2[label=\"8\"];\n" +
"}\n";
assertEquals(expecting, actual);
}
@Test public void test_ax$_a$_fullctx() {
PredictionContext x = x();
PredictionContext a1 = createSingleton(x, 1);
PredictionContext a2 = a();
PredictionContext r = PredictionContext.merge(a1, a2, fullCtx(), null);
System.out.println(toDOTString(r, fullCtx()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"0\"];\n" +
" s1[shape=record, label=\"<p0>|<p1>$\"];\n" +
" s2[label=\"$\"];\n" +
" s0->s1[label=\"1\"];\n" +
" s1:p0->s2[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, fullCtx()));
}
@Test public void test_a_b() {
PredictionContext r = PredictionContext.merge(a(), b(), rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>\"];\n" +
" s1[label=\"*\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s1[label=\"2\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_ax_ax_same() {
PredictionContext x = x();
PredictionContext a1 = createSingleton(x, 1);
PredictionContext a2 = createSingleton(x, 1);
PredictionContext r = PredictionContext.merge(a1, a2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"0\"];\n" +
" s1[label=\"1\"];\n" +
" s2[label=\"*\"];\n" +
" s0->s1[label=\"1\"];\n" +
" s1->s2[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_ax_ax() {
PredictionContext x1 = x();
PredictionContext x2 = x();
PredictionContext a1 = createSingleton(x1, 1);
PredictionContext a2 = createSingleton(x2, 1);
PredictionContext r = PredictionContext.merge(a1, a2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"0\"];\n" +
" s1[label=\"1\"];\n" +
" s2[label=\"*\"];\n" +
" s0->s1[label=\"1\"];\n" +
" s1->s2[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_abx_abx() {
PredictionContext x1 = x();
PredictionContext x2 = x();
PredictionContext b1 = createSingleton(x1, 2);
PredictionContext b2 = createSingleton(x2, 2);
PredictionContext a1 = createSingleton(b1, 1);
PredictionContext a2 = createSingleton(b2, 1);
PredictionContext r = PredictionContext.merge(a1, a2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"0\"];\n" +
" s1[label=\"1\"];\n" +
" s2[label=\"2\"];\n" +
" s3[label=\"*\"];\n" +
" s0->s1[label=\"1\"];\n" +
" s1->s2[label=\"2\"];\n" +
" s2->s3[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_abx_acx() {
PredictionContext x1 = x();
PredictionContext x2 = x();
PredictionContext b = createSingleton(x1, 2);
PredictionContext c = createSingleton(x2, 3);
PredictionContext a1 = createSingleton(b, 1);
PredictionContext a2 = createSingleton(c, 1);
PredictionContext r = PredictionContext.merge(a1, a2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"0\"];\n" +
" s1[shape=record, label=\"<p0>|<p1>\"];\n" +
" s2[label=\"2\"];\n" +
" s3[label=\"*\"];\n" +
" s0->s1[label=\"1\"];\n" +
" s1:p0->s2[label=\"2\"];\n" +
" s1:p1->s2[label=\"3\"];\n" +
" s2->s3[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_ax_bx_same() {
PredictionContext x = x();
PredictionContext a = createSingleton(x, 1);
PredictionContext b = createSingleton(x, 2);
PredictionContext r = PredictionContext.merge(a, b, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>\"];\n" +
" s1[label=\"1\"];\n" +
" s2[label=\"*\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s1[label=\"2\"];\n" +
" s1->s2[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_ax_bx() {
PredictionContext x1 = x();
PredictionContext x2 = x();
PredictionContext a = createSingleton(x1, 1);
PredictionContext b = createSingleton(x2, 2);
PredictionContext r = PredictionContext.merge(a, b, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>\"];\n" +
" s1[label=\"1\"];\n" +
" s2[label=\"*\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s1[label=\"2\"];\n" +
" s1->s2[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_ax_by() {
PredictionContext a = createSingleton(x(), 1);
PredictionContext b = createSingleton(y(), 2);
PredictionContext r = PredictionContext.merge(a, b, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>\"];\n" +
" s2[label=\"2\"];\n" +
" s3[label=\"*\"];\n" +
" s1[label=\"1\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s2[label=\"2\"];\n" +
" s2->s3[label=\"10\"];\n" +
" s1->s3[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_a$_bx() {
PredictionContext x2 = x();
PredictionContext a = a();
PredictionContext b = createSingleton(x2, 2);
PredictionContext r = PredictionContext.merge(a, b, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>\"];\n" +
" s2[label=\"2\"];\n" +
" s1[label=\"*\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s2[label=\"2\"];\n" +
" s2->s1[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_a$_bx_fullctx() {
PredictionContext x2 = x();
PredictionContext a = a();
PredictionContext b = createSingleton(x2, 2);
PredictionContext r = PredictionContext.merge(a, b, fullCtx(), null);
System.out.println(toDOTString(r, fullCtx()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>\"];\n" +
" s2[label=\"2\"];\n" +
" s1[label=\"$\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s2[label=\"2\"];\n" +
" s2->s1[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, fullCtx()));
}
@Ignore("Known inefficiency but deferring resolving the issue for now")
@Test public void test_aex_bfx() {
// TJP: this is inefficient as it leaves the top x nodes unmerged.
PredictionContext x1 = x();
PredictionContext x2 = x();
PredictionContext e = createSingleton(x1, 5);
PredictionContext f = createSingleton(x2, 6);
PredictionContext a = createSingleton(e, 1);
PredictionContext b = createSingleton(f, 2);
PredictionContext r = PredictionContext.merge(a, b, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>\"];\n" +
" s2[label=\"2\"];\n" +
" s3[label=\"3\"];\n" +
" s4[label=\"*\"];\n" +
" s1[label=\"1\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s2[label=\"2\"];\n" +
" s2->s3[label=\"6\"];\n" +
" s3->s4[label=\"9\"];\n" +
" s1->s3[label=\"5\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
// Array merges
@Test public void test_A$_A$_fullctx() {
ArrayPredictionContext A1 = array(PredictionContext.EMPTY);
ArrayPredictionContext A2 = array(PredictionContext.EMPTY);
PredictionContext r = PredictionContext.merge(A1, A2, fullCtx(), null);
System.out.println(toDOTString(r, fullCtx()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"$\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, fullCtx()));
}
@Test public void test_Aab_Ac() { // a,b + c
SingletonPredictionContext a = a();
SingletonPredictionContext b = b();
SingletonPredictionContext c = c();
ArrayPredictionContext A1 = array(a, b);
ArrayPredictionContext A2 = array(c);
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" +
" s1[label=\"*\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s1[label=\"2\"];\n" +
" s0:p2->s1[label=\"3\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aa_Aa() {
SingletonPredictionContext a1 = a();
SingletonPredictionContext a2 = a();
ArrayPredictionContext A1 = array(a1);
ArrayPredictionContext A2 = array(a2);
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"0\"];\n" +
" s1[label=\"*\"];\n" +
" s0->s1[label=\"1\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aa_Abc() { // a + b,c
SingletonPredictionContext a = a();
SingletonPredictionContext b = b();
SingletonPredictionContext c = c();
ArrayPredictionContext A1 = array(a);
ArrayPredictionContext A2 = array(b, c);
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" +
" s1[label=\"*\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s1[label=\"2\"];\n" +
" s0:p2->s1[label=\"3\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aac_Ab() { // a,c + b
SingletonPredictionContext a = a();
SingletonPredictionContext b = b();
SingletonPredictionContext c = c();
ArrayPredictionContext A1 = array(a, c);
ArrayPredictionContext A2 = array(b);
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" +
" s1[label=\"*\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s1[label=\"2\"];\n" +
" s0:p2->s1[label=\"3\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aab_Aa() { // a,b + a
ArrayPredictionContext A1 = array(a(), b());
ArrayPredictionContext A2 = array(a());
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>\"];\n" +
" s1[label=\"*\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s1[label=\"2\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aab_Ab() { // a,b + b
ArrayPredictionContext A1 = array(a(), b());
ArrayPredictionContext A2 = array(b());
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>\"];\n" +
" s1[label=\"*\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s1[label=\"2\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aax_Aby() { // ax + by but in arrays
SingletonPredictionContext a = createSingleton(x(), 1);
SingletonPredictionContext b = createSingleton(y(), 2);
ArrayPredictionContext A1 = array(a);
ArrayPredictionContext A2 = array(b);
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>\"];\n" +
" s2[label=\"2\"];\n" +
" s3[label=\"*\"];\n" +
" s1[label=\"1\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s2[label=\"2\"];\n" +
" s2->s3[label=\"10\"];\n" +
" s1->s3[label=\"9\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aax_Aay() { // ax + ay -> merged singleton a, array parent
SingletonPredictionContext a1 = createSingleton(x(), 1);
SingletonPredictionContext a2 = createSingleton(y(), 1);
ArrayPredictionContext A1 = array(a1);
ArrayPredictionContext A2 = array(a2);
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[label=\"0\"];\n" +
" s1[shape=record, label=\"<p0>|<p1>\"];\n" +
" s2[label=\"*\"];\n" +
" s0->s1[label=\"1\"];\n" +
" s1:p0->s2[label=\"9\"];\n" +
" s1:p1->s2[label=\"10\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aaxc_Aayd() { // ax,c + ay,d -> merged a, array parent
SingletonPredictionContext a1 = createSingleton(x(), 1);
SingletonPredictionContext a2 = createSingleton(y(), 1);
ArrayPredictionContext A1 = array(a1, c());
ArrayPredictionContext A2 = array(a2, d());
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" +
" s2[label=\"*\"];\n" +
" s1[shape=record, label=\"<p0>|<p1>\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s2[label=\"3\"];\n" +
" s0:p2->s2[label=\"4\"];\n" +
" s1:p0->s2[label=\"9\"];\n" +
" s1:p1->s2[label=\"10\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aaubv_Acwdx() { // au,bv + cw,dx -> [a,b,c,d]->[u,v,w,x]
SingletonPredictionContext a = createSingleton(u(), 1);
SingletonPredictionContext b = createSingleton(v(), 2);
SingletonPredictionContext c = createSingleton(w(), 3);
SingletonPredictionContext d = createSingleton(x(), 4);
ArrayPredictionContext A1 = array(a, b);
ArrayPredictionContext A2 = array(c, d);
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>|<p2>|<p3>\"];\n" +
" s4[label=\"4\"];\n" +
" s5[label=\"*\"];\n" +
" s3[label=\"3\"];\n" +
" s2[label=\"2\"];\n" +
" s1[label=\"1\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s2[label=\"2\"];\n" +
" s0:p2->s3[label=\"3\"];\n" +
" s0:p3->s4[label=\"4\"];\n" +
" s4->s5[label=\"9\"];\n" +
" s3->s5[label=\"8\"];\n" +
" s2->s5[label=\"7\"];\n" +
" s1->s5[label=\"6\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aaubv_Abvdx() { // au,bv + bv,dx -> [a,b,d]->[u,v,x]
SingletonPredictionContext a = createSingleton(u(), 1);
SingletonPredictionContext b1 = createSingleton(v(), 2);
SingletonPredictionContext b2 = createSingleton(v(), 2);
SingletonPredictionContext d = createSingleton(x(), 4);
ArrayPredictionContext A1 = array(a, b1);
ArrayPredictionContext A2 = array(b2, d);
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" +
" s3[label=\"3\"];\n" +
" s4[label=\"*\"];\n" +
" s2[label=\"2\"];\n" +
" s1[label=\"1\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s2[label=\"2\"];\n" +
" s0:p2->s3[label=\"4\"];\n" +
" s3->s4[label=\"9\"];\n" +
" s2->s4[label=\"7\"];\n" +
" s1->s4[label=\"6\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aaubv_Abwdx() { // au,bv + bw,dx -> [a,b,d]->[u,[v,w],x]
SingletonPredictionContext a = createSingleton(u(), 1);
SingletonPredictionContext b1 = createSingleton(v(), 2);
SingletonPredictionContext b2 = createSingleton(w(), 2);
SingletonPredictionContext d = createSingleton(x(), 4);
ArrayPredictionContext A1 = array(a, b1);
ArrayPredictionContext A2 = array(b2, d);
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" +
" s3[label=\"3\"];\n" +
" s4[label=\"*\"];\n" +
" s2[shape=record, label=\"<p0>|<p1>\"];\n" +
" s1[label=\"1\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s2[label=\"2\"];\n" +
" s0:p2->s3[label=\"4\"];\n" +
" s3->s4[label=\"9\"];\n" +
" s2:p0->s4[label=\"7\"];\n" +
" s2:p1->s4[label=\"8\"];\n" +
" s1->s4[label=\"6\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aaubv_Abvdu() { // au,bv + bv,du -> [a,b,d]->[u,v,u]; u,v shared
SingletonPredictionContext a = createSingleton(u(), 1);
SingletonPredictionContext b1 = createSingleton(v(), 2);
SingletonPredictionContext b2 = createSingleton(v(), 2);
SingletonPredictionContext d = createSingleton(u(), 4);
ArrayPredictionContext A1 = array(a, b1);
ArrayPredictionContext A2 = array(b2, d);
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>|<p2>\"];\n" +
" s2[label=\"2\"];\n" +
" s3[label=\"*\"];\n" +
" s1[label=\"1\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s2[label=\"2\"];\n" +
" s0:p2->s1[label=\"4\"];\n" +
" s2->s3[label=\"7\"];\n" +
" s1->s3[label=\"6\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
@Test public void test_Aaubu_Acudu() { // au,bu + cu,du -> [a,b,c,d]->[u,u,u,u]
SingletonPredictionContext a = createSingleton(u(), 1);
SingletonPredictionContext b = createSingleton(u(), 2);
SingletonPredictionContext c = createSingleton(u(), 3);
SingletonPredictionContext d = createSingleton(u(), 4);
ArrayPredictionContext A1 = array(a, b);
ArrayPredictionContext A2 = array(c, d);
PredictionContext r = PredictionContext.merge(A1, A2, rootIsWildcard(), null);
System.out.println(toDOTString(r, rootIsWildcard()));
String expecting =
"digraph G {\n" +
"rankdir=LR;\n" +
" s0[shape=record, label=\"<p0>|<p1>|<p2>|<p3>\"];\n" +
" s1[label=\"1\"];\n" +
" s2[label=\"*\"];\n" +
" s0:p0->s1[label=\"1\"];\n" +
" s0:p1->s1[label=\"2\"];\n" +
" s0:p2->s1[label=\"3\"];\n" +
" s0:p3->s1[label=\"4\"];\n" +
" s1->s2[label=\"6\"];\n" +
"}\n";
assertEquals(expecting, toDOTString(r, rootIsWildcard()));
}
// ------------ SUPPORT -------------------------
protected SingletonPredictionContext a() {
return createSingleton(PredictionContext.EMPTY, 1);
}
private SingletonPredictionContext b() {
return createSingleton(PredictionContext.EMPTY, 2);
}
private SingletonPredictionContext c() {
return createSingleton(PredictionContext.EMPTY, 3);
}
private SingletonPredictionContext d() {
return createSingleton(PredictionContext.EMPTY, 4);
}
private SingletonPredictionContext u() {
return createSingleton(PredictionContext.EMPTY, 6);
}
private SingletonPredictionContext v() {
return createSingleton(PredictionContext.EMPTY, 7);
}
private SingletonPredictionContext w() {
return createSingleton(PredictionContext.EMPTY, 8);
}
private SingletonPredictionContext x() {
return createSingleton(PredictionContext.EMPTY, 9);
}
private SingletonPredictionContext y() {
return createSingleton(PredictionContext.EMPTY, 10);
}
public SingletonPredictionContext createSingleton(PredictionContext parent, int payload) {
SingletonPredictionContext a = SingletonPredictionContext.create(parent, payload);
return a;
}
public ArrayPredictionContext array(SingletonPredictionContext... nodes) {
PredictionContext[] parents = new PredictionContext[nodes.length];
int[] invokingStates = new int[nodes.length];
for (int i=0; i<nodes.length; i++) {
parents[i] = nodes[i].parent;
invokingStates[i] = nodes[i].returnState;
}
return new ArrayPredictionContext(parents, invokingStates);
}
private static String toDOTString(PredictionContext context, boolean rootIsWildcard) {
StringBuilder nodes = new StringBuilder();
StringBuilder edges = new StringBuilder();
Map<PredictionContext, PredictionContext> visited = new IdentityHashMap<PredictionContext, PredictionContext>();
Map<PredictionContext, Integer> contextIds = new IdentityHashMap<PredictionContext, Integer>();
Deque<PredictionContext> workList = new ArrayDeque<PredictionContext>();
visited.put(context, context);
contextIds.put(context, contextIds.size());
workList.add(context);
while (!workList.isEmpty()) {
PredictionContext current = workList.pop();
nodes.append(" s").append(contextIds.get(current)).append('[');
if (current.size() > 1) {
nodes.append("shape=record, ");
}
nodes.append("label=\"");
if (current.isEmpty()) {
nodes.append(rootIsWildcard ? '*' : '$');
} else if (current.size() > 1) {
for (int i = 0; i < current.size(); i++) {
if (i > 0) {
nodes.append('|');
}
nodes.append("<p").append(i).append('>');
if (current.getReturnState(i) == PredictionContext.EMPTY_RETURN_STATE) {
nodes.append(rootIsWildcard ? '*' : '$');
}
}
} else {
nodes.append(contextIds.get(current));
}
nodes.append("\"];\n");
if (current.isEmpty()) {
continue;
}
for (int i = 0; i < current.size(); i++) {
if (current.getReturnState(i) == PredictionContext.EMPTY_RETURN_STATE) {
continue;
}
if (visited.put(current.getParent(i), current.getParent(i)) == null) {
contextIds.put(current.getParent(i), contextIds.size());
workList.push(current.getParent(i));
}
edges.append(" s").append(contextIds.get(current));
if (current.size() > 1) {
edges.append(":p").append(i);
}
edges.append("->");
edges.append('s').append(contextIds.get(current.getParent(i)));
edges.append("[label=\"").append(current.getReturnState(i)).append("\"]");
edges.append(";\n");
}
}
StringBuilder builder = new StringBuilder();
builder.append("digraph G {\n");
builder.append("rankdir=LR;\n");
builder.append(nodes);
builder.append(edges);
builder.append("}\n");
return builder.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.lucene.property;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.Collections.singletonList;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.TYPE_PROPERTY_NAME;
import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.TYPE_LUCENE;
import static org.apache.jackrabbit.oak.plugins.index.lucene.property.HybridPropertyIndexUtil.PROPERTY_INDEX;
import static org.apache.jackrabbit.oak.plugins.index.lucene.property.HybridPropertyIndexUtil.simplePropertyIndex;
import static org.apache.jackrabbit.oak.plugins.index.lucene.property.HybridPropertyIndexUtil.uniquePropertyIndex;
import static org.apache.jackrabbit.oak.spi.state.NodeStateUtils.getNode;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.commit.AnnotatingConflictHandler;
import org.apache.jackrabbit.oak.plugins.commit.ConflictHook;
import org.apache.jackrabbit.oak.plugins.commit.ConflictValidatorProvider;
import org.apache.jackrabbit.oak.plugins.index.AsyncIndexInfoService;
import org.apache.jackrabbit.oak.plugins.index.IndexPathService;
import org.apache.jackrabbit.oak.plugins.index.IndexUtils;
import org.apache.jackrabbit.oak.spi.commit.CommitContext;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.CompositeEditorProvider;
import org.apache.jackrabbit.oak.spi.commit.CompositeHook;
import org.apache.jackrabbit.oak.spi.commit.EditorHook;
import org.apache.jackrabbit.oak.spi.commit.ResetCommitAttributeHook;
import org.apache.jackrabbit.oak.spi.commit.SimpleCommitContext;
import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStateUtils;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.apache.jackrabbit.oak.stats.MeterStats;
import org.apache.jackrabbit.oak.stats.StatisticsProvider;
import org.apache.jackrabbit.oak.stats.StatsOptions;
import org.apache.jackrabbit.oak.stats.TimerStats;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableMap;
public class PropertyIndexCleaner implements Runnable{
private final Logger log = LoggerFactory.getLogger(getClass());
private final NodeStore nodeStore;
private final IndexPathService indexPathService;
private final AsyncIndexInfoService asyncIndexInfoService;
private UniqueIndexCleaner uniqueIndexCleaner = new UniqueIndexCleaner(TimeUnit.HOURS, 1);
private Map<String, Long> lastAsyncInfo = Collections.emptyMap();
private final TimerStats cleanupTime;
private final MeterStats noopMeter;
private boolean recursiveDelete;
public PropertyIndexCleaner(NodeStore nodeStore, IndexPathService indexPathService,
AsyncIndexInfoService asyncIndexInfoService,
StatisticsProvider statsProvider) {
this.nodeStore = checkNotNull(nodeStore);
this.indexPathService = checkNotNull(indexPathService);
this.asyncIndexInfoService = checkNotNull(asyncIndexInfoService);
this.cleanupTime = statsProvider.getTimer("HYBRID_PROPERTY_CLEANER", StatsOptions.METRICS_ONLY);
this.noopMeter = statsProvider.getMeter("HYBRID_PROPERTY_NOOP", StatsOptions.METRICS_ONLY);
}
@Override
public void run() {
try{
performCleanup(false);
} catch (Exception e) {
log.warn("Cleanup run failed with error", e);
}
}
/**
* Perform some cleanup.
*
* @param paths the list of paths (comma separated)
* @param batchSize the bach size
* @param sleepPerBatch the number of milliseconds to sleep per batch
* @param maxRemoveCount the maximum number of nodes to remove per path
* @return the number of nodes removed
*/
public int performCleanup(String paths, int batchSize, int sleepPerBatch, int maxRemoveCount) throws CommitFailedException {
String[] list = paths.split(",");
int numOfNodesDeleted = 0;
for(String s : list) {
log.info("Cleanup of {}", s);
if (!NodeStateUtils.isHidden(PathUtils.getName(s))) {
log.warn("Not a hidden node: {}", s);
continue;
}
RecursiveDelete rd = new RecursiveDelete(nodeStore, createCommitHook(),
PropertyIndexCleaner::createCommitInfo);
rd.setBatchSize(batchSize);
rd.setSleepPerBatch(sleepPerBatch);
rd.setMaxRemoveCount(maxRemoveCount);
rd.run(singletonList(s));
numOfNodesDeleted += rd.getNumRemoved();
}
return numOfNodesDeleted;
}
/**
* Performs the cleanup run
*
* @param forceCleanup if true then clean up would attempted even if no change
* is found in async indexer state
*/
public CleanupStats performCleanup(boolean forceCleanup) throws CommitFailedException {
CleanupStats stats = new CleanupStats();
Stopwatch w = Stopwatch.createStarted();
Map<String, Long> asyncInfo = asyncIndexInfoService.getIndexedUptoPerLane();
if (lastAsyncInfo.equals(asyncInfo) && !forceCleanup) {
log.debug("No change found in async state from last run {}. Skipping the run", asyncInfo);
noopMeter.mark();
return stats;
}
stats.cleanupPerformed = true;
List<String> syncIndexes = getSyncIndexPaths();
IndexInfo indexInfo = switchBucketsAndCollectIndexData(syncIndexes, asyncInfo, stats);
purgeOldBuckets(indexInfo.oldBucketPaths, stats);
purgeOldUniqueIndexEntries(indexInfo.uniqueIndexPaths, stats);
lastAsyncInfo = asyncInfo;
if (w.elapsed(TimeUnit.MINUTES) > 5) {
log.info("Property index cleanup done in {}. {}", w, stats);
} else {
log.debug("Property index cleanup done in {}. {}", w, stats);
}
cleanupTime.update(w.elapsed(TimeUnit.NANOSECONDS), TimeUnit.NANOSECONDS);
return stats;
}
/**
* Specifies the threshold for created time such that only those entries
* in unique indexes are purged which have
*
* async indexer time - creation time > threshold
*
* @param unit time unit
* @param time time value in given unit
*/
public void setCreatedTimeThreshold(TimeUnit unit, long time) {
uniqueIndexCleaner = new UniqueIndexCleaner(unit, time);
}
public boolean isRecursiveDelete() {
return recursiveDelete;
}
public void setRecursiveDelete(boolean recursiveDelete) {
this.recursiveDelete = recursiveDelete;
}
List<String> getSyncIndexPaths() {
List<String> indexPaths = new ArrayList<>();
NodeState root = nodeStore.getRoot();
for (String indexPath : indexPathService.getIndexPaths()) {
NodeState idx = getNode(root, indexPath);
if (TYPE_LUCENE.equals(idx.getString(TYPE_PROPERTY_NAME))
&& idx.hasChildNode(PROPERTY_INDEX)) {
indexPaths.add(indexPath);
}
}
return indexPaths;
}
private IndexInfo switchBucketsAndCollectIndexData(List<String> indexPaths,
Map<String, Long> asyncInfo, CleanupStats stats)
throws CommitFailedException {
IndexInfo indexInfo = new IndexInfo();
NodeState root = nodeStore.getRoot();
NodeBuilder builder = root.builder();
boolean modified = false;
for (String indexPath : indexPaths) {
NodeState idx = getNode(root, indexPath);
NodeBuilder idxb = child(builder, indexPath);
String laneName = IndexUtils.getAsyncLaneName(idx, indexPath);
Long lastIndexedTo = asyncInfo.get(laneName);
if (lastIndexedTo == null) {
log.warn("Not able to determine async index info for lane {}. " +
"Known lanes {}", laneName, asyncInfo.keySet());
continue;
}
NodeState propertyIndexNode = idx.getChildNode(PROPERTY_INDEX);
NodeBuilder propIndexNodeBuilder = idxb.getChildNode(PROPERTY_INDEX);
for (ChildNodeEntry cne : propertyIndexNode.getChildNodeEntries()) {
NodeState propIdxState = cne.getNodeState();
String propName = cne.getName();
if (simplePropertyIndex(propIdxState)) {
NodeBuilder propIdx = propIndexNodeBuilder.getChildNode(propName);
BucketSwitcher bs = new BucketSwitcher(propIdx);
modified |= bs.switchBucket(lastIndexedTo);
for (String bucketName : bs.getOldBuckets()) {
String bucketPath = PathUtils.concat(indexPath, PROPERTY_INDEX, propName, bucketName);
indexInfo.oldBucketPaths.add(bucketPath);
stats.purgedIndexPaths.add(indexPath);
}
} else if (uniquePropertyIndex(propIdxState)) {
String indexNodePath = PathUtils.concat(indexPath, PROPERTY_INDEX, propName);
indexInfo.uniqueIndexPaths.put(indexNodePath, lastIndexedTo);
}
}
}
if (modified) {
merge(builder);
}
return indexInfo;
}
private void purgeOldBuckets(List<String> bucketPaths, CleanupStats stats) throws CommitFailedException {
if (bucketPaths.isEmpty()) {
return;
}
if (recursiveDelete) {
RecursiveDelete rd = new RecursiveDelete(nodeStore, createCommitHook(),
PropertyIndexCleaner::createCommitInfo);
rd.run(bucketPaths);
stats.numOfNodesDeleted += rd.getNumRemoved();
} else {
NodeState root = nodeStore.getRoot();
NodeBuilder builder = root.builder();
for (String path : bucketPaths) {
NodeBuilder bucket = child(builder, path);
bucket.remove();
}
merge(builder);
}
stats.purgedBucketCount = bucketPaths.size();
}
private void purgeOldUniqueIndexEntries(Map<String, Long> asyncInfo, CleanupStats stats) throws CommitFailedException {
NodeState root = nodeStore.getRoot();
NodeBuilder builder = root.builder();
for (Map.Entry<String, Long> e : asyncInfo.entrySet()) {
String indexNodePath = e.getKey();
NodeBuilder idxb = child(builder, indexNodePath);
int removalCount = uniqueIndexCleaner.clean(idxb, e.getValue());
if (removalCount > 0) {
stats.purgedIndexPaths.add(PathUtils.getAncestorPath(indexNodePath, 2));
log.debug("Removed [{}] entries from [{}]", removalCount, indexNodePath);
}
stats.uniqueIndexEntryRemovalCount += removalCount;
}
if (stats.uniqueIndexEntryRemovalCount > 0) {
merge(builder);
}
}
private void merge(NodeBuilder builder) throws CommitFailedException {
//TODO Configure validator
CompositeHook hooks = createCommitHook();
nodeStore.merge(builder, hooks, createCommitInfo());
}
private CompositeHook createCommitHook() {
return new CompositeHook(
ResetCommitAttributeHook.INSTANCE,
new ConflictHook(new AnnotatingConflictHandler()),
new EditorHook(CompositeEditorProvider.compose(singletonList(new ConflictValidatorProvider())))
);
}
private static CommitInfo createCommitInfo() {
Map<String, Object> info = ImmutableMap.of(CommitContext.NAME, new SimpleCommitContext());
return new CommitInfo(CommitInfo.OAK_UNKNOWN, CommitInfo.OAK_UNKNOWN, info);
}
private static NodeBuilder child(NodeBuilder nb, String path) {
for (String name : PathUtils.elements(checkNotNull(path))) {
//Use getChildNode to avoid creating new entries by default
nb = nb.getChildNode(name);
}
return nb;
}
private static final class IndexInfo {
final List<String> oldBucketPaths = new ArrayList<>();
/* indexPath, lastIndexedTo */
final Map<String, Long> uniqueIndexPaths = new HashMap<>();
}
public static class CleanupStats {
public int uniqueIndexEntryRemovalCount;
public int purgedBucketCount;
public Set<String> purgedIndexPaths = new HashSet<>();
public boolean cleanupPerformed;
public int numOfNodesDeleted;
@Override
public String toString() {
String nodeCountMsg = numOfNodesDeleted > 0 ? String.format("(%d nodes)", numOfNodesDeleted) : "";
return String.format("Removed %d index buckets %s, %d unique index entries " +
"from indexes %s", purgedBucketCount, nodeCountMsg, uniqueIndexEntryRemovalCount, purgedIndexPaths);
}
}
}
| |
package tars.logic.commands;
import java.time.DateTimeException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
import tars.commons.core.Messages;
import tars.commons.exceptions.DuplicateTaskException;
import tars.commons.exceptions.IllegalValueException;
import tars.commons.util.DateTimeUtil;
import tars.commons.util.StringUtil;
import tars.model.tag.Tag;
import tars.model.tag.UniqueTagList;
import tars.model.task.DateTime;
import tars.model.task.DateTime.IllegalDateException;
import tars.model.task.Name;
import tars.model.task.Priority;
import tars.model.task.Status;
import tars.model.task.Task;
import tars.model.task.UniqueTaskList.TaskNotFoundException;
// @@author A0140022H
/**
* Adds a task to tars.
*/
public class AddCommand extends UndoableCommand {
public static final String COMMAND_WORD = "add";
public static final String MESSAGE_USAGE = COMMAND_WORD
+ ": Adds a task to tars.\n"
+ "Parameters: <TASK_NAME> [/dt DATETIME] [/p PRIORITY] [/t TAG_NAME ...] [/r NUM_TIMES FREQUENCY]\n "
+ "Example: " + COMMAND_WORD
+ " cs2103 project meeting /dt 05/09/2016 1400 to 06/09/2016 2200 /p h /t project /r 2 every week";
public static final String MESSAGE_SUCCESS = "New task added: %1$s";
public static final String MESSAGE_UNDO = "Removed %1$s";
public static final String MESSAGE_REDO = "Added %1$s";
private static final int DATETIME_INDEX_OF_ENDDATE = 1;
private static final int DATETIME_INDEX_OF_STARTDATE = 0;
private static final int DATETIME_EMPTY_DATE = 0;
private static final int ADDTASK_FIRST_ITERATION = 0;
private static final int ADDTASK_DEFAULT_NUMTASK = 1;
private static final String ADDTASK_STRING_EMPTY = "";
private static final String ADDTASK_STRING_NEWLINE = "\n";
private static final int RECURRINGSTRING_NOT_EMPTY = 1;
private static final int RECURRINGSTRING_INDEX_OF_NUMTASK = 0;
private static final int RECURRINGSTRING_INDEX_OF_FREQUENCY = 2;
private Task toAdd;
private ArrayList<Task> taskArray;
private String conflictingTaskList = "";
// @@author A0140022H
/**
* Convenience constructor using raw values.
*
* @throws IllegalValueException if any of the raw values are invalid
* @throws DateTimeException if given dateTime string is invalid.
*/
public AddCommand(String name, String[] dateTime, String priority,
Set<String> tags, String[] recurringString)
throws IllegalValueException, DateTimeException {
taskArray = new ArrayList<Task>();
final Set<Tag> tagSet = new HashSet<>();
for (String tagName : tags) {
tagSet.add(new Tag(tagName));
}
addToTaskArray(name, dateTime, priority, recurringString, tagSet);
}
// @@author
// @@author A0140022H
private void addToTaskArray(String name, String[] dateTime, String priority,
String[] recurringString, final Set<Tag> tagSet)
throws IllegalValueException, IllegalDateException {
int numTask = ADDTASK_DEFAULT_NUMTASK;
if (recurringString != null
&& recurringString.length > RECURRINGSTRING_NOT_EMPTY) {
numTask = Integer.parseInt(
recurringString[RECURRINGSTRING_INDEX_OF_NUMTASK]);
}
for (int i = ADDTASK_FIRST_ITERATION; i < numTask; i++) {
if (i != ADDTASK_FIRST_ITERATION) {
if (recurringString != null
&& recurringString.length > RECURRINGSTRING_NOT_EMPTY) {
modifyDateTime(dateTime, recurringString,
DATETIME_INDEX_OF_STARTDATE);
modifyDateTime(dateTime, recurringString,
DATETIME_INDEX_OF_ENDDATE);
}
}
this.toAdd = new Task(new Name(name),
new DateTime(dateTime[DATETIME_INDEX_OF_STARTDATE],
dateTime[DATETIME_INDEX_OF_ENDDATE]),
new Priority(priority), new Status(),
new UniqueTagList(tagSet));
taskArray.add(toAdd);
}
}
// @@author
// @@author A0140022H
private void modifyDateTime(String[] dateTime, String[] recurringString,
int dateTimeIndex) {
if (dateTime[dateTimeIndex] != null
&& dateTime[dateTimeIndex].length() > DATETIME_EMPTY_DATE) {
dateTime[dateTimeIndex] = DateTimeUtil.modifyDate(
dateTime[dateTimeIndex],
recurringString[RECURRINGSTRING_INDEX_OF_FREQUENCY]);
}
}
// @@author
// @@author A0140022H
@Override
public CommandResult execute() {
assert model != null;
try {
addTasks();
model.getUndoableCmdHist().push(this);
return new CommandResult(messageSummary());
} catch (DuplicateTaskException e) {
return new CommandResult(Messages.MESSAGE_DUPLICATE_TASK);
}
}
// @@author
// @@author A0140022H
private void addTasks() throws DuplicateTaskException {
for (Task toAdd : taskArray) {
conflictingTaskList +=
model.getTaskConflictingDateTimeWarningMessage(
toAdd.getDateTime());
model.addTask(toAdd);
if (taskArray.size() == ADDTASK_DEFAULT_NUMTASK && ((toAdd
.getDateTime().getStartDate() == null
&& toAdd.getDateTime().getEndDate() != null)
|| (toAdd.getDateTime().getStartDate() != null
&& toAdd.getDateTime().getEndDate() != null))) {
model.updateFilteredTaskListUsingDate(toAdd.getDateTime());
}
}
}
// @@author A0139924W
@Override
public CommandResult undo() {
assert model != null;
try {
for (Task toAdd : taskArray) {
model.deleteTask(toAdd);
}
return new CommandResult(String.format(UndoCommand.MESSAGE_SUCCESS,
String.format(MESSAGE_UNDO, toAdd)));
} catch (TaskNotFoundException e) {
return new CommandResult(
String.format(UndoCommand.MESSAGE_UNSUCCESS,
Messages.MESSAGE_TASK_CANNOT_BE_FOUND));
}
}
// @@author A0139924W
@Override
public CommandResult redo() {
assert model != null;
try {
for (Task toAdd : taskArray) {
model.addTask(toAdd);
}
return new CommandResult(String.format(RedoCommand.MESSAGE_SUCCESS,
messageSummary()));
} catch (DuplicateTaskException e) {
return new CommandResult(
String.format(RedoCommand.MESSAGE_UNSUCCESS,
Messages.MESSAGE_DUPLICATE_TASK));
}
}
// @@author A0140022H
private String messageSummary() {
String summary = ADDTASK_STRING_EMPTY;
for (Task toAdd : taskArray) {
summary += String.format(MESSAGE_SUCCESS,
toAdd + ADDTASK_STRING_NEWLINE);
}
if (!conflictingTaskList.isEmpty()) {
summary += StringUtil.STRING_NEWLINE
+ Messages.MESSAGE_CONFLICTING_TASKS_WARNING
+ conflictingTaskList;
}
return summary;
}
// @@author
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.schedule;
import it.sauronsoftware.cron4j.InvalidPatternException;
import it.sauronsoftware.cron4j.Predictor;
import it.sauronsoftware.cron4j.Scheduler;
import it.sauronsoftware.cron4j.SchedulingPattern;
import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteInterruptedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.processors.timeout.GridTimeoutObjectAdapter;
import org.apache.ignite.internal.util.future.GridFutureAdapter;
import org.apache.ignite.internal.util.future.IgniteFutureImpl;
import org.apache.ignite.internal.util.lang.GridClosureException;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.internal.A;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteClosure;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.lang.IgniteFutureCancelledException;
import org.apache.ignite.lang.IgniteFutureTimeoutException;
import org.apache.ignite.lang.IgniteInClosure;
import org.apache.ignite.scheduler.SchedulerFuture;
import org.jetbrains.annotations.Nullable;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
/**
* Implementation of {@link org.apache.ignite.scheduler.SchedulerFuture} interface.
*/
class ScheduleFutureImpl<R> implements SchedulerFuture<R> {
/** Empty time array. */
private static final long[] EMPTY_TIMES = new long[] {};
/** No next execution time constant. **/
private static final long NO_NEXT_EXECUTION_TIME = 0;
/** Identifier generated by cron scheduler. */
private volatile String id;
/** Scheduling pattern. */
private String pat;
/** Scheduling delay in seconds parsed from pattern. */
private int delay;
/** Number of maximum task calls parsed from pattern. */
private int maxCalls;
/** Mere cron pattern parsed from extended pattern. */
private String cron;
/** Cancelled flag. */
private boolean cancelled;
/** Done flag. */
private boolean done;
/** Task calls counter. */
private int callCnt;
/** De-schedule flag. */
private final AtomicBoolean descheduled = new AtomicBoolean(false);
/** Listeners. */
private Collection<IgniteInClosure<? super IgniteFuture<R>>> lsnrs = new ArrayList<>(1);
/** Statistics. */
@SuppressWarnings({"FieldAccessedSynchronizedAndUnsynchronized"})
private GridScheduleStatistics stats = new GridScheduleStatistics();
/** Latch synchronizing fetch of the next execution result. */
@GridToStringExclude
private CountDownLatch resLatch = new CountDownLatch(1);
/** Cron scheduler. */
@GridToStringExclude
private Scheduler sched;
/** Processor registry. */
@GridToStringExclude
private GridKernalContext ctx;
/** Execution task. */
@GridToStringExclude
private Callable<R> task;
/** Result of the last execution of scheduled task. */
@GridToStringExclude
private R lastRes;
/** Keeps last execution exception or {@code null} if the last execution was successful. */
@GridToStringExclude
private Throwable lastErr;
/** Listener call count. */
private int lastLsnrExecCnt;
/** Mutex. */
private final Object mux = new Object();
/** Grid logger. */
private IgniteLogger log;
/** Runnable object to schedule with cron scheduler. */
private final Runnable run = new Runnable() {
@Nullable private CountDownLatch onStart() {
synchronized (mux) {
if (done || cancelled)
return null;
if (stats.isRunning()) {
U.warn(log, "Task got scheduled while previous was not finished: " + this);
return null;
}
if (callCnt == maxCalls && maxCalls > 0)
return null;
callCnt++;
stats.onStart();
assert resLatch != null;
return resLatch;
}
}
@SuppressWarnings({"ErrorNotRethrown"})
@Override public void run() {
CountDownLatch latch = onStart();
if (latch == null)
return;
R res = null;
Throwable err = null;
try {
res = task.call();
}
catch (Exception e) {
err = e;
}
catch (Error e) {
err = e;
U.error(log, "Error occurred while executing scheduled task: " + this, e);
}
finally {
if (!onEnd(latch, res, err, false))
deschedule();
}
}
};
/**
* Creates descriptor for task scheduling. To start scheduling call {@link #schedule(Callable)}.
*
* @param sched Cron scheduler.
* @param ctx Kernal context.
* @param pat Cron pattern.
*/
ScheduleFutureImpl(Scheduler sched, GridKernalContext ctx, String pat) {
assert sched != null;
assert ctx != null;
assert pat != null;
this.sched = sched;
this.ctx = ctx;
this.pat = pat.trim();
log = ctx.log(getClass());
try {
parsePatternParameters();
}
catch (IgniteCheckedException e) {
onEnd(resLatch, null, e, true);
}
}
/**
* @param latch Latch.
* @param res Result.
* @param err Error.
* @param initErr Init error flag.
* @return {@code False} if future should be unscheduled.
*/
private boolean onEnd(CountDownLatch latch, R res, Throwable err, boolean initErr) {
assert latch != null;
boolean notifyLsnr = false;
CountDownLatch resLatchCp = null;
try {
synchronized (mux) {
lastRes = res;
lastErr = err;
if (initErr) {
assert err != null;
notifyLsnr = true;
}
else {
stats.onEnd();
int cnt = stats.getExecutionCount();
if (lastLsnrExecCnt != cnt) {
notifyLsnr = true;
lastLsnrExecCnt = cnt;
}
}
if ((callCnt == maxCalls && maxCalls > 0) || cancelled || initErr) {
done = true;
resLatchCp = resLatch;
resLatch = null;
return false;
}
resLatch = new CountDownLatch(1);
return true;
}
}
finally {
// Unblock all get() invocations.
latch.countDown();
// Make sure that none will be blocked on new latch if this
// future will not be executed any more.
if (resLatchCp != null)
resLatchCp.countDown();
if (notifyLsnr)
notifyListeners(res, err);
}
}
/**
* Sets execution task.
*
* @param task Execution task.
*/
void schedule(Callable<R> task) {
assert task != null;
assert this.task == null;
// Done future on this step means that there was error on init.
if (isDone())
return;
this.task = task;
((IgniteScheduleProcessor)ctx.schedule()).onScheduled(this);
if (delay > 0) {
// Schedule after delay.
ctx.timeout().addTimeoutObject(new GridTimeoutObjectAdapter(delay * 1000) {
@Override public void onTimeout() {
assert id == null;
try {
id = sched.schedule(cron, run);
}
catch (InvalidPatternException e) {
// This should never happen as we validated the pattern during parsing.
e.printStackTrace();
assert false : "Invalid scheduling pattern: " + cron;
}
}
});
}
else {
assert id == null;
try {
id = sched.schedule(cron, run);
}
catch (InvalidPatternException e) {
// This should never happen as we validated the pattern during parsing.
e.printStackTrace();
assert false : "Invalid scheduling pattern: " + cron;
}
}
}
/**
* De-schedules scheduled task.
*/
void deschedule() {
if (descheduled.compareAndSet(false, true)) {
sched.deschedule(id);
((IgniteScheduleProcessor)ctx.schedule()).onDescheduled(this);
}
}
/**
* Parse delay, number of task calls and mere cron expression from extended pattern
* that looks like "{n1,n2} * * * * *".
* @throws IgniteCheckedException Thrown if pattern is invalid.
*/
private void parsePatternParameters() throws IgniteCheckedException {
assert pat != null;
String regEx = "(\\{(\\*|\\d+),\\s*(\\*|\\d+)\\})?(.*)";
Matcher matcher = Pattern.compile(regEx).matcher(pat.trim());
if (matcher.matches()) {
String delayStr = matcher.group(2);
if (delayStr != null)
if ("*".equals(delayStr))
delay = 0;
else
try {
delay = Integer.valueOf(delayStr);
}
catch (NumberFormatException e) {
throw new IgniteCheckedException("Invalid delay parameter in schedule pattern [delay=" +
delayStr + ", pattern=" + pat + ']', e);
}
String numOfCallsStr = matcher.group(3);
if (numOfCallsStr != null) {
int maxCalls0;
if ("*".equals(numOfCallsStr))
maxCalls0 = 0;
else {
try {
maxCalls0 = Integer.valueOf(numOfCallsStr);
}
catch (NumberFormatException e) {
throw new IgniteCheckedException("Invalid number of calls parameter in schedule pattern [numOfCalls=" +
numOfCallsStr + ", pattern=" + pat + ']', e);
}
if (maxCalls0 <= 0)
throw new IgniteCheckedException("Number of calls must be greater than 0 or must be equal to \"*\"" +
" in schedule pattern [numOfCalls=" + maxCalls0 + ", pattern=" + pat + ']');
}
synchronized (mux) {
maxCalls = maxCalls0;
}
}
cron = matcher.group(4);
if (cron != null)
cron = cron.trim();
// Cron expression should never be empty and should be of correct format.
if (cron.isEmpty() || !SchedulingPattern.validate(cron))
throw new IgniteCheckedException("Invalid cron expression in schedule pattern: " + pat);
}
else
throw new IgniteCheckedException("Invalid schedule pattern: " + pat);
}
/** {@inheritDoc} */
@Override public String pattern() {
return pat;
}
/** {@inheritDoc} */
@Override public String id() {
return id;
}
/** {@inheritDoc} */
@Override public long[] nextExecutionTimes(int cnt, long start) {
assert cnt > 0;
assert start > 0;
if (isDone() || isCancelled())
return EMPTY_TIMES;
synchronized (mux) {
if (maxCalls > 0)
cnt = Math.min(cnt, maxCalls);
}
long[] times = new long[cnt];
if (start < createTime() + delay * 1000)
start = createTime() + delay * 1000;
SchedulingPattern ptrn = new SchedulingPattern(cron);
Predictor p = new Predictor(ptrn, start);
for (int i = 0; i < cnt; i++)
times[i] = p.nextMatchingTime();
return times;
}
/** {@inheritDoc} */
@Override public long nextExecutionTime() {
long[] execTimes = nextExecutionTimes(1, U.currentTimeMillis());
return execTimes == EMPTY_TIMES ? NO_NEXT_EXECUTION_TIME : execTimes[0];
}
/** {@inheritDoc} */
@Override public boolean cancel() {
synchronized (mux) {
if (done)
return false;
if (cancelled)
return true;
if (!stats.isRunning())
done = true;
cancelled = true;
}
deschedule();
return true;
}
/** {@inheritDoc} */
@Override public long createTime() {
synchronized (mux) {
return stats.getCreateTime();
}
}
/** {@inheritDoc} */
@Override public long lastStartTime() {
synchronized (mux) {
return stats.getLastStartTime();
}
}
/** {@inheritDoc} */
@Override public long lastFinishTime() {
synchronized (mux) {
return stats.getLastEndTime();
}
}
/** {@inheritDoc} */
@Override public double averageExecutionTime() {
synchronized (mux) {
return stats.getLastExecutionTime();
}
}
/** {@inheritDoc} */
@Override public long lastIdleTime() {
synchronized (mux) {
return stats.getLastIdleTime();
}
}
/** {@inheritDoc} */
@Override public double averageIdleTime() {
synchronized (mux) {
return stats.getAverageIdleTime();
}
}
/** {@inheritDoc} */
@Override public int count() {
synchronized (mux) {
return stats.getExecutionCount();
}
}
/** {@inheritDoc} */
@Override public boolean isRunning() {
synchronized (mux) {
return stats.isRunning();
}
}
/** {@inheritDoc} */
@Override public R last() throws IgniteException {
synchronized (mux) {
if (lastErr != null)
throw U.convertException(U.cast(lastErr));
return lastRes;
}
}
/** {@inheritDoc} */
@Override public boolean isCancelled() {
synchronized (mux) {
return cancelled;
}
}
/** {@inheritDoc} */
@Override public boolean isDone() {
synchronized (mux) {
return done;
}
}
/** {@inheritDoc} */
@Override public void listen(IgniteInClosure<? super IgniteFuture<R>> lsnr) {
A.notNull(lsnr, "lsnr");
Throwable err;
R res;
boolean notifyLsnr = false;
synchronized (mux) {
lsnrs.add(lsnr);
err = lastErr;
res = lastRes;
int cnt = stats.getExecutionCount();
if (cnt > 0 && lastLsnrExecCnt != cnt) {
lastLsnrExecCnt = cnt;
notifyLsnr = true;
}
}
// Avoid race condition in case if listener was added after
// first execution completed.
if (notifyLsnr)
notifyListener(lsnr, res, err);
}
/** {@inheritDoc} */
@SuppressWarnings("ExternalizableWithoutPublicNoArgConstructor")
@Override public <T> IgniteFuture<T> chain(final IgniteClosure<? super IgniteFuture<R>, T> doneCb) {
final GridFutureAdapter<T> fut = new GridFutureAdapter<T>() {
@Override public String toString() {
return "ChainFuture[orig=" + ScheduleFutureImpl.this + ", doneCb=" + doneCb + ']';
}
};
listen(new CI1<IgniteFuture<R>>() {
@Override public void apply(IgniteFuture<R> fut0) {
try {
fut.onDone(doneCb.apply(fut0));
}
catch (GridClosureException e) {
fut.onDone(e.unwrap());
}
catch (IgniteException e) {
fut.onDone(e);
}
catch (RuntimeException | Error e) {
U.warn(null, "Failed to notify chained future (is grid stopped?) [igniteInstanceName=" +
ctx.igniteInstanceName() + ", doneCb=" + doneCb + ", err=" + e.getMessage() + ']');
fut.onDone(e);
throw e;
}
}
});
return new IgniteFutureImpl<>(fut);
}
/**
* @param lsnr Listener to notify.
* @param res Last execution result.
* @param err Last execution error.
*/
private void notifyListener(final IgniteInClosure<? super IgniteFuture<R>> lsnr, R res, Throwable err) {
assert lsnr != null;
assert !Thread.holdsLock(mux);
assert ctx != null;
lsnr.apply(snapshot(res, err));
}
/**
* @param res Last execution result.
* @param err Last execution error.
*/
private void notifyListeners(R res, Throwable err) {
final Collection<IgniteInClosure<? super IgniteFuture<R>>> tmp;
synchronized (mux) {
tmp = new ArrayList<>(lsnrs);
}
final SchedulerFuture<R> snapshot = snapshot(res, err);
for (IgniteInClosure<? super IgniteFuture<R>> lsnr : tmp)
lsnr.apply(snapshot);
}
/**
* Checks that the future is in valid state for get operation.
*
* @return Latch or {@code null} if future has been finished.
* @throws IgniteFutureCancelledException If was cancelled.
*/
@Nullable private CountDownLatch ensureGet() throws IgniteFutureCancelledException {
synchronized (mux) {
if (cancelled)
throw new IgniteFutureCancelledException("Scheduling has been cancelled: " + this);
if (done)
return null;
return resLatch;
}
}
/** {@inheritDoc} */
@Nullable @Override public R get() {
CountDownLatch latch = ensureGet();
if (latch != null) {
try {
latch.await();
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
if (isCancelled())
throw new IgniteFutureCancelledException(e);
if (isDone())
return last();
throw new IgniteInterruptedException(e);
}
}
return last();
}
/** {@inheritDoc} */
@Override public R get(long timeout) {
return get(timeout, MILLISECONDS);
}
/** {@inheritDoc} */
@Nullable @Override public R get(long timeout, TimeUnit unit) throws IgniteException {
CountDownLatch latch = ensureGet();
if (latch != null) {
try {
if (latch.await(timeout, unit))
return last();
else
throw new IgniteFutureTimeoutException("Timed out waiting for completion of next " +
"scheduled computation: " + this);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
if (isCancelled())
throw new IgniteFutureCancelledException(e);
if (isDone())
return last();
throw new IgniteInterruptedException(e);
}
}
return last();
}
/**
* Creates a snapshot of this future with fixed last result.
*
* @param res Last result.
* @param err Last error.
* @return Future snapshot.
*/
private SchedulerFuture<R> snapshot(R res, Throwable err) {
return new ScheduleFutureSnapshot<>(this, res, err);
}
/**
* Future snapshot.
*
* @param <R>
*/
private static class ScheduleFutureSnapshot<R> implements SchedulerFuture<R> {
/** */
private ScheduleFutureImpl<R> ref;
/** */
private R res;
/** */
private Throwable err;
/**
*
* @param ref Referenced implementation.
* @param res Last result.
* @param err Throwable.
*/
ScheduleFutureSnapshot(ScheduleFutureImpl<R> ref, R res, Throwable err) {
assert ref != null;
this.ref = ref;
this.res = res;
this.err = err;
}
/** {@inheritDoc} */
@Override public R last() {
if (err != null)
throw U.convertException(U.cast(err));
return res;
}
/** {@inheritDoc} */
@Override public String id() {
return ref.id();
}
/** {@inheritDoc} */
@Override public String pattern() {
return ref.pattern();
}
/** {@inheritDoc} */
@Override public long createTime() {
return ref.createTime();
}
/** {@inheritDoc} */
@Override public long lastStartTime() {
return ref.lastStartTime();
}
/** {@inheritDoc} */
@Override public long lastFinishTime() {
return ref.lastFinishTime();
}
/** {@inheritDoc} */
@Override public double averageExecutionTime() {
return ref.averageExecutionTime();
}
/** {@inheritDoc} */
@Override public long lastIdleTime() {
return ref.lastIdleTime();
}
/** {@inheritDoc} */
@Override public double averageIdleTime() {
return ref.averageIdleTime();
}
/** {@inheritDoc} */
@Override public long[] nextExecutionTimes(int cnt, long start) {
return ref.nextExecutionTimes(cnt, start);
}
/** {@inheritDoc} */
@Override public int count() {
return ref.count();
}
/** {@inheritDoc} */
@Override public boolean isRunning() {
return ref.isRunning();
}
/** {@inheritDoc} */
@Override public long nextExecutionTime() {
return ref.nextExecutionTime();
}
/** {@inheritDoc} */
@Nullable @Override public R get() {
return ref.get();
}
/** {@inheritDoc} */
@Override public R get(long timeout) {
return ref.get(timeout);
}
/** {@inheritDoc} */
@Nullable @Override public R get(long timeout, TimeUnit unit) {
return ref.get(timeout, unit);
}
/** {@inheritDoc} */
@Override public boolean cancel() {
return ref.cancel();
}
/** {@inheritDoc} */
@Override public boolean isDone() {
return ref.isDone();
}
/** {@inheritDoc} */
@Override public boolean isCancelled() {
return ref.isCancelled();
}
/** {@inheritDoc} */
@Override public void listen(IgniteInClosure<? super IgniteFuture<R>> lsnr) {
ref.listen(lsnr);
}
/** {@inheritDoc} */
@Override public <T> IgniteFuture<T> chain(IgniteClosure<? super IgniteFuture<R>, T> doneCb) {
return ref.chain(doneCb);
}
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(ScheduleFutureImpl.class, this);
}
}
| |
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.docproc;
import com.yahoo.component.ComponentId;
import com.yahoo.docproc.jdisc.metric.NullMetric;
import com.yahoo.jdisc.Metric;
import com.yahoo.statistics.Statistics;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
/**
* A stack of the processors to call next in this processing. To push which
* processor to call next, call addNext, to get and remove the next processor,
* call pop.
*
* This is not thread safe.
*
* @author bratseth
*/
public class CallStack {
/** The name of this stack, or null if it is not named */
private String name;
/** The Call objects of this stack */
private final List<Call> elements = new java.util.LinkedList<>();
/** The last element popped from the call stack, if any */
private Call lastPopped = null;
/** Used for metrics in Call */
private final Metric metric;
public CallStack() {
this(new NullMetric());
}
public CallStack(String name) {
this(name, new NullMetric());
}
/** Creates an empty stack */
public CallStack(Metric metric) {
this.name = null;
this.metric = metric;
}
@Deprecated
public CallStack(Statistics statistics, Metric metric) {
this(metric);
}
@Deprecated
public CallStack(String name, Statistics manager, Metric metric) {
this(name, metric);
}
/** Creates an empty stack with a name */
public CallStack(String name, Metric metric) {
this.name = name;
this.metric = metric;
}
/**
* Creates a stack from another stack (starting at the next of the given
* callstack) This does a deep copy of the stack.
*/
public CallStack(CallStack stackToCopy) {
name = stackToCopy.name;
for (Iterator<Call> i = stackToCopy.iterator(); i.hasNext();) {
Call callToCopy = i.next();
elements.add((Call) callToCopy.clone());
}
this.metric = stackToCopy.metric;
}
/**
* Creates a stack (with a given name) based on a collection of document processors, which are added to the stack
* in the iteration order of the collection.
*
* @param name the name of the stack
* @param docprocs the document processors to call
*/
public CallStack(String name, Collection<DocumentProcessor> docprocs, Metric metric) {
this(name, metric);
for (DocumentProcessor docproc : docprocs) {
addLast(docproc);
}
}
@Deprecated
public CallStack(String name, Collection<DocumentProcessor> docprocs, Statistics manager, Metric metric) {
this(name, docprocs, metric);
}
/** Returns the name of this stack, or null if it is not named */
public String getName() {
return name;
}
/** Sets the name of this stack */
public void setName(String name) {
this.name = name;
}
/**
* Push an element as the <i>next</i> element on this stack
*
* @return this for convenience
*/
public CallStack addNext(Call call) {
elements.add(0, call);
return this;
}
/**
* Push an element as the <i>next</i> element on this stack
*
* @return this for convenience
*/
public CallStack addNext(DocumentProcessor processor) {
return addNext(new Call(processor, name, metric));
}
/**
* Push multiple elements as the <i>next</i> elements on this stack
*
* @return this for convenience
*/
public CallStack addNext(CallStack callStack) {
elements.addAll(0, callStack.elements);
return this;
}
/**
* Adds an element as the <i>last</i> element on this stack
*
* @return this for convenience
*/
public CallStack addLast(Call call) {
elements.add(call);
return this;
}
/**
* Adds an element as the <i>last</i> element on this stack
*
* @return this for convenience
*/
public CallStack addLast(DocumentProcessor processor) {
return addLast(new Call(processor, name, metric));
}
/**
* Adds multiple elements as the <i>last</i> elements on this stack
*
* @return this for convenience
*/
public CallStack addLast(CallStack callStack) {
elements.addAll(callStack.elements);
return this;
}
/**
* Adds an element just before the first occurence of some other element on
* the stack. This can not be called during an iteration.
*
* @param before
* the call to add this before. If this call is not present (the
* same object instance), new processor is added as the last
* element
* @param call the call to add
* @return this for convenience
*/
public CallStack addBefore(Call before, Call call) {
int insertPosition = elements.indexOf(before);
if (insertPosition < 0) {
addLast(call);
} else {
elements.add(insertPosition, call);
}
return this;
}
/**
* Adds an element just before the first occurence of some element on the
* stack. This can not be called during an iteration.
*
* @param before
* the call to add this before. If this call is not present (the
* same object instance), the new processor is added as the last
* element
* @param processor the processor to add
* @return this for convenience
*/
public CallStack addBefore(Call before, DocumentProcessor processor) {
return addBefore(before, new Call(processor, name, metric));
}
/**
* Adds multiple elements just before the first occurence of some element on
* the stack. This can not be called during an iteration.
*
* @param before the call to add this before. If this call is not present (the
* same object instance), the new processor is added as the last element
* @param callStack the calls to add
* @return this for convenience
*/
public CallStack addBefore(Call before, CallStack callStack) {
int insertPosition = elements.indexOf(before);
if (insertPosition < 0) {
addLast(callStack);
} else {
elements.addAll(insertPosition, callStack.elements);
}
return this;
}
/**
* Adds an element just after the first occurence of some other element on
* the stack. This can not be called during an iteration.
*
* @param after
* the call to add this before. If this call is not present, (the
* same object instance), the new processor is added as the last
* element
* @param call
* the call to add
* @return this for convenience
*/
public CallStack addAfter(Call after, Call call) {
int insertPosition = elements.indexOf(after);
if (insertPosition < 0) {
addLast(call);
} else {
elements.add(insertPosition + 1, call);
}
return this;
}
/**
* Adds an element just after the first occurence of some other element on
* the stack. This can not be called during an iteration.
*
* @param after the call to add this after. If this call is not present, (the
* same object instance), the new processor is added as the last element
* @param processor the processor to add
* @return this for convenience
*/
public CallStack addAfter(Call after, DocumentProcessor processor) {
return addAfter(after, new Call(processor, name, metric));
}
/**
* Adds multiple elements just after another given element on the stack.
* This can not be called during an iteration.
*
* @param after the call to add this before. If this call is not present, (the
* same object instance), the new processor is added as the last element
* @param callStack the calls to add
* @return this for convenience
*/
public CallStack addAfter(Call after, CallStack callStack) {
int insertPosition = elements.indexOf(after);
if (insertPosition < 0) {
addLast(callStack);
} else {
elements.addAll(insertPosition + 1, callStack.elements);
}
return this;
}
/**
* Removes the given call. Does nothing if the call is not present.
*
* @param call
* the call to remove
* @return this for convenience
*/
public CallStack remove(Call call) {
for (ListIterator<Call> i = iterator(); i.hasNext();) {
Call current = i.next();
if (current == call) {
i.remove();
}
}
return this;
}
/**
* Returns whether this stack has this call (left)
*
* @param call
* the call to check
* @return true if the call is present, false otherwise
*/
public boolean contains(Call call) {
for (ListIterator<Call> i = iterator(); i.hasNext();) {
Call current = i.next();
if (current == call) {
return true;
}
}
return false;
}
/**
* Returns the next call to this processor id, or null if no such calls are left
*/
public Call findCall(ComponentId processorId) {
for (Iterator<Call> i = iterator(); i.hasNext();) {
Call call = i.next();
if (call.getDocumentProcessorId().equals(processorId)) {
return call;
}
}
return null;
}
/**
* Returns the next call to this processor, or null if no such calls are
* left
*/
public Call findCall(DocumentProcessor processor) {
return findCall(processor.getId());
}
/**
* Returns and removes the next element, or null if there are no more elements
*/
public Call pop() {
if (elements.isEmpty()) return null;
lastPopped = elements.remove(0);
return lastPopped;
}
/**
* Returns the next element without removing it, or null if there are no
* more elements
*/
public Call peek() {
if (elements.isEmpty()) return null;
return elements.get(0);
}
/**
* Returns the element that was last popped from this stack, or null if none
* have been popped or the stack is empty
*/
public Call getLastPopped() {
return lastPopped;
}
public void clear() {
elements.clear();
}
/**
* Returns a modifiable ListIterator over all the remaining elements of this
* stack, starting by the next element
*/
public ListIterator<Call> iterator() {
return elements.listIterator();
}
/** Returns the number of remaining elements in this stack */
public int size() {
return elements.size();
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("callstack");
if (name != null) {
b.append(" ");
b.append(name);
}
b.append(":");
for (Iterator<Call> i = iterator(); i.hasNext();) {
b.append("\n");
b.append(" ");
b.append(i.next().toString());
}
b.append("\n");
return b.toString();
}
@Deprecated
public Statistics getStatistics() {
return null;
}
public Metric getMetric() {
return metric;
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package com.intellij.openapi.wm.impl;
import com.intellij.execution.configurations.GeneralCommandLine;
import com.intellij.execution.util.ExecUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.WindowManager;
import org.jetbrains.annotations.Nullable;
import sun.awt.AWTAccessor;
import sun.misc.Unsafe;
import javax.swing.*;
import java.awt.*;
import java.awt.peer.ComponentPeer;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.intellij.util.ArrayUtil.newLongArray;
public class X11UiUtil {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.wm.impl.X11UiUtil");
private static final int True = 1;
private static final int False = 0;
private static final long None = 0;
private static final long XA_ATOM = 4;
private static final long XA_WINDOW = 33;
private static final int CLIENT_MESSAGE = 33;
private static final int FORMAT_BYTE = 8;
private static final int FORMAT_LONG = 32;
private static final long EVENT_MASK = (3L << 19);
private static final long NET_WM_STATE_TOGGLE = 2;
@SuppressWarnings("SpellCheckingInspection")
private static class Xlib {
private Unsafe unsafe;
private Method XGetWindowProperty;
private Method XFree;
private Method RootWindow;
private Method XSendEvent;
private Method getWindow;
private Method getScreenNumber;
private Method awtLock;
private Method awtUnlock;
private long display;
private long UTF8_STRING;
private long NET_SUPPORTING_WM_CHECK;
private long NET_WM_NAME;
private long NET_WM_ALLOWED_ACTIONS;
private long NET_WM_STATE;
private long NET_WM_ACTION_FULLSCREEN;
private long NET_WM_STATE_FULLSCREEN;
@Nullable
private static Xlib getInstance() {
Class<? extends Toolkit> toolkitClass = Toolkit.getDefaultToolkit().getClass();
if (!SystemInfo.isXWindow || !"sun.awt.X11.XToolkit".equals(toolkitClass.getName())) {
return null;
}
try {
Xlib x11 = new Xlib();
// reflect on Xlib method wrappers and important structures
Class<?> XlibWrapper = Class.forName("sun.awt.X11.XlibWrapper");
x11.unsafe = (Unsafe)field(XlibWrapper, "unsafe").get(null);
x11.XGetWindowProperty = method(XlibWrapper, "XGetWindowProperty", 12);
x11.XFree = method(XlibWrapper, "XFree", 1);
x11.RootWindow = method(XlibWrapper, "RootWindow", 2);
x11.XSendEvent = method(XlibWrapper, "XSendEvent", 5);
Class<?> XBaseWindow = Class.forName("sun.awt.X11.XBaseWindow");
x11.getWindow = method(XBaseWindow, "getWindow");
x11.getScreenNumber = method(XBaseWindow, "getScreenNumber");
x11.display = (Long)method(toolkitClass, "getDisplay").invoke(null);
x11.awtLock = method(toolkitClass, "awtLock");
x11.awtUnlock = method(toolkitClass, "awtUnlock");
// intern atoms
Class<?> XAtom = Class.forName("sun.awt.X11.XAtom");
Method get = method(XAtom, "get", String.class);
Field atom = field(XAtom, "atom");
x11.UTF8_STRING = (Long)atom.get(get.invoke(null, "UTF8_STRING"));
x11.NET_SUPPORTING_WM_CHECK = (Long)atom.get(get.invoke(null, "_NET_SUPPORTING_WM_CHECK"));
x11.NET_WM_NAME = (Long)atom.get(get.invoke(null, "_NET_WM_NAME"));
x11.NET_WM_ALLOWED_ACTIONS = (Long)atom.get(get.invoke(null, "_NET_WM_ALLOWED_ACTIONS"));
x11.NET_WM_STATE = (Long)atom.get(get.invoke(null, "_NET_WM_STATE"));
x11.NET_WM_ACTION_FULLSCREEN = (Long)atom.get(get.invoke(null, "_NET_WM_ACTION_FULLSCREEN"));
x11.NET_WM_STATE_FULLSCREEN = (Long)atom.get(get.invoke(null, "_NET_WM_STATE_FULLSCREEN"));
// check for _NET protocol support
Long netWmWindow = x11.getNetWmWindow();
if (netWmWindow == null) {
LOG.info("_NET protocol is not supported");
return null;
}
return x11;
}
catch (Throwable t) {
LOG.info("cannot initialize", t);
}
return null;
}
private long getRootWindow(long screen) throws Exception {
awtLock.invoke(null);
try {
return (Long)RootWindow.invoke(null, display, screen);
}
finally {
awtUnlock.invoke(null);
}
}
@Nullable
private Long getNetWmWindow() throws Exception {
long rootWindow = getRootWindow(0);
long[] values = getLongArrayProperty(rootWindow, NET_SUPPORTING_WM_CHECK, XA_WINDOW);
return values != null && values.length > 0 ? values[0] : null;
}
@Nullable
private long[] getLongArrayProperty(long window, long name, long type) throws Exception {
return getWindowProperty(window, name, type, FORMAT_LONG);
}
@Nullable
private String getUtfStringProperty(long window, long name) throws Exception {
byte[] bytes = getWindowProperty(window, name, UTF8_STRING, FORMAT_BYTE);
return bytes != null ? new String(bytes, CharsetToolkit.UTF8_CHARSET) : null;
}
@Nullable
private <T> T getWindowProperty(long window, long name, long type, long expectedFormat) throws Exception {
long data = unsafe.allocateMemory(64);
awtLock.invoke(null);
try {
unsafe.setMemory(data, 64, (byte)0);
int result = (Integer)XGetWindowProperty.invoke(
null, display, window, name, 0L, 65535L, (long)False, type, data, data + 8, data + 16, data + 24, data + 32);
if (result == 0) {
int format = unsafe.getInt(data + 8);
long pointer = SystemInfo.is64Bit ? unsafe.getLong(data + 32) : unsafe.getInt(data + 32);
if (pointer != None && format == expectedFormat) {
int length = SystemInfo.is64Bit ? (int)unsafe.getLong(data + 16) : unsafe.getInt(data + 16);
if (format == FORMAT_BYTE) {
byte[] bytes = new byte[length];
for (int i = 0; i < length; i++) bytes[i] = unsafe.getByte(pointer + i);
@SuppressWarnings("unchecked") T t = (T)bytes;
return t;
}
else if (format == FORMAT_LONG) {
long[] values = newLongArray(length);
for (int i = 0; i < length; i++) {
values[i] = SystemInfo.is64Bit ? unsafe.getLong(pointer + 8 * i) : unsafe.getInt(pointer + 4 * i);
}
@SuppressWarnings("unchecked") T t = (T)values;
return t;
}
else if (format != None) {
LOG.info("unexpected format: " + format);
}
}
if (pointer != None) XFree.invoke(null, pointer);
}
}
finally {
awtUnlock.invoke(null);
unsafe.freeMemory(data);
}
return null;
}
private void sendClientMessage(long target, long window, long type, long... data) throws Exception {
assert data.length <= 5;
long event = unsafe.allocateMemory(128);
awtLock.invoke(null);
try {
unsafe.setMemory(event, 128, (byte)0);
unsafe.putInt(event, CLIENT_MESSAGE);
if (!SystemInfo.is64Bit) {
unsafe.putInt(event + 8, True);
unsafe.putInt(event + 16, (int)window);
unsafe.putInt(event + 20, (int)type);
unsafe.putInt(event + 24, FORMAT_LONG);
for (int i = 0; i < data.length; i++) {
unsafe.putInt(event + 28 + 4 * i, (int)data[i]);
}
}
else {
unsafe.putInt(event + 16, True);
unsafe.putLong(event + 32, window);
unsafe.putLong(event + 40, NET_WM_STATE);
unsafe.putInt(event + 48, FORMAT_LONG);
for (int i = 0; i < data.length; i++) {
unsafe.putLong(event + 56 + 8 * i, data[i]);
}
}
XSendEvent.invoke(null, display, target, false, EVENT_MASK, event);
}
finally {
awtUnlock.invoke(null);
unsafe.freeMemory(event);
}
}
}
@Nullable private static final Xlib X11 = Xlib.getInstance();
// WM detection and patching
@Nullable
public static String getWmName() {
if (X11 == null) return null;
try {
Long netWmWindow = X11.getNetWmWindow();
if (netWmWindow != null) {
return X11.getUtfStringProperty(netWmWindow, X11.NET_WM_NAME);
}
}
catch (Throwable t) {
LOG.info("cannot get WM name", t);
}
return null;
}
@SuppressWarnings("SpellCheckingInspection")
public static void patchDetectedWm(String wmName) {
if (X11 == null || !Registry.is("ide.x11.override.wm")) return;
try {
if (wmName.startsWith("Mutter") || "Muffin".equals(wmName) || "GNOME Shell".equals(wmName)) {
setWM("MUTTER_WM", "METACITY_WM");
}
else if ("Marco".equals(wmName)) {
setWM("MARCO_WM", "METACITY_WM");
}
else if ("awesome".equals(wmName)) {
String version = getAwesomeWMVersion();
if (StringUtil.compareVersionNumbers(version, "3.5") >= 0) {
setWM("SAWFISH_WM");
}
else if (version != null) {
setWM("OTHER_NONREPARENTING_WM", "LG3D_WM");
}
}
}
catch (Throwable t) {
LOG.warn(t);
}
}
private static void setWM(String... wmConstants) throws Exception {
Class<?> xwmClass = Class.forName("sun.awt.X11.XWM");
Object xwm = method(xwmClass, "getWM").invoke(null);
if (xwm != null) {
for (String wmConstant : wmConstants) {
try {
Field wm = field(xwmClass, wmConstant);
Object id = wm.get(null);
if (id != null) {
field(xwmClass, "awt_wmgr").set(null, id);
field(xwmClass, "WMID").set(xwm, id);
LOG.info("impersonated WM: " + wmConstant);
break;
}
}
catch (NoSuchFieldException ignore) { }
}
}
}
@Nullable
private static String getAwesomeWMVersion() {
try {
String version = ExecUtil.execAndReadLine(new GeneralCommandLine("awesome", "--version"));
if (version != null) {
Matcher m = Pattern.compile("awesome v([0-9.]+)").matcher(version);
if (m.find()) {
return m.group(1);
}
}
}
catch (Throwable t) {
LOG.warn(t);
}
return null;
}
// full-screen support
public static boolean isFullScreenSupported() {
if (X11 == null) return false;
IdeFrame[] frames = WindowManager.getInstance().getAllProjectFrames();
if (frames.length == 0) return true; // no frame to check the property so be optimistic here
return frames[0] instanceof JFrame && hasWindowProperty((JFrame)frames[0], X11.NET_WM_ALLOWED_ACTIONS, X11.NET_WM_ACTION_FULLSCREEN);
}
public static boolean isInFullScreenMode(JFrame frame) {
return X11 != null && hasWindowProperty(frame, X11.NET_WM_STATE, X11.NET_WM_STATE_FULLSCREEN);
}
private static boolean hasWindowProperty(JFrame frame, long name, long expected) {
if (X11 == null) return false;
try {
ComponentPeer peer = AWTAccessor.getComponentAccessor().getPeer(frame);
if (peer != null) {
long window = (Long)X11.getWindow.invoke(peer);
long[] values = X11.getLongArrayProperty(window, name, XA_ATOM);
if (values != null) {
for (long value : values) {
if (value == expected) return true;
}
}
}
return false;
}
catch (Throwable t) {
LOG.info("cannot check window property", t);
return false;
}
}
public static void toggleFullScreenMode(JFrame frame) {
if (X11 == null) return;
try {
@SuppressWarnings("deprecation") ComponentPeer peer = AWTAccessor.getComponentAccessor().getPeer(frame);
if (peer == null) throw new IllegalStateException(frame + " has no peer");
long window = (Long)X11.getWindow.invoke(peer);
long screen = (Long)X11.getScreenNumber.invoke(peer);
long rootWindow = X11.getRootWindow(screen);
X11.sendClientMessage(rootWindow, window, X11.NET_WM_STATE, NET_WM_STATE_TOGGLE, X11.NET_WM_STATE_FULLSCREEN);
}
catch (Throwable t) {
LOG.info("cannot toggle mode", t);
}
}
// reflection utilities
private static Method method(Class<?> aClass, String name, Class<?>... parameterTypes) throws Exception {
while (aClass != null) {
try {
Method method = aClass.getDeclaredMethod(name, parameterTypes);
method.setAccessible(true);
return method;
}
catch (NoSuchMethodException e) {
aClass = aClass.getSuperclass();
}
}
throw new NoSuchMethodException(name);
}
private static Method method(Class<?> aClass, String name, int parameters) throws Exception {
for (Method method : aClass.getDeclaredMethods()) {
if (name.equals(method.getName()) && method.getParameterTypes().length == parameters) {
method.setAccessible(true);
return method;
}
}
throw new NoSuchMethodException(name);
}
private static Field field(Class<?> aClass, String name) throws Exception {
Field field = aClass.getDeclaredField(name);
field.setAccessible(true);
return field;
}
}
| |
/*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.primitives;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.collect.testing.Helpers;
import com.google.common.testing.NullPointerTester;
import com.google.common.testing.SerializableTester;
import junit.framework.TestCase;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Unit test for {@link Booleans}.
*
* @author Kevin Bourrillion
*/
@GwtCompatible(emulated = true)
public class BooleansTest extends TestCase {
private static final boolean[] EMPTY = {};
private static final boolean[] ARRAY_FALSE = {false};
private static final boolean[] ARRAY_TRUE = {true};
private static final boolean[] ARRAY_FALSE_FALSE = {false, false};
private static final boolean[] ARRAY_FALSE_TRUE = {false, true};
private static final boolean[] VALUES = {false, true};
public void testHashCode() {
assertEquals(Boolean.TRUE.hashCode(), Booleans.hashCode(true));
assertEquals(Boolean.FALSE.hashCode(), Booleans.hashCode(false));
}
public void testCompare() {
for (boolean x : VALUES) {
for (boolean y : VALUES) {
// note: spec requires only that the sign is the same
assertEquals(x + ", " + y,
Boolean.valueOf(x).compareTo(y),
Booleans.compare(x, y));
}
}
}
public void testContains() {
assertFalse(Booleans.contains(EMPTY, false));
assertFalse(Booleans.contains(ARRAY_FALSE, true));
assertTrue(Booleans.contains(ARRAY_FALSE, false));
assertTrue(Booleans.contains(ARRAY_FALSE_TRUE, false));
assertTrue(Booleans.contains(ARRAY_FALSE_TRUE, true));
}
public void testIndexOf() {
assertEquals(-1, Booleans.indexOf(EMPTY, ARRAY_FALSE));
assertEquals(-1, Booleans.indexOf(ARRAY_FALSE, ARRAY_FALSE_TRUE));
assertEquals(0, Booleans.indexOf(ARRAY_FALSE_FALSE, ARRAY_FALSE));
assertEquals(0, Booleans.indexOf(ARRAY_FALSE, ARRAY_FALSE));
assertEquals(0, Booleans.indexOf(ARRAY_FALSE_TRUE, ARRAY_FALSE));
assertEquals(1, Booleans.indexOf(ARRAY_FALSE_TRUE, ARRAY_TRUE));
assertEquals(0, Booleans.indexOf(ARRAY_TRUE, new boolean[0]));
}
public void testIndexOf_arrays() {
assertEquals(-1, Booleans.indexOf(EMPTY, false));
assertEquals(-1, Booleans.indexOf(ARRAY_FALSE, true));
assertEquals(-1, Booleans.indexOf(ARRAY_FALSE_FALSE, true));
assertEquals(0, Booleans.indexOf(ARRAY_FALSE, false));
assertEquals(0, Booleans.indexOf(ARRAY_FALSE_TRUE, false));
assertEquals(1, Booleans.indexOf(ARRAY_FALSE_TRUE, true));
assertEquals(2, Booleans.indexOf(new boolean[] {false, false, true}, true));
}
public void testLastIndexOf() {
assertEquals(-1, Booleans.lastIndexOf(EMPTY, false));
assertEquals(-1, Booleans.lastIndexOf(ARRAY_FALSE, true));
assertEquals(-1, Booleans.lastIndexOf(ARRAY_FALSE_FALSE, true));
assertEquals(0, Booleans.lastIndexOf(ARRAY_FALSE, false));
assertEquals(0, Booleans.lastIndexOf(ARRAY_FALSE_TRUE, false));
assertEquals(1, Booleans.lastIndexOf(ARRAY_FALSE_TRUE, true));
assertEquals(2, Booleans.lastIndexOf(new boolean[] {false, true, true}, true));
}
public void testConcat() {
assertTrue(Arrays.equals(EMPTY, Booleans.concat()));
assertTrue(Arrays.equals(EMPTY, Booleans.concat(EMPTY)));
assertTrue(Arrays.equals(EMPTY, Booleans.concat(EMPTY, EMPTY, EMPTY)));
assertTrue(Arrays.equals(ARRAY_FALSE, Booleans.concat(ARRAY_FALSE)));
assertNotSame(ARRAY_FALSE, Booleans.concat(ARRAY_FALSE));
assertTrue(Arrays.equals(ARRAY_FALSE, Booleans.concat(EMPTY, ARRAY_FALSE, EMPTY)));
assertTrue(Arrays.equals(
new boolean[] {false, false, false},
Booleans.concat(ARRAY_FALSE, ARRAY_FALSE, ARRAY_FALSE)));
assertTrue(Arrays.equals(
new boolean[] {false, false, true},
Booleans.concat(ARRAY_FALSE, ARRAY_FALSE_TRUE)));
}
public void testEnsureCapacity() {
assertSame(EMPTY, Booleans.ensureCapacity(EMPTY, 0, 1));
assertSame(ARRAY_FALSE, Booleans.ensureCapacity(ARRAY_FALSE, 0, 1));
assertSame(ARRAY_FALSE, Booleans.ensureCapacity(ARRAY_FALSE, 1, 1));
assertTrue(Arrays.equals(
new boolean[] {true, false, false},
Booleans.ensureCapacity(new boolean[] {true}, 2, 1)));
}
@SuppressWarnings("CheckReturnValue")
public void testEnsureCapacity_fail() {
try {
Booleans.ensureCapacity(ARRAY_FALSE, -1, 1);
fail();
} catch (IllegalArgumentException expected) {
}
try {
// notice that this should even fail when no growth was needed
Booleans.ensureCapacity(ARRAY_FALSE, 1, -1);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testJoin() {
assertEquals("", Booleans.join(",", EMPTY));
assertEquals("false", Booleans.join(",", ARRAY_FALSE));
assertEquals("false,true", Booleans.join(",", false, true));
assertEquals("falsetruefalse",
Booleans.join("", false, true, false));
}
public void testLexicographicalComparator() {
List<boolean[]> ordered = Arrays.asList(
new boolean[] {},
new boolean[] {false},
new boolean[] {false, false},
new boolean[] {false, true},
new boolean[] {true},
new boolean[] {true, false},
new boolean[] {true, true},
new boolean[] {true, true, true});
Comparator<boolean[]> comparator = Booleans.lexicographicalComparator();
Helpers.testComparator(comparator, ordered);
}
@GwtIncompatible("SerializableTester")
public void testLexicographicalComparatorSerializable() {
Comparator<boolean[]> comparator = Booleans.lexicographicalComparator();
assertSame(comparator, SerializableTester.reserialize(comparator));
}
public void testToArray() {
// need explicit type parameter to avoid javac warning!?
List<Boolean> none = Arrays.<Boolean>asList();
assertTrue(Arrays.equals(EMPTY, Booleans.toArray(none)));
List<Boolean> one = Arrays.asList(false);
assertTrue(Arrays.equals(ARRAY_FALSE, Booleans.toArray(one)));
boolean[] array = {false, false, true};
List<Boolean> three = Arrays.asList(false, false, true);
assertTrue(Arrays.equals(array, Booleans.toArray(three)));
assertTrue(Arrays.equals(array, Booleans.toArray(Booleans.asList(array))));
}
public void testToArray_threadSafe() {
// Only for booleans, we lengthen VALUES
boolean[] VALUES = BooleansTest.VALUES;
VALUES = Booleans.concat(VALUES, VALUES);
for (int delta : new int[] { +1, 0, -1 }) {
for (int i = 0; i < VALUES.length; i++) {
List<Boolean> list = Booleans.asList(VALUES).subList(0, i);
Collection<Boolean> misleadingSize =
Helpers.misleadingSizeCollection(delta);
misleadingSize.addAll(list);
boolean[] arr = Booleans.toArray(misleadingSize);
assertEquals(i, arr.length);
for (int j = 0; j < i; j++) {
assertEquals(VALUES[j], arr[j]);
}
}
}
}
@SuppressWarnings("CheckReturnValue")
public void testToArray_withNull() {
List<Boolean> list = Arrays.asList(false, true, null);
try {
Booleans.toArray(list);
fail();
} catch (NullPointerException expected) {
}
}
public void testAsListIsEmpty() {
assertTrue(Booleans.asList(EMPTY).isEmpty());
assertFalse(Booleans.asList(ARRAY_FALSE).isEmpty());
}
public void testAsListSize() {
assertEquals(0, Booleans.asList(EMPTY).size());
assertEquals(1, Booleans.asList(ARRAY_FALSE).size());
assertEquals(2, Booleans.asList(ARRAY_FALSE_TRUE).size());
}
public void testAsListIndexOf() {
assertEquals(-1, Booleans.asList(EMPTY).indexOf("wrong type"));
assertEquals(-1, Booleans.asList(EMPTY).indexOf(true));
assertEquals(-1, Booleans.asList(ARRAY_FALSE).indexOf(true));
assertEquals(0, Booleans.asList(ARRAY_FALSE).indexOf(false));
assertEquals(1, Booleans.asList(ARRAY_FALSE_TRUE).indexOf(true));
}
public void testAsListLastIndexOf() {
assertEquals(-1, Booleans.asList(EMPTY).indexOf("wrong type"));
assertEquals(-1, Booleans.asList(EMPTY).indexOf(true));
assertEquals(-1, Booleans.asList(ARRAY_FALSE).lastIndexOf(true));
assertEquals(1, Booleans.asList(ARRAY_FALSE_TRUE).lastIndexOf(true));
assertEquals(1, Booleans.asList(ARRAY_FALSE_FALSE).lastIndexOf(false));
}
public void testAsListContains() {
assertFalse(Booleans.asList(EMPTY).contains("wrong type"));
assertFalse(Booleans.asList(EMPTY).contains(true));
assertFalse(Booleans.asList(ARRAY_FALSE).contains(true));
assertTrue(Booleans.asList(ARRAY_TRUE).contains(true));
assertTrue(Booleans.asList(ARRAY_FALSE_TRUE).contains(false));
assertTrue(Booleans.asList(ARRAY_FALSE_TRUE).contains(true));
}
public void testAsListEquals() {
assertEquals(Booleans.asList(EMPTY), Collections.emptyList());
assertEquals(Booleans.asList(ARRAY_FALSE), Booleans.asList(ARRAY_FALSE));
assertFalse(Booleans.asList(ARRAY_FALSE).equals(ARRAY_FALSE));
assertFalse(Booleans.asList(ARRAY_FALSE).equals(null));
assertFalse(Booleans.asList(ARRAY_FALSE).equals(Booleans.asList(ARRAY_FALSE_TRUE)));
assertFalse(Booleans.asList(ARRAY_FALSE_FALSE).equals(Booleans.asList(ARRAY_FALSE_TRUE)));
assertEquals(1, Booleans.asList(ARRAY_FALSE_TRUE).lastIndexOf(true));
List<Boolean> reference = Booleans.asList(ARRAY_FALSE);
assertEquals(Booleans.asList(ARRAY_FALSE), reference);
assertEquals(reference, reference);
}
public void testAsListHashcode() {
assertEquals(1, Booleans.asList(EMPTY).hashCode());
assertEquals(Booleans.asList(ARRAY_FALSE).hashCode(), Booleans.asList(ARRAY_FALSE).hashCode());
List<Boolean> reference = Booleans.asList(ARRAY_FALSE);
assertEquals(Booleans.asList(ARRAY_FALSE).hashCode(), reference.hashCode());
}
public void testAsListToString() {
assertEquals("[false]", Booleans.asList(ARRAY_FALSE).toString());
assertEquals("[false, true]", Booleans.asList(ARRAY_FALSE_TRUE).toString());
}
public void testAsListSet() {
List<Boolean> list = Booleans.asList(ARRAY_FALSE);
assertFalse(list.set(0, true));
assertTrue(list.set(0, false));
try {
list.set(0, null);
fail();
} catch (NullPointerException expected) {
}
try {
list.set(1, true);
fail();
} catch (IndexOutOfBoundsException expected) {
}
}
public void testCountTrue() {
assertEquals(0, Booleans.countTrue());
assertEquals(0, Booleans.countTrue(false));
assertEquals(1, Booleans.countTrue(true));
assertEquals(3, Booleans.countTrue(false, true, false, true, false, true));
assertEquals(1, Booleans.countTrue(false, false, true, false, false));
}
@GwtIncompatible("NullPointerTester")
public void testNulls() {
new NullPointerTester().testAllPublicStaticMethods(Booleans.class);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.addthis.hydra.task.map;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.GuardedBy;
import java.io.File;
import java.io.IOException;
import java.net.ServerSocket;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.LongAdder;
import java.nio.file.Path;
import java.text.SimpleDateFormat;
import com.addthis.basis.jmx.MBeanRemotingSupport;
import com.addthis.bundle.channel.DataChannelError;
import com.addthis.bundle.core.Bundle;
import com.addthis.bundle.core.BundleField;
import com.addthis.codec.annotations.Time;
import com.addthis.codec.json.CodecJSON;
import com.addthis.hydra.data.filter.bundle.BundleFilterDebugPrint;
import com.addthis.hydra.task.output.TaskDataOutput;
import com.addthis.hydra.task.run.TaskExitState;
import com.addthis.hydra.task.run.TaskRunnable;
import com.addthis.hydra.task.source.TaskDataSource;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.Meter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.lang.String.format;
import static java.lang.String.join;
/**
* <p>This is <span class="hydra-summary">the most common form of Hydra job (either a split job or a map job)</span>.
* It is specified with {@code type : "map"}.</p>
* <p>There are two common use cases of these jobs:</p>
* <ul>
* <li><p>Split jobs. These jobs take in lines of data, such as log files, and emit new lines.
* It might change the format (text in, binary out),
* or drop lines that fail to some predicate, or create multiple derived lines from each input,
* or make all strings lowercase, or other arbitrary transformations.
* But it's always lines in, lines out.</li>
* <li>Tree jobs. These jobs take in log lines of input,
* such as just emitted by a split job, and build a tree representation of the data.
* This hierarchical databases can then be explored through the distribute Hydra query system.</li>
* </ul>
* <p>Example:</p>
* <pre>
* map.source: {...}
* map.filterIn: {...}
* map.filterOut: {...}
* map.output: {...}
* </pre>
*
* @user-reference
*/
public class StreamMapper implements StreamEmitter, TaskRunnable {
private static final Logger log = LoggerFactory.getLogger(StreamMapper.class);
/** The data source for this job. */
private final TaskDataSource source;
/** The transformations to apply onto the data. */
private final MapDef map;
/** The data sink for emitting the result of the transformations. */
private final TaskDataOutput output;
/**
* Allow more flexible stream builders. For example, asynchronous or builders
* where one or more bundles roll up into a single bundle or a single bundle causes
* the emission of multiple bundles.
*/
private final StreamBuilder builder;
/** Print to the console statistics while processing the data. Default is {@code true}. */
private final boolean stats;
/** How frequently statistics should be printed. */
private final long metricTick;
/**
* If true then ensure that writable directories are all unique.
* Default is true.
**/
private final boolean validateDirs;
/**
* Duration to wait for outstanding processor tasks
* to finish on shutdown. Default is "60 seconds"
*/
private final int taskFinishTimeout;
/**
* Use MapFeederForkJoin if true; Otherwise use original MapFeeder.
*
* Default is false. This is a temporary flag that allows us to selectively test the
* performance of the new fork join map feeder. Once tested, the original map feeder can be
* replaced with the fork join version.
*/
private final boolean useForkJoinMapFeeder;
private final int threads;
private final boolean enableJmx;
private final boolean emitTaskState;
private final SimpleDateFormat dateFormat;
private final CompletableFuture<Void> completionFuture = new CompletableFuture<>();
private final AtomicBoolean metricGate = new AtomicBoolean(false);
private final LongAdder filterTime = new LongAdder();
private final LongAdder outputTime = new LongAdder();
// metrics
private static final Meter inputMeter = Metrics.newMeter(StreamMapper.class, "input", "input", TimeUnit.SECONDS);
private static final Meter outputMeter = Metrics.newMeter(StreamMapper.class, "output", "output", TimeUnit.SECONDS);
@GuardedBy("metricGate") private long lastTick;
@GuardedBy("metricGate") private long lastOutputTime = 0;
@GuardedBy("metricGate") private long lastFilterTime = 0;
@GuardedBy("metricGate") private long lastInputCount = 0;
@GuardedBy("metricGate") private long lastOutputCount = 0;
private MBeanRemotingSupport jmxremote;
private Thread feeder;
@JsonCreator
public StreamMapper(@JsonProperty(value = "source", required = true) TaskDataSource source,
@JsonProperty("map") MapDef map,
@JsonProperty(value = "output", required = true) TaskDataOutput output,
@JsonProperty("builder") StreamBuilder builder,
@JsonProperty("stats") boolean stats,
@JsonProperty("metricTick") @Time(TimeUnit.NANOSECONDS) long metricTick,
@JsonProperty(value = "threads", required = true) int threads,
@JsonProperty("enableJmx") boolean enableJmx,
@JsonProperty("emitTaskState") boolean emitTaskState,
@JsonProperty("dateFormat") SimpleDateFormat dateFormat,
@JsonProperty("validateDirs") boolean validateDirs,
@JsonProperty("taskFinishTimeout") @Time(TimeUnit.SECONDS) int taskFinishTimeout,
@JsonProperty("useForkJoinMapFeeder") boolean useForkJoinMapFeeder) {
this.source = source;
this.map = map;
this.output = output;
this.builder = builder;
this.stats = stats;
this.metricTick = metricTick;
this.threads = threads;
this.enableJmx = enableJmx;
this.emitTaskState = emitTaskState;
this.dateFormat = dateFormat;
this.validateDirs = validateDirs;
this.taskFinishTimeout = taskFinishTimeout;
this.useForkJoinMapFeeder = useForkJoinMapFeeder;
validateWritableRootPaths();
}
@Override
public void start() {
source.init();
map.init();
output.init();
if (builder != null) {
builder.init();
}
maybeInitJmx();
log.info("[init]");
if (useForkJoinMapFeeder) {
feeder = new Thread(new MapFeederForkJoin(this, source, threads), "MapFeederForkJoin");
} else {
feeder = new Thread(new MapFeeder(this, source, threads), "MapFeeder");
}
lastTick = System.nanoTime();
feeder.start();
}
public void process(Bundle inputBundle) {
Bundle bundle = inputBundle;
try {
log.debug("input: {}", bundle);
long filterBefore = System.nanoTime();
long filterAfter;
if ((map.filterIn == null) || map.filterIn.filter(bundle)) {
bundle = mapBundle(bundle);
if ((map.filterOut == null) || map.filterOut.filter(bundle)) {
if ((map.cFilterOut == null) || map.cFilterOut.filter(bundle)) {
filterAfter = System.nanoTime();
if (builder != null) {
builder.process(bundle, this);
} else {
emit(bundle);
}
long bundleOutputTime = System.nanoTime() - filterAfter;
outputTime.add(bundleOutputTime);
outputMeter.mark();
} else {
filterAfter = System.nanoTime();
log.debug("closeable filterOut dropped bundle : {}", bundle);
}
} else {
filterAfter = System.nanoTime();
log.debug("filterOut dropped bundle : {}", bundle);
}
} else {
filterAfter = System.nanoTime();
log.debug("filterIn dropped bundle : {}", bundle);
}
long bundleFilterTime = filterAfter - filterBefore;
filterTime.add(bundleFilterTime);
// inputs are counted after outputs to prevent spurious drop reporting
inputMeter.mark();
// print metrics if it has been long enough
long time = System.nanoTime();
if (stats && ((time - lastTick) > metricTick) && metricGate.compareAndSet(false, true)) {
// lastTick is not volatile, so double check after fencing on "metricGate"
if ((time - lastTick) > metricTick) {
printMetrics(time);
}
metricGate.set(false);
}
} catch (DataChannelError ex) {
output.sourceError(ex);
throw ex;
} catch (RuntimeException ex) {
log.warn("runtime error :: {}", BundleFilterDebugPrint.formatBundle(bundle));
output.sourceError(DataChannelError.promote(ex));
throw ex;
} catch (Exception ex) {
log.warn("handling error :: {}", BundleFilterDebugPrint.formatBundle(bundle));
DataChannelError err = DataChannelError.promote(ex);
output.sourceError(err);
throw err;
}
}
private Bundle mapBundle(Bundle in) {
Bundle out = output.createBundle();
if (map.fields != null) {
for (FieldFilter fieldFilter : map.fields) {
fieldFilter.mapField(in, out);
}
} else {
for (BundleField bundleField : in) {
out.setValue(out.getFormat().getField(bundleField.getName()), in.getValue(bundleField));
}
}
return out;
}
/** called directly or from builder */
@Override public void emit(Bundle bundle) {
log.debug("output: {}", bundle);
output.send(bundle);
}
// These metrics are racey with respect to each other and the time range they cover, but no events are dropped, and
// "extra" filtering costs for one tick will not show up in the next tick and should therefore be visible only once.
@GuardedBy("metricGate")
private void printMetrics(long time) {
long nsCovered = time - lastTick;
lastTick = time;
double inputRateAtTick = inputMeter.oneMinuteRate();
long inputCountAtTick = inputMeter.count();
long inputCountForTick = inputCountAtTick - lastInputCount;
lastInputCount = inputCountAtTick;
double outputRateAtTick = outputMeter.oneMinuteRate();
long outputCountAtTick = outputMeter.count();
long outputCountForTick = outputCountAtTick - lastOutputCount;
lastOutputCount = outputCountAtTick;
double dropRateAtTick = Math.max(0, inputRateAtTick - outputRateAtTick);
double dropPercentAtTick = (inputRateAtTick > 0) ? (dropRateAtTick / inputRateAtTick) : 0;
long dropCountAtTick = Math.max(0, inputCountAtTick - outputCountAtTick);
long dropCountForTick = Math.max(0, inputCountForTick - outputCountForTick);
double dropPercentForTick = (double) dropCountForTick / (double) inputCountForTick;
// filter time accounting
long filterTimeAtTick = filterTime.sum();
long filterTimeForTick = filterTimeAtTick - lastFilterTime;
lastFilterTime = filterTimeAtTick;
long filterTimePerThread = Math.min(nsCovered, filterTimeForTick / threads);
// output time accounting
long outputTimeAtTick = outputTime.sum();
long outputTimeForTick = outputTimeAtTick - lastOutputTime;
lastOutputTime = outputTimeAtTick;
long outputTimePerThread = Math.min(nsCovered, outputTimeForTick / threads);
// input time accounting
long inputTimePerThread = Math.max(0, nsCovered - filterTimePerThread - outputTimePerThread);
// ensure percentages add up to [99, 100] to avoid confusing people
long timeDivisor = Math.max(nsCovered, filterTimePerThread + outputTimePerThread);
double filterPercentTime = (double) filterTimePerThread / (double) timeDivisor;
double inputPercentTime = (double) inputTimePerThread / (double) timeDivisor;
double outputPercentTime = (double) outputTimePerThread / (double) timeDivisor;
String countsForTick = format("in=%,7d out=%,7d drop=%5.2f%%",
inputCountForTick,
outputCountForTick,
dropPercentForTick * 100);
String profileForTick = format("time {source=%3.0f%% output=%3.0f%% filters=%3.0f%%}",
inputPercentTime * 100,
outputPercentTime * 100,
filterPercentTime * 100);
String ratesAtTick = format("avg {in=%,7.0f out=%,7.0f drop=%5.2f%%}",
inputRateAtTick,
outputRateAtTick,
dropPercentAtTick * 100);
String totalsAtTick = format("totals {in=%,d out=%,d drop=%,d}",
inputCountAtTick,
outputCountAtTick,
dropCountAtTick);
StringBuilder fullMetrics =
new StringBuilder(join(" | ", countsForTick, profileForTick, ratesAtTick, totalsAtTick));
if (Math.abs(nsCovered - metricTick) > (metricTick / 100)) {
fullMetrics.append(" | ")
.append(format("(ABNORMAL TIME SPAN) ms=%,d", TimeUnit.NANOSECONDS.toMillis(nsCovered)));
}
log.info(fullMetrics.toString());
}
@Override
public void close() throws InterruptedException {
feeder.interrupt();
feeder.join();
map.close();
}
/** called on process exit */
public void taskComplete() {
if (builder != null) {
builder.streamComplete(this);
log.info("[streamComplete] builder");
}
if (metricGate.compareAndSet(false, true)) {
printMetrics(System.nanoTime());
}
output.sendComplete();
emitTaskExitState();
maybeCloseJmx();
boolean success = completionFuture.complete(null);
log.info("[taskComplete] Triggered future: {}", success);
}
/* leave artifact for minion, if desired */
private void emitTaskExitState() {
if (emitTaskState) {
try {
TaskExitState exitState = new TaskExitState();
exitState.setInput(inputMeter.count());
exitState.setTotalEmitted(outputMeter.count());
exitState.setMeanRate(outputMeter.meanRate());
Files.write(CodecJSON.INSTANCE.encode(exitState), new File("job.exit"));
} catch (Exception ex) {
log.error("", ex);
}
}
}
private void maybeInitJmx() {
if (enableJmx) {
try {
ServerSocket ss = new ServerSocket();
ss.setReuseAddress(true);
ss.bind(null);
int jmxport = ss.getLocalPort();
ss.close();
if (jmxport == -1) {
log.warn("[init.jmx] failed to get a port");
} else {
try {
jmxremote = new MBeanRemotingSupport(jmxport);
jmxremote.start();
log.info("[init.jmx] port={}", jmxport);
} catch (Exception e) {
log.error("[init.jmx]", e);
}
}
} catch (IOException e) {
log.error("", e);
}
}
}
private void maybeCloseJmx() {
if (jmxremote != null) {
try {
jmxremote.stop();
jmxremote = null;
} catch (IOException e) {
log.error("", e);
}
}
}
@Nonnull @Override
public ImmutableList<Path> writableRootPaths() {
return ImmutableList.<Path>builder()
.addAll(source.writableRootPaths())
.addAll(output.writableRootPaths())
.build();
}
public CompletableFuture<Void> getCompletionFuture() {
return completionFuture;
}
public void validateWritableRootPaths() {
if (!validateDirs) {
return;
}
StringBuilder builder = new StringBuilder();
ImmutableList<Path> sources = source.writableRootPaths();
ImmutableList<Path> sinks = output.writableRootPaths();
Set<Path> sourcesSet = new HashSet<>();
Set<Path> sinksSet = new HashSet<>();
for (Path source : sources) {
if(!sourcesSet.add(source)) {
String message = String.format("The writable directory is used in more than one location " +
"in the input section: \"%s\"\n", source);
builder.append(message);
}
}
for (Path sink : sinks) {
if(!sinksSet.add(sink)) {
String message = String.format("The writable directory is used in more than one location " +
"in the output section: \"%s\"\n", sink);
builder.append(message);
}
}
Sets.SetView<Path> intersect = Sets.intersection(sourcesSet, sinksSet);
if (intersect.size() > 0) {
String message = String.format("The following one or more directories are used in both an input " +
"section and an output section: \"%s\"\n",
intersect.toString());
builder.append(message);
}
if (builder.length() > 0) {
throw new IllegalArgumentException(builder.toString());
}
}
public int getTaskFinishTimeout() {
return taskFinishTimeout;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.security.token.block;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.security.SecureRandom;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.datatransfer.InvalidEncryptionKeyException;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Time;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
/**
* BlockTokenSecretManager can be instantiated in 2 modes, master mode and slave
* mode. Master can generate new block keys and export block keys to slaves,
* while slaves can only import and use block keys received from master. Both
* master and slave can generate and verify block tokens. Typically, master mode
* is used by NN and slave mode is used by DN.
*/
@InterfaceAudience.Private
public class BlockTokenSecretManager extends
SecretManager<BlockTokenIdentifier> {
public static final Log LOG = LogFactory
.getLog(BlockTokenSecretManager.class);
// We use these in an HA setup to ensure that the pair of NNs produce block
// token serial numbers that are in different ranges.
private static final int LOW_MASK = ~(1 << 31);
public static final Token<BlockTokenIdentifier> DUMMY_TOKEN = new Token<BlockTokenIdentifier>();
private final boolean isMaster;
private int nnIndex;
/**
* keyUpdateInterval is the interval that NN updates its block keys. It should
* be set long enough so that all live DN's and Balancer should have sync'ed
* their block keys with NN at least once during each interval.
*/
private long keyUpdateInterval;
private volatile long tokenLifetime;
private int serialNo;
private BlockKey currentKey;
private BlockKey nextKey;
private final Map<Integer, BlockKey> allKeys;
private String blockPoolId;
private final String encryptionAlgorithm;
private final SecureRandom nonceGenerator = new SecureRandom();
public static enum AccessMode {
READ, WRITE, COPY, REPLACE
};
/**
* Constructor for slaves.
*
* @param keyUpdateInterval how often a new key will be generated
* @param tokenLifetime how long an individual token is valid
*/
public BlockTokenSecretManager(long keyUpdateInterval,
long tokenLifetime, String blockPoolId, String encryptionAlgorithm) {
this(false, keyUpdateInterval, tokenLifetime, blockPoolId,
encryptionAlgorithm);
}
/**
* Constructor for masters.
*
* @param keyUpdateInterval how often a new key will be generated
* @param tokenLifetime how long an individual token is valid
* @param nnIndex namenode index
* @param blockPoolId block pool ID
* @param encryptionAlgorithm encryption algorithm to use
*/
public BlockTokenSecretManager(long keyUpdateInterval,
long tokenLifetime, int nnIndex, String blockPoolId,
String encryptionAlgorithm) {
this(true, keyUpdateInterval, tokenLifetime, blockPoolId,
encryptionAlgorithm);
Preconditions.checkArgument(nnIndex == 0 || nnIndex == 1);
this.nnIndex = nnIndex;
setSerialNo(new SecureRandom().nextInt());
generateKeys();
}
private BlockTokenSecretManager(boolean isMaster, long keyUpdateInterval,
long tokenLifetime, String blockPoolId, String encryptionAlgorithm) {
this.isMaster = isMaster;
this.keyUpdateInterval = keyUpdateInterval;
this.tokenLifetime = tokenLifetime;
this.allKeys = new HashMap<Integer, BlockKey>();
this.blockPoolId = blockPoolId;
this.encryptionAlgorithm = encryptionAlgorithm;
generateKeys();
}
@VisibleForTesting
public synchronized void setSerialNo(int serialNo) {
this.serialNo = (serialNo & LOW_MASK) | (nnIndex << 31);
}
public void setBlockPoolId(String blockPoolId) {
this.blockPoolId = blockPoolId;
}
/** Initialize block keys */
private synchronized void generateKeys() {
if (!isMaster)
return;
/*
* Need to set estimated expiry dates for currentKey and nextKey so that if
* NN crashes, DN can still expire those keys. NN will stop using the newly
* generated currentKey after the first keyUpdateInterval, however it may
* still be used by DN and Balancer to generate new tokens before they get a
* chance to sync their keys with NN. Since we require keyUpdInterval to be
* long enough so that all live DN's and Balancer will sync their keys with
* NN at least once during the period, the estimated expiry date for
* currentKey is set to now() + 2 * keyUpdateInterval + tokenLifetime.
* Similarly, the estimated expiry date for nextKey is one keyUpdateInterval
* more.
*/
setSerialNo(serialNo + 1);
currentKey = new BlockKey(serialNo, Time.now() + 2
* keyUpdateInterval + tokenLifetime, generateSecret());
setSerialNo(serialNo + 1);
nextKey = new BlockKey(serialNo, Time.now() + 3
* keyUpdateInterval + tokenLifetime, generateSecret());
allKeys.put(currentKey.getKeyId(), currentKey);
allKeys.put(nextKey.getKeyId(), nextKey);
}
/** Export block keys, only to be used in master mode */
public synchronized ExportedBlockKeys exportKeys() {
if (!isMaster)
return null;
if (LOG.isDebugEnabled())
LOG.debug("Exporting access keys");
return new ExportedBlockKeys(true, keyUpdateInterval, tokenLifetime,
currentKey, allKeys.values().toArray(new BlockKey[0]));
}
private synchronized void removeExpiredKeys() {
long now = Time.now();
for (Iterator<Map.Entry<Integer, BlockKey>> it = allKeys.entrySet()
.iterator(); it.hasNext();) {
Map.Entry<Integer, BlockKey> e = it.next();
if (e.getValue().getExpiryDate() < now) {
it.remove();
}
}
}
/**
* Set block keys, only to be used in slave mode
*/
public synchronized void addKeys(ExportedBlockKeys exportedKeys)
throws IOException {
if (isMaster || exportedKeys == null)
return;
LOG.info("Setting block keys");
removeExpiredKeys();
this.currentKey = exportedKeys.getCurrentKey();
BlockKey[] receivedKeys = exportedKeys.getAllKeys();
for (int i = 0; i < receivedKeys.length; i++) {
if (receivedKeys[i] == null)
continue;
this.allKeys.put(receivedKeys[i].getKeyId(), receivedKeys[i]);
}
}
/**
* Update block keys if update time > update interval.
* @return true if the keys are updated.
*/
public synchronized boolean updateKeys(final long updateTime) throws IOException {
if (updateTime > keyUpdateInterval) {
return updateKeys();
}
return false;
}
/**
* Update block keys, only to be used in master mode
*/
synchronized boolean updateKeys() throws IOException {
if (!isMaster)
return false;
LOG.info("Updating block keys");
removeExpiredKeys();
// set final expiry date of retiring currentKey
allKeys.put(currentKey.getKeyId(), new BlockKey(currentKey.getKeyId(),
Time.now() + keyUpdateInterval + tokenLifetime,
currentKey.getKey()));
// update the estimated expiry date of new currentKey
currentKey = new BlockKey(nextKey.getKeyId(), Time.now()
+ 2 * keyUpdateInterval + tokenLifetime, nextKey.getKey());
allKeys.put(currentKey.getKeyId(), currentKey);
// generate a new nextKey
setSerialNo(serialNo + 1);
nextKey = new BlockKey(serialNo, Time.now() + 3
* keyUpdateInterval + tokenLifetime, generateSecret());
allKeys.put(nextKey.getKeyId(), nextKey);
return true;
}
/** Generate an block token for current user */
public Token<BlockTokenIdentifier> generateToken(ExtendedBlock block,
EnumSet<AccessMode> modes) throws IOException {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
String userID = (ugi == null ? null : ugi.getShortUserName());
return generateToken(userID, block, modes);
}
/** Generate a block token for a specified user */
public Token<BlockTokenIdentifier> generateToken(String userId,
ExtendedBlock block, EnumSet<AccessMode> modes) throws IOException {
BlockTokenIdentifier id = new BlockTokenIdentifier(userId, block
.getBlockPoolId(), block.getBlockId(), modes);
return new Token<BlockTokenIdentifier>(id, this);
}
/**
* Check if access should be allowed. userID is not checked if null. This
* method doesn't check if token password is correct. It should be used only
* when token password has already been verified (e.g., in the RPC layer).
*/
public void checkAccess(BlockTokenIdentifier id, String userId,
ExtendedBlock block, AccessMode mode) throws InvalidToken {
if (LOG.isDebugEnabled()) {
LOG.debug("Checking access for user=" + userId + ", block=" + block
+ ", access mode=" + mode + " using " + id.toString());
}
if (userId != null && !userId.equals(id.getUserId())) {
throw new InvalidToken("Block token with " + id.toString()
+ " doesn't belong to user " + userId);
}
if (!id.getBlockPoolId().equals(block.getBlockPoolId())) {
throw new InvalidToken("Block token with " + id.toString()
+ " doesn't apply to block " + block);
}
if (id.getBlockId() != block.getBlockId()) {
throw new InvalidToken("Block token with " + id.toString()
+ " doesn't apply to block " + block);
}
if (isExpired(id.getExpiryDate())) {
throw new InvalidToken("Block token with " + id.toString()
+ " is expired.");
}
if (!id.getAccessModes().contains(mode)) {
throw new InvalidToken("Block token with " + id.toString()
+ " doesn't have " + mode + " permission");
}
}
/** Check if access should be allowed. userID is not checked if null */
public void checkAccess(Token<BlockTokenIdentifier> token, String userId,
ExtendedBlock block, AccessMode mode) throws InvalidToken {
BlockTokenIdentifier id = new BlockTokenIdentifier();
try {
id.readFields(new DataInputStream(new ByteArrayInputStream(token
.getIdentifier())));
} catch (IOException e) {
throw new InvalidToken(
"Unable to de-serialize block token identifier for user=" + userId
+ ", block=" + block + ", access mode=" + mode);
}
checkAccess(id, userId, block, mode);
if (!Arrays.equals(retrievePassword(id), token.getPassword())) {
throw new InvalidToken("Block token with " + id.toString()
+ " doesn't have the correct token password");
}
}
private static boolean isExpired(long expiryDate) {
return Time.now() > expiryDate;
}
/**
* check if a token is expired. for unit test only. return true when token is
* expired, false otherwise
*/
static boolean isTokenExpired(Token<BlockTokenIdentifier> token)
throws IOException {
ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
DataInputStream in = new DataInputStream(buf);
long expiryDate = WritableUtils.readVLong(in);
return isExpired(expiryDate);
}
/** set token lifetime. */
public void setTokenLifetime(long tokenLifetime) {
this.tokenLifetime = tokenLifetime;
}
/**
* Create an empty block token identifier
*
* @return a newly created empty block token identifier
*/
@Override
public BlockTokenIdentifier createIdentifier() {
return new BlockTokenIdentifier();
}
/**
* Create a new password/secret for the given block token identifier.
*
* @param identifier
* the block token identifier
* @return token password/secret
*/
@Override
protected byte[] createPassword(BlockTokenIdentifier identifier) {
BlockKey key = null;
synchronized (this) {
key = currentKey;
}
if (key == null)
throw new IllegalStateException("currentKey hasn't been initialized.");
identifier.setExpiryDate(Time.now() + tokenLifetime);
identifier.setKeyId(key.getKeyId());
if (LOG.isDebugEnabled()) {
LOG.debug("Generating block token for " + identifier.toString());
}
return createPassword(identifier.getBytes(), key.getKey());
}
/**
* Look up the token password/secret for the given block token identifier.
*
* @param identifier
* the block token identifier to look up
* @return token password/secret as byte[]
* @throws InvalidToken
*/
@Override
public byte[] retrievePassword(BlockTokenIdentifier identifier)
throws InvalidToken {
if (isExpired(identifier.getExpiryDate())) {
throw new InvalidToken("Block token with " + identifier.toString()
+ " is expired.");
}
BlockKey key = null;
synchronized (this) {
key = allKeys.get(identifier.getKeyId());
}
if (key == null) {
throw new InvalidToken("Can't re-compute password for "
+ identifier.toString() + ", since the required block key (keyID="
+ identifier.getKeyId() + ") doesn't exist.");
}
return createPassword(identifier.getBytes(), key.getKey());
}
/**
* Generate a data encryption key for this block pool, using the current
* BlockKey.
*
* @return a data encryption key which may be used to encrypt traffic
* over the DataTransferProtocol
*/
public DataEncryptionKey generateDataEncryptionKey() {
byte[] nonce = new byte[8];
nonceGenerator.nextBytes(nonce);
BlockKey key = null;
synchronized (this) {
key = currentKey;
}
byte[] encryptionKey = createPassword(nonce, key.getKey());
return new DataEncryptionKey(key.getKeyId(), blockPoolId, nonce,
encryptionKey, Time.now() + tokenLifetime,
encryptionAlgorithm);
}
/**
* Recreate an encryption key based on the given key id and nonce.
*
* @param keyId identifier of the secret key used to generate the encryption key.
* @param nonce random value used to create the encryption key
* @return the encryption key which corresponds to this (keyId, blockPoolId, nonce)
* @throws InvalidEncryptionKeyException
*/
public byte[] retrieveDataEncryptionKey(int keyId, byte[] nonce)
throws InvalidEncryptionKeyException {
BlockKey key = null;
synchronized (this) {
key = allKeys.get(keyId);
if (key == null) {
throw new InvalidEncryptionKeyException("Can't re-compute encryption key"
+ " for nonce, since the required block key (keyID=" + keyId
+ ") doesn't exist. Current key: " + currentKey.getKeyId());
}
}
return createPassword(nonce, key.getKey());
}
@VisibleForTesting
public synchronized void setKeyUpdateIntervalForTesting(long millis) {
this.keyUpdateInterval = millis;
}
@VisibleForTesting
public void clearAllKeysForTesting() {
allKeys.clear();
}
@VisibleForTesting
public synchronized int getSerialNoForTesting() {
return serialNo;
}
}
| |
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.integrationtests;
import org.drools.compiler.Cheese;
import org.drools.compiler.CommonTestMethodBase;
import org.drools.compiler.Person;
import org.drools.core.base.ClassObjectType;
import org.drools.core.base.DroolsQuery;
import org.drools.core.common.DoubleNonIndexSkipBetaConstraints;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.common.SingleBetaConstraints;
import org.drools.core.common.TripleNonIndexSkipBetaConstraints;
import org.drools.core.impl.KnowledgeBaseImpl;
import org.drools.core.impl.StatefulKnowledgeSessionImpl;
import org.drools.core.reteoo.AlphaNode;
import org.drools.core.reteoo.BetaMemory;
import org.drools.core.reteoo.CompositeObjectSinkAdapter;
import org.drools.core.reteoo.JoinNode;
import org.drools.core.reteoo.LeftInputAdapterNode;
import org.drools.core.reteoo.NotNode;
import org.drools.core.reteoo.ObjectSinkNodeList;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.reteoo.RightTuple;
import org.drools.core.rule.IndexableConstraint;
import org.drools.core.util.FastIterator;
import org.drools.core.util.index.LeftTupleIndexHashTable;
import org.drools.core.util.index.LeftTupleList;
import org.drools.core.util.index.RightTupleIndexHashTable;
import org.drools.core.util.index.RightTupleList;
import org.junit.Test;
import org.kie.api.definition.type.FactType;
import org.kie.api.runtime.rule.Row;
import org.kie.api.runtime.rule.Variable;
import org.kie.api.runtime.rule.ViewChangedEventListener;
import org.kie.internal.KnowledgeBase;
import org.kie.internal.runtime.StatefulKnowledgeSession;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class IndexingTest extends CommonTestMethodBase {
@Test(timeout=10000)
public void testBuildsIndexedAlphaNodes() {
String drl = "";
drl += "package org.drools.compiler.test\n";
drl += "import org.drools.compiler.Person\n";
drl += "rule test1\n";
drl += "when\n";
drl += " Person(name == \"Mark\", age == 37)\n";
drl += " Person(name == \"Mark\", happy == true)\n";
drl += "then\n";
drl += "end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( drl );
ObjectTypeNode otn = getObjectTypeNode(kbase, Person.class );
InternalWorkingMemory wm = ((StatefulKnowledgeSessionImpl)kbase.newStatefulKnowledgeSession());
AlphaNode alphaNode1 = ( AlphaNode ) otn.getSinkPropagator().getSinks()[0];
CompositeObjectSinkAdapter sinkAdapter = (CompositeObjectSinkAdapter)alphaNode1.getSinkPropagator();
ObjectSinkNodeList hashableSinks = sinkAdapter.getHashableSinks();
assertNotNull(hashableSinks);
assertEquals(2, hashableSinks.size());
AlphaNode alphaNode2 = ( AlphaNode ) alphaNode1.getSinkPropagator().getSinks()[0];
assertSame(hashableSinks.getFirst(), alphaNode2);
AlphaNode alphaNode3 = ( AlphaNode ) alphaNode1.getSinkPropagator().getSinks()[1];
assertSame(hashableSinks.getLast(), alphaNode3);
}
@Test(timeout=10000)
public void testBuildsIndexedMemory() {
// tests indexes are correctly built
String drl = "";
drl += "package org.drools.compiler.test\n";
drl += "import org.drools.compiler.Person\n";
drl += "global java.util.List list\n";
drl += "rule test1\n";
drl += "when\n";
drl += " $p1 : Person($name : name )\n";
drl += " $p2 : Person(name == $name)\n"; //indexed
drl += " $p3 : Person(name == $p1.name)\n"; //indexed
drl += " $p4 : Person(address.street == $p1.address.street)\n"; //not indexed
drl += " $p5 : Person(address.street == $p1.name)\n"; // indexed
//drl += " $p6 : Person( $name == name)\n"; // not indexed and won't compile
drl += " $p7 : Person(addresses[\"key\"].street == $p1.name)\n"; // indexed
drl += " $p8 : Person(addresses[0].street == $p1.name)\n"; // indexed
drl += " $p9 : Person(name == $p1.address.street)\n"; //not indexed
drl += " $p10 : Person(addresses[0].street + 'xx' == $p1.name)\n"; // indexed
drl += " $p11 : Person(addresses[$p1].street == $p1.name)\n"; // not indexed
drl += "then\n";
drl += "end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( drl );
ObjectTypeNode node = getObjectTypeNode(kbase, Person.class );
InternalWorkingMemory wm = ((StatefulKnowledgeSessionImpl)kbase.newStatefulKnowledgeSession());
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) node.getSinkPropagator().getSinks()[0];
JoinNode j2 = ( JoinNode ) liaNode.getSinkPropagator().getSinks()[0]; // $p2
JoinNode j3 = ( JoinNode ) j2.getSinkPropagator().getSinks()[0]; // $p3
JoinNode j4 = ( JoinNode ) j3.getSinkPropagator().getSinks()[0]; // $p4
JoinNode j5 = ( JoinNode ) j4.getSinkPropagator().getSinks()[0]; // $p5
//JoinNode j6 = ( JoinNode ) j5.getSinkPropagator().getSinks()[0]; // $p6 // won't compile
JoinNode j7 = ( JoinNode ) j5.getSinkPropagator().getSinks()[0]; // $p7
JoinNode j8 = ( JoinNode ) j7.getSinkPropagator().getSinks()[0]; // $p8
JoinNode j9 = ( JoinNode ) j8.getSinkPropagator().getSinks()[0]; // $p9
JoinNode j10 = ( JoinNode ) j9.getSinkPropagator().getSinks()[0]; // $p10
JoinNode j11 = ( JoinNode ) j10.getSinkPropagator().getSinks()[0]; // $p11
SingleBetaConstraints c = ( SingleBetaConstraints ) j2.getRawConstraints();
assertEquals( "$name", ((IndexableConstraint)c.getConstraint()).getFieldIndex().getDeclaration().getIdentifier() );
assertTrue( c.isIndexed() );
BetaMemory bm = ( BetaMemory ) wm.getNodeMemory( j2 );
assertTrue( bm.getLeftTupleMemory() instanceof LeftTupleIndexHashTable);
assertTrue( bm.getRightTupleMemory() instanceof RightTupleIndexHashTable );
c = ( SingleBetaConstraints ) j3.getRawConstraints();
assertEquals( "name", ((IndexableConstraint)c.getConstraint()).getFieldIndex().getDeclaration().getIdentifier() );
assertTrue( c.isIndexed() );
bm = ( BetaMemory ) wm.getNodeMemory( j3 );
assertTrue( bm.getLeftTupleMemory() instanceof LeftTupleIndexHashTable );
assertTrue( bm.getRightTupleMemory() instanceof RightTupleIndexHashTable );
c = ( SingleBetaConstraints ) j4.getRawConstraints();
assertEquals("$p1", c.getConstraint().getRequiredDeclarations()[0].getIdentifier());
assertFalse( c.isIndexed() );
bm = ( BetaMemory ) wm.getNodeMemory( j4 );
assertTrue( bm.getLeftTupleMemory() instanceof LeftTupleList );
assertTrue( bm.getRightTupleMemory() instanceof RightTupleList );
c = ( SingleBetaConstraints ) j5.getRawConstraints();
assertEquals("name", ((IndexableConstraint)c.getConstraint()).getFieldIndex().getDeclaration().getIdentifier());
assertTrue( c.isIndexed() );
bm = ( BetaMemory ) wm.getNodeMemory( j5 );
assertTrue( bm.getLeftTupleMemory() instanceof LeftTupleIndexHashTable );
assertTrue( bm.getRightTupleMemory() instanceof RightTupleIndexHashTable );
// won't compile
// c = ( SingleBetaConstraints ) j6.getRawConstraints();
// assertEquals( "name", ((VariableConstraint)c.getConstraint()).getRequiredDeclarations()[0].getIdentifier() );
// assertFalse( c.isIndexed() );
// bm = ( BetaMemory ) wm.getNodeMemory( j6 );
// assertTrue( bm.getLeftTupleMemory() instanceof LeftTupleList );
// assertTrue( bm.getRightTupleMemory() instanceof RightTupleList );
c = ( SingleBetaConstraints ) j7.getRawConstraints();
assertEquals("name", ((IndexableConstraint)c.getConstraint()).getFieldIndex().getDeclaration().getIdentifier());
assertTrue( c.isIndexed() );
bm = ( BetaMemory ) wm.getNodeMemory( j7 );
assertTrue( bm.getLeftTupleMemory() instanceof LeftTupleIndexHashTable );
assertTrue( bm.getRightTupleMemory() instanceof RightTupleIndexHashTable );
c = ( SingleBetaConstraints ) j8.getRawConstraints();
assertEquals("name", ((IndexableConstraint)c.getConstraint()).getFieldIndex().getDeclaration().getIdentifier());
assertTrue( c.isIndexed() );
bm = ( BetaMemory ) wm.getNodeMemory( j8 );
assertTrue( bm.getLeftTupleMemory() instanceof LeftTupleIndexHashTable );
assertTrue( bm.getRightTupleMemory() instanceof RightTupleIndexHashTable );
c = ( SingleBetaConstraints ) j9.getRawConstraints();
assertEquals("$p1", c.getConstraint().getRequiredDeclarations()[0].getIdentifier());
assertFalse( c.isIndexed() );
bm = ( BetaMemory ) wm.getNodeMemory( j9 );
assertTrue( bm.getLeftTupleMemory() instanceof LeftTupleList );
assertTrue( bm.getRightTupleMemory() instanceof RightTupleList );
c = ( SingleBetaConstraints ) j10.getRawConstraints();
assertEquals("name", ((IndexableConstraint)c.getConstraint()).getFieldIndex().getDeclaration().getIdentifier());
assertTrue( c.isIndexed() );
bm = ( BetaMemory ) wm.getNodeMemory( j10 );
assertTrue( bm.getLeftTupleMemory() instanceof LeftTupleIndexHashTable );
assertTrue( bm.getRightTupleMemory() instanceof RightTupleIndexHashTable );
c = ( SingleBetaConstraints ) j11.getRawConstraints();
assertEquals("$p1", c.getConstraint().getRequiredDeclarations()[0].getIdentifier());
assertFalse( c.isIndexed() );
bm = ( BetaMemory ) wm.getNodeMemory( j11 );
assertTrue( bm.getLeftTupleMemory() instanceof LeftTupleList);
assertTrue( bm.getRightTupleMemory() instanceof RightTupleList );
}
@Test(timeout=10000)
public void testIndexingOnQueryUnification() throws Exception {
String str = "";
str += "package org.drools.compiler.test \n";
str += "import org.drools.compiler.Person \n";
str += "query peeps( String $name, String $likes, String $street) \n";
str += " $p : Person( $name := name, $likes := likes, $street := address.street ) \n";
str += "end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
List<ObjectTypeNode> nodes = ((KnowledgeBaseImpl)kbase).getRete().getObjectTypeNodes();
ObjectTypeNode node = null;
for ( ObjectTypeNode n : nodes ) {
if ( ((ClassObjectType)n.getObjectType()).getClassType() == DroolsQuery.class ) {
node = n;
break;
}
}
InternalWorkingMemory wm = ((StatefulKnowledgeSessionImpl)kbase.newStatefulKnowledgeSession());
AlphaNode alphanode = ( AlphaNode ) node.getSinkPropagator().getSinks()[0];
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) alphanode.getSinkPropagator().getSinks()[0];
JoinNode j = ( JoinNode ) liaNode.getSinkPropagator().getSinks()[0]; // $p2
TripleNonIndexSkipBetaConstraints c = ( TripleNonIndexSkipBetaConstraints ) j.getRawConstraints();
//assertEquals( "$name", ((VariableConstraint)c.getConstraint()).getRequiredDeclarations()[0].getIdentifier() );
assertTrue( c.isIndexed() );
BetaMemory bm = ( BetaMemory ) wm.getNodeMemory( j );
assertTrue( bm.getLeftTupleMemory() instanceof LeftTupleIndexHashTable );
assertTrue( bm.getRightTupleMemory() instanceof RightTupleIndexHashTable );
}
@Test(timeout=10000)
public void testIndexingOnQueryUnificationWithNot() throws Exception {
String str = "";
str += "package org.drools.compiler.test \n";
str += "import org.drools.compiler.Person \n";
str += "query peeps( String $name, int $age ) \n";
str += " not $p2 : Person( $name := name, age > $age ) \n";
str += "end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
List<ObjectTypeNode> nodes = ((KnowledgeBaseImpl)kbase).getRete().getObjectTypeNodes();
ObjectTypeNode node = null;
for ( ObjectTypeNode n : nodes ) {
if ( ((ClassObjectType)n.getObjectType()).getClassType() == DroolsQuery.class ) {
node = n;
break;
}
}
StatefulKnowledgeSessionImpl wm = ((StatefulKnowledgeSessionImpl)kbase.newStatefulKnowledgeSession());
AlphaNode alphanode = ( AlphaNode ) node.getSinkPropagator().getSinks()[0];
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) alphanode.getSinkPropagator().getSinks()[0];
NotNode n = (NotNode) liaNode.getSinkPropagator().getSinks()[0];
DoubleNonIndexSkipBetaConstraints c = (DoubleNonIndexSkipBetaConstraints) n.getRawConstraints();
//assertEquals( "$name", ((VariableConstraint)c.getConstraint()).getRequiredDeclarations()[0].getIdentifier() );
assertTrue( c.isIndexed() );
BetaMemory bm = ( BetaMemory ) wm.getNodeMemory( n );
System.out.println( bm.getLeftTupleMemory().getClass() );
System.out.println( bm.getRightTupleMemory().getClass() );
assertTrue(bm.getLeftTupleMemory() instanceof LeftTupleIndexHashTable);
assertTrue( bm.getRightTupleMemory() instanceof RightTupleIndexHashTable );
final Map<String, Integer> map = new HashMap<String, Integer>();
map.put("inserted", new Integer(0));
map.put("deleted", new Integer(0));
map.put("updated", new Integer(0));
wm.openLiveQuery("peeps", new Object[] {Variable.v, 99 }, new ViewChangedEventListener() {
@Override
public void rowInserted(Row row) {
System.out.println( "inserted" );
Integer integer = map.get("inserted");
map.put("inserted", integer.intValue() + 1 );
}
@Override
public void rowDeleted(Row row) {
System.out.println( "deleted" );
Integer integer = map.get("deleted");
map.put("deleted", integer.intValue() + 1 );
}
@Override
public void rowUpdated(Row row) {
System.out.println( "updated" );
Integer integer = map.get("updated");
map.put("updated", integer.intValue() + 1 );
}
});
System.out.println( "inserted: " + map.get("inserted"));
System.out.println( "deleted: " + map.get("deleted"));
System.out.println( "updated: " + map.get("updated"));
Map<String, InternalFactHandle> peeps = new HashMap<String, InternalFactHandle>();
Person p = null;
InternalFactHandle fh = null;
int max = 3;
// 1 matched, prior to any insertions
assertEquals( 1, map.get("inserted").intValue() );
assertEquals( 0, map.get("deleted").intValue() );
assertEquals( 0, map.get("updated").intValue() );
// x0 is the blocker
for ( int i = 0; i < max; i++ ) {
p = new Person( "x" + i, 100);
fh = ( InternalFactHandle ) wm.insert( p );
wm.fireAllRules();
peeps.put(p.getName(), fh);
}
// insertions case 1 deletion
assertEquals( 1, map.get("inserted").intValue() );
assertEquals( 1, map.get("deleted").intValue() );
assertEquals( 0, map.get("updated").intValue() );
// each x is blocker in turn up to x99
for ( int i = 0; i < (max-1); i++ ) {
fh = peeps.get("x" + i);
p = (Person) fh.getObject();
p.setAge( 90 );
wm.update( fh, p );
wm.fireAllRules();
assertEquals( "i=" + i, 1, map.get("inserted").intValue() ); // make sure this doesn't change
}
// no change
assertEquals( 1, map.get("inserted").intValue() );
assertEquals( 1, map.get("deleted").intValue() );
assertEquals( 0, map.get("updated").intValue() );
// x99 is still the blocker, everything else is just added
for ( int i = 0; i < (max-1); i++ ) {
fh = peeps.get("x" + i);
p = (Person) fh.getObject();
p.setAge( 102 );
wm.update( fh, p );
wm.fireAllRules();
assertEquals( "i=" + i, 1, map.get("inserted").intValue() ); // make sure this doesn't change
}
// no change
assertEquals( 1, map.get("inserted").intValue() );
assertEquals( 1, map.get("deleted").intValue() );
assertEquals( 0, map.get("updated").intValue() );
// x99 is still the blocker
for ( int i = (max-2); i >= 0; i-- ) {
fh = peeps.get("x" + i);
p = (Person) fh.getObject();
p.setAge( 90 );
wm.update( fh, p );
wm.fireAllRules();
assertEquals( "i=" + i, 1, map.get("inserted").intValue() ); // make sure this doesn't change
}
// move x99, should no longer be a blocker, now it can increase
fh = peeps.get("x" + (max-1));
p = (Person) fh.getObject();
p.setAge( 90 );
wm.update( fh, p );
wm.fireAllRules();
assertEquals( 2, map.get("inserted").intValue() );
}
@Test(timeout=10000)
public void testFullFastIteratorResume() throws Exception {
String str = "";
str += "package org.drools.compiler.test \n";
str += "import org.drools.compiler.Person \n";
str += "query peeps( String $name, int $age ) \n";
str += " not $p2 : Person( $name := name, age > $age ) \n";
str += "end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString( str );
List<ObjectTypeNode> nodes = ((KnowledgeBaseImpl)kbase).getRete().getObjectTypeNodes();
ObjectTypeNode node = null;
for ( ObjectTypeNode n : nodes ) {
if ( ((ClassObjectType)n.getObjectType()).getClassType() == DroolsQuery.class ) {
node = n;
break;
}
}
StatefulKnowledgeSessionImpl wm = ((StatefulKnowledgeSessionImpl)kbase.newStatefulKnowledgeSession());
AlphaNode alphanode = ( AlphaNode ) node.getSinkPropagator().getSinks()[0];
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) alphanode.getSinkPropagator().getSinks()[0];
NotNode n = (NotNode) liaNode.getSinkPropagator().getSinks()[0];
DoubleNonIndexSkipBetaConstraints c = (DoubleNonIndexSkipBetaConstraints) n.getRawConstraints();
//assertEquals( "$name", ((VariableConstraint)c.getConstraint()).getRequiredDeclarations()[0].getIdentifier() );
assertTrue( c.isIndexed() );
BetaMemory bm = ( BetaMemory ) wm.getNodeMemory( n );
System.out.println( bm.getLeftTupleMemory().getClass() );
System.out.println( bm.getRightTupleMemory().getClass() );
assertTrue(bm.getLeftTupleMemory() instanceof LeftTupleIndexHashTable);
assertTrue( bm.getRightTupleMemory() instanceof RightTupleIndexHashTable );
final Map<String, Integer> map = new HashMap<String, Integer>();
map.put("inserted", new Integer(0));
map.put("deleted", new Integer(0));
map.put("updated", new Integer(0));
wm.openLiveQuery("peeps", new Object[] {Variable.v, 99 }, new ViewChangedEventListener() {
@Override
public void rowInserted(Row row) {
}
@Override
public void rowDeleted(Row row) {
}
@Override
public void rowUpdated(Row row) {
}
});
Map<String, InternalFactHandle> peeps = new HashMap<String, InternalFactHandle>();
Person p = new Person( "x0", 100);
InternalFactHandle fh = ( InternalFactHandle ) wm.insert( p );
peeps.put(p.getName(), fh);
for ( int i = 1; i < 100; i++ ) {
p = new Person( "x" + i, 101);
fh = ( InternalFactHandle ) wm.insert( p );
wm.fireAllRules();
peeps.put(p.getName(), fh);
}
List<RightTuple> list = new ArrayList<RightTuple>(100);
FastIterator it = n.getRightIterator( bm.getRightTupleMemory() );
for ( RightTuple rt =n.getFirstRightTuple(null, bm.getRightTupleMemory(), null, it); rt != null; rt = (RightTuple)it.next(rt) ) {
list.add(rt);
}
assertEquals( 100, list.size() );
// check we can resume from each entry in the list above.
for ( int i = 0; i < 100; i++ ) {
RightTuple rightTuple = list.get(i);
it = n.getRightIterator( bm.getRightTupleMemory(), rightTuple ); // resumes from the current rightTuple
int j = i + 1;
for ( RightTuple rt = ( RightTuple ) it.next(rightTuple); rt != null; rt = (RightTuple)it.next(rt) ) {
assertSame( list.get(j), rt);
j++;
}
}
}
public static ObjectTypeNode getObjectTypeNode(KnowledgeBase kbase, Class<?> nodeClass) {
List<ObjectTypeNode> nodes = ((KnowledgeBaseImpl)kbase).getRete().getObjectTypeNodes();
for ( ObjectTypeNode n : nodes ) {
if ( ((ClassObjectType)n.getObjectType()).getClassType() == nodeClass ) {
return n;
}
}
return null;
}
@Test(timeout=10000)
public void testRangeIndex() {
String str = "import org.drools.compiler.*;\n" +
"rule R1\n" +
"when\n" +
" $s : String()" +
" exists Cheese( type > $s )\n" +
"then\n" +
" System.out.println( $s );\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( "cheddar" );
ksession.insert( "gorgonzola" );
ksession.insert( "stilton" );
ksession.insert( new Cheese( "gorgonzola", 10 ) );
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
@Test(timeout=10000)
public void testRangeIndex2() {
String str = "import org.drools.compiler.*;\n" +
"rule R1\n" +
"when\n" +
" $s : String()" +
" exists Cheese( type < $s )\n" +
"then\n" +
" System.out.println( $s );\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( "gorgonzola" );
ksession.insert( new Cheese( "cheddar", 10 ) );
ksession.insert( new Cheese( "gorgonzola", 10 ) );
ksession.insert( new Cheese( "stilton", 10 ) );
assertEquals(1, ksession.fireAllRules());
ksession.dispose();
}
@Test(timeout=10000)
public void testNotNode() {
String str = "import org.drools.compiler.*;\n" +
"rule R1 salience 10\n" +
"when\n" +
" Person( $age : age )" +
" not Cheese( price < $age )\n" +
"then\n" +
" System.out.println( $age );\n" +
"end\n" +
"rule R2 salience 1\n" +
"when\n" +
" $p : Person( age == 10 )" +
"then\n" +
" modify($p) { setAge(15); }\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( new Person( "mario", 10 ) );
ksession.insert( new Cheese( "gorgonzola", 20 ) );
assertEquals(3, ksession.fireAllRules());
ksession.dispose();
}
@Test(timeout=10000)
public void testNotNodeModifyRight() {
String str = "import org.drools.compiler.*;\n" +
"rule R1 salience 10 when\n" +
" Person( $age : age )\n" +
" not Cheese( price < $age )\n" +
"then\n" +
" System.out.println( $age );\n" +
"end\n" +
"rule R3 salience 5 when\n" +
" $c : Cheese( price == 8 )\n" +
"then\n" +
" modify($c) { setPrice(15); }\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( new Person( "A", 10 ) );
ksession.insert( new Cheese( "C1", 20 ) );
ksession.insert( new Cheese( "C2", 8 ) );
assertEquals(2, ksession.fireAllRules());
ksession.dispose();
}
@Test(timeout=10000)
public void testRange() {
String str = "import org.drools.compiler.*;\n" +
"rule R1 salience 10 when\n" +
" Person( $age : age, $doubleAge : doubleAge )\n" +
" not Cheese( this.price > $age && < $doubleAge )\n" +
"then\n" +
" System.out.println( $age );\n" +
"end\n" +
"rule R3 salience 5 when\n" +
" $c : Cheese( price == 15 )\n" +
"then\n" +
" System.out.println( \"modify\" );\n" +
" modify($c) { setPrice(8); }\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
ksession.insert( new Person( "A", 10 ) );
ksession.insert( new Cheese( "C1", 30 ) );
ksession.insert( new Cheese( "C2", 15 ) );
assertEquals(2, ksession.fireAllRules());
ksession.dispose();
}
@Test(timeout=10000)
public void testRange2() throws Exception {
String rule = "package org.drools.compiler.test\n" +
"declare A\n" +
" a: int\n" +
"end\n" +
"declare B\n" +
" b: int\n" +
"end\n" +
"declare C\n" +
" c: int\n" +
"end\n" +
"rule R1 when\n" +
" A( $a : a )\n" +
" B( $b : b )\n" +
" exists C( c > $a && < $b )\n" +
"then\n" +
" System.out.println( $a + \", \" + $b );\n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString( rule );
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
FactType aType = kbase.getFactType( "org.drools.compiler.test", "A" );
FactType bType = kbase.getFactType( "org.drools.compiler.test", "B" );
FactType cType = kbase.getFactType( "org.drools.compiler.test", "C" );
Object a1 = aType.newInstance();
aType.set( a1, "a", 5 );
ksession.insert( a1 );
Object a2 = aType.newInstance();
aType.set( a2, "a", 11 );
ksession.insert( a2 );
Object b1 = bType.newInstance();
bType.set( b1, "b", 10 );
ksession.insert( b1 );
Object b2 = bType.newInstance();
bType.set( b2, "b", 6 );
ksession.insert( b2 );
Object c = cType.newInstance();
cType.set( c, "c", 7 );
ksession.insert( c );
ksession.fireAllRules();
}
}
| |
package no.uio.ifi.qure.relation;
import java.util.Arrays;
import java.util.List;
import java.util.ArrayList;
import java.util.Set;
import java.util.HashSet;
import java.util.Map;
import java.util.HashMap;
import no.uio.ifi.qure.Config;
import no.uio.ifi.qure.util.*;
import no.uio.ifi.qure.traversal.*;
import no.uio.ifi.qure.space.*;
public class Overlaps extends AtomicRelation {
private final Map<Integer, Integer> argRole;
private final int[] args;
public Overlaps(int r, int a) {
argRole= new HashMap<Integer, Integer>();
argRole.put(a, r);
args = new int[]{a};
}
public Overlaps(int r0, int r1, int a0, int a1) {
argRole= new HashMap<Integer, Integer>();
if (a0 == a1) {
argRole.put(a0, (r0 | r1));
} else {
argRole.put(a0, r0);
argRole.put(a1, r1);
}
args = new int[]{a0, a1};
}
public Overlaps(int[] rs, int[] as) {
argRole = new HashMap<Integer, Integer>();
for (int i = 0; i < as.length; i++) {
argRole.put(as[i], 0);
}
for (int i = 0; i < as.length; i++) {
argRole.put(as[i], argRole.get(as[i]) | rs[i]);
}
args = as;
}
public Overlaps(Map<Integer, Integer> argRole) {
this.argRole = argRole;
args = new int[argRole.keySet().size()];
int i = 0;
for (Integer k : argRole.keySet()) args[i++] = k;
}
public int getArg(int i) {
return args[i];
}
public boolean relatesArg(int arg) {
return argRole.containsKey(arg);
}
public String toBTSQL(String[] vals, Config config) { //TODO
if (getArity() == 2) {
return toBTSQL2(vals, config);
} else {
return null; // Base query on implied Overlaps of lower arity
}
}
private String makeBlockOverlapsWhere(String table0, String table1) {
String query = "";
query += " ((" + table1 + ".block >= (" + table0 + ".block & (" + table0 + ".block-1)) AND \n";
query += " " + table1 + ".block <= (" + table0 + ".block | (" + table0 + ".block-1))) OR \n";
query += " " + table1 + ".block = ((" + table0 + ".block & ~(V.n-1)) | V.n))";
return query;
}
private String toBTSQL2(String[] vals, Config config) {
String[] sfw = makeSelectFromWhereParts(config.btTableName, config.uriColumn, vals);
String from = sfw[1] + ",\n";
from += "(" + makeValuesFrom(config) + ") AS V(n)";
String query = "SELECT DISTINCT " + sfw[0] + "\n";
query += " FROM " + from + "\n";
query += " WHERE ";
if (!sfw[2].equals("")) query += sfw[2] + " AND \n";
for (int i = 0; i < args.length; i++) {
if (argRole.get(args[i]) != 0) {
query += "T" + args[i] + ".role & " + (argRole.get(args[i]) << 1) + " != 0 AND\n";
}
}
if (vals[0] != null) {
query += makeBlockOverlapsWhere("T" + args[1], "T" + args[0]);
} else {
query += makeBlockOverlapsWhere("T" + args[0], "T" + args[1]);
}
return query;
}
public String toGeoSQL(String[] vals, Config config, SpaceProvider sp) {
return sp.toSQL(this, vals, config);
}
public String toString() {
String res = "ov(";
String delim = "";
for (int arg : argRole.keySet()) {
res += delim + "<" + argRole.get(arg) + "," + arg + ">";
if (delim.equals("")) delim = ", ";
}
return res + ")";
}
public boolean isIntrinsic(Integer[] tuple) {
SID[] sidTuple = toSIDs(tuple);
if (getArity() < 2) return false;
Set<SID> sids = new HashSet<SID>();
for (int i = 0; i < sidTuple.length; i++) {
if (sidTuple[i] == null) continue;
for (int role : getStricter(new HashSet<Integer>(argRole.values()), sidTuple[i].getRole())) {
SID s = new SID(sidTuple[i].getID(), role);
if (sids.contains(s)) {
return true;
}
sids.add(s);
}
}
return false;
}
public Integer getArgRole(Integer arg) { return argRole.get(arg); }
public Set<Integer> getArguments() { return argRole.keySet(); }
public Set<Integer> getRoles() { return new HashSet<Integer>(argRole.values()); }
public Set<Map<Integer, Integer>> impliesNonEmpty(AtomicRelation r) {
if (!(r instanceof Overlaps)) {
return null;
} else {
if (getArity() < r.getArity()) return null;
Overlaps ovr = (Overlaps) r;
Set<Map<Integer, Integer>> unifiers = new HashSet<Map<Integer, Integer>>();
Map<Integer, Integer> unifier = new HashMap<Integer, Integer>();
Set<Integer> rArgs = new HashSet<Integer>(ovr.argRole.keySet());
Set<Integer> tArgs = new HashSet<Integer>(argRole.keySet());
unifyOverlaps(unifiers, unifier, tArgs, rArgs, ovr);
return (unifiers.isEmpty()) ? null : unifiers;
}
}
private boolean redundantUnifier(Map<Integer, Integer> unifier, Set<Map<Integer, Integer>> unifiers, AtomicRelation or) {
if (or.getArity() == 1) return false;
Set<SID> sids = new HashSet<SID>();
for (Integer val : unifier.values()) {
sids.add(new SID(val, or.getArgRole(val)));
}
Set<Integer> keys = unifier.keySet();
for (Map<Integer, Integer> other : unifiers) {
Set<Integer> oKeys = other.keySet();
if (keys.equals(oKeys)) {
Set<SID> oSids = new HashSet<SID>();
for (Integer oVal : other.values()) {
oSids.add(new SID(oVal, or.getArgRole(oVal)));
}
if (sids.equals(oSids)) return true;
}
}
return false;
}
private void unifyOverlaps(Set<Map<Integer, Integer>> unifiers, Map<Integer, Integer> unifier,
Set<Integer> tArgs, Set<Integer> rArgs, Overlaps r) {
if (rArgs.isEmpty()) { // Unifier found for all variables
if (!redundantUnifier(unifier, unifiers, r)) {
unifiers.add(unifier);
}
return;
}
// Pick some argument which we try to unify with next
Pair<Integer, Set<Integer>> oneArg = Utils.getSome(rArgs);
Integer rArg = oneArg.fst;
// Generate a set of all possible unifications for rArg
Set<Integer> possible = new HashSet<Integer>();
for (Integer tArg : tArgs) {
if (stricterRole(argRole.get(tArg), r.argRole.get(rArg))) {
possible.add(tArg);
}
}
// Then recusively try to unify rest of variables with each unification
for (Integer tArg : possible) {
Set<Integer> tArgsNew = new HashSet<Integer>(tArgs);
tArgsNew.remove(tArg);
Map<Integer, Integer> newUnifier = new HashMap<Integer, Integer>(unifier);
newUnifier.put(tArg, rArg);
unifyOverlaps(unifiers, newUnifier, tArgsNew, oneArg.snd, r);
}
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Overlaps)) return false;
Overlaps oov = (Overlaps) o;
return argRole.equals(oov.argRole);
}
@Override
public int hashCode() {
int hc = 0;
for (Integer r : argRole.keySet()) {
hc += r + argRole.get(r).hashCode();
}
return hc;
}
public boolean eval(Space[] spaceArgs) {
Pair<Space, Set<Space>> sm = Utils.getSome(Utils.asSet(spaceArgs));
return sm.fst.overlaps(sm.snd);
}
public Set<AtomicRelation> getNormalizedAtomicRelations() {
Map<Integer, Integer> argNormMap = new HashMap<Integer, Integer>();
int i = 0;
for (Integer arg : argRole.keySet()) {
if (!argNormMap.containsKey(arg)) {
argNormMap.put(arg, i);
i++;
}
}
Map<Integer, Integer> normalizedArgRole = new HashMap<Integer, Integer>();
for (Integer arg : argRole.keySet()) {
normalizedArgRole.put(argNormMap.get(arg), argRole.get(arg));
}
Set<AtomicRelation> rels = new HashSet<AtomicRelation>();
rels.add(new Overlaps(normalizedArgRole));
return rels;
}
private Map<Integer, Set<SID>> getRoleToSID(Set<SID> tuple) {
Map<Integer, Set<SID>> roleToSID = new HashMap<Integer, Set<SID>>();
for (Integer role : getRoles()) {
Set<SID> sids = new HashSet<SID>();
for (SID s : tuple) {
if (s != null && strictnessRelated(s.getRole(), role)) {
sids.add(s);
}
}
roleToSID.put(role, sids);
}
return roleToSID;
}
public boolean compatible(Set<SID> tuple) {
if (tuple.size() != getArity()) return false;
Map<Integer, Set<SID>> roleToSID = getRoleToSID(tuple);
for (Integer pos : argRole.keySet()) {
Set<SID> possible = roleToSID.get(argRole.get(pos));
if (possible == null || possible.isEmpty()) {
return false;
} else {
possible = Utils.getSome(possible).snd; // Remove abitrary element from possible
}
}
return true;
}
private void generateAllOrderedTuples(Set<SID[]> tuples, Map<Integer, Set<SID>> roleToSID,
Set<Integer> remPos, SID[] tuple) {
if (remPos.isEmpty()) {
tuples.add(Arrays.copyOf(tuple, tuple.length));
} else {
Pair<Integer, Set<Integer>> somePos = Utils.getSome(remPos);
for (SID s : roleToSID.get(argRole.get(somePos.fst))) {
tuple[somePos.fst] = s;
generateAllOrderedTuples(tuples, roleToSID, somePos.snd, tuple);
}
}
}
public Set<SID[]> generateAllOrderedTuples(Set<SID> tuple) {
Map<Integer, Set<SID>> roleToSID = getRoleToSID(tuple);
Set<SID[]> res = new HashSet<SID[]>();
generateAllOrderedTuples(res, roleToSID, argRole.keySet(), new SID[getArity()]);
return res;
}
public Table evalAll(SpaceProvider spaces) {
// Must be a unary role-relation
Table table = new Table(this);
Integer role = Utils.unpackSingleton(argRole.values());
for (SID sid : spaces.keySet()) {
if (role == sid.getRole()) {
Integer[] tuple = new Integer[1];
tuple[0] = sid.getID();
table.addTuple(tuple);
}
}
return table;
}
public Table evalAll(SpaceProvider spaces, Table possible) {
Table table = new Table(this);
for (Integer[] tuple : possible.getTuples()) {
if (getArity() == 1) {
if (spaces.keySet().contains(new SID(tuple[0], Utils.unpackSingleton(argRole.values())))) {
table.addTuple(tuple);
}
} else if (eval(toSpaces(toSIDs(tuple), spaces))) {
table.addTuple(tuple);
}
}
return table;
}
}
| |
package com.netflix.governator;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.AbstractModule;
import com.google.inject.ProvisionException;
import com.google.inject.matcher.Matchers;
import com.google.inject.multibindings.Multibinder;
import com.google.inject.spi.ProvisionListener;
import com.netflix.governator.annotations.SuppressLifecycleUninitialized;
import com.netflix.governator.internal.GovernatorFeatureSet;
import com.netflix.governator.internal.PostConstructLifecycleActions;
import com.netflix.governator.internal.PreDestroyLifecycleActions;
import com.netflix.governator.spi.LifecycleListener;
/**
* Adds support for standard lifecycle annotations @PostConstruct and @PreDestroy to Guice.
*
* <code>
* public class MyService {
* {@literal @}PostConstruct
* public void init() {
* }
*
* {@literal @}PreDestroy
* public void shutdown() {
* }
* }
* </code>
*
* To use simply add LifecycleModule to guice when creating the injector
*
* See {@link LifecycleInjector} for different scenarios for shutting down the LifecycleManager.
*/
public final class LifecycleModule extends AbstractModule {
private static final Logger LOG = LoggerFactory.getLogger(LifecycleModule.class);
private LifecycleProvisionListener provisionListener = new LifecycleProvisionListener();
/**
* Holder of actions for a specific type.
*
* @author elandau
*/
static class TypeLifecycleActions {
final List<LifecycleAction> postConstructActions = new ArrayList<LifecycleAction>();
final List<LifecycleAction> preDestroyActions = new ArrayList<>();
}
@Singleton
@SuppressLifecycleUninitialized
static class LifecycleProvisionListener extends AbstractLifecycleListener implements ProvisionListener {
private final ConcurrentLinkedDeque<Runnable> shutdownActions = new ConcurrentLinkedDeque<Runnable>();
private final ConcurrentMap<Class<?>, TypeLifecycleActions> cache = new ConcurrentHashMap<>();
private Set<LifecycleFeature> features;
private final AtomicBoolean isShutdown = new AtomicBoolean();
private LifecycleManager manager;
private List<LifecycleListener> pendingLifecycleListeners = new ArrayList<>();
private boolean shutdownOnFailure = true;
@SuppressLifecycleUninitialized
@Singleton
static class OptionalArgs {
@com.google.inject.Inject(optional = true)
GovernatorFeatureSet governatorFeatures;
boolean hasShutdownOnFailure() {
return governatorFeatures == null ? true : governatorFeatures.get(GovernatorFeatures.SHUTDOWN_ON_ERROR);
}
}
@Inject
public static void initialize(
OptionalArgs args,
LifecycleManager manager,
LifecycleProvisionListener provisionListener,
Set<LifecycleFeature> features) {
provisionListener.manager = manager;
provisionListener.features = features;
provisionListener.shutdownOnFailure = args.hasShutdownOnFailure();
LOG.debug("LifecycleProvisionListener initialized {}", features);
for (LifecycleListener l : provisionListener.pendingLifecycleListeners) {
manager.addListener(l);
}
provisionListener.pendingLifecycleListeners.clear();
}
public TypeLifecycleActions getOrCreateActions(Class<?> type) {
TypeLifecycleActions actions = cache.get(type);
if (actions == null) {
actions = new TypeLifecycleActions();
// Ordered set of actions to perform before PostConstruct
for (LifecycleFeature feature : features) {
actions.postConstructActions.addAll(feature.getActionsForType(type));
}
// Finally, add @PostConstruct methods
actions.postConstructActions.addAll(PostConstructLifecycleActions.INSTANCE.getActionsForType(type));
// Determine @PreDestroy methods
actions.preDestroyActions.addAll(PreDestroyLifecycleActions.INSTANCE.getActionsForType(type));
TypeLifecycleActions existing = cache.putIfAbsent(type, actions);
if (existing != null) {
return existing;
}
}
return actions;
}
/**
* Invoke all shutdown actions
*/
@Override
public synchronized void onStopped(Throwable optionalFailureReason) {
if (shutdownOnFailure || optionalFailureReason == null) {
if (isShutdown.compareAndSet(false, true)) {
for (Runnable action : shutdownActions) {
action.run();
}
}
}
}
@Override
public String toString() {
return "LifecycleProvisionListener[]";
}
@Override
public <T> void onProvision(ProvisionInvocation<T> provision) {
final T injectee = provision.provision();
if(injectee == null) {
return;
}
if (features == null) {
if (!injectee.getClass().isAnnotationPresent(SuppressLifecycleUninitialized.class)) {
LOG.debug("LifecycleProvisionListener not initialized yet : {}", injectee.getClass());
}
if (injectee instanceof LifecycleListener) {
pendingLifecycleListeners.add((LifecycleListener)injectee);
}
// TODO: Add to PreDestroy list
return;
}
final TypeLifecycleActions actions = getOrCreateActions(injectee.getClass());
// Call all the LifecycleActions with PostConstruct methods being the last
for (LifecycleAction action : actions.postConstructActions) {
try {
action.call(injectee);
}
catch (Exception e) {
throw new ProvisionException("Failed to provision object of type " + injectee.getClass(), e);
}
}
if (injectee instanceof LifecycleListener) {
manager.addListener((LifecycleListener)injectee);
}
// Add any PreDestroy methods to the shutdown list of actions
if (!actions.preDestroyActions.isEmpty()) {
if (isShutdown.get() == false) {
shutdownActions.addFirst(new Runnable() {
@Override
public void run() {
for (LifecycleAction m : actions.preDestroyActions) {
try {
m.call(injectee);
}
catch (Exception e) {
LOG.error("Failed to call @PreDestroy method {} on {}", new Object[]{m, injectee.getClass().getName()}, e);
}
}
}
});
}
else {
LOG.warn("Already shutting down. Shutdown methods {} on {} will not be invoked", new Object[]{actions.preDestroyActions, injectee.getClass().getName()});
}
}
}
}
@Override
protected void configure() {
requestStaticInjection(LifecycleProvisionListener.class);
bind(LifecycleProvisionListener.class).toInstance(provisionListener);
bindListener(Matchers.any(), provisionListener);
Multibinder.newSetBinder(binder(), LifecycleFeature.class);
}
@Override
public boolean equals(Object obj) {
return getClass().equals(obj.getClass());
}
@Override
public int hashCode() {
return getClass().hashCode();
}
@Override
public String toString() {
return "LifecycleModule[]";
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Denis M. Kishenko
*/
package com.tom_roush.harmony.awt.geom;
import android.graphics.PointF;
import java.io.IOException;
import java.io.Serializable;
public class AffineTransform implements Cloneable, Serializable
{
private static final long serialVersionUID = 1330973210523860834L;
public static final int TYPE_IDENTITY = 0;
public static final int TYPE_TRANSLATION = 1;
public static final int TYPE_UNIFORM_SCALE = 2;
public static final int TYPE_GENERAL_SCALE = 4;
public static final int TYPE_QUADRANT_ROTATION = 8;
public static final int TYPE_GENERAL_ROTATION = 16;
public static final int TYPE_GENERAL_TRANSFORM = 32;
public static final int TYPE_FLIP = 64;
public static final int TYPE_MASK_SCALE = TYPE_UNIFORM_SCALE | TYPE_GENERAL_SCALE;
public static final int TYPE_MASK_ROTATION = TYPE_QUADRANT_ROTATION | TYPE_GENERAL_ROTATION;
/**
* The <code>TYPE_UNKNOWN</code> is an initial type value
*/
static final int TYPE_UNKNOWN = -1;
/**
* The min value equivalent to zero. If absolute value less then ZERO it considered as zero.
*/
static final double ZERO = 1E-10;
/**
* The values of transformation matrix
*/
double m00;
double m10;
double m01;
double m11;
double m02;
double m12;
/**
* The transformation <code>type</code>
*/
transient int type;
public AffineTransform()
{
type = TYPE_IDENTITY;
m00 = m11 = 1.0;
m10 = m01 = m02 = m12 = 0.0;
}
public AffineTransform(AffineTransform t)
{
this.type = t.type;
this.m00 = t.m00;
this.m10 = t.m10;
this.m01 = t.m01;
this.m11 = t.m11;
this.m02 = t.m02;
this.m12 = t.m12;
}
public AffineTransform(float m00, float m10, float m01, float m11, float m02, float m12)
{
this.type = TYPE_UNKNOWN;
this.m00 = m00;
this.m10 = m10;
this.m01 = m01;
this.m11 = m11;
this.m02 = m02;
this.m12 = m12;
}
public AffineTransform(double m00, double m10, double m01, double m11, double m02, double m12)
{
this.type = TYPE_UNKNOWN;
this.m00 = m00;
this.m10 = m10;
this.m01 = m01;
this.m11 = m11;
this.m02 = m02;
this.m12 = m12;
}
public AffineTransform(float[] matrix)
{
this.type = TYPE_UNKNOWN;
m00 = matrix[0];
m10 = matrix[1];
m01 = matrix[2];
m11 = matrix[3];
if (matrix.length > 4)
{
m02 = matrix[4];
m12 = matrix[5];
}
}
public AffineTransform(double[] matrix)
{
this.type = TYPE_UNKNOWN;
m00 = matrix[0];
m10 = matrix[1];
m01 = matrix[2];
m11 = matrix[3];
if (matrix.length > 4)
{
m02 = matrix[4];
m12 = matrix[5];
}
}
/**
* Creates an AffineTransform from and android.graphics.Matrix
*
* @param matrix the matrix to copy from
*/
public AffineTransform(android.graphics.Matrix matrix)
{
float[] values = new float[9];
matrix.getValues(values);
m00 = values[0];
m01 = values[1];
m02 = values[2];
m10 = values[3];
m11 = values[4];
m12 = values[5];
}
/*
* Method returns type of affine transformation.
*
* Transform matrix is
* m00 m01 m02
* m10 m11 m12
*
* According analytic geometry new basis vectors are (m00, m01) and (m10, m11),
* translation vector is (m02, m12). Original basis vectors are (1, 0) and (0, 1).
* Type transformations classification:
* TYPE_IDENTITY - new basis equals original one and zero translation
* TYPE_TRANSLATION - translation vector isn't zero
* TYPE_UNIFORM_SCALE - vectors length of new basis equals
* TYPE_GENERAL_SCALE - vectors length of new basis doesn't equal
* TYPE_FLIP - new basis vector orientation differ from original one
* TYPE_QUADRANT_ROTATION - new basis is rotated by 90, 180, 270, or 360 degrees
* TYPE_GENERAL_ROTATION - new basis is rotated by arbitrary angle
* TYPE_GENERAL_TRANSFORM - transformation can't be inversed
*/
public int getType()
{
if (type != TYPE_UNKNOWN)
{
return type;
}
int type = 0;
if (m00 * m01 + m10 * m11 != 0.0)
{
type |= TYPE_GENERAL_TRANSFORM;
return type;
}
if (m02 != 0.0 || m12 != 0.0)
{
type |= TYPE_TRANSLATION;
}
else if (m00 == 1.0 && m11 == 1.0 && m01 == 0.0 && m10 == 0.0)
{
type = TYPE_IDENTITY;
return type;
}
if (m00 * m11 - m01 * m10 < 0.0)
{
type |= TYPE_FLIP;
}
double dx = m00 * m00 + m10 * m10;
double dy = m01 * m01 + m11 * m11;
if (dx != dy)
{
type |= TYPE_GENERAL_SCALE;
}
else if (dx != 1.0)
{
type |= TYPE_UNIFORM_SCALE;
}
if ((m00 == 0.0 && m11 == 0.0) ||
(m10 == 0.0 && m01 == 0.0 && (m00 < 0.0 || m11 < 0.0)))
{
type |= TYPE_QUADRANT_ROTATION;
}
else if (m01 != 0.0 || m10 != 0.0)
{
type |= TYPE_GENERAL_ROTATION;
}
return type;
}
public double getScaleX()
{
return m00;
}
public double getScaleY()
{
return m11;
}
public double getShearX()
{
return m01;
}
public double getShearY()
{
return m10;
}
public double getTranslateX()
{
return m02;
}
public double getTranslateY()
{
return m12;
}
public boolean isIdentity()
{
return getType() == TYPE_IDENTITY;
}
public void getMatrix(double[] matrix)
{
matrix[0] = m00;
matrix[1] = m10;
matrix[2] = m01;
matrix[3] = m11;
if (matrix.length > 4)
{
matrix[4] = m02;
matrix[5] = m12;
}
}
public double getDeterminant()
{
return m00 * m11 - m01 * m10;
}
public void setTransform(double m00, double m10, double m01, double m11, double m02, double m12)
{
this.type = TYPE_UNKNOWN;
this.m00 = m00;
this.m10 = m10;
this.m01 = m01;
this.m11 = m11;
this.m02 = m02;
this.m12 = m12;
}
public void setTransform(AffineTransform t)
{
type = t.type;
setTransform(t.m00, t.m10, t.m01, t.m11, t.m02, t.m12);
}
public void setToIdentity()
{
type = TYPE_IDENTITY;
m00 = m11 = 1.0;
m10 = m01 = m02 = m12 = 0.0;
}
public void setToTranslation(double mx, double my)
{
m00 = m11 = 1.0;
m01 = m10 = 0.0;
m02 = mx;
m12 = my;
if (mx == 0.0 && my == 0.0)
{
type = TYPE_IDENTITY;
}
else
{
type = TYPE_TRANSLATION;
}
}
public void setToScale(double scx, double scy)
{
m00 = scx;
m11 = scy;
m10 = m01 = m02 = m12 = 0.0;
if (scx != 1.0 || scy != 1.0)
{
type = TYPE_UNKNOWN;
}
else
{
type = TYPE_IDENTITY;
}
}
public void setToShear(double shx, double shy)
{
m00 = m11 = 1.0;
m02 = m12 = 0.0;
m01 = shx;
m10 = shy;
if (shx != 0.0 || shy != 0.0)
{
type = TYPE_UNKNOWN;
}
else
{
type = TYPE_IDENTITY;
}
}
public void setToRotation(double angle)
{
double sin = Math.sin(angle);
double cos = Math.cos(angle);
if (Math.abs(cos) < ZERO)
{
cos = 0.0;
sin = sin > 0.0 ? 1.0 : -1.0;
}
else if (Math.abs(sin) < ZERO)
{
sin = 0.0;
cos = cos > 0.0 ? 1.0 : -1.0;
}
m00 = m11 = cos;
m01 = -sin;
m10 = sin;
m02 = m12 = 0.0;
type = TYPE_UNKNOWN;
}
public void setToRotation(double angle, double px, double py)
{
setToRotation(angle);
m02 = px * (1.0 - m00) + py * m10;
m12 = py * (1.0 - m00) - px * m10;
type = TYPE_UNKNOWN;
}
public static AffineTransform getTranslateInstance(double mx, double my)
{
AffineTransform t = new AffineTransform();
t.setToTranslation(mx, my);
return t;
}
public static AffineTransform getScaleInstance(double scx, double scY)
{
AffineTransform t = new AffineTransform();
t.setToScale(scx, scY);
return t;
}
public static AffineTransform getShearInstance(double shx, double shy)
{
AffineTransform m = new AffineTransform();
m.setToShear(shx, shy);
return m;
}
public static AffineTransform getRotateInstance(double angle)
{
AffineTransform t = new AffineTransform();
t.setToRotation(angle);
return t;
}
public static AffineTransform getRotateInstance(double angle, double x, double y)
{
AffineTransform t = new AffineTransform();
t.setToRotation(angle, x, y);
return t;
}
public void translate(double mx, double my)
{
concatenate(AffineTransform.getTranslateInstance(mx, my));
}
public void scale(double scx, double scy)
{
concatenate(AffineTransform.getScaleInstance(scx, scy));
}
public void shear(double shx, double shy)
{
concatenate(AffineTransform.getShearInstance(shx, shy));
}
public void rotate(double angle)
{
concatenate(AffineTransform.getRotateInstance(angle));
}
public void rotate(double angle, double px, double py)
{
concatenate(AffineTransform.getRotateInstance(angle, px, py));
}
/**
* Multiply matrix of two AffineTransform objects
*
* @param t1 - the AffineTransform object is a multiplicand
* @param t2 - the AffineTransform object is a multiplier
* @return an AffineTransform object that is a result of t1 multiplied by matrix t2.
*/
AffineTransform multiply(AffineTransform t1, AffineTransform t2)
{
return new AffineTransform(
t1.m00 * t2.m00 + t1.m10 * t2.m01, // m00
t1.m00 * t2.m10 + t1.m10 * t2.m11, // m01
t1.m01 * t2.m00 + t1.m11 * t2.m01, // m10
t1.m01 * t2.m10 + t1.m11 * t2.m11, // m11
t1.m02 * t2.m00 + t1.m12 * t2.m01 + t2.m02, // m02
t1.m02 * t2.m10 + t1.m12 * t2.m11 + t2.m12);// m12
}
public void concatenate(AffineTransform t)
{
setTransform(multiply(t, this));
}
public void preConcatenate(AffineTransform t)
{
setTransform(multiply(this, t));
}
public AffineTransform createInverse() throws NoninvertibleTransformException
{
double det = getDeterminant();
if (Math.abs(det) < ZERO)
{
throw new NoninvertibleTransformException("Determinant is zero");
}
return new AffineTransform(
m11 / det, // m00
-m10 / det, // m10
-m01 / det, // m01
m00 / det, // m11
(m01 * m12 - m11 * m02) / det, // m02
(m10 * m02 - m00 * m12) / det // m12
);
}
public PointF transform(PointF src, PointF dst)
{
dst.set((float) (src.x * m00 + src.y * m01 + m02),
(float) (src.x * m10 + src.y * m11 + m12));
return dst;
}
public void transform(PointF[] src, int srcOff, PointF[] dst, int dstOff, int length)
{
while (--length >= 0)
{
PointF srcPoint = src[srcOff++];
PointF dstPoint = dst[dstOff];
if (dstPoint == null)
{
dstPoint = new PointF();
}
dstPoint.set((float) (srcPoint.x * m00 + srcPoint.y * m01 + m02),
(float) (srcPoint.x * m10 + srcPoint.y * m11 + m12));
dst[dstOff++] = dstPoint;
}
}
public void transform(double[] src, int srcOff, double[] dst, int dstOff, int length)
{
int step = 2;
if (src == dst && srcOff < dstOff && dstOff < srcOff + length * 2)
{
srcOff = srcOff + length * 2 - 2;
dstOff = dstOff + length * 2 - 2;
step = -2;
}
while (--length >= 0)
{
double x = src[srcOff + 0];
double y = src[srcOff + 1];
dst[dstOff + 0] = x * m00 + y * m01 + m02;
dst[dstOff + 1] = x * m10 + y * m11 + m12;
srcOff += step;
dstOff += step;
}
}
public void transform(float[] src, int srcOff, float[] dst, int dstOff, int length)
{
int step = 2;
if (src == dst && srcOff < dstOff && dstOff < srcOff + length * 2)
{
srcOff = srcOff + length * 2 - 2;
dstOff = dstOff + length * 2 - 2;
step = -2;
}
while (--length >= 0)
{
float x = src[srcOff + 0];
float y = src[srcOff + 1];
dst[dstOff + 0] = (float) (x * m00 + y * m01 + m02);
dst[dstOff + 1] = (float) (x * m10 + y * m11 + m12);
srcOff += step;
dstOff += step;
}
}
public void transform(float[] src, int srcOff, double[] dst, int dstOff, int length)
{
while (--length >= 0)
{
float x = src[srcOff++];
float y = src[srcOff++];
dst[dstOff++] = x * m00 + y * m01 + m02;
dst[dstOff++] = x * m10 + y * m11 + m12;
}
}
public void transform(double[] src, int srcOff, float[] dst, int dstOff, int length)
{
while (--length >= 0)
{
double x = src[srcOff++];
double y = src[srcOff++];
dst[dstOff++] = (float) (x * m00 + y * m01 + m02);
dst[dstOff++] = (float) (x * m10 + y * m11 + m12);
}
}
public PointF deltaTransform(PointF src, PointF dst)
{
if (dst == null)
{
dst = new PointF();
}
dst.set((float) (src.x * m00 + src.y * m01), (float) (src.x * m10 + src.y * m11));
return dst;
}
public void deltaTransform(double[] src, int srcOff, double[] dst, int dstOff, int length)
{
while (--length >= 0)
{
double x = src[srcOff++];
double y = src[srcOff++];
dst[dstOff++] = x * m00 + y * m01;
dst[dstOff++] = x * m10 + y * m11;
}
}
public PointF inverseTransform(PointF src, PointF dst) throws NoninvertibleTransformException
{
double det = getDeterminant();
if (Math.abs(det) < ZERO)
{
throw new NoninvertibleTransformException("Determinant is zero");
}
if (dst == null)
{
dst = new PointF();
}
float x = (float) (src.x - m02);
float y = (float) (src.y - m12);
dst.set((float) ((x * m11 - y * m01) / det), (float) ((y * m00 - x * m10) / det));
return dst;
}
public void inverseTransform(double[] src, int srcOff, double[] dst, int dstOff, int length)
throws NoninvertibleTransformException
{
double det = getDeterminant();
if (Math.abs(det) < ZERO)
{
throw new NoninvertibleTransformException("Determinant is zero");
}
while (--length >= 0)
{
double x = src[srcOff++] - m02;
double y = src[srcOff++] - m12;
dst[dstOff++] = (x * m11 - y * m01) / det;
dst[dstOff++] = (y * m00 - x * m10) / det;
}
}
// public Shape createTransformedShape(Shape src)
// {
// if (src == null)
// {
// return null;
// }
// if (src instanceof GeneralPath)
// {
// return ((GeneralPath) src).createTransformedShape(this);
// }
// PathIterator path = src.getPathIterator(this);
// GeneralPath dst = new GeneralPath(path.getWindingRule());
// dst.append(path, false);
// return dst;
// } TODO: PdfBox-Android
@Override
public String toString()
{
return getClass().getName() + "[[" + m00 + ", " + m01 + ", " + m02 + "], [" + m10 + ", " +
m11 + ", " + m12 + "]]";
}
@Override
public Object clone()
{
try
{
return super.clone();
}
catch (CloneNotSupportedException e)
{
throw new InternalError();
}
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
{
return true;
}
if (obj instanceof AffineTransform)
{
AffineTransform t = (AffineTransform) obj;
return
m00 == t.m00 && m01 == t.m01 &&
m02 == t.m02 && m10 == t.m10 &&
m11 == t.m11 && m12 == t.m12;
}
return false;
}
/**
* Write AffineTrasform object to the output steam.
*
* @param stream - the output stream
* @throws IOException - if there are I/O errors while writing to the output strem
*/
private void writeObject(java.io.ObjectOutputStream stream) throws IOException
{
stream.defaultWriteObject();
}
/**
* Read AffineTransform object from the input stream
*
* @param stream - the input steam
* @throws IOException - if there are I/O errors while reading from the input strem
* @throws ClassNotFoundException - if class could not be found
*/
private void readObject(java.io.ObjectInputStream stream)
throws IOException, ClassNotFoundException
{
stream.defaultReadObject();
type = TYPE_UNKNOWN;
}
/**
* Returns this AffineTransform as an android.graphics.Matrix
*
* @return the matrix
*/
public android.graphics.Matrix toMatrix()
{
android.graphics.Matrix retval = new android.graphics.Matrix();
retval.setValues(new float[]{
(float) m00, (float) m01, (float) m02,
(float) m10, (float) m11, (float) m12,
0.0f, 0.0f, 1.0f
});
return retval;
}
public class NoninvertibleTransformException extends java.lang.Exception
{
private static final long serialVersionUID = 6137225240503990466L;
public NoninvertibleTransformException(String s)
{
super(s);
}
}
}
| |
package jadx.core.dex.visitors;
import java.util.*;
import jadx.core.dex.attributes.*;
import jadx.core.dex.info.*;
import jadx.core.dex.instructions.*;
import jadx.core.dex.instructions.args.*;
import jadx.core.dex.nodes.*;
import jadx.core.dex.visitors.typeinference.*;
import jadx.core.utils.*;
import jadx.core.utils.exceptions.*;
public class ConstInlineVisitor extends AbstractVisitor {
@Override
public void visit(MethodNode mth) throws JadxException {
if (mth.isNoCode()) {
return;
}
List<InsnNode> toRemove = new ArrayList<InsnNode>();
for (BlockNode block : mth.getBasicBlocks()) {
toRemove.clear();
for (InsnNode insn : block.getInstructions()) {
if (checkInsn(mth, insn)) {
toRemove.add(insn);
}
}
InstructionRemover.removeAll(mth, block, toRemove);
}
}
private static boolean checkInsn(MethodNode mth, InsnNode insn) {
if (insn.getType() != InsnType.CONST || insn.contains(AFlag.DONT_INLINE)) {
return false;
}
InsnArg arg = insn.getArg(0);
if (!arg.isLiteral()) {
return false;
}
long lit = ((LiteralArg) arg).getLiteral();
SSAVar sVar = insn.getResult().getSVar();
if (lit == 0 && checkObjectInline(sVar)) {
if (sVar.getUseCount() == 1) {
InsnNode assignInsn = insn.getResult().getAssignInsn();
if (assignInsn != null) {
assignInsn.add(AFlag.DONT_INLINE);
}
}
return false;
}
ArgType resType = insn.getResult().getType();
// make sure arg has correct type
if (!arg.getType().isTypeKnown()) {
arg.merge(mth.dex(), resType);
}
return replaceConst(mth, insn, lit);
}
/**
* Don't inline null object if:
* - used as instance arg in invoke instruction
* - used in 'array.length'
*/
private static boolean checkObjectInline(SSAVar sVar) {
for (RegisterArg useArg : sVar.getUseList()) {
InsnNode insn = useArg.getParentInsn();
if (insn != null) {
InsnType insnType = insn.getType();
if (insnType == InsnType.INVOKE) {
InvokeNode inv = (InvokeNode) insn;
if (inv.getInvokeType() != InvokeType.STATIC
&& inv.getArg(0) == useArg) {
return true;
}
} else if (insnType == InsnType.ARRAY_LENGTH) {
if (insn.getArg(0) == useArg) {
return true;
}
}
}
}
return false;
}
private static boolean replaceConst(MethodNode mth, InsnNode constInsn, long literal) {
SSAVar sVar = constInsn.getResult().getSVar();
List<RegisterArg> use = new ArrayList<RegisterArg>(sVar.getUseList());
int replaceCount = 0;
for (RegisterArg arg : use) {
InsnNode useInsn = arg.getParentInsn();
if (useInsn == null
|| useInsn.getType() == InsnType.PHI
|| useInsn.getType() == InsnType.MERGE) {
continue;
}
LiteralArg litArg;
ArgType argType = arg.getType();
if (argType.isObject() && literal != 0) {
argType = ArgType.NARROW_NUMBERS;
}
if (use.size() == 1 || arg.isTypeImmutable()) {
// arg used only in one place
litArg = InsnArg.lit(literal, argType);
} else if (useInsn.getType() == InsnType.MOVE
&& !useInsn.getResult().getType().isTypeKnown()) {
// save type for 'move' instructions (hard to find type in chains of 'move')
litArg = InsnArg.lit(literal, argType);
} else {
// in most cases type not equal arg.getType()
// just set unknown type and run type fixer
litArg = InsnArg.lit(literal, ArgType.UNKNOWN);
}
if (useInsn.replaceArg(arg, litArg)) {
fixTypes(mth, useInsn, litArg);
replaceCount++;
if (useInsn.getType() == InsnType.RETURN) {
useInsn.setSourceLine(constInsn.getSourceLine());
}
FieldNode f = null;
ArgType litArgType = litArg.getType();
if (litArgType.isTypeKnown()) {
f = mth.getParentClass().getConstFieldByLiteralArg(litArg);
} else if (litArgType.contains(PrimitiveType.INT)) {
f = mth.getParentClass().getConstField((int) literal, false);
}
if (f != null) {
litArg.wrapInstruction(new IndexInsnNode(InsnType.SGET, f.getFieldInfo(), 0));
}
}
}
return replaceCount == use.size();
}
/**
* This is method similar to PostTypeInference.process method,
* but contains some expensive operations needed only after constant inline
*/
private static void fixTypes(MethodNode mth, InsnNode insn, LiteralArg litArg) {
DexNode dex = mth.dex();
PostTypeInference.process(mth, insn);
switch (insn.getType()) {
case CONST:
insn.getArg(0).merge(dex, insn.getResult());
break;
case MOVE:
insn.getResult().merge(dex, insn.getArg(0));
insn.getArg(0).merge(dex, insn.getResult());
break;
case IPUT:
case SPUT:
IndexInsnNode node = (IndexInsnNode) insn;
insn.getArg(0).merge(dex, ((FieldInfo) node.getIndex()).getType());
break;
case IF: {
InsnArg arg0 = insn.getArg(0);
InsnArg arg1 = insn.getArg(1);
if (arg0 == litArg) {
arg0.merge(dex, arg1);
} else {
arg1.merge(dex, arg0);
}
break;
}
case CMP_G:
case CMP_L:
InsnArg arg0 = insn.getArg(0);
InsnArg arg1 = insn.getArg(1);
if (arg0 == litArg) {
arg0.merge(dex, arg1);
} else {
arg1.merge(dex, arg0);
}
break;
case RETURN:
if (insn.getArgsCount() != 0) {
insn.getArg(0).merge(dex, mth.getReturnType());
}
break;
case INVOKE:
InvokeNode inv = (InvokeNode) insn;
List<ArgType> types = inv.getCallMth().getArgumentsTypes();
int count = insn.getArgsCount();
int k = types.size() == count ? 0 : -1;
for (int i = 0; i < count; i++) {
InsnArg arg = insn.getArg(i);
if (!arg.getType().isTypeKnown()) {
ArgType type;
if (k >= 0) {
type = types.get(k);
} else {
type = mth.getParentClass().getClassInfo().getType();
}
arg.merge(dex, type);
}
k++;
}
break;
case ARITH:
litArg.merge(dex, insn.getResult());
break;
case APUT:
case AGET:
if (litArg == insn.getArg(1)) {
litArg.merge(dex, ArgType.INT);
}
break;
case NEW_ARRAY:
if (litArg == insn.getArg(0)) {
litArg.merge(dex, ArgType.INT);
}
break;
default:
break;
}
}
}
| |
/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.background.stationary;
import boofcv.BoofTesting;
import boofcv.alg.background.BackgroundModelStationary;
import boofcv.alg.misc.GImageMiscOps;
import boofcv.alg.misc.ImageMiscOps;
import boofcv.core.image.GeneralizedImageOps;
import boofcv.struct.image.GrayU8;
import boofcv.struct.image.ImageBase;
import boofcv.struct.image.ImageType;
import boofcv.testing.BoofStandardJUnit;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* @author Peter Abeles
*/
public abstract class GenericBackgroundModelStationaryChecks extends BoofStandardJUnit {
Random rand = new Random(234);
int width = 60;
int height = 50;
protected List<ImageType> imageTypes = new ArrayList<>();
public abstract<T extends ImageBase<T>>
BackgroundModelStationary<T> create( ImageType<T> imageType );
/**
* Basic check were multiple images are feed into the algorithm and another image,
* which has a region which is clearly different is then segmented.
*/
@Test void basicCheck() {
for( ImageType type : imageTypes ) {
basicCheck(type);
}
}
private <T extends ImageBase<T>> void basicCheck( ImageType<T> imageType ) {
BackgroundModelStationary<T> alg = create(imageType);
T frame = imageType.createImage(width,height);
for (int i = 0; i < 30; i++) {
noise(100, 2, frame);
alg.updateBackground(frame);
}
int x0 = 10, y0 = 12, x1 = 40, y1 = 38;
noise(100,2,frame);
GImageMiscOps.fillRectangle(frame, 200, x0, y0, x1 - x0, y1 - y0);
GrayU8 segmented = new GrayU8(width,height);
alg.segment(frame, segmented);
// segmented.printBinary();
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
if( x >= x0 && x < x1 && y >= y0 && y < y1 ) {
assertEquals(1,segmented.get(x,y));
} else {
assertEquals(0,segmented.get(x,y));
}
}
}
}
/**
* Sees if reset discard the previous history in the background image
*/
@Test void reset() {
for( ImageType type : imageTypes ) {
reset(type);
}
}
private <T extends ImageBase<T>>
void reset( ImageType<T> imageType ) {
T frame = imageType.createImage(width, height);
BackgroundModelStationary<T> alg = create(frame.getImageType());
GImageMiscOps.fill(frame,100);
alg.updateBackground(frame);
alg.reset();
GImageMiscOps.fill(frame,50);
alg.updateBackground(frame);
GrayU8 segmented = new GrayU8(width,height);
GrayU8 expected = new GrayU8(width,height);
// there should be no change
// if reset isn't the case then this will fail
alg.segment(frame,segmented);
BoofTesting.assertEquals(expected, segmented, 1e-8);
GImageMiscOps.fill(frame, 100);
ImageMiscOps.fill(expected, 1);
// it should be all changed. really just a sanity check
alg.segment(frame,segmented);
BoofTesting.assertEquals(expected,segmented,1e-8);
}
/**
* The user tries to segment before specifying the background
*/
@Test void segmentBeforeUpdateBackGround() {
for( ImageType type : imageTypes ) {
segmentBeforeUpdateBackGround(type);
}
}
private <T extends ImageBase<T>>
void segmentBeforeUpdateBackGround( ImageType<T> imageType ) {
T frame = imageType.createImage(width, height);
BackgroundModelStationary<T> alg = create(frame.getImageType());
alg.setUnknownValue(2);
GrayU8 segmented = new GrayU8(width,height);
GrayU8 expected = new GrayU8(width,height);
ImageMiscOps.fill(expected, 2);
alg.segment(frame, segmented);
BoofTesting.assertEquals(expected, segmented, 1e-8);
}
@Test void checkSubImage() {
for( ImageType type : imageTypes ) {
checkSubImage(type);
}
}
private <T extends ImageBase<T>>
void checkSubImage( ImageType<T> imageType ) {
T frame = imageType.createImage(width, height);
GrayU8 segmented = new GrayU8(width,height);
checkSubImage_process(frame, segmented);
GrayU8 expected = segmented.clone();
frame = BoofTesting.createSubImageOf(frame);
segmented = BoofTesting.createSubImageOf(segmented);
ImageMiscOps.fill(segmented,0);
checkSubImage_process(frame, segmented);
GrayU8 found = segmented.clone();
// see if both produce the same result
BoofTesting.assertEquals(expected,found,1e-8);
}
private <T extends ImageBase<T>>
void checkSubImage_process( T frame, GrayU8 segmented)
{
rand = new Random(2345);
BackgroundModelStationary<T> alg = create(frame.getImageType());
for (int i = 0; i < 5; i++) {
noise(100, 30, frame);
alg.updateBackground(frame);
}
noise(100, 30, frame);
alg.segment(frame, segmented);
}
/**
* For each band in the image have all put one be filled with a constant uniform color.
* Alternate which band has motion in it.
*/
@Test public void checkBandsUsed() {
for( ImageType type : imageTypes ) {
checkBandsUsed(type);
}
}
private <T extends ImageBase<T>> void checkBandsUsed( ImageType<T> imageType ) {
BackgroundModelStationary<T> alg = create(imageType);
T frame = imageType.createImage(width,height);
int numBands = imageType.getNumBands();
for (int band = 0; band < numBands; band++) {
alg.reset();
for (int i = 0; i < 30; i++) {
noiseBand(100, 2, frame, band);
alg.updateBackground(frame);
}
GrayU8 segmented = new GrayU8(width,height);
// segment with the current frame. should be no motion
alg.segment(frame, segmented);
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
assertEquals(0,segmented.get(x,y));
}
}
// now the whole image should report motion
noiseBand(200, 2, frame, band);
alg.segment(frame, segmented);
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
assertEquals(1,segmented.get(x,y));
}
}
}
}
protected void noiseBand( double mean , double range , ImageBase image , int band ) {
double pixel[] = new double[ image.getImageType().getNumBands() ];
Arrays.fill(pixel,10);
for (int y = 0; y < image.height; y++) {
for (int x = 0; x < image.width; x++) {
pixel[band] = mean + rand.nextDouble()*2*range-range;
GeneralizedImageOps.setM(image,x,y,pixel);
}
}
}
protected void noise( double mean , double range , ImageBase image ) {
GImageMiscOps.fill(image, mean);
GImageMiscOps.addUniform(image, rand, -range, range);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.tests.integration.functions.utils;
import com.google.gson.Gson;
import java.util.HashMap;
import java.util.Map;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import org.apache.pulsar.tests.integration.topologies.PulsarCluster;
@Getter
@Setter
@ToString
public class CommandGenerator {
public enum Runtime {
JAVA,
PYTHON,
};
private String functionName;
private String tenant = "public";
private String namespace = "default";
private String functionClassName;
private String sourceTopic;
private String sourceTopicPattern;
private Map<String, String> customSereSourceTopics;
private String sinkTopic;
private String logTopic;
private String outputSerDe;
private String processingGuarantees;
private Runtime runtime;
private Integer parallelism;
private String adminUrl;
private Integer windowLengthCount;
private Long windowLengthDurationMs;
private Integer slidingIntervalCount;
private Long slidingIntervalDurationMs;
private Map<String, String> userConfig = new HashMap<>();
private static final String JAVAJAR = "/pulsar/examples/api-examples.jar";
private static final String PYTHONBASE = "/pulsar/examples/python-examples/";
public static CommandGenerator createDefaultGenerator(String sourceTopic, String functionClassName) {
CommandGenerator generator = new CommandGenerator();
generator.setSourceTopic(sourceTopic);
generator.setFunctionClassName(functionClassName);
generator.setRuntime(Runtime.JAVA);
return generator;
}
public static CommandGenerator createTopicPatternGenerator(String sourceTopicPattern, String functionClassName) {
CommandGenerator generator = new CommandGenerator();
generator.setSourceTopicPattern(sourceTopicPattern);
generator.setFunctionClassName(functionClassName);
generator.setRuntime(Runtime.JAVA);
return generator;
}
public String generateCreateFunctionCommand() {
return generateCreateFunctionCommand(null);
}
public String generateCreateFunctionCommand(String codeFile) {
StringBuilder commandBuilder = new StringBuilder(PulsarCluster.ADMIN_SCRIPT);
if (adminUrl != null) {
commandBuilder.append(" --admin-url ");
commandBuilder.append(adminUrl);
}
commandBuilder.append(" functions create");
if (tenant != null) {
commandBuilder.append(" --tenant " + tenant);
}
if (namespace != null) {
commandBuilder.append(" --namespace " + namespace);
}
if (functionName != null) {
commandBuilder.append(" --name " + functionName);
}
commandBuilder.append(" --className " + functionClassName);
if (sourceTopic != null) {
commandBuilder.append(" --inputs " + sourceTopic);
}
if (sourceTopicPattern != null) {
commandBuilder.append(" --topics-pattern " + sourceTopicPattern);
}
if (logTopic != null) {
commandBuilder.append(" --logTopic " + logTopic);
}
if (customSereSourceTopics != null && !customSereSourceTopics.isEmpty()) {
commandBuilder.append(" --customSerdeInputs \'" + new Gson().toJson(customSereSourceTopics) + "\'");
}
if (sinkTopic != null) {
commandBuilder.append(" --output " + sinkTopic);
}
if (outputSerDe != null) {
commandBuilder.append(" --outputSerdeClassName " + outputSerDe);
}
if (processingGuarantees != null) {
commandBuilder.append(" --processingGuarantees " + processingGuarantees);
}
if (!userConfig.isEmpty()) {
commandBuilder.append(" --userConfig \'" + new Gson().toJson(userConfig) + "\'");
}
if (parallelism != null) {
commandBuilder.append(" --parallelism " + parallelism);
}
if (windowLengthCount != null) {
commandBuilder.append(" --windowLengthCount " + windowLengthCount);
}
if (windowLengthDurationMs != null) {
commandBuilder.append(" --windowLengthDurationMs " + windowLengthDurationMs);
}
if (slidingIntervalCount != null) {
commandBuilder.append( " --slidingIntervalCount " + slidingIntervalCount);
}
if (slidingIntervalDurationMs != null) {
commandBuilder.append(" --slidingIntervalDurationMs " + slidingIntervalDurationMs);
}
if (runtime == Runtime.JAVA) {
commandBuilder.append(" --jar " + JAVAJAR);
} else {
if (codeFile != null) {
commandBuilder.append(" --py " + PYTHONBASE + codeFile);
} else {
commandBuilder.append(" --py " + PYTHONBASE);
}
}
return commandBuilder.toString();
}
public String generateUpdateFunctionCommand() {
return generateUpdateFunctionCommand(null);
}
public String generateUpdateFunctionCommand(String codeFile) {
StringBuilder commandBuilder = new StringBuilder();
if (adminUrl == null) {
commandBuilder.append("/pulsar/bin/pulsar-admin functions update");
} else {
commandBuilder.append("/pulsar/bin/pulsar-admin");
commandBuilder.append(" --admin-url ");
commandBuilder.append(adminUrl);
commandBuilder.append(" functions update");
}
if (tenant != null) {
commandBuilder.append(" --tenant " + tenant);
}
if (namespace != null) {
commandBuilder.append(" --namespace " + namespace);
}
if (functionName != null) {
commandBuilder.append(" --name " + functionName);
}
commandBuilder.append(" --className " + functionClassName);
if (sourceTopic != null) {
commandBuilder.append(" --inputs " + sourceTopic);
}
if (customSereSourceTopics != null && !customSereSourceTopics.isEmpty()) {
commandBuilder.append(" --customSerdeInputs \'" + new Gson().toJson(customSereSourceTopics) + "\'");
}
if (sinkTopic != null) {
commandBuilder.append(" --output " + sinkTopic);
}
if (logTopic != null) {
commandBuilder.append(" --logTopic " + logTopic);
}
if (outputSerDe != null) {
commandBuilder.append(" --outputSerdeClassName " + outputSerDe);
}
if (processingGuarantees != null) {
commandBuilder.append(" --processingGuarantees " + processingGuarantees);
}
if (!userConfig.isEmpty()) {
commandBuilder.append(" --userConfig \'" + new Gson().toJson(userConfig) + "\'");
}
if (parallelism != null) {
commandBuilder.append(" --parallelism " + parallelism);
}
if (windowLengthCount != null) {
commandBuilder.append(" --windowLengthCount " + windowLengthCount);
}
if (windowLengthDurationMs != null) {
commandBuilder.append(" --windowLengthDurationMs " + windowLengthDurationMs);
}
if (slidingIntervalCount != null) {
commandBuilder.append(" --slidingIntervalCount " + slidingIntervalCount);
}
if (slidingIntervalDurationMs != null) {
commandBuilder.append(" --slidingIntervalDurationMs " + slidingIntervalDurationMs);
}
if (runtime == Runtime.JAVA) {
commandBuilder.append(" --jar " + JAVAJAR);
} else {
if (codeFile != null) {
commandBuilder.append(" --py " + PYTHONBASE + codeFile);
} else {
commandBuilder.append(" --py " + PYTHONBASE);
}
}
return commandBuilder.toString();
}
public String genereateDeleteFunctionCommand() {
StringBuilder commandBuilder = new StringBuilder("/pulsar/bin/pulsar-admin functions delete");
if (tenant != null) {
commandBuilder.append(" --tenant " + tenant);
}
if (namespace != null) {
commandBuilder.append(" --namespace " + namespace);
}
if (functionName != null) {
commandBuilder.append(" --name " + functionName);
}
return commandBuilder.toString();
}
public String generateTriggerFunctionCommand(String triggerValue) {
StringBuilder commandBuilder = new StringBuilder("/pulsar/bin/pulsar-admin functions trigger");
if (tenant != null) {
commandBuilder.append(" --tenant " + tenant);
}
if (namespace != null) {
commandBuilder.append(" --namespace " + namespace);
}
if (functionName != null) {
commandBuilder.append(" --name " + functionName);
}
commandBuilder.append(" --triggerValue " + triggerValue);
return commandBuilder.toString();
}
}
| |
package utils;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class AdSpace {
int totalSlots;
public List<AdBidder> bidders;
List<PermuteWithProbability> probabilities;
// other attributes to add
public List<PermuteWithProbability> getProbabilities() {
if (probabilities == null) {
probabilities = calcProbabilities(bidders, totalSlots);
}
return probabilities;
}
public AdSpace(int totalSlots, List<AdBidder> bidders) {
super();
this.totalSlots = totalSlots;
this.bidders = bidders;
// probabilities = calcProbabilities(bidders, totalSlots);
}
/**
* get all the probs of a new bidder being displaying in all the slots
*
* @param newBidder
* @return
* @deprecated this method consumes lots of memory. Please use
* getNewBidderChancesFast()
*/
public double[] getNewBidderChances(AdBidder newBidder) {
List<AdBidder> withNew = new ArrayList<AdBidder>(bidders);
withNew.add(newBidder);
List<PermuteWithProbability> perms = calcProbabilities(withNew, totalSlots);
double[] results = new double[totalSlots];
for (PermuteWithProbability perm : perms) {
List<AdBidder> list = perm.permute;
int size = list.size();
for (int i = 0; i < size; i++) {
AdBidder ab = list.get(i);
if (ab.equals(newBidder)) {
results[i] += perm.probability;
}
}
}
return results;
}
/**
* Finds all non-null subsequences of a list. E.g.
* <code>subsequences([1, 2, 3])</code> would be: [[1, 2, 3], [1, 3], [2,
* 3], [1, 2], [1], [2], [3]]
*
* @param items
* the List of items
* @return the subsequences from items
*/
public static <T> Set<List<T>> subsequences(List<T> items) {
Set<List<T>> ans = new HashSet<List<T>>();
for (T h : items) {
Set<List<T>> next = new HashSet<List<T>>();
for (List<T> it : ans) {
List<T> sublist = new ArrayList<T>(it);
sublist.add(h);
next.add(sublist);
}
next.addAll(ans);
List<T> hlist = new ArrayList<T>();
hlist.add(h);
next.add(hlist);
ans = next;
}
return ans;
}
/**
* get sub sequence of a specific length.
*
* @param <T>
* @param items
* @param length
* , the number of elements in the combination
* @return
*/
public static <T> Set<List<T>> subsequences(List<T> items, int length) {
if (length > items.size())
length = items.size();
Set<List<T>> ans = new HashSet<List<T>>();
for (T h : items) {
Set<List<T>> next = new HashSet<List<T>>();
for (List<T> it : ans) {
if (it.size() < length) {
List<T> sublist = new ArrayList<T>(it);
sublist.add(h);
next.add(sublist);
}
}
next.addAll(ans);
List<T> hlist = new ArrayList<T>();
hlist.add(h);
next.add(hlist);
ans = next;
}
// removed too short ones
Iterator<List<T>> it = ans.iterator();
while (it.hasNext()) {
List<T> o = it.next();
if (o.size() != length)
it.remove();
}
return ans;
}
/**
* very slow
*
* @param candies
* @param totalSlots
* @return
* @deprecated memory unsafe.
*/
public static List<PermuteWithProbability> calcProbabilities(List<AdBidder> candies, int totalSlots) {
Set<List<AdBidder>> permutes = new HashSet<List<AdBidder>>();
long t = System.currentTimeMillis();
// Set<List<AdBidder>> subsequences =
// GroovyCollections.subsequences(candies, totalSlots);
Set<List<AdBidder>> subsequences = subsequences(candies, totalSlots);
System.out.println("subseq used(ms): " + (System.currentTimeMillis() - t));
t = System.currentTimeMillis();
Iterator<List<AdBidder>> it = subsequences.iterator();
while (it.hasNext()) {
List<AdBidder> lab = it.next();
PermutationGenerator<AdBidder> permu = new PermutationGenerator<AdBidder>(lab);
while (permu.hasNext()) {
permutes.add(permu.next());
}
}
System.out.println("calc all permutes used(ms): " + (System.currentTimeMillis() - t));
System.out.println("total exposure combo: " + permutes.size());
List<PermuteWithProbability> calc = new ArrayList<PermuteWithProbability>();
it = permutes.iterator();
while (it.hasNext()) {
List<AdBidder> per = it.next();
List<AdBidder> placed = new ArrayList<AdBidder>();
double prob = 1;
int persize = per.size();
for (int i = 0; i < persize; i++) {
AdBidder cur = per.get(i);
Iterator<AdBidder> caniter = candies.iterator();
int total = 0;
while (caniter.hasNext()) {
AdBidder can = caniter.next();
if (!placed.contains(can))
total += can.getBidPower();
}
prob *= (cur.getBidPower() + 0.0) / total;
placed.add(cur);
}
calc.add(new PermuteWithProbability(per, prob));
}
return calc;
}
public List<Double> getNewBidderChancesFast(AdBidder newBidder) {
return testNewBidderChancesFast(newBidder, this.bidders, totalSlots);
}
public List<Double> getNewBidderChances3(AdBidder newBidder) {
return testNewBidderChances3(newBidder, this.bidders, totalSlots);
}
public static List<Double> testNewBidderChancesFast(AdBidder newBidder, List<AdBidder> candies, int totalSlots) {
// Set<List<AdBidder>> permutes = new HashSet<List<AdBidder>>();
List<AdBidder> allBidders = new ArrayList<AdBidder>(candies);
allBidders.add(newBidder);
long t = System.currentTimeMillis();
// Set<List<AdBidder>> subsequences =
// GroovyCollections.subsequences(candies, totalSlots);
Set<List<AdBidder>> subsequences = subsequences(allBidders, totalSlots);
System.out.println(subsequences.size() + " subseqs used(ms): " + (System.currentTimeMillis() - t));
t = System.currentTimeMillis();
// each bidder with a list of chances in each open slot
Map<AdBidder, List<Double>> bidderChances = new HashMap<AdBidder, List<Double>>();
Iterator<List<AdBidder>> it = subsequences.iterator();
BigInteger bi = new BigInteger("0");
while (it.hasNext()) {
List<AdBidder> lab = it.next();
PermutationGenerator<AdBidder> permu = new PermutationGenerator<AdBidder>(lab);
while (permu.hasNext()) {
bi = bi.add(BigInteger.ONE);
List<AdBidder> per = permu.next();
List<AdBidder> placed = new ArrayList<AdBidder>();
double prob = 1;
int persize = per.size();
for (int i = 0; i < persize; i++) {
AdBidder cur = per.get(i);
Iterator<AdBidder> caniter = allBidders.iterator();
int total = 0;
while (caniter.hasNext()) {
AdBidder can = caniter.next();
if (!placed.contains(can))
total += can.getBidPower();
}
prob *= (cur.getBidPower() + 0.0) / total;
placed.add(cur);
}
for (int i = 0; i < persize; i++) {
AdBidder cur = per.get(i);
List<Double> list = bidderChances.get(cur);
if (list == null) {
list = new ArrayList<Double>();
for (int j = 0; j < persize; j++)
list.add(new Double(0));
bidderChances.put(cur, list);
}
Double double1 = list.get(i);
if (double1 == null)
double1 = 0d;
list.set(i, double1 + prob);
}
}
}
System.out.println("total permuts: " + bi.toString());
return bidderChances.get(newBidder);
}
public static List<Double> testNewBidderChances3(AdBidder newBidder, List<AdBidder> candies, int totalSlots) {
// Set<List<AdBidder>> permutes = new HashSet<List<AdBidder>>();
List<AdBidder> allBidders = new ArrayList<AdBidder>(candies);
allBidders.add(newBidder);
AdBidder[] bidderArray = new AdBidder[allBidders.size()];
allBidders.toArray(bidderArray);
Map<AdBidder, Byte> bidderIndexMap = new HashMap<AdBidder, Byte>();
for (byte i = 0; i < allBidders.size(); i++) {
AdBidder b = bidderArray[i];
bidderIndexMap.put(b, i);
}
long t = System.currentTimeMillis();
// Set<List<AdBidder>> subsequences =
// GroovyCollections.subsequences(candies, totalSlots);
Set<List<AdBidder>> subsequences = subsequences(allBidders, totalSlots);
System.out.println(subsequences.size() + " subseqs used(ms): " + (System.currentTimeMillis() - t));
t = System.currentTimeMillis();
// each bidder with a list of chances in each open slot
// Map<AdBidder, List<Double>> bidderChances = new HashMap<AdBidder, List<Double>>();
// dim 1: all bidders , dim 2 chances to appear in each slots
double[][] bidderChances = new double[allBidders.size()][];
Iterator<List<AdBidder>> it = subsequences.iterator();
long bi = 0;
while (it.hasNext()) {
List<AdBidder> lab = it.next();
byte[] seqBytes = new byte[totalSlots];
for (int k = 0; k < totalSlots; k++) {
AdBidder der = lab.get(k);
seqBytes[k] = bidderIndexMap.get(der);
}
t = System.currentTimeMillis();
RotateMutator mu = new RotateMutator(seqBytes);
byte[][] mutates = mu.mutate();
for (byte[] mut: mutates) {
bi++;
Set<Integer> placed = new HashSet<Integer>();
double prob = 1;
for (int i = 0; i < totalSlots; i++) {
byte b = mut[i];
AdBidder cur = bidderArray[b];
double total = 0;
for (int l = 0; l < bidderArray.length; l++) {
if (!placed.contains(l))
total += bidderArray[l].getBidPower();
}
prob *= (cur.getBidPower() + 0.0) / total;
placed.add((int)b);
}
// now we got the probability of one mutation
for (int i = 0; i < totalSlots; i++) {
byte b = mut[i]; // the id of AdBidder
// AdBidder cur = per.get(i);
double[] list = bidderChances[b];
// this is the probs for one bidder in all slots
if (list == null) {
// init the chance array
list = new double[totalSlots];
bidderChances[b] = list;
}
list[i] = list[i] + prob;
}
}
}
System.out.println("total permuts: " + bi);
double[] newBidderChancesInSlots = bidderChances[allBidders.size() - 1];
List<Double> results = new java.util.ArrayList<Double>();
for (double d: newBidderChancesInSlots) {
results.add(d);
}
return results;
}
// public static double calcProbForElementInSlot(String ) {
//
// }
/**
* the probability of pick up a value from a set of value candidates,
* assuming the value is in the set and all elements are integer strings
*
* @param cur
* @param candiesLeft
* @return
*/
private static double getProb(AdBidder cur, HashSet<AdBidder> candiesLeft) {
Iterator<AdBidder> i = candiesLeft.iterator();
int total = 0;
while (i.hasNext()) {
AdBidder next = i.next();
total += next.getBidPower();
}
return (cur.getBidPower() + 0.0) / total;
}
/**
* experiment permute
*
* @param <T>
* @param list
* @param r
* @return
*/
public static <T> List<T> permute(List<T> list, int r) {
int n = list.size();
int f = fac(n);
List<T> perm = new ArrayList<T>();
list = new ArrayList<T>(list);
for (list = new ArrayList<T>(list); n > 0; n--, r %= f) {
f /= n;
perm.add(list.remove(r / f));
}
return perm;
}
public static int fac(int n) {
int f = 1;
for (; n > 0; f *= n--)
;
return f;
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.diff.tools.binary;
import com.intellij.diff.DiffContext;
import com.intellij.diff.actions.impl.FocusOppositePaneAction;
import com.intellij.diff.contents.DiffContent;
import com.intellij.diff.contents.FileContent;
import com.intellij.diff.requests.ContentDiffRequest;
import com.intellij.diff.requests.DiffRequest;
import com.intellij.diff.tools.holders.BinaryEditorHolder;
import com.intellij.diff.tools.util.DiffNotifications;
import com.intellij.diff.tools.util.StatusPanel;
import com.intellij.diff.tools.util.TransferableFileEditorStateSupport;
import com.intellij.diff.tools.util.side.TwosideDiffViewer;
import com.intellij.diff.util.DiffUtil;
import com.intellij.diff.util.Side;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.Separator;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static com.intellij.diff.util.DiffUtil.getDiffSettings;
public class TwosideBinaryDiffViewer extends TwosideDiffViewer<BinaryEditorHolder> {
@NotNull private final TransferableFileEditorStateSupport myTransferableStateSupport;
@NotNull private final StatusPanel myStatusPanel;
public TwosideBinaryDiffViewer(@NotNull DiffContext context, @NotNull DiffRequest request) {
super(context, (ContentDiffRequest)request, BinaryEditorHolder.BinaryEditorHolderFactory.INSTANCE);
myStatusPanel = new StatusPanel();
new MyFocusOppositePaneAction().install(myPanel);
myContentPanel.setTopAction(new MyAcceptSideAction(Side.LEFT));
myContentPanel.setBottomAction(new MyAcceptSideAction(Side.RIGHT));
myTransferableStateSupport = new TransferableFileEditorStateSupport(getDiffSettings(context), getEditorHolders(), this);
}
@Override
protected void processContextHints() {
super.processContextHints();
myTransferableStateSupport.processContextHints(myRequest, myContext);
}
@Override
protected void updateContextHints() {
super.updateContextHints();
myTransferableStateSupport.updateContextHints(myRequest, myContext);
}
@Override
protected List<AnAction> createToolbarActions() {
List<AnAction> group = new ArrayList<>();
group.add(new MyAcceptSideAction(Side.LEFT));
group.add(new MyAcceptSideAction(Side.RIGHT));
group.add(Separator.getInstance());
group.add(myTransferableStateSupport.createToggleAction());
group.addAll(super.createToolbarActions());
return group;
}
//
// Diff
//
@Override
protected void onSlowRediff() {
super.onSlowRediff();
myStatusPanel.setBusy(true);
}
@Override
@NotNull
protected Runnable performRediff(@NotNull final ProgressIndicator indicator) {
try {
indicator.checkCanceled();
List<DiffContent> contents = myRequest.getContents();
if (!(contents.get(0) instanceof FileContent) || !(contents.get(1) instanceof FileContent)) {
return applyNotification(null);
}
final VirtualFile file1 = ((FileContent)contents.get(0)).getFile();
final VirtualFile file2 = ((FileContent)contents.get(1)).getFile();
final JComponent notification = ReadAction.compute(() -> {
if (!file1.isValid() || !file2.isValid()) {
return DiffNotifications.createError();
}
if (FileUtilRt.isTooLarge(file1.getLength()) ||
FileUtilRt.isTooLarge(file2.getLength())) {
return DiffNotifications.createNotification("Files are too large to compare");
}
try {
// we can't use getInputStream() here because we can't restore BOM marker
// (getBom() can return null for binary files, while getInputStream() strips BOM for all files).
// It can be made for files from VFS that implements FileSystemInterface though.
byte[] bytes1 = file1.contentsToByteArray();
byte[] bytes2 = file2.contentsToByteArray();
return Arrays.equals(bytes1, bytes2) ? DiffNotifications.createEqualContents() : null;
}
catch (IOException e) {
LOG.warn(e);
return null;
}
});
return applyNotification(notification);
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Throwable e) {
LOG.error(e);
return applyNotification(DiffNotifications.createError());
}
}
@NotNull
private Runnable applyNotification(@Nullable final JComponent notification) {
return () -> {
clearDiffPresentation();
if (notification != null) myPanel.addNotification(notification);
};
}
private void clearDiffPresentation() {
myStatusPanel.setBusy(false);
myPanel.resetNotifications();
}
//
// Getters
//
@NotNull
FileEditor getCurrentEditor() {
return getCurrentEditorHolder().getEditor();
}
@NotNull
@Override
protected JComponent getStatusPanel() {
return myStatusPanel;
}
//
// Misc
//
public static boolean canShowRequest(@NotNull DiffContext context, @NotNull DiffRequest request) {
return TwosideDiffViewer.canShowRequest(context, request, BinaryEditorHolder.BinaryEditorHolderFactory.INSTANCE);
}
//
// Actions
//
private class MyAcceptSideAction extends DumbAwareAction {
@NotNull private final Side myBaseSide;
MyAcceptSideAction(@NotNull Side baseSide) {
myBaseSide = baseSide;
getTemplatePresentation().setText("Copy Content to " + baseSide.select("Right", "Left"));
getTemplatePresentation().setIcon(baseSide.select(AllIcons.Vcs.Arrow_right, AllIcons.Vcs.Arrow_left));
setShortcutSet(ActionManager.getInstance().getAction(baseSide.select("Diff.ApplyLeftSide", "Diff.ApplyRightSide")).getShortcutSet());
}
@Override
public void update(@NotNull AnActionEvent e) {
VirtualFile baseFile = getContentFile(myBaseSide);
VirtualFile targetFile = getContentFile(myBaseSide.other());
boolean enabled = baseFile != null && targetFile != null && targetFile.isWritable();
e.getPresentation().setEnabledAndVisible(enabled);
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
final VirtualFile baseFile = getContentFile(myBaseSide);
final VirtualFile targetFile = getContentFile(myBaseSide.other());
assert baseFile != null && targetFile != null;
try {
WriteAction.run(() -> {
targetFile.setBinaryContent(baseFile.contentsToByteArray());
});
}
catch (IOException err) {
LOG.warn(err);
Messages.showErrorDialog(getProject(), err.getMessage(), "Can't Copy File");
}
}
@Nullable
private VirtualFile getContentFile(@NotNull Side side) {
DiffContent content = side.select(myRequest.getContents());
VirtualFile file = content instanceof FileContent ? ((FileContent)content).getFile() : null;
return file != null && file.isValid() ? file : null;
}
}
private class MyFocusOppositePaneAction extends FocusOppositePaneAction {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
setCurrentSide(getCurrentSide().other());
DiffUtil.requestFocus(getProject(), getPreferredFocusedComponent());
}
}
}
| |
package org.motechproject.nms.testing.it.flwUpdate;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.mime.HttpMultipartMode;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.entity.mime.content.FileBody;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.motechproject.mtraining.domain.ActivityRecord;
import org.motechproject.mtraining.domain.ActivityState;
import org.motechproject.mtraining.domain.Bookmark;
import org.motechproject.mtraining.repository.ActivityDataService;
import org.motechproject.mtraining.repository.BookmarkDataService;
import org.motechproject.nms.csv.domain.CsvAuditRecord;
import org.motechproject.nms.csv.exception.CsvImportDataException;
import org.motechproject.nms.csv.repository.CsvAuditRecordDataService;
import org.motechproject.nms.flw.domain.FlwJobStatus;
import org.motechproject.nms.flw.domain.FrontLineWorker;
import org.motechproject.nms.flw.repository.FrontLineWorkerDataService;
import org.motechproject.nms.flw.service.FrontLineWorkerService;
import org.motechproject.nms.flwUpdate.service.FrontLineWorkerUpdateImportService;
import org.motechproject.nms.mobileacademy.domain.CourseCompletionRecord;
import org.motechproject.nms.mobileacademy.repository.CourseCompletionRecordDataService;
import org.motechproject.nms.region.repository.CircleDataService;
import org.motechproject.nms.region.repository.DistrictDataService;
import org.motechproject.nms.region.repository.LanguageDataService;
import org.motechproject.nms.region.repository.StateDataService;
import org.motechproject.nms.region.service.DistrictService;
import org.motechproject.nms.region.service.LanguageService;
import org.motechproject.nms.testing.it.api.utils.RequestBuilder;
import org.motechproject.nms.testing.it.utils.RegionHelper;
import org.motechproject.nms.testing.service.TestingService;
import org.motechproject.testing.osgi.BasePaxIT;
import org.motechproject.testing.osgi.container.MotechNativeTestContainerFactory;
import org.motechproject.testing.osgi.http.SimpleHttpClient;
import org.motechproject.testing.utils.TestContext;
import org.ops4j.pax.exam.ExamFactory;
import org.ops4j.pax.exam.junit.PaxExam;
import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy;
import org.ops4j.pax.exam.spi.reactors.PerSuite;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import javax.inject.Inject;
import java.io.*;
import java.util.HashMap;
import java.util.List;
import static org.junit.Assert.*;
@RunWith(PaxExam.class)
@ExamReactorStrategy(PerSuite.class)
@ExamFactory(MotechNativeTestContainerFactory.class)
public class FrontLineWorkerUpdateImportServiceBundleIT extends BasePaxIT {
@Inject
CircleDataService circleDataService;
@Inject
DistrictDataService districtDataService;
@Inject
DistrictService districtService;
@Inject
StateDataService stateDataService;
@Inject
LanguageDataService languageDataService;
@Inject
LanguageService languageService;
@Inject
FrontLineWorkerDataService frontLineWorkerDataService;
@Inject
FrontLineWorkerService frontLineWorkerService;
@Inject
TestingService testingService;
@Inject
FrontLineWorkerUpdateImportService frontLineWorkerUpdateImportService;
@Inject
CsvAuditRecordDataService csvAuditRecordDataService;
@Inject
CourseCompletionRecordDataService courseCompletionRecordDataService;
@Inject
BookmarkDataService bookmarkDataService;
@Inject
ActivityDataService activityDataService;
@Inject
PlatformTransactionManager transactionManager;
private RegionHelper rh;
@Before
public void setUp() {
rh = new RegionHelper(languageDataService, languageService, circleDataService, stateDataService,
districtDataService, districtService);
testingService.clearDatabase();
rh.hindiLanguage();
rh.kannadaLanguage();
rh.delhiState();
rh.delhiCircle();
}
// Test when state not provided
@Test(expected = CsvImportDataException.class)
public void testImportWhenStateNotPresent() throws Exception {
Reader reader = createLanguageReaderWithHeaders("72185,210302604211400029,9439986187,en,");
frontLineWorkerUpdateImportService.importLanguageData(reader);
}
// Test when state not in database
@Test(expected = CsvImportDataException.class)
public void testImportWhenStateNotInDatabase() throws Exception {
FrontLineWorker flw = new FrontLineWorker(9439986187L);
frontLineWorkerService.add(flw);
Reader reader = createLanguageReaderWithHeaders("72185,210302604211400029,9439986187,en,2");
frontLineWorkerUpdateImportService.importLanguageData(reader);
}
// Test when language not provided
@Test(expected = CsvImportDataException.class)
public void testImportWhenLanguageNotPresent() throws Exception {
Reader reader = createLanguageReaderWithHeaders("72185,210302604211400029,9439986187,,1");
frontLineWorkerUpdateImportService.importLanguageData(reader);
}
// Test when language not in database
@Test(expected = CsvImportDataException.class)
public void testImportWhenLanguageNotInDatabase() throws Exception {
FrontLineWorker flw = new FrontLineWorker(9439986187L);
frontLineWorkerService.add(flw);
Reader reader = createLanguageReaderWithHeaders("72185,210302604211400029,9439986187,en,1");
frontLineWorkerUpdateImportService.importLanguageData(reader);
}
// Test when only NMS Id found and FLW not in database
@Test(expected = CsvImportDataException.class)
public void testImportWhenFLWIdProvidedButNotInDatabase() throws Exception {
Reader reader = createLanguageReaderWithHeaders("72185,,,hi,1");
frontLineWorkerUpdateImportService.importLanguageData(reader);
}
// NMS_FT_553
// Test when only MCTS Id found and FLW not in database
@Test(expected = CsvImportDataException.class)
public void testImportWhenMCTSIdProvidedButNotInDatabase() throws Exception {
Reader reader = createLanguageReaderWithHeaders(",210302604211400029,,hi,1");
frontLineWorkerUpdateImportService.importLanguageData(reader);
}
// NMS_FT_554
// Test when only MSISDN found and FLW not in database
@Test(expected = CsvImportDataException.class)
public void testImportWhenMSISDProvidedButNotInDatabase() throws Exception {
Reader reader = createLanguageReaderWithHeaders(",,9439986187,hi,1");
frontLineWorkerUpdateImportService.importLanguageData(reader);
}
// Test NMS Id takes precedence over MCTS ID
@Test
public void testImportWhenNMSIdTakesPrecedenceOverMCTSId() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setFlwId("72185");
flw.setLanguage(rh.kannadaLanguage());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
flw = new FrontLineWorker(2000000000L);
flw.setMctsFlwId("210302604211400029");
flw.setLanguage(rh.kannadaLanguage());
flw.setState(rh.delhiState());
flw.setDistrict(rh.newDelhiDistrict());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
Reader reader = createLanguageReaderWithHeaders("72185,210302604211400029,,hi,1");
frontLineWorkerUpdateImportService.importLanguageData(reader);
flw = frontLineWorkerService.getByContactNumber(1000000000L);
assertEquals(rh.hindiLanguage(), flw.getLanguage());
flw = frontLineWorkerService.getByContactNumber(2000000000L);
assertEquals(rh.kannadaLanguage(), flw.getLanguage());
}
// Test NMS Id takes precedence over MSISDN
@Test
public void testImportWhenNMSIdTakesPrecedenceOverMSIDN() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setFlwId("72185");
flw.setLanguage(rh.kannadaLanguage());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
flw = new FrontLineWorker(2000000000L);
flw.setLanguage(rh.kannadaLanguage());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
Reader reader = createLanguageReaderWithHeaders("72185,,2000000000,hi,1");
frontLineWorkerUpdateImportService.importLanguageData(reader);
flw = frontLineWorkerService.getByContactNumber(1000000000L);
assertEquals(rh.hindiLanguage(), flw.getLanguage());
flw = frontLineWorkerService.getByContactNumber(2000000000L);
assertEquals(rh.kannadaLanguage(), flw.getLanguage());
}
// Test MCTS Id takes precedence over MSISDN
@Test
public void testImportWhenMCTSIdTakesPrecedenceOverMSIDN() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setMctsFlwId("210302604211400029");
flw.setLanguage(rh.kannadaLanguage());
flw.setState(rh.delhiState());
flw.setDistrict(rh.newDelhiDistrict());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
flw = new FrontLineWorker(2000000000L);
flw.setLanguage(rh.kannadaLanguage());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
Reader reader = createLanguageReaderWithHeaders("72185,210302604211400029,2000000000,hi,1");
frontLineWorkerUpdateImportService.importLanguageData(reader);
flw = frontLineWorkerService.getByContactNumber(1000000000L);
assertEquals(rh.hindiLanguage(), flw.getLanguage());
flw = frontLineWorkerService.getByContactNumber(2000000000L);
assertEquals(rh.kannadaLanguage(), flw.getLanguage());
}
// Test MSISDN only
@Test
public void testImportWhenMSISDNOnly() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setLanguage(rh.kannadaLanguage());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
flw = new FrontLineWorker(2000000000L);
flw.setLanguage(rh.kannadaLanguage());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
Reader reader = createLanguageReaderWithHeaders("72185,210302604211400029,1000000000,hi,1");
frontLineWorkerUpdateImportService.importLanguageData(reader);
flw = frontLineWorkerService.getByContactNumber(1000000000L);
assertEquals(rh.hindiLanguage(), flw.getLanguage());
flw = frontLineWorkerService.getByContactNumber(2000000000L);
assertEquals(rh.kannadaLanguage(), flw.getLanguage());
}
@Test
public void testImportFromSampleLanguageDataFile() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setLanguage(rh.kannadaLanguage());
flw.setFlwId("72185");
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
flw = new FrontLineWorker(2000000000L);
flw.setLanguage(rh.kannadaLanguage());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
frontLineWorkerUpdateImportService.importLanguageData(read("csv/flw_language_update.csv"));
flw = frontLineWorkerService.getByContactNumber(1000000000L);
assertEquals(rh.hindiLanguage(), flw.getLanguage());
flw = frontLineWorkerService.getByContactNumber(2000000000L);
assertEquals(rh.hindiLanguage(), flw.getLanguage());
}
/************************************************************************************************************
MSISDN TESTS
***********************************************************************************************************/
// Test when new msisdn not provided
@Test(expected = CsvImportDataException.class)
public void testMsisdnImportWhenNewMsisdnNotPresent() throws Exception {
Reader reader = createMSISDNReaderWithHeaders("72185,210302604211400029,9439986187,,1");
frontLineWorkerUpdateImportService.importMSISDNData(reader);
}
// Test when only NMS Id found and FLW not in database
@Test(expected = CsvImportDataException.class)
public void testMsisdnImportWhenFLWIdProvidedButNotInDatabase() throws Exception {
Reader reader = createMSISDNReaderWithHeaders("72185,,,9439986187,1");
frontLineWorkerUpdateImportService.importMSISDNData(reader);
}
// Test when only MCTS Id found and FLW not in database
@Test(expected = CsvImportDataException.class)
public void testMsisdnImportWhenMCTSIdProvidedButNotInDatabase() throws Exception {
Reader reader = createMSISDNReaderWithHeaders(",210302604211400029,,9439986187,1");
frontLineWorkerUpdateImportService.importMSISDNData(reader);
}
// Test when only MSISDN found and FLW not in database
@Test(expected = CsvImportDataException.class)
public void testMsisdnImportWhenMSISDNProvidedButNotInDatabase() throws Exception {
Reader reader = createMSISDNReaderWithHeaders(",,9439986187,9439986188,1");
frontLineWorkerUpdateImportService.importMSISDNData(reader);
}
// Test NMS Id takes precedence over MCTS ID
@Test
public void testMsisdnImportWhenNMSIdTakesPrecedenceOverMCTSId() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setFlwId("72185");
frontLineWorkerService.add(flw);
flw = new FrontLineWorker(2000000000L);
flw.setMctsFlwId("210302604211400029");
flw.setState(rh.delhiState());
flw.setDistrict(rh.newDelhiDistrict());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
Reader reader = createMSISDNReaderWithHeaders("72185,210302604211400029,,9439986187,1");
frontLineWorkerUpdateImportService.importMSISDNData(reader);
flw = frontLineWorkerService.getByContactNumber(9439986187L);
assertNull(flw);
flw = frontLineWorkerService.getByContactNumber(1000000000L);
assertNull(flw);
flw = frontLineWorkerService.getByContactNumber(2000000000L);
assertNotNull(flw);
}
// Test NMS Id takes precedence over MSISDN
@Test
public void testMsisdnImportWhenNMSIdTakesPrecedenceOverMSIDN() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setFlwId("72185");
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
flw = new FrontLineWorker(2000000000L);
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
Reader reader = createMSISDNReaderWithHeaders("72185,,2000000000,9439986187,1");
frontLineWorkerUpdateImportService.importMSISDNData(reader);
flw = frontLineWorkerService.getByContactNumber(9439986187L);
assertNotNull(flw);
assertEquals("72185", flw.getFlwId());
flw = frontLineWorkerService.getByContactNumber(1000000000L);
assertNull(flw);
flw = frontLineWorkerService.getByContactNumber(2000000000L);
assertNotNull(flw);
}
// Test MCTS Id takes precedence over MSISDN
@Test
public void testMsisdnImportWhenMCTSIdTakesPrecedenceOverMSIDN() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setMctsFlwId("210302604211400029");
flw.setState(rh.delhiState());
flw.setDistrict(rh.newDelhiDistrict());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
flw = new FrontLineWorker(2000000000L);
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
Reader reader = createMSISDNReaderWithHeaders("72185,210302604211400029,2000000000,9439986187,1");
frontLineWorkerUpdateImportService.importMSISDNData(reader);
flw = frontLineWorkerService.getByContactNumber(9439986187L);
assertNotNull(flw);
assertEquals("210302604211400029", flw.getMctsFlwId());
flw = frontLineWorkerService.getByContactNumber(1000000000L);
assertNull(flw);
flw = frontLineWorkerService.getByContactNumber(2000000000L);
assertNotNull(flw);
}
// Test MSISDN only flw Update and Bookmark, Completion and Activity Record
@Test
public void testMsisdnImportWhenMSISDNOnly() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
flw = new FrontLineWorker(2000000000L);
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
createMaRecords(1000000000L);
Reader reader = createMSISDNReaderWithHeaders("72185,210302604211400029,1000000000,9439986187,1");
frontLineWorkerUpdateImportService.importMSISDNData(reader);
flw = frontLineWorkerService.getByContactNumber(9439986187L);
assertNotNull(flw);
flw = frontLineWorkerService.getByContactNumber(1000000000L);
assertNull(flw);
flw = frontLineWorkerService.getByContactNumber(2000000000L);
assertNotNull(flw);
assertMaRecords(1000000000L, 9439986187L);
}
@Test
public void testMsisdnImportFromSampleDataFile() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setFlwId("72185");
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
flw = new FrontLineWorker(2000000000L);
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
frontLineWorkerUpdateImportService.importMSISDNData(read("csv/flw_msisdn_update.csv"));
flw = frontLineWorkerService.getByContactNumber(9439986187L);
assertNotNull(flw);
flw = frontLineWorkerService.getByContactNumber(9439986188L);
assertNotNull(flw);
flw = frontLineWorkerService.getByContactNumber(1000000000L);
assertNull(flw);
flw = frontLineWorkerService.getByContactNumber(2000000000L);
assertNull(flw);
}
// Test new MSISDN larger than 10 digits
@Test
public void testMsisdnImportWhenNewMsisdnTooLong() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setFlwId("72185");
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
Reader reader = createMSISDNReaderWithHeaders("72185,210302604211400029,1000000000,09439986187,1");
frontLineWorkerUpdateImportService.importMSISDNData(reader);
flw = frontLineWorkerService.getByContactNumber(9439986187L);
assertNotNull(flw);
flw = frontLineWorkerService.getByContactNumber(1000000000L);
assertNull(flw);
}
// NMS_FT_557
// Test new MSISDN not a valid number
@Test(expected = CsvImportDataException.class)
public void testMsisdnImportWhenMSISDNProvidedButNotValid() throws Exception {
Reader reader = createMSISDNReaderWithHeaders(",,9439986187,AAAAAAAAAA,1");
frontLineWorkerUpdateImportService.importMSISDNData(reader);
}
// NMS_FT_557
// Test new MSISDN less than 10 digits
@Test(expected = CsvImportDataException.class)
public void testMsisdnImportWhenMSISDNProvidedButTooShort() throws Exception {
Reader reader = createMSISDNReaderWithHeaders(",,9439986187,943998618,1");
frontLineWorkerUpdateImportService.importMSISDNData(reader);
}
// NMS_FT_556
// Test new MSISDN associated with existing FLW
@Test(expected = CsvImportDataException.class)
public void testMsisdnImportWhenMSISDNProvidedButAlreadyInUse() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
frontLineWorkerService.add(flw);
flw = new FrontLineWorker(9439986187L);
frontLineWorkerService.add(flw);
Reader reader = createMSISDNReaderWithHeaders(",,9439986187,1000000000,1");
frontLineWorkerUpdateImportService.importMSISDNData(reader);
}
// Test Ma Update when new MSISDN associated with existing FLW
@Test
public void testMaUpdateWhenMSISDNProvidedIsAlreadyInUse() throws Exception {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
frontLineWorkerService.add(flw);
flw = new FrontLineWorker(9439986187L);
frontLineWorkerService.add(flw);
createMaRecords(9439986187L);
createMaRecords(1000000000L);
assertBookmark("1000000000", 1);
assertActivity("1000000000", 2);
Reader reader = createMSISDNReaderWithHeaders(",,9439986187,1000000000,1");
try {
frontLineWorkerUpdateImportService.importMSISDNData(reader);
} catch(CsvImportDataException e) {
assertBookmark("1000000000", 1); // Records expected is 1 instead of 2 since update fails
assertActivity("1000000000", 2);
}
}
private Reader createMSISDNReaderWithHeaders(String... lines) {
StringBuilder builder = new StringBuilder();
builder.append("NMS FLW-ID,MCTS FLW-ID,MSISDN,NEW MSISDN,STATE").append("\n");
for (String line : lines) {
builder.append(line).append("\n");
}
return new StringReader(builder.toString());
}
private Reader createLanguageReaderWithHeaders(String... lines) {
StringBuilder builder = new StringBuilder();
builder.append("NMS FLW-ID,MCTS FLW-ID,MSISDN,LANGUAGE CODE,STATE").append("\n");
for (String line : lines) {
builder.append(line).append("\n");
}
return new StringReader(builder.toString());
}
private Reader read(String resource) {
return new InputStreamReader(getClass().getClassLoader().getResourceAsStream(resource));
}
/**
* Method used to import CSV File For updating FLW Data. option can be
* "msisdn" or "language"
*/
private HttpResponse importCsvFileForFLWUpdate(String option,
String fileName)
throws InterruptedException, IOException {
HttpPost httpPost;
if (StringUtils.isBlank(option)) {
// update using import
httpPost = new HttpPost(String.format(
"http://localhost:%d/flwUpdate/import",
TestContext.getJettyPort()));
} else {
httpPost = new HttpPost(String.format(
"http://localhost:%d/flwUpdate/update/%s",
TestContext.getJettyPort(), option));
}
MultipartEntityBuilder builder = MultipartEntityBuilder.create();
builder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);
builder.addPart(
"csvFile",
new FileBody(new File(String.format(
"src/test/resources/csv/%s", fileName))));
httpPost.setEntity(builder.build());
HttpResponse response = SimpleHttpClient.httpRequestAndResponse(
httpPost, RequestBuilder.ADMIN_USERNAME,
RequestBuilder.ADMIN_PASSWORD);
return response;
}
/**
* To verify language is updated successfully when MCTS FLW ID is provided.
*/
@Test
public void verifyFT550() throws InterruptedException, IOException {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setFlwId("72185");
flw.setMctsFlwId("210302604211400029");
flw.setLanguage(rh.kannadaLanguage());
flw.setState(rh.delhiState());
flw.setDistrict(rh.newDelhiDistrict());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
assertEquals(
HttpStatus.SC_OK,
importCsvFileForFLWUpdate("language",
"flw_language_update_only_flwId.csv").getStatusLine()
.getStatusCode());
flw = frontLineWorkerService.getByFlwId("72185");
assertEquals(rh.hindiLanguage(), flw.getLanguage());
assertEquals(1, csvAuditRecordDataService.count());
assertEquals("Success", csvAuditRecordDataService.retrieveAll().get(0)
.getOutcome());
}
/**
* To verify language is updated successfully when MSISDN is provided.
*/
@Test
public void verifyFT551() throws InterruptedException, IOException {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setFlwId("72185");
flw.setMctsFlwId("210302604211400029");
flw.setLanguage(rh.kannadaLanguage());
flw.setState(rh.delhiState());
flw.setDistrict(rh.newDelhiDistrict());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
assertEquals(
HttpStatus.SC_OK,
importCsvFileForFLWUpdate("language",
"flw_language_update_only_MSISDN.csv").getStatusLine()
.getStatusCode());
flw = frontLineWorkerService.getByFlwId("72185");
assertEquals(rh.hindiLanguage(), flw.getLanguage());
assertEquals(1, csvAuditRecordDataService.count());
assertEquals("Success", csvAuditRecordDataService.retrieveAll().get(0)
.getOutcome());
}
/**
* To verify language updated is getting rejected when language provided is
* having invalid value.
*/
// TODO JIRA issue https://applab.atlassian.net/browse/NMS-252
@Test
public void verifyFT552() throws InterruptedException, IOException {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setFlwId("72185");
flw.setMctsFlwId("210302604211400029");
flw.setLanguage(rh.kannadaLanguage());
flw.setState(rh.delhiState());
flw.setDistrict(rh.newDelhiDistrict());
frontLineWorkerService.add(flw);
assertEquals(
HttpStatus.SC_BAD_REQUEST,
importCsvFileForFLWUpdate("language",
"flw_language_update_lang_error.csv").getStatusLine()
.getStatusCode());
flw = frontLineWorkerService.getByFlwId("72185");
assertEquals(rh.kannadaLanguage(), flw.getLanguage());
assertEquals(1, csvAuditRecordDataService.count());
assertTrue(csvAuditRecordDataService.retrieveAll().get(0).getOutcome()
.contains("Failure"));
}
/**
* To verify MSISDN is updated successfully when MCTS FLW ID is provided.
*/
@Test
public void verifyFT555() throws InterruptedException, IOException {
FrontLineWorker flw = new FrontLineWorker(1000000000L);
flw.setFlwId("72185");
flw.setMctsFlwId("210302604211400029");
flw.setLanguage(rh.kannadaLanguage());
flw.setState(rh.delhiState());
flw.setDistrict(rh.newDelhiDistrict());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
assertEquals(
HttpStatus.SC_OK,
importCsvFileForFLWUpdate("msisdn",
"flw_msisdn_update_only_flwId.csv").getStatusLine()
.getStatusCode());
flw = frontLineWorkerService.getByContactNumber(9439986187L);
assertNotNull(flw);
flw = frontLineWorkerService.getByContactNumber(1000000000L);
assertNull(flw);
assertEquals(1, csvAuditRecordDataService.count());
assertEquals("Success", csvAuditRecordDataService.retrieveAll().get(0)
.getOutcome());
}
/*
* To verify location is updated successfully when MCTS FLW ID is provided.
*/
// TODO https://applab.atlassian.net/browse/NMS-255
@Test
public void verifyFT558() throws InterruptedException, IOException {
TransactionStatus status = transactionManager.getTransaction(new DefaultTransactionDefinition());
// create FLW record having state as "Delhi" and district as "new delhi district"
FrontLineWorker flw = new FrontLineWorker("Aisha Bibi", 1234567899L);
flw.setMctsFlwId("10");
flw.setState(rh.delhiState());
flw.setDistrict(rh.newDelhiDistrict());
flw.setLanguage(rh.hindiLanguage());
flw.setJobStatus(FlwJobStatus.ACTIVE);
frontLineWorkerService.add(flw);
transactionManager.commit(status);
// update FLW district to "southDelhiDistrict"
rh.southDelhiDistrict();
HttpResponse response = importCsvFileForFLWUpdate(null,
"flw_FT_558.txt");
assertEquals(HttpStatus.SC_OK, response.getStatusLine()
.getStatusCode());
flw = frontLineWorkerService.getByContactNumber(1234567899L);
assertEquals(rh.southDelhiDistrict().getCode(), flw.getDistrict()
.getCode());
assertEquals(rh.delhiState().getCode(), flw.getState().getCode());
// Language should not be updated
assertEquals(rh.hindiLanguage().getCode(), flw.getLanguage().getCode());
// Assert audit trail log
CsvAuditRecord csvAuditRecord = csvAuditRecordDataService.retrieveAll()
.get(0);
assertEquals("/flwUpdate/import", csvAuditRecord.getEndpoint());
assertTrue(csvAuditRecord.getOutcome().contains("Success"));
assertEquals("flw_FT_558.txt", csvAuditRecord.getFile());
}
/*
* To verify location update is rejected when state provided is having
* invalid value.
*/
@Test
public void verifyFT560() throws InterruptedException, IOException {
// create FLW record
FrontLineWorker flw = new FrontLineWorker("Aisha Bibi", 1234567899L);
flw.setMctsFlwId("10");
flw.setState(rh.delhiState());
flw.setDistrict(rh.newDelhiDistrict());
flw.setLanguage(rh.hindiLanguage());
frontLineWorkerService.add(flw);
// update state to "State 10" which doesn't exist in DB
HttpResponse response = importCsvFileForFLWUpdate(null,
"flw_FT_560.txt");
assertEquals(HttpStatus.SC_BAD_REQUEST, response.getStatusLine()
.getStatusCode());
// Assert audit trail log
CsvAuditRecord csvAuditRecord = csvAuditRecordDataService.retrieveAll()
.get(0);
assertEquals("/flwUpdate/import", csvAuditRecord.getEndpoint());
assertTrue(csvAuditRecord.getOutcome().contains("Failure: "));
assertEquals("flw_FT_560.txt", csvAuditRecord.getFile());
}
/*
* To verify location update is rejected when District provided is having
* invalid value.
*/
@Test
public void verifyFT561() throws InterruptedException, IOException {
// create FLW record
FrontLineWorker flw = new FrontLineWorker("Aisha Bibi", 1234567899L);
flw.setMctsFlwId("10");
flw.setState(rh.delhiState());
flw.setDistrict(rh.newDelhiDistrict());
flw.setLanguage(rh.hindiLanguage());
frontLineWorkerService.add(flw);
// update FLW district to a value which doesn't exist in DB
HttpResponse response = importCsvFileForFLWUpdate(null,
"flw_FT_561.txt");
assertEquals(HttpStatus.SC_BAD_REQUEST, response.getStatusLine()
.getStatusCode());
// Assert audit trail log
CsvAuditRecord csvAuditRecord = csvAuditRecordDataService.retrieveAll()
.get(0);
assertEquals("/flwUpdate/import", csvAuditRecord.getEndpoint());
assertTrue(csvAuditRecord.getOutcome().contains("Failure: "));
assertEquals("flw_FT_561.txt", csvAuditRecord.getFile());
}
/**
* Method used to add Bookmark, Completion and Activity record with given contactNumber
*/
private void createMaRecords(Long contactNumber) {
bookmarkDataService.create(new Bookmark(contactNumber.toString(), "1", "1", "1", new HashMap<String, Object>()));
CourseCompletionRecord ccr = new CourseCompletionRecord(contactNumber, 35, "score", false);
courseCompletionRecordDataService.create(ccr);
// String externalId, String courseName, String chapterName, String lessonName, DateTime startTime, DateTime completionTime, ActivityState.STARTED);
ActivityRecord ar = new ActivityRecord(contactNumber.toString(), "1", "1", "1", null,null , ActivityState.STARTED);
activityDataService.create(ar);
ar = new ActivityRecord(contactNumber.toString(), "1", "1", "1", null,null , ActivityState.COMPLETED);
activityDataService.create(ar);
}
private void assertMaRecords(Long oldContactNumber, Long newContactNumber) {
String oldContact = oldContactNumber.toString();
String newContact = newContactNumber.toString();
assertBookmark(oldContact, 0);
assertBookmark(newContact, 1);
assertActivity(oldContact, 0);
assertActivity(newContact, 2);
}
private void assertBookmark(String contactNumber, int expected) {
List<Bookmark> bm = bookmarkDataService.findBookmarksForUser(contactNumber);
assertTrue(bm.size() == expected);
}
private void assertActivity(String contactNumber, int expected) {
List<ActivityRecord> ar = activityDataService.findRecordsForUser(contactNumber);
assertTrue(ar.size() == expected);
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.java.decompiler.main.rels;
import org.jetbrains.java.decompiler.code.CodeConstants;
import org.jetbrains.java.decompiler.main.ClassesProcessor.ClassNode;
import org.jetbrains.java.decompiler.main.DecompilerContext;
import org.jetbrains.java.decompiler.main.collectors.CounterContainer;
import org.jetbrains.java.decompiler.main.collectors.VarNamesCollector;
import org.jetbrains.java.decompiler.main.extern.IFernflowerLogger;
import org.jetbrains.java.decompiler.main.extern.IFernflowerPreferences;
import org.jetbrains.java.decompiler.modules.decompiler.exps.*;
import org.jetbrains.java.decompiler.modules.decompiler.sforms.DirectGraph;
import org.jetbrains.java.decompiler.modules.decompiler.sforms.DirectNode;
import org.jetbrains.java.decompiler.modules.decompiler.stats.DoStatement;
import org.jetbrains.java.decompiler.modules.decompiler.stats.RootStatement;
import org.jetbrains.java.decompiler.modules.decompiler.stats.Statement;
import org.jetbrains.java.decompiler.modules.decompiler.vars.VarTypeProcessor;
import org.jetbrains.java.decompiler.modules.decompiler.vars.VarVersionPair;
import org.jetbrains.java.decompiler.struct.StructClass;
import org.jetbrains.java.decompiler.struct.StructField;
import org.jetbrains.java.decompiler.struct.StructMethod;
import org.jetbrains.java.decompiler.struct.attr.StructEnclosingMethodAttribute;
import org.jetbrains.java.decompiler.struct.attr.StructGeneralAttribute;
import org.jetbrains.java.decompiler.struct.gen.MethodDescriptor;
import org.jetbrains.java.decompiler.struct.gen.VarType;
import org.jetbrains.java.decompiler.util.InterpreterUtil;
import java.util.*;
import java.util.Map.Entry;
public class NestedClassProcessor {
public void processClass(ClassNode root, ClassNode node) {
// hide synthetic lambda content methods
if (node.type == ClassNode.CLASS_LAMBDA && !node.lambdaInformation.is_method_reference) {
ClassNode node_content = DecompilerContext.getClassProcessor().getMapRootClasses().get(node.classStruct.qualifiedName);
if (node_content != null && node_content.getWrapper() != null) {
node_content.getWrapper().getHiddenMembers().add(node.lambdaInformation.content_method_key);
}
}
if (node.nested.isEmpty()) {
return;
}
if (node.type != ClassNode.CLASS_LAMBDA) {
computeLocalVarsAndDefinitions(node);
// for each local or anonymous class ensure not empty enclosing method
checkNotFoundClasses(root, node);
}
int nameless = 0, synthetics = 0;
for (ClassNode child : node.nested) {
StructClass cl = child.classStruct;
// ensure not-empty class name
if ((child.type == ClassNode.CLASS_LOCAL || child.type == ClassNode.CLASS_MEMBER) && child.simpleName == null) {
if ((child.access & CodeConstants.ACC_SYNTHETIC) != 0 || cl.isSynthetic()) {
child.simpleName = "SyntheticClass_" + (++synthetics);
}
else {
String message = "Nameless local or member class " + cl.qualifiedName + "!";
DecompilerContext.getLogger().writeMessage(message, IFernflowerLogger.Severity.WARN);
child.simpleName = "NamelessClass_" + (++nameless);
}
}
}
for (ClassNode child : node.nested) {
if (child.type == ClassNode.CLASS_LAMBDA) {
setLambdaVars(node, child);
}
else if (child.type != ClassNode.CLASS_MEMBER || (child.access & CodeConstants.ACC_STATIC) == 0) {
insertLocalVars(node, child);
if (child.type == ClassNode.CLASS_LOCAL && child.enclosingMethod != null) {
MethodWrapper enclosingMethodWrapper = node.getWrapper().getMethods().getWithKey(child.enclosingMethod);
if(enclosingMethodWrapper != null) { // e.g. in case of switch-on-enum. FIXME: some proper handling of multiple enclosing classes
setLocalClassDefinition(enclosingMethodWrapper, child);
}
}
}
}
for (ClassNode child : node.nested) {
processClass(root, child);
}
}
private static void setLambdaVars(ClassNode parent, ClassNode child) {
if (child.lambdaInformation.is_method_reference) { // method reference, no code and no parameters
return;
}
MethodWrapper method = parent.getWrapper().getMethods().getWithKey(child.lambdaInformation.content_method_key);
MethodWrapper enclosingMethod = parent.getWrapper().getMethods().getWithKey(child.enclosingMethod);
MethodDescriptor md_lambda = MethodDescriptor.parseDescriptor(child.lambdaInformation.method_descriptor);
MethodDescriptor md_content = MethodDescriptor.parseDescriptor(child.lambdaInformation.content_method_descriptor);
int vars_count = md_content.params.length - md_lambda.params.length;
boolean is_static_lambda_content = child.lambdaInformation.is_content_method_static;
String parent_class_name = parent.getWrapper().getClassStruct().qualifiedName;
String lambda_class_name = child.simpleName;
VarType lambda_class_type = new VarType(lambda_class_name, true);
// this pointer
if (!is_static_lambda_content && DecompilerContext.getOption(IFernflowerPreferences.LAMBDA_TO_ANONYMOUS_CLASS)) {
method.varproc.getThisVars().put(new VarVersionPair(0, 0), parent_class_name);
method.varproc.setVarName(new VarVersionPair(0, 0), parent.simpleName + ".this");
}
Map<VarVersionPair, String> mapNewNames = new HashMap<>();
enclosingMethod.getOrBuildGraph().iterateExprents(exprent -> {
List<Exprent> lst = exprent.getAllExprents(true);
lst.add(exprent);
for (Exprent expr : lst) {
if (expr.type == Exprent.EXPRENT_NEW) {
NewExprent new_expr = (NewExprent)expr;
VarNamesCollector enclosingCollector = new VarNamesCollector(enclosingMethod.varproc.getVarNames());
if (new_expr.isLambda() && lambda_class_type.equals(new_expr.getNewType())) {
InvocationExprent inv_dynamic = new_expr.getConstructor();
int param_index = is_static_lambda_content ? 0 : 1;
int varIndex = is_static_lambda_content ? 0 : 1;
for (int i = 0; i < md_content.params.length; ++i) {
VarVersionPair varVersion = new VarVersionPair(varIndex, 0);
if (i < vars_count) {
Exprent param = inv_dynamic.getLstParameters().get(param_index + i);
if (param.type == Exprent.EXPRENT_VAR) {
mapNewNames.put(varVersion, enclosingMethod.varproc.getVarName(new VarVersionPair((VarExprent)param)));
}
}
else {
mapNewNames.put(varVersion, enclosingCollector.getFreeName(method.varproc.getVarName(varVersion)));
}
varIndex += md_content.params[i].stackSize;
}
}
}
}
return 0;
});
// update names of local variables
Set<String> setNewOuterNames = new HashSet<>(mapNewNames.values());
setNewOuterNames.removeAll(method.setOuterVarNames);
method.varproc.refreshVarNames(new VarNamesCollector(setNewOuterNames));
method.setOuterVarNames.addAll(setNewOuterNames);
for (Entry<VarVersionPair, String> entry : mapNewNames.entrySet()) {
method.varproc.setVarName(entry.getKey(), entry.getValue());
}
}
private static void checkNotFoundClasses(ClassNode root, ClassNode node) {
List<ClassNode> copy = new ArrayList<>(node.nested);
for (ClassNode child : copy) {
if (child.classStruct.isSynthetic()) {
continue;
}
if ((child.type == ClassNode.CLASS_LOCAL || child.type == ClassNode.CLASS_ANONYMOUS) && child.enclosingMethod == null) {
Set<String> setEnclosing = child.enclosingClasses;
if (!setEnclosing.isEmpty()) {
StructEnclosingMethodAttribute attr = child.classStruct.getAttribute(StructGeneralAttribute.ATTRIBUTE_ENCLOSING_METHOD);
if (attr != null &&
attr.getMethodName() != null &&
node.classStruct.qualifiedName.equals(attr.getClassName()) &&
node.classStruct.getMethod(attr.getMethodName(), attr.getMethodDescriptor()) != null) {
child.enclosingMethod = InterpreterUtil.makeUniqueKey(attr.getMethodName(), attr.getMethodDescriptor());
continue;
}
}
node.nested.remove(child);
child.parent = null;
setEnclosing.remove(node.classStruct.qualifiedName);
boolean hasEnclosing = !setEnclosing.isEmpty() && insertNestedClass(root, child);
if (!hasEnclosing) {
if (child.type == ClassNode.CLASS_ANONYMOUS) {
String message = "Unreferenced anonymous class " + child.classStruct.qualifiedName + "!";
DecompilerContext.getLogger().writeMessage(message, IFernflowerLogger.Severity.WARN);
}
else if (child.type == ClassNode.CLASS_LOCAL) {
String message = "Unreferenced local class " + child.classStruct.qualifiedName + "!";
DecompilerContext.getLogger().writeMessage(message, IFernflowerLogger.Severity.WARN);
}
}
}
}
}
private static boolean insertNestedClass(ClassNode root, ClassNode child) {
Set<String> setEnclosing = child.enclosingClasses;
LinkedList<ClassNode> stack = new LinkedList<>();
stack.add(root);
while (!stack.isEmpty()) {
ClassNode node = stack.removeFirst();
if (setEnclosing.contains(node.classStruct.qualifiedName)) {
node.nested.add(child);
child.parent = node;
return true;
}
// note: ordered list
stack.addAll(node.nested);
}
return false;
}
private static void computeLocalVarsAndDefinitions(ClassNode node) {
// class name -> constructor descriptor -> var to field link
Map<String, Map<String, List<VarFieldPair>>> mapVarMasks = new HashMap<>();
int clTypes = 0;
for (ClassNode nd : node.nested) {
if (nd.type != ClassNode.CLASS_LAMBDA &&
!nd.classStruct.isSynthetic() &&
(nd.access & CodeConstants.ACC_STATIC) == 0 &&
(nd.access & CodeConstants.ACC_INTERFACE) == 0) {
clTypes |= nd.type;
Map<String, List<VarFieldPair>> mask = getMaskLocalVars(nd.getWrapper());
if (mask.isEmpty()) {
String message = "Nested class " + nd.classStruct.qualifiedName + " has no constructor!";
DecompilerContext.getLogger().writeMessage(message, IFernflowerLogger.Severity.WARN);
}
else {
mapVarMasks.put(nd.classStruct.qualifiedName, mask);
}
}
}
// local var masks
Map<String, Map<String, List<VarFieldPair>>> mapVarFieldPairs = new HashMap<>();
if (clTypes != ClassNode.CLASS_MEMBER) {
// iterate enclosing class
for (MethodWrapper method : node.getWrapper().getMethods()) {
if (method.root != null) { // neither abstract, nor native
method.getOrBuildGraph().iterateExprents(exprent -> {
List<Exprent> lst = exprent.getAllExprents(true);
lst.add(exprent);
for (Exprent expr : lst) {
if (expr.type == Exprent.EXPRENT_NEW) {
InvocationExprent constructor = ((NewExprent)expr).getConstructor();
if (constructor != null && mapVarMasks.containsKey(constructor.getClassname())) { // non-static inner class constructor
String refClassName = constructor.getClassname();
ClassNode nestedClassNode = node.getClassNode(refClassName);
if (nestedClassNode.type != ClassNode.CLASS_MEMBER) {
List<VarFieldPair> mask = mapVarMasks.get(refClassName).get(constructor.getStringDescriptor());
if (!mapVarFieldPairs.containsKey(refClassName)) {
mapVarFieldPairs.put(refClassName, new HashMap<>());
}
List<VarFieldPair> lstTemp = new ArrayList<>();
for (int i = 0; i < mask.size(); i++) {
Exprent param = constructor.getLstParameters().get(i);
VarFieldPair pair = null;
if (param.type == Exprent.EXPRENT_VAR && mask.get(i) != null) {
VarVersionPair varPair = new VarVersionPair((VarExprent)param);
// FIXME: flags of variables are wrong! Correct the entire functionality.
// if(method.varproc.getVarFinal(varPair) != VarTypeProcessor.VAR_NON_FINAL) {
pair = new VarFieldPair(mask.get(i).fieldKey, varPair);
// }
}
lstTemp.add(pair);
}
List<VarFieldPair> pairMask = mapVarFieldPairs.get(refClassName).get(constructor.getStringDescriptor());
if (pairMask == null) {
pairMask = lstTemp;
}
else {
for (int i = 0; i < pairMask.size(); i++) {
if (!InterpreterUtil.equalObjects(pairMask.get(i), lstTemp.get(i))) {
pairMask.set(i, null);
}
}
}
mapVarFieldPairs.get(refClassName).put(constructor.getStringDescriptor(), pairMask);
nestedClassNode.enclosingMethod =
InterpreterUtil.makeUniqueKey(method.methodStruct.getName(), method.methodStruct.getDescriptor());
}
}
}
}
return 0;
});
}
}
}
// merge var masks
for (Entry<String, Map<String, List<VarFieldPair>>> enclosing : mapVarMasks.entrySet()) {
ClassNode nestedNode = node.getClassNode(enclosing.getKey());
// intersection
List<VarFieldPair> interPairMask = null;
// merge referenced constructors
if (mapVarFieldPairs.containsKey(enclosing.getKey())) {
for (List<VarFieldPair> mask : mapVarFieldPairs.get(enclosing.getKey()).values()) {
if (interPairMask == null) {
interPairMask = new ArrayList<>(mask);
}
else {
mergeListSignatures(interPairMask, mask, false);
}
}
}
List<VarFieldPair> interMask = null;
// merge all constructors
for (List<VarFieldPair> mask : enclosing.getValue().values()) {
if (interMask == null) {
interMask = new ArrayList<>(mask);
}
else {
mergeListSignatures(interMask, mask, false);
}
}
if (interPairMask == null) { // member or local and never instantiated
interPairMask = interMask != null ? new ArrayList<>(interMask) : new ArrayList<>();
boolean found = false;
for (int i = 0; i < interPairMask.size(); i++) {
if (interPairMask.get(i) != null) {
if (found) {
interPairMask.set(i, null);
}
found = true;
}
}
}
mergeListSignatures(interPairMask, interMask, true);
for (VarFieldPair pair : interPairMask) {
if (pair != null && !pair.fieldKey.isEmpty()) {
nestedNode.mapFieldsToVars.put(pair.fieldKey, pair.varPair);
}
}
// set resulting constructor signatures
for (Entry<String, List<VarFieldPair>> entry : enclosing.getValue().entrySet()) {
mergeListSignatures(entry.getValue(), interPairMask, false);
List<VarVersionPair> mask = new ArrayList<>(entry.getValue().size());
for (VarFieldPair pair : entry.getValue()) {
mask.add(pair != null && !pair.fieldKey.isEmpty() ? pair.varPair : null);
}
nestedNode.getWrapper().getMethodWrapper(CodeConstants.INIT_NAME, entry.getKey()).synthParameters = mask;
}
}
}
private static void insertLocalVars(ClassNode parent, ClassNode child) {
// enclosing method, is null iff member class
MethodWrapper enclosingMethod = parent.getWrapper().getMethods().getWithKey(child.enclosingMethod);
// iterate all child methods
for (MethodWrapper method : child.getWrapper().getMethods()) {
if (method.root != null) { // neither abstract nor native
Map<VarVersionPair, String> mapNewNames = new HashMap<>(); // local var names
Map<VarVersionPair, VarType> mapNewTypes = new HashMap<>(); // local var types
Map<Integer, VarVersionPair> mapParamsToNewVars = new HashMap<>();
if (method.synthParameters != null) {
int index = 0, varIndex = 1;
MethodDescriptor md = MethodDescriptor.parseDescriptor(method.methodStruct.getDescriptor());
for (VarVersionPair pair : method.synthParameters) {
if (pair != null) {
VarVersionPair newVar = new VarVersionPair(method.counter.getCounterAndIncrement(CounterContainer.VAR_COUNTER), 0);
mapParamsToNewVars.put(varIndex, newVar);
String varName = null;
VarType varType = null;
if (child.type != ClassNode.CLASS_MEMBER) {
varName = enclosingMethod.varproc.getVarName(pair);
varType = enclosingMethod.varproc.getVarType(pair);
enclosingMethod.varproc.setVarFinal(pair, VarTypeProcessor.VAR_EXPLICIT_FINAL);
}
if (pair.var == -1 || "this".equals(varName)) {
if (parent.simpleName == null) {
// anonymous enclosing class, no access to this
varName = VarExprent.VAR_NAMELESS_ENCLOSURE;
}
else {
varName = parent.simpleName + ".this";
}
method.varproc.getThisVars().put(newVar, parent.classStruct.qualifiedName);
}
mapNewNames.put(newVar, varName);
mapNewTypes.put(newVar, varType);
}
varIndex += md.params[index++].stackSize;
}
}
Map<String, VarVersionPair> mapFieldsToNewVars = new HashMap<>();
for (ClassNode classNode = child; classNode != null; classNode = classNode.parent) {
for (Entry<String, VarVersionPair> entry : classNode.mapFieldsToVars.entrySet()) {
VarVersionPair newVar = new VarVersionPair(method.counter.getCounterAndIncrement(CounterContainer.VAR_COUNTER), 0);
mapFieldsToNewVars.put(InterpreterUtil.makeUniqueKey(classNode.classStruct.qualifiedName, entry.getKey()), newVar);
String varName = null;
VarType varType = null;
if (classNode.type != ClassNode.CLASS_MEMBER) {
MethodWrapper enclosing_method = classNode.parent.getWrapper().getMethods().getWithKey(classNode.enclosingMethod);
varName = enclosing_method.varproc.getVarName(entry.getValue());
varType = enclosing_method.varproc.getVarType(entry.getValue());
enclosing_method.varproc.setVarFinal(entry.getValue(), VarTypeProcessor.VAR_EXPLICIT_FINAL);
}
if (entry.getValue().var == -1 || "this".equals(varName)) {
if (classNode.parent.simpleName == null) {
// anonymous enclosing class, no access to this
varName = VarExprent.VAR_NAMELESS_ENCLOSURE;
}
else {
varName = classNode.parent.simpleName + ".this";
}
method.varproc.getThisVars().put(newVar, classNode.parent.classStruct.qualifiedName);
}
mapNewNames.put(newVar, varName);
mapNewTypes.put(newVar, varType);
// hide synthetic field
if (classNode == child) { // fields higher up the chain were already handled with their classes
StructField fd = child.classStruct.getFields().getWithKey(entry.getKey());
child.getWrapper().getHiddenMembers().add(InterpreterUtil.makeUniqueKey(fd.getName(), fd.getDescriptor()));
}
}
}
Set<String> setNewOuterNames = new HashSet<>(mapNewNames.values());
setNewOuterNames.removeAll(method.setOuterVarNames);
method.varproc.refreshVarNames(new VarNamesCollector(setNewOuterNames));
method.setOuterVarNames.addAll(setNewOuterNames);
for (Entry<VarVersionPair, String> entry : mapNewNames.entrySet()) {
VarVersionPair pair = entry.getKey();
VarType type = mapNewTypes.get(pair);
method.varproc.setVarName(pair, entry.getValue());
if (type != null) {
method.varproc.setVarType(pair, type);
}
}
method.getOrBuildGraph().iterateExprents(new DirectGraph.ExprentIterator() {
@Override
public int processExprent(Exprent exprent) {
if (exprent.type == Exprent.EXPRENT_ASSIGNMENT) {
AssignmentExprent assignExpr = (AssignmentExprent)exprent;
if (assignExpr.getLeft().type == Exprent.EXPRENT_FIELD) {
FieldExprent fExpr = (FieldExprent)assignExpr.getLeft();
String qName = child.classStruct.qualifiedName;
if (fExpr.getClassname().equals(qName) && // process this class only
mapFieldsToNewVars.containsKey(InterpreterUtil.makeUniqueKey(qName, fExpr.getName(), fExpr.getDescriptor().descriptorString))) {
return 2;
}
}
}
if (child.type == ClassNode.CLASS_ANONYMOUS &&
CodeConstants.INIT_NAME.equals(method.methodStruct.getName()) &&
exprent.type == Exprent.EXPRENT_INVOCATION) {
InvocationExprent invokeExpr = (InvocationExprent)exprent;
if (invokeExpr.getFunctype() == InvocationExprent.TYP_INIT) {
// invocation of the super constructor in an anonymous class
child.superInvocation = invokeExpr; // FIXME: save original names of parameters
return 2;
}
}
replaceExprent(exprent);
return 0;
}
private Exprent replaceExprent(Exprent exprent) {
if (exprent.type == Exprent.EXPRENT_VAR) {
int varIndex = ((VarExprent)exprent).getIndex();
if (mapParamsToNewVars.containsKey(varIndex)) {
VarVersionPair newVar = mapParamsToNewVars.get(varIndex);
method.varproc.getExternalVars().add(newVar);
return new VarExprent(newVar.var, method.varproc.getVarType(newVar), method.varproc);
}
}
else if (exprent.type == Exprent.EXPRENT_FIELD) {
FieldExprent fExpr = (FieldExprent)exprent;
String key = InterpreterUtil.makeUniqueKey(fExpr.getClassname(), fExpr.getName(), fExpr.getDescriptor().descriptorString);
if (mapFieldsToNewVars.containsKey(key)) {
//if(fExpr.getClassname().equals(child.classStruct.qualifiedName) &&
// mapFieldsToNewVars.containsKey(key)) {
VarVersionPair newVar = mapFieldsToNewVars.get(key);
method.varproc.getExternalVars().add(newVar);
return new VarExprent(newVar.var, method.varproc.getVarType(newVar), method.varproc);
}
}
boolean replaced = true;
while (replaced) {
replaced = false;
for (Exprent expr : exprent.getAllExprents()) {
Exprent retExpr = replaceExprent(expr);
if (retExpr != null) {
exprent.replaceExprent(expr, retExpr);
replaced = true;
break;
}
}
}
return null;
}
});
}
}
}
private static Map<String, List<VarFieldPair>> getMaskLocalVars(ClassWrapper wrapper) {
Map<String, List<VarFieldPair>> mapMasks = new HashMap<>();
StructClass cl = wrapper.getClassStruct();
// iterate over constructors
for (StructMethod mt : cl.getMethods()) {
if (CodeConstants.INIT_NAME.equals(mt.getName())) {
MethodDescriptor md = MethodDescriptor.parseDescriptor(mt.getDescriptor());
MethodWrapper method = wrapper.getMethodWrapper(CodeConstants.INIT_NAME, mt.getDescriptor());
DirectGraph graph = method.getOrBuildGraph();
if (graph != null) { // something gone wrong, should not be null
List<VarFieldPair> fields = new ArrayList<>(md.params.length);
int varIndex = 1;
for (int i = 0; i < md.params.length; i++) { // no static methods allowed
String keyField = getEnclosingVarField(cl, method, graph, varIndex);
fields.add(keyField == null ? null : new VarFieldPair(keyField, new VarVersionPair(-1, 0))); // TODO: null?
varIndex += md.params[i].stackSize;
}
mapMasks.put(mt.getDescriptor(), fields);
}
}
}
return mapMasks;
}
private static String getEnclosingVarField(StructClass cl, MethodWrapper method, DirectGraph graph, int index) {
String field = "";
// parameter variable final
if (method.varproc.getVarFinal(new VarVersionPair(index, 0)) == VarTypeProcessor.VAR_NON_FINAL) {
return null;
}
boolean noSynthFlag = DecompilerContext.getOption(IFernflowerPreferences.SYNTHETIC_NOT_SET);
// no loop at the begin
DirectNode firstNode = graph.first;
if (firstNode.preds.isEmpty()) {
// assignment to a synthetic field?
for (Exprent exprent : firstNode.exprents) {
if (exprent.type == Exprent.EXPRENT_ASSIGNMENT) {
AssignmentExprent assignExpr = (AssignmentExprent)exprent;
if (assignExpr.getRight().type == Exprent.EXPRENT_VAR &&
((VarExprent)assignExpr.getRight()).getIndex() == index &&
assignExpr.getLeft().type == Exprent.EXPRENT_FIELD) {
FieldExprent left = (FieldExprent)assignExpr.getLeft();
StructField fd = cl.getField(left.getName(), left.getDescriptor().descriptorString);
if (fd != null &&
cl.qualifiedName.equals(left.getClassname()) &&
(fd.isSynthetic() || noSynthFlag && possiblySyntheticField(fd))) {
// local (== not inherited) field
field = InterpreterUtil.makeUniqueKey(left.getName(), left.getDescriptor().descriptorString);
break;
}
}
}
}
}
return field;
}
private static boolean possiblySyntheticField(StructField fd) {
return fd.getName().contains("$") && fd.hasModifier(CodeConstants.ACC_FINAL) && fd.hasModifier(CodeConstants.ACC_PRIVATE);
}
private static void mergeListSignatures(List<VarFieldPair> first, List<VarFieldPair> second, boolean both) {
int i = 1;
while (first.size() > i && second.size() > i) {
VarFieldPair fObj = first.get(first.size() - i);
VarFieldPair sObj = second.get(second.size() - i);
if (!isEqual(both, fObj, sObj)) {
first.set(first.size() - i, null);
if (both) {
second.set(second.size() - i, null);
}
}
else if (fObj != null) {
if (fObj.varPair.var == -1) {
fObj.varPair = sObj.varPair;
}
else {
sObj.varPair = fObj.varPair;
}
}
i++;
}
for (int j = 1; j <= first.size() - i; j++) {
first.set(j, null);
}
if (both) {
for (int j = 1; j <= second.size() - i; j++) {
second.set(j, null);
}
}
// first
if (first.isEmpty()) {
if (!second.isEmpty() && both) {
second.set(0, null);
}
}
else if (second.isEmpty()) {
first.set(0, null);
}
else {
VarFieldPair fObj = first.get(0);
VarFieldPair sObj = second.get(0);
if (!isEqual(both, fObj, sObj)) {
first.set(0, null);
if (both) {
second.set(0, null);
}
}
else if (fObj != null) {
if (fObj.varPair.var == -1) {
fObj.varPair = sObj.varPair;
}
else {
sObj.varPair = fObj.varPair;
}
}
}
}
private static boolean isEqual(boolean both, VarFieldPair fObj, VarFieldPair sObj) {
boolean eq;
if (fObj == null || sObj == null) {
eq = (fObj == sObj);
}
else {
eq = true;
if (fObj.fieldKey.length() == 0) {
fObj.fieldKey = sObj.fieldKey;
}
else if (sObj.fieldKey.length() == 0) {
if (both) {
sObj.fieldKey = fObj.fieldKey;
}
}
else {
eq = fObj.fieldKey.equals(sObj.fieldKey);
}
}
return eq;
}
private static void setLocalClassDefinition(MethodWrapper method, ClassNode node) {
RootStatement root = method.root;
Set<Statement> setStats = new HashSet<>();
VarType classType = new VarType(node.classStruct.qualifiedName, true);
Statement statement = getDefStatement(root, classType, setStats);
if (statement == null) {
// unreferenced local class
statement = root.getFirst();
}
Statement first = findFirstBlock(statement, setStats);
List<Exprent> lst;
if (first == null) {
lst = statement.getVarDefinitions();
}
else if (first.getExprents() == null) {
lst = first.getVarDefinitions();
}
else {
lst = first.getExprents();
}
int addIndex = 0;
for (Exprent expr : lst) {
if (searchForClass(expr, classType)) {
break;
}
addIndex++;
}
VarExprent var = new VarExprent(method.counter.getCounterAndIncrement(CounterContainer.VAR_COUNTER), classType, method.varproc);
var.setDefinition(true);
var.setClassDef(true);
lst.add(addIndex, var);
}
private static Statement findFirstBlock(Statement stat, Set<Statement> setStats) {
LinkedList<Statement> stack = new LinkedList<>();
stack.add(stat);
while (!stack.isEmpty()) {
Statement st = stack.remove(0);
if (stack.isEmpty() || setStats.contains(st)) {
if (st.isLabeled() && !stack.isEmpty() || st.getExprents() != null) {
return st;
}
stack.clear();
switch (st.type) {
case Statement.TYPE_SEQUENCE:
stack.addAll(0, st.getStats());
break;
case Statement.TYPE_IF:
case Statement.TYPE_ROOT:
case Statement.TYPE_SWITCH:
case Statement.TYPE_SYNCRONIZED:
stack.add(st.getFirst());
break;
default:
return st;
}
}
}
return null;
}
private static Statement getDefStatement(Statement stat, VarType classType, Set<? super Statement> setStats) {
List<Exprent> lst = new ArrayList<>();
Statement retStat = null;
if (stat.getExprents() == null) {
int counter = 0;
for (Object obj : stat.getSequentialObjects()) {
if (obj instanceof Statement) {
Statement st = (Statement)obj;
Statement stTemp = getDefStatement(st, classType, setStats);
if (stTemp != null) {
if (counter == 1) {
retStat = stat;
break;
}
retStat = stTemp;
counter++;
}
if (st.type == Statement.TYPE_DO) {
DoStatement dost = (DoStatement)st;
lst.addAll(dost.getInitExprentList());
lst.addAll(dost.getConditionExprentList());
}
}
else if (obj instanceof Exprent) {
lst.add((Exprent)obj);
}
}
}
else {
lst = stat.getExprents();
}
if (retStat != stat) {
for (Exprent exprent : lst) {
if (exprent != null && searchForClass(exprent, classType)) {
retStat = stat;
break;
}
}
}
if (retStat != null) {
setStats.add(stat);
}
return retStat;
}
private static boolean searchForClass(Exprent exprent, VarType classType) {
List<Exprent> lst = exprent.getAllExprents(true);
lst.add(exprent);
String classname = classType.value;
for (Exprent expr : lst) {
boolean res = false;
switch (expr.type) {
case Exprent.EXPRENT_CONST:
ConstExprent constExpr = (ConstExprent)expr;
res = (VarType.VARTYPE_CLASS.equals(constExpr.getConstType()) && classname.equals(constExpr.getValue()) ||
classType.equals(constExpr.getConstType()));
break;
case Exprent.EXPRENT_FIELD:
res = classname.equals(((FieldExprent)expr).getClassname());
break;
case Exprent.EXPRENT_INVOCATION:
res = classname.equals(((InvocationExprent)expr).getClassname());
break;
case Exprent.EXPRENT_NEW:
VarType newType = expr.getExprType();
res = newType.type == CodeConstants.TYPE_OBJECT && classname.equals(newType.value);
break;
case Exprent.EXPRENT_VAR:
VarExprent varExpr = (VarExprent)expr;
if (varExpr.isDefinition()) {
VarType varType = varExpr.getVarType();
if (classType.equals(varType) || (varType.arrayDim > 0 && classType.value.equals(varType.value))) {
res = true;
}
}
}
if (res) {
return true;
}
}
return false;
}
private static class VarFieldPair {
public String fieldKey;
public VarVersionPair varPair;
VarFieldPair(String field, VarVersionPair varPair) {
this.fieldKey = field;
this.varPair = varPair;
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (!(o instanceof VarFieldPair)) return false;
VarFieldPair pair = (VarFieldPair)o;
return fieldKey.equals(pair.fieldKey) && varPair.equals(pair.varPair);
}
@Override
public int hashCode() {
return fieldKey.hashCode() + varPair.hashCode();
}
}
}
| |
package com.marcouberti.naturegradientsradial;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.ComponentName;
import android.content.DialogInterface;
import android.net.Uri;
import android.os.Bundle;
import android.support.v7.widget.DefaultItemAnimator;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.support.wearable.companion.WatchFaceCompanion;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.RelativeLayout;
import android.widget.Spinner;
import android.widget.TextView;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.PendingResult;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.wearable.DataApi;
import com.google.android.gms.wearable.DataEvent;
import com.google.android.gms.wearable.DataEventBuffer;
import com.google.android.gms.wearable.DataItem;
import com.google.android.gms.wearable.DataItemBuffer;
import com.google.android.gms.wearable.DataMap;
import com.google.android.gms.wearable.DataMapItem;
import com.google.android.gms.wearable.Wearable;
/**
* The phone-side config activity for {@code DigitalWatchFaceService}. Like the watch-side config
* activity ({@code DigitalWatchFaceWearableConfigActivity}), allows for setting the background
* color. Additionally, enables setting the color for hour, minute and second digits.
*/
public class NatureGradientsWatchFaceCompanionConfigActivity extends Activity
implements GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener,
ResultCallback<DataApi.DataItemResult>,DataApi.DataListener {
private static final String TAG = "DigitalWatchFaceConfig";
// TODO: use the shared constants (needs covering all the samples with Gradle build model)
private GoogleApiClient mGoogleApiClient;
private String mPeerId;
private Toolbar toolbar;
protected RecyclerView recyclerView;
private GradientAdapter adapter;
private RecyclerView.LayoutManager robotLayoutManager;
CustomGradientView previewView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_nature_gradient_watch_face_config);
mPeerId = getIntent().getStringExtra(WatchFaceCompanion.EXTRA_PEER_ID);
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.addApi(Wearable.API)
.build();
ComponentName name = getIntent().getParcelableExtra(
WatchFaceCompanion.EXTRA_WATCH_FACE_COMPONENT);
toolbar = (Toolbar)findViewById(R.id.toolbar);
toolbar.setNavigationIcon(R.drawable.back);
toolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
}
});
/*
toolbar.setOnMenuItemClickListener(new Toolbar.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
if (item.getItemId() == R.id.action_refresh) {
if(dashboardFragment != null) {
((DashboardFragment)dashboardFragment).reloadRobotList(true);
}
}
return true;
}
});
*/
previewView = (CustomGradientView)findViewById(R.id.gradient);
recyclerView = (RecyclerView) findViewById(R.id.recycler_view);
recyclerView.setHasFixedSize(true);
RecyclerView.ItemAnimator animator = new DefaultItemAnimator();
animator.setSupportsChangeAnimations(false);//no animation for changes
recyclerView.setItemAnimator(animator);
robotLayoutManager = new LinearLayoutManager(this);
//robotLayoutManager = new GridLayoutManager(getActivity(),2);
recyclerView.setLayoutManager(robotLayoutManager);
adapter = new GradientAdapter();
recyclerView.setAdapter(adapter);
}
@Override
protected void onStart() {
super.onStart();
mGoogleApiClient.connect();
}
@Override
protected void onStop() {
if (mGoogleApiClient != null && mGoogleApiClient.isConnected()) {
mGoogleApiClient.disconnect();
}
super.onStop();
}
@Override // GoogleApiClient.ConnectionCallbacks
public void onConnected(Bundle connectionHint) {
Log.d(TAG, "onConnected: " + connectionHint);
if (mPeerId != null) {
Uri.Builder builder = new Uri.Builder();
Uri uri = builder.scheme("wear").path(NatureGradientsWatchFaceUtil.PATH_WITH_FEATURE).authority(mPeerId).build();
Wearable.DataApi.getDataItem(mGoogleApiClient, uri).setResultCallback(this);
Wearable.DataApi.addListener(mGoogleApiClient, this);
} else {
displayNoConnectedDeviceDialog();
}
}
@Override
protected void onPause() {
super.onPause();
Wearable.DataApi.removeListener(mGoogleApiClient, this);
mGoogleApiClient.disconnect();
}
private void updateConfigDataItemAndUiOnStartup() {
Log.d(TAG, "updateConfigDataItemAndUiOnStartup...");
PendingResult<DataItemBuffer> results = Wearable.DataApi.getDataItems(mGoogleApiClient);
results.setResultCallback(new ResultCallback<DataItemBuffer>() {
@Override
public void onResult(DataItemBuffer dataItems) {
if (dataItems.getCount() != 0) {
DataMapItem dataMapItem = DataMapItem.fromDataItem(dataItems.get(0));
// This should read the correct value.
int value = dataMapItem.getDataMap().getInt(NatureGradientsWatchFaceUtil.KEY_BACKGROUND_COLOR);
updateUiForKey(NatureGradientsWatchFaceUtil.KEY_BACKGROUND_COLOR, value);
Log.d(TAG, "aggiorno a startup background...");
}
dataItems.release();
}
});
/*
NatureGradientsWatchFaceUtil.fetchConfigDataMap(mGoogleApiClient,
new NatureGradientsWatchFaceUtil.FetchConfigDataMapCallback() {
@Override
public void onConfigDataMapFetched(DataMap startupConfig) {
// If the DataItem hasn't been created yet or some keys are missing,
// use the default values.
NatureGradientsWatchFaceUtil.putConfigDataItem(mGoogleApiClient, startupConfig);
updateUiForConfigDataMap(startupConfig);
}
}
);
*/
}
@Override // ResultCallback<DataApi.DataItemResult>
public void onResult(DataApi.DataItemResult dataItemResult) {
if (dataItemResult.getStatus().isSuccess() && dataItemResult.getDataItem() != null) {
DataItem configDataItem = dataItemResult.getDataItem();
DataMapItem dataMapItem = DataMapItem.fromDataItem(configDataItem);
DataMap config = dataMapItem.getDataMap();
Log.d(TAG,"startup setup UI...");
updateUiForConfigDataMap(config);
//setUpAllPickers(config);
} else {
// If DataItem with the current config can't be retrieved, select the default items on
// each picker.
//setUpAllPickers(null);
}
}
@Override // GoogleApiClient.ConnectionCallbacks
public void onConnectionSuspended(int cause) {
Log.d(TAG, "onConnectionSuspended: " + cause);
}
@Override // GoogleApiClient.OnConnectionFailedListener
public void onConnectionFailed(ConnectionResult result) {
Log.d(TAG, "onConnectionFailed: " + result);
}
private void displayNoConnectedDeviceDialog() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
String messageText = getResources().getString(R.string.title_no_device_connected);
String okText = getResources().getString(R.string.ok_no_device_connected);
builder.setMessage(messageText)
.setCancelable(false)
.setPositiveButton(okText, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
}
});
AlertDialog alert = builder.create();
alert.show();
}
private void sendConfigUpdateMessage(String configKey, int color) {
if (mPeerId != null) {
DataMap config = new DataMap();
config.putInt(configKey, color);
byte[] rawData = config.toByteArray();
Wearable.MessageApi.sendMessage(mGoogleApiClient, mPeerId, NatureGradientsWatchFaceUtil.PATH_WITH_FEATURE, rawData);
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "Sent watch face config message: " + configKey + " -> "
+ Integer.toHexString(color));
}
}
}
@Override // DataApi.DataListener
public void onDataChanged(DataEventBuffer dataEvents) {
for (DataEvent dataEvent : dataEvents) {
if (dataEvent.getType() != DataEvent.TYPE_CHANGED) {
continue;
}
DataItem dataItem = dataEvent.getDataItem();
if (!dataItem.getUri().getPath().equals(NatureGradientsWatchFaceUtil.PATH_WITH_FEATURE)) {
continue;
}
DataMapItem dataMapItem = DataMapItem.fromDataItem(dataItem);
DataMap config = dataMapItem.getDataMap();
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "Config DataItem updated:" + config);
}
updateUiForConfigDataMap(config);
}
}
private void updateUiForConfigDataMap(final DataMap config) {
boolean uiUpdated = false;
for (String configKey : config.keySet()) {
if (!config.containsKey(configKey)) {
continue;
}
int color = config.getInt(configKey);
Log.d(TAG, "Found watch face config key: " + configKey + " -> "
+ color);
if (updateUiForKey(configKey, color)) {
uiUpdated = true;
}
}
}
/**
* Updates the color of a UI item according to the given {@code configKey}. Does nothing if
* {@code configKey} isn't recognized.
*
* @return whether UI has been updated
*/
private boolean updateUiForKey(String configKey, final int color) {
if (configKey.equals(NatureGradientsWatchFaceUtil.KEY_BACKGROUND_COLOR)) {
runOnUiThread(new Runnable() {
@Override
public void run() {
previewView.gradients = GradientsUtils.getGradients(getApplicationContext(), color);
previewView.invalidate();
}
});
} else {
Log.w(TAG, "Ignoring unknown config key: " + configKey);
return false;
}
return true;
}
public class GradientAdapter extends RecyclerView.Adapter<GradientAdapter.ViewHolder> {
String[] gradients = getResources().getStringArray(R.array.gradients_face_array);
// Provide a reference to the views for each data item
// Complex data items may need more than one view per item, and
// you provide access to all the views for a data item in a view holder
public class ViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener{
// each data item is just a string in this case
public RelativeLayout itemContainer;
public TextView nameText;
public CustomGradientView gradientView;
public ViewHolder(RelativeLayout v) {
super(v);
v.setOnClickListener(this);
itemContainer = v;
nameText = (TextView)v.findViewById(R.id.name);
gradientView = (CustomGradientView)v.findViewById(R.id.gradient);
}
@Override
public void onClick(View view) {
//position
int itemPosition = getAdapterPosition();
Log.d(TAG, "clicked position " + itemPosition);
String gradientName = gradients[itemPosition];
previewView.gradients = GradientsUtils.getGradients(getApplicationContext(),gradientName);
previewView.invalidate();
sendConfigUpdateMessage(NatureGradientsWatchFaceUtil.KEY_BACKGROUND_COLOR, GradientsUtils.getColorID(gradientName));
}
}
// Provide a suitable constructor (depends on the kind of dataset)
public GradientAdapter() {}
// Create new views (invoked by the layout manager)
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent,
int viewType) {
// create a new view
View v = LayoutInflater.from(parent.getContext())
.inflate(R.layout.gradient_item_list, parent, false);
// set the view's size, margins, paddings and layout parameters
GradientAdapter.ViewHolder vh = new GradientAdapter.ViewHolder((RelativeLayout) v);
return vh;
}
// Replace the contents of a view (invoked by the layout manager)
@Override
public void onBindViewHolder(GradientAdapter.ViewHolder vh, final int position) {
GradientAdapter.ViewHolder holder = (GradientAdapter.ViewHolder)vh;
//defaults
holder.nameText.setText(gradients[position]);
holder.gradientView.gradients = GradientsUtils.getGradients(getApplicationContext(),gradients[position]);
}
// Return the size of your dataset (invoked by the layout manager)
@Override
public int getItemCount() {
return gradients.length;
}
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package controller;
import cart.ShoppingCart;
import entity.Category;
import entity.Product;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import javax.ejb.EJB;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import session.CategoryFacade;
import session.OrderManager;
import session.ProductFacade;
import validate.Validator;
/**
*
* @author tgiunipero
*/
@WebServlet(name = "Controller",
loadOnStartup = 1,
urlPatterns = {"/category",
"/addToCart",
"/viewCart",
"/updateCart",
"/checkout",
"/purchase",
"/chooseLanguage"})
public class ControllerServlet extends HttpServlet {
private String surcharge;
@EJB
private CategoryFacade categoryFacade;
@EJB
private ProductFacade productFacade;
@EJB
private OrderManager orderManager;
@Override
public void init(ServletConfig servletConfig) throws ServletException {
super.init(servletConfig);
// initialize servlet with configuration information
surcharge = servletConfig.getServletContext().getInitParameter("deliverySurcharge");
// store category list in servlet context
getServletContext().setAttribute("categories", categoryFacade.findAll());
}
/**
* Handles the HTTP <code>GET</code> method.
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String userPath = request.getServletPath();
HttpSession session = request.getSession();
Category selectedCategory;
Collection<Product> categoryProducts;
// if category page is requested
if (userPath.equals("/category")) {
// get categoryId from request
String categoryId = request.getQueryString();
if (categoryId != null) {
// get selected category
selectedCategory = categoryFacade.find(Short.parseShort(categoryId));
// place selected category in session scope
session.setAttribute("selectedCategory", selectedCategory);
// get all products for selected category
categoryProducts = selectedCategory.getProductCollection();
// place category products in session scope
session.setAttribute("categoryProducts", categoryProducts);
}
// if cart page is requested
} else if (userPath.equals("/viewCart")) {
String clear = request.getParameter("clear");
if ((clear != null) && clear.equals("true")) {
ShoppingCart cart = (ShoppingCart) session.getAttribute("cart");
cart.clear();
}
userPath = "/cart";
// if checkout page is requested
} else if (userPath.equals("/checkout")) {
ShoppingCart cart = (ShoppingCart) session.getAttribute("cart");
// calculate total
cart.calculateTotal(surcharge);
// forward to checkout page and switch to a secure channel
// if user switches language
} else if (userPath.equals("/chooseLanguage")) {
// TODO: Implement language request
}
// use RequestDispatcher to forward request internally
String url = "/WEB-INF/view" + userPath + ".jsp";
try {
request.getRequestDispatcher(url).forward(request, response);
} catch (Exception ex) {
ex.printStackTrace();
}
}
/**
* Handles the HTTP <code>POST</code> method.
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
request.setCharacterEncoding("UTF-8"); // ensures that user input is interpreted as
// 8-bit Unicode (e.g., for Czech characters)
String userPath = request.getServletPath();
HttpSession session = request.getSession();
ShoppingCart cart = (ShoppingCart) session.getAttribute("cart");
Validator validator = new Validator();
// if addToCart action is called
if (userPath.equals("/addToCart")) {
// if user is adding item to cart for first time
// create cart object and attach it to user session
if (cart == null) {
cart = new ShoppingCart();
session.setAttribute("cart", cart);
}
// get user input from request
String productId = request.getParameter("productId");
if (!productId.isEmpty()) {
Product product = productFacade.find(Integer.parseInt(productId));
cart.addItem(product);
}
userPath = "/category";
// if updateCart action is called
} else if (userPath.equals("/updateCart")) {
// get input from request
String productId = request.getParameter("productId");
String quantity = request.getParameter("quantity");
boolean invalidEntry = validator.validateQuantity(productId, quantity);
if (!invalidEntry) {
Product product = productFacade.find(Integer.parseInt(productId));
cart.update(product, quantity);
}
userPath = "/cart";
// if purchase action is called
} else if (userPath.equals("/purchase")) {
if (cart != null) {
// extract user data from request
String name = request.getParameter("name");
String email = request.getParameter("email");
String phone = request.getParameter("phone");
String address = request.getParameter("address");
String cityRegion = request.getParameter("cityRegion");
String ccNumber = request.getParameter("creditcard");
// validate user data
boolean validationErrorFlag = false;
validationErrorFlag = validator.validateForm(name, email, phone, address, cityRegion, ccNumber, request);
// if validation error found, return user to checkout
if (validationErrorFlag == true) {
request.setAttribute("validationErrorFlag", validationErrorFlag);
userPath = "/checkout";
// otherwise, save order to database
} else {
int orderId = orderManager.placeOrder(name, email, phone, address, cityRegion, ccNumber, cart);
// if order processed successfully send user to confirmation page
if (orderId != 0) {
// dissociate shopping cart from session
cart = null;
// end session
session.invalidate();
// get order details
Map orderMap = orderManager.getOrderDetails(orderId);
// place order details in request scope
request.setAttribute("customer", orderMap.get("customer"));
request.setAttribute("products", orderMap.get("products"));
request.setAttribute("orderRecord", orderMap.get("orderRecord"));
request.setAttribute("orderedProducts", orderMap.get("orderedProducts"));
userPath = "/confirmation";
// otherwise, send back to checkout page and display error
} else {
userPath = "/checkout";
request.setAttribute("orderFailureFlag", true);
}
}
}
}
// use RequestDispatcher to forward request internally
String url = "/WEB-INF/view" + userPath + ".jsp";
try {
request.getRequestDispatcher(url).forward(request, response);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
| |
/**
* Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.kaazing.gateway.transport.ws.bridge.filter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import org.apache.mina.core.buffer.IoBuffer;
import org.apache.mina.filter.codec.ProtocolDecoder;
import org.apache.mina.filter.codec.ProtocolDecoderException;
import org.junit.Test;
import org.kaazing.gateway.transport.ws.WsBinaryMessage;
import org.kaazing.gateway.transport.ws.WsContinuationMessage;
import org.kaazing.gateway.transport.ws.WsMessage;
import org.kaazing.gateway.transport.ws.WsPingMessage;
import org.kaazing.gateway.transport.ws.WsPongMessage;
import org.kaazing.gateway.transport.ws.WsTextMessage;
import org.kaazing.mina.core.buffer.IoBufferAllocatorEx;
import org.kaazing.mina.core.buffer.IoBufferEx;
import org.kaazing.mina.filter.codec.ProtocolCodecSessionEx;
public class WsFrameDecoderTest {
private static final Charset UTF_8 = Charset.forName("UTF-8");
@Test
public void decodeZeroLengthPingFrame() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 0);
IoBufferEx in = allocator.wrap(allocator.allocate(2))
.put((byte)0x89)
.put((byte)0x00)
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsPingMessage(allocator.wrap(allocator.allocate(0))), out);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
@Test
public void decodeTextContinuationFrame() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 200);
int firstPayload = 125;
String first = createString('a', firstPayload);
int secondPayload = 2;
String second = createString('b', secondPayload);
int thirdPayload = 4;
String thrid = createString('c', thirdPayload);
IoBufferEx in = allocator.wrap(allocator.allocate(firstPayload + secondPayload + thirdPayload + 6))
// text frame
.put((byte) 0x01)
.put((byte) firstPayload)
.putString(first, UTF_8.newEncoder())
// continuation frame with FIN
.put((byte) 0x80)
.put((byte) secondPayload)
.putString(second, UTF_8.newEncoder())
// text frame with FIN
.put((byte) 0x81)
.put((byte) thirdPayload)
.putString(thrid, UTF_8.newEncoder())
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out1 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(ByteBuffer.wrap(first.getBytes(UTF_8))), false), out1);
WsMessage out2 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsContinuationMessage(allocator.wrap(ByteBuffer.wrap(second.getBytes(UTF_8)))), out2);
WsMessage out3 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(ByteBuffer.wrap(thrid.getBytes(UTF_8)))), out3);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
@Test
public void decodeBinaryContinuationFrame() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 200);
int firstPayload = 125;
byte[] first = createString('a', firstPayload).getBytes("UTF-8");
int secondPayload = 2;
byte[] second = createString('b', secondPayload).getBytes("UTF-8");
int thirdPayload = 4;
byte[] thrid = createString('c', thirdPayload).getBytes("UTF-8");
IoBufferEx in = allocator.wrap(allocator.allocate(firstPayload + secondPayload + thirdPayload + 6))
// binary frame
.put((byte) 0x02)
.put((byte) firstPayload)
.put(first)
// continuation frame with FIN
.put((byte) 0x80)
.put((byte) secondPayload)
.put(second)
// binary frame with FIN
.put((byte) 0x82)
.put((byte) thirdPayload)
.put(thrid)
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out1 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsBinaryMessage(allocator.wrap(ByteBuffer.wrap(first)), false), out1);
WsMessage out2 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsContinuationMessage(allocator.wrap(ByteBuffer.wrap(second))), out2);
WsMessage out3 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsBinaryMessage(allocator.wrap(ByteBuffer.wrap(thrid))), out3);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
@Test
public void decodeTextContinuationFrameWithFragmentedPayload() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 250);
String textFramePayload = createString('a', 100);
String continuationFramePayloadFirstFragment = createString('b', 50);
String continuationFramePayloadSecondFragment = createString('b', 50);
IoBufferEx in = allocator.wrap(allocator.allocate(102))
// text frame
.put((byte) 0x01)
.put((byte) 100)
.putString(textFramePayload, UTF_8.newEncoder())
.flip();
IoBufferEx[] array = new IoBufferEx[] { allocator.wrap(allocator.allocate(2))
// continuation frame fragment (opcode and payload length)
.put((byte) 0x80)
.put((byte) 0x64)
.flip(),
allocator.wrap(allocator.allocate(50))
// continuation frame payload first fragment
.putString(continuationFramePayloadFirstFragment, UTF_8.newEncoder())
.flip(),
allocator.wrap(allocator.allocate(50))
// continuation frame payload second fragment
.putString(continuationFramePayloadSecondFragment, UTF_8.newEncoder())
.flip()
};
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
for (IoBufferEx buffer : array) {
decoder.decode(session, (IoBuffer) buffer, session.getDecoderOutput());
}
WsMessage out1 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(ByteBuffer.wrap(textFramePayload.getBytes(UTF_8))), false), out1);
WsMessage out2 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsContinuationMessage(allocator.wrap(ByteBuffer.wrap(createString('b', 100).getBytes(UTF_8)))), out2);
}
@Test
public void decodeBinaryContinuationFrameWithFragmentedPayload() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 250);
byte[] binaryFramePayload = createString('a', 100).getBytes();
byte[] continuationFramePayload = createString('b', 100).getBytes();
IoBufferEx in = allocator.wrap(allocator.allocate(102))
// binary frame
.put((byte) 0x02)
.put((byte) 100)
.put(binaryFramePayload)
.flip();
IoBufferEx[] array = new IoBufferEx[] { allocator.wrap(allocator.allocate(2))
// continuation frame fragment (opcode and payload length)
.put((byte) 0x80)
.put((byte) 0x64)
.flip(),
allocator.wrap(allocator.allocate(50))
// continuation frame first fragment
.put(continuationFramePayload,0, 50)
.flip(),
allocator.wrap(allocator.allocate(50))
// continuation frame second fragment
.put(continuationFramePayload, 50, 50)
.flip()
};
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
for (IoBufferEx buffer : array) {
decoder.decode(session, (IoBuffer) buffer, session.getDecoderOutput());
}
WsMessage out1 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsBinaryMessage(allocator.wrap(ByteBuffer.wrap(binaryFramePayload)), false), out1);
WsMessage out2 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsContinuationMessage(allocator.wrap(ByteBuffer.wrap(continuationFramePayload))), out2);
}
@Test(expected = ProtocolDecoderException.class)
public void decodeFragmentedContinuationFrameExceedingMaxMessageSize() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 150);
String textFramePayload = createString('a', 100);
IoBufferEx textFrameBuffer = allocator.wrap(allocator.allocate(102))
// text frame
.put((byte) 0x01)
.put((byte) 100)
.putString(textFramePayload, UTF_8.newEncoder())
.flip();
// the decoder should fail fast when message size exceeds the max message size without waiting for
// payload
IoBufferEx continuationFrameBuffer = allocator.wrap(allocator.allocate(2))
// continuation frame fragment (opcode and payload length)
.put((byte) 0x80)
.put((byte) 0x64)
.flip();
decoder.decode(session, (IoBuffer) textFrameBuffer, session.getDecoderOutput());
// since the maximum message size is 150, the following statement will
// cause the message size(200) to exceed maximum message size
// The decoder should fail fast once the payload length is decoded regardless of the
// availability of payload
decoder.decode(session, (IoBuffer) continuationFrameBuffer, session.getDecoderOutput());
}
@Test
public void pingInTextContinuationSequence() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 500);
int firstPayload = 125;
String first = createString('a', firstPayload);
int secondPayload = 125;
String second = createString('b', secondPayload);
int thirdPayload = 4;
String third = createString('c', thirdPayload);
int fourthPayload = 6;
byte[] fourth = createString('d', fourthPayload).getBytes("UTF-8");
IoBufferEx in = allocator.wrap(allocator.allocate(firstPayload+secondPayload+2+thirdPayload+fourthPayload+8))
// text frame
.put((byte) 0x01)
.put((byte) firstPayload)
.putString(first, UTF_8.newEncoder())
// continuation frame
.put((byte) 0x00)
.put((byte) secondPayload)
.putString(second, UTF_8.newEncoder())
// ping frame
.put((byte) 0x89)
.put((byte) 0x00)
// continuation frame with FIN
.put((byte) 0x80)
.put((byte) thirdPayload)
.putString(third, UTF_8.newEncoder())
// binary frame
.put((byte) 0x82)
.put((byte) fourthPayload)
.put(fourth)
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out1 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(ByteBuffer.wrap(first.getBytes(UTF_8))), false), out1);
WsMessage out2 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsContinuationMessage(allocator.wrap(ByteBuffer.wrap(second.getBytes(UTF_8))), false), out2);
WsMessage out = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsPingMessage(allocator.wrap(allocator.allocate(0))), out);
WsMessage out3 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsContinuationMessage(allocator.wrap(ByteBuffer.wrap(third.getBytes(UTF_8)))), out3);
WsMessage out4 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsBinaryMessage(allocator.wrap(ByteBuffer.wrap(fourth))), out4);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
private IoBufferEx getMaxMessageSizeBuffer(IoBufferAllocatorEx<?> allocator) throws Exception {
int firstPayload = 125;
String first = createString('a', firstPayload);
int secondPayload = 125;
String second = createString('b', secondPayload);
int thirdPayload = 4;
String third = createString('c', thirdPayload);
int fourthPayload = 6;
byte[] fourth = createString('d', fourthPayload).getBytes("UTF-8");
return allocator.wrap(allocator.allocate(firstPayload+secondPayload+2+thirdPayload+fourthPayload+8))
// text frame
.put((byte) 0x01).put((byte) firstPayload).putString(first, UTF_8.newEncoder())
// continuation frame
.put((byte) 0x00).put((byte) secondPayload).putString(second, UTF_8.newEncoder())
// ping frame
.put((byte) 0x89).put((byte) 0x00)
// continuation frame with FIN
.put((byte) 0x80).put((byte) thirdPayload).putString(third, UTF_8.newEncoder())
// binary frame
.put((byte) 0x82).put((byte) fourthPayload).put(fourth)
.flip();
}
@Test
public void testMaxMessageSize() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
try {
IoBuffer in = (IoBuffer) getMaxMessageSizeBuffer(allocator);
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 253);
decoder.decode(session, in, session.getDecoderOutput());
fail("Expected throw exception as the message size > 253");
} catch (ProtocolDecoderException e) {
// expected exception as message size exceeds 255
}
IoBuffer in = (IoBuffer) getMaxMessageSizeBuffer(allocator);
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 254);
decoder.decode(session, in, session.getDecoderOutput());
in = (IoBuffer) getMaxMessageSizeBuffer(allocator);
decoder = new WsFrameDecoder(allocator, 255);
decoder.decode(session, in, session.getDecoderOutput());
}
@Test
public void pingInBinaryContinuationSequence() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 400);
int firstPayload = 125;
byte[] first = createString('a', firstPayload).getBytes("UTF-8");
int secondPayload = 125;
byte[] second = createString('b', secondPayload).getBytes("UTF-8");
int thirdPayload = 4;
byte[] third = createString('c', thirdPayload).getBytes("UTF-8");
int fourthPayload = 6;
String fourth = createString('d', fourthPayload);
IoBufferEx in = allocator.wrap(allocator.allocate(firstPayload+secondPayload+2+thirdPayload+fourthPayload+8))
// text frame
.put((byte) 0x01)
.put((byte) firstPayload)
.put(first)
// continuation frame
.put((byte) 0x00)
.put((byte) secondPayload)
.put(second)
// ping frame
.put((byte) 0x89)
.put((byte) 0x00)
// continuation frame with FIN
.put((byte) 0x80)
.put((byte) thirdPayload)
.put(third)
// binary frame
.put((byte) 0x82)
.put((byte) fourthPayload)
.putString(fourth, UTF_8.newEncoder())
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out1 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(ByteBuffer.wrap(first)), false), out1);
WsMessage out2 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsContinuationMessage(allocator.wrap(ByteBuffer.wrap(second)), false), out2);
WsMessage out = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsPingMessage(allocator.wrap(allocator.allocate(0))), out);
WsMessage out3 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsContinuationMessage(allocator.wrap(ByteBuffer.wrap(third))), out3);
WsMessage out4 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsBinaryMessage(allocator.wrap(ByteBuffer.wrap(fourth.getBytes(UTF_8)))), out4);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
@Test
public void decodeZeroLengthMaskedPongFrame() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 0);
IoBufferEx in = allocator.wrap(allocator.allocate(6))
.put((byte)0x8A)
.put((byte)0x80)
.fill(4) // mask
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsPongMessage(allocator.wrap(allocator.allocate(0)).flip()), out);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
@Test
public void decodeZeroLengthBinaryFrame() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 0);
IoBufferEx in = allocator.wrap(allocator.allocate(2))
.put((byte)0x82)
.put((byte) 0x00)
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsBinaryMessage(allocator.wrap(allocator.allocate(0))), out);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
@Test
public void decodeBinaryFrame() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 0);
IoBufferEx in = allocator.wrap(allocator.allocate(204))
.put((byte) 0x82)
.put((byte) 126)
.put((byte) 0x00)
.put((byte) 0xC8)
.fill(200)
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsBinaryMessage(allocator.wrap(allocator.allocate(200)).fill(200).flip()), out);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
@Test
public void decodeFragmentedBinaryFrame() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 0);
IoBufferEx[] array = new IoBufferEx[] { allocator.wrap(allocator.allocate(104))
.put((byte) 0x82)
.put((byte) 126)
.put((byte) 0x00)
.put((byte) 0xC8)
.fill(100)
.flip(),
allocator.wrap(allocator.allocate(102))
.fill(100)
.put((byte) 0x82)
.put((byte) 0x00)
.flip(),
};
for (IoBufferEx in : array) {
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
}
WsMessage fragmented = (WsMessage) session.getDecoderOutputQueue().poll();
assertEquals(new WsBinaryMessage(allocator.wrap(allocator.allocate(200)).fill(200).flip()), fragmented);
WsMessage empty = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsBinaryMessage(allocator.wrap(allocator.allocate(0))), empty);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
for (IoBufferEx in : array) {
assertFalse(in.hasRemaining());
}
}
@Test
public void decodeZeroLengthTextFrame() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 0);
IoBufferEx in = allocator.wrap(allocator.allocate(2))
.put((byte) 0x81)
.put((byte)0x00)
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out = (WsMessage) session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(allocator.allocate(0)).flip()), out);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
@Test
public void decodeTextFrame() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 0);
IoBufferEx in = allocator.wrap(allocator.allocate(14))
.put((byte) 0x81)
.put((byte) 0x0C)
.putString("Hello, world", UTF_8.newEncoder())
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(ByteBuffer.wrap("Hello, world".getBytes(UTF_8)))), out);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
@Test
public void decodeFragmentedTextFrame() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 0);
IoBufferEx[] array = new IoBufferEx[] { allocator.wrap(allocator.allocate(103))
.put((byte) 0x81)
.put((byte) 0x0C)
.putString("Hello", UTF_8.newEncoder())
.flip(),
allocator.wrap(allocator.allocate(102))
.putString(", world", UTF_8.newEncoder())
.put((byte) 0x81)
.put((byte) 0x00)
.flip(),
};
for (IoBufferEx in : array) {
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
}
WsMessage fragmented = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(ByteBuffer.wrap("Hello, world".getBytes(UTF_8)))), fragmented);
WsMessage empty = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(allocator.allocate(0))), empty);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
for (IoBufferEx in : array) {
assertFalse(in.hasRemaining());
}
}
@Test
public void sizeLimitEqualledDecodeBinaryFrame() throws Exception {
sizeLimitDecodeBinaryFrame(200);
}
@Test(expected=ProtocolDecoderException.class)
public void sizeLimitExceededDecodeBinaryFrame() throws Exception {
sizeLimitDecodeBinaryFrame(199);
}
@Test // negative should be interpreted as unset (no limit)
public void sizeLimitNegativeDecodeBinaryFrame() throws Exception {
sizeLimitDecodeBinaryFrame(-1);
}
// Decode a 200-byte binary frame
private void sizeLimitDecodeBinaryFrame(int maxSize) throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, maxSize);
IoBufferEx in = allocator.wrap(allocator.allocate(204))
.put((byte)0x82)
.put((byte)126)
.put((byte)0x00)
.put((byte)0xC8)
.fill(200)
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out = (WsMessage) session.getDecoderOutputQueue().poll();
assertEquals(new WsBinaryMessage(allocator.wrap(allocator.allocate(200)).fill(200).flip()), out);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
@Test
public void sizeLimitEqualledDecodeFragmentedBinaryFrame() throws Exception {
sizeLimitDecodeFragmentedBinaryFrame(200);
}
@Test(expected=ProtocolDecoderException.class)
public void sizeLimitExceededDecodeFragmentedBinaryFrame() throws Exception {
sizeLimitDecodeFragmentedBinaryFrame(199);
}
private void sizeLimitDecodeFragmentedBinaryFrame(int maxSize) throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, maxSize);
IoBufferEx[] array = new IoBufferEx[] { allocator.wrap(allocator.allocate(104))
.put((byte)0x82)
.put((byte)126)
.put((byte)0x00)
.put((byte)0xC8)
.fill(100)
.flip(),
allocator.wrap(allocator.allocate(102))
.fill(100)
.put((byte)0x82)
.put((byte)0x00)
.flip(),
};
for (IoBufferEx in : array) {
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
}
WsMessage fragmented = (WsMessage) session.getDecoderOutputQueue().poll();
assertEquals(new WsBinaryMessage(allocator.wrap(allocator.allocate(200)).fill(200).flip()), fragmented);
WsMessage empty = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsBinaryMessage(allocator.wrap(allocator.allocate(0))), empty);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
for (IoBufferEx in : array) {
assertFalse(in.hasRemaining());
}
}
@Test(expected=ProtocolDecoderException.class)
public void sizeLimitExceededDecodeTextFrame() throws Exception {
sizeLimitDecodeTextFrame(11);
}
@Test
public void sizeLimitEqualledDecodeTextFrame() throws Exception {
sizeLimitDecodeTextFrame(12);
}
// Decode a 12-byte text frame
private void sizeLimitDecodeTextFrame(int maxSize) throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, maxSize);
IoBufferEx in = allocator.wrap(allocator.allocate(14))
.put((byte)0x81)
.put((byte)12)
.putString("Hello, world", UTF_8.newEncoder())
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out = (WsMessage) session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(ByteBuffer.wrap("Hello, world".getBytes(UTF_8)))), out);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
@Test
public void sizeLimitEqualledDecodeFragmentedTextFrame() throws Exception {
sizeLimitDecodeFragmentedTextFrame(12);
}
@Test(expected=ProtocolDecoderException.class)
public void sizeLimitExceededDecodeFragmentedTextFrame() throws Exception {
sizeLimitDecodeFragmentedTextFrame(11);
}
// Decode a 12-byte text frame sent in 2 chunks
private void sizeLimitDecodeFragmentedTextFrame(int maxSize) throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, maxSize);
IoBufferEx[] array = new IoBufferEx[] { allocator.wrap(allocator.allocate(103))
.put((byte)0x81)
.put((byte)0x0C)
.putString("Hello", UTF_8.newEncoder())
.flip(),
allocator.wrap(allocator.allocate(102))
.putString(", world", UTF_8.newEncoder())
.put((byte)0x81)
.put((byte)0x00)
.flip(),
};
for (IoBufferEx in : array) {
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
}
WsMessage fragmented = (WsMessage) session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(ByteBuffer.wrap("Hello, world".getBytes(UTF_8)))), fragmented);
WsMessage empty = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(allocator.allocate(0)).flip()), empty);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
for (IoBufferEx in : array) {
assertFalse(in.hasRemaining());
}
}
@Test
public void sizeLimitEqualledDecodeDoubleTextFrame() throws Exception {
sizeLimitDecodeDoubleTextFrame(15);
}
@Test(expected=ProtocolDecoderException.class)
public void sizeLimitExceededDecodeDoubleTextFrame() throws Exception {
sizeLimitDecodeDoubleTextFrame(14);
}
// Decode a 12-byte text frame and a 15 byte text frame written in one go (one network packet)
private void sizeLimitDecodeDoubleTextFrame(int maxSize) throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, maxSize);
IoBufferEx in = allocator.wrap(allocator.allocate(12+2+15+2))
.put((byte)0x81)
.put((byte)12)
.putString("123456789012", UTF_8.newEncoder())
.put((byte)0x81)
.put((byte)15)
.putString("123456789012345", UTF_8.newEncoder())
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(ByteBuffer.wrap("123456789012".getBytes(UTF_8)))), out);
WsMessage out2 = (WsMessage)session.getDecoderOutputQueue().poll();
assertEquals(new WsTextMessage(allocator.wrap(ByteBuffer.wrap("123456789012345".getBytes(UTF_8)))), out2);
assertTrue(session.getDecoderOutputQueue().isEmpty());
decoder.finishDecode(session, session.getDecoderOutput());
assertTrue(session.getDecoderOutputQueue().isEmpty());
assertFalse(in.hasRemaining());
}
// Make sure we fail early for case of large text messages: we should fail as soon as we process
// a network packet that exceeds the limit (first packet in this case)
@Test(expected=ProtocolDecoderException.class)
public void sizeLimitDecodeTextFrameFailEarly1() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 20);
int dataSize = 30;
StringBuilder data = new StringBuilder(dataSize);
for ( int i=0; i<(dataSize); i++ ) {
data.append((i%10));
}
IoBufferEx in = allocator.wrap(allocator.allocate(dataSize+2))
.put((byte)0x81)
.put((byte)30)
.putString(data.toString(), UTF_8.newEncoder())
.flip();
// As soon as we sent part of a message that exceeds the limit it should throw the exception
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
}
// Make sure we fail early for case of large text messages: we should fail as soon as we process
// a network packet that exceeds the limit (2nd packet in this case)
@Test(expected=ProtocolDecoderException.class)
public void sizeLimitDecodeTextFrameFailEarly2() throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 20);
int dataSize = 30;
StringBuilder data = new StringBuilder(dataSize);
for ( int i=0; i<(dataSize); i++ ) {
data.append((i%10));
}
IoBufferEx in = allocator.wrap(allocator.allocate(dataSize+2))
.put((byte)0x81)
.put((byte)30)
.putString(data.toString(), UTF_8.newEncoder())
.flip();
decoder.decode(session, (IoBuffer) in.getSlice(10), session.getDecoderOutput());
// Now if we send the next 12 bytes that should exceed the limit (first byte is control byte, doesn't count)
decoder.decode(session, (IoBuffer) in.getSlice(12), session.getDecoderOutput());
}
/*
* Use the below utility to decode a ws frame.
public static String decodeABinaryFrame(byte[] hexBytes) throws Exception {
ProtocolCodecSessionEx session = new ProtocolCodecSessionEx();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
ProtocolDecoder decoder = new WsFrameDecoder(allocator, 0);
IoBufferEx in = allocator.wrap(allocator.allocate(hexBytes.length))
.put(hexBytes)
.flip();
decoder.decode(session, (IoBuffer) in, session.getDecoderOutput());
WsMessage out = (WsMessage)session.getDecoderOutputQueue().poll();
return new String(out.getBytes().array());
}
public static void main(String[] strings) throws Exception
{
byte b[] = {(byte)0x82, (byte)0xCB, (byte)0xF5, (byte)0xC7, (byte)0x2C, (byte)0xDE, (byte)0xB2, (byte)0x82, (byte)0x78, (byte)0xFE, (byte)0xDA, (byte)0xA2, (byte)0x4F, (byte)0xB6, (byte)0x9A, (byte)0xE7
, (byte)0x64, (byte)0x8A, (byte)0xA1, (byte)0x97, (byte)0x03, (byte)0xEF, (byte)0xDB, (byte)0xF6, (byte)0x21, (byte)0xD4, (byte)0xA6, (byte)0xA2, (byte)0x4F, (byte)0xF3, (byte)0xA2, (byte)0xA2
, (byte)0x4E, (byte)0x8D, (byte)0x9A, (byte)0xA4, (byte)0x47, (byte)0xBB, (byte)0x81, (byte)0xEA, (byte)0x69, (byte)0xA6, (byte)0x81, (byte)0xA2, (byte)0x42, (byte)0xAD, (byte)0x9C, (byte)0xA8
, (byte)0x42, (byte)0xAD, (byte)0xCF, (byte)0xE7, (byte)0x54, (byte)0xF3, (byte)0x9E, (byte)0xA6, (byte)0x4D, (byte)0xA4, (byte)0x9C, (byte)0xA9, (byte)0x4B, (byte)0xF3, (byte)0x9D, (byte)0xB3
, (byte)0x58, (byte)0xAE, (byte)0xD8, (byte)0xB5, (byte)0x49, (byte)0xA8, (byte)0x94, (byte)0xAB, (byte)0x45, (byte)0xBA, (byte)0x94, (byte)0xB3, (byte)0x49, (byte)0xD3, (byte)0xFF, (byte)0xCA
, (byte)0x26};
String s = decodeABinaryFrame(b);
System.out.print(s);
}
*/
private static String createString(char ch, int size) {
char[] arr = new char[size];
for(int i=0; i < size; i++) {
arr[i] = ch;
}
return new String(arr);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aries.blueprint.container;
import java.io.ByteArrayInputStream;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Pattern;
import org.apache.aries.blueprint.container.BeanRecipe.UnwrapperedBeanHolder;
import org.apache.aries.blueprint.di.CollectionRecipe;
import org.apache.aries.blueprint.di.MapRecipe;
import org.apache.aries.blueprint.services.ExtendedBlueprintContainer;
import org.apache.aries.blueprint.utils.ReflectionUtils;
import org.osgi.service.blueprint.container.Converter;
import org.osgi.service.blueprint.container.ReifiedType;
import static org.apache.aries.blueprint.utils.ReflectionUtils.getRealCause;
/**
* Implementation of the Converter.
*
* This object contains all the registered Converters which can be registered
* by using {@link #registerConverter(Converter)}
* and unregistered using {@link #unregisterConverter(Converter)}.
*
* Each {@link org.osgi.service.blueprint.container.BlueprintContainer} has its own AggregateConverter
* used to register converters defined by the related blueprint bundle.
*
* @version $Rev$, $Date$
*/
public class AggregateConverter implements Converter {
/**
* Objects implementing this interface will bypass the default conversion rules
* and be called directly to transform into the expected type.
*/
public static interface Convertible {
Object convert(ReifiedType type) throws Exception;
}
private static class ConversionResult {
public final Converter converter;
public final Object value;
public ConversionResult(Converter converter, Object value) {
this.converter = converter;
this.value = value;
}
}
private ExtendedBlueprintContainer blueprintContainer;
private List<Converter> converters = new ArrayList<Converter>();
public AggregateConverter(ExtendedBlueprintContainer blueprintContainer) {
this.blueprintContainer = blueprintContainer;
}
public void registerConverter(Converter converter) {
converters.add(converter);
}
public void unregisterConverter(Converter converter) {
converters.remove(converter);
}
public boolean canConvert(Object fromValue, final ReifiedType toType) {
if (fromValue == null) {
return true;
} else if (fromValue instanceof UnwrapperedBeanHolder) {
fromValue = ((UnwrapperedBeanHolder) fromValue).unwrapperedBean;
}
if (isAssignable(fromValue, toType)) {
return true;
}
final Object toTest = fromValue;
boolean canConvert = false;
AccessControlContext acc = blueprintContainer.getAccessControlContext();
if (acc == null) {
canConvert = canConvertWithConverters(toTest, toType);
} else {
canConvert = AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
public Boolean run() {
return canConvertWithConverters(toTest, toType);
}
}, acc);
}
if (canConvert) {
return true;
}
// TODO implement better logic ?!
try {
convert(toTest, toType);
return true;
} catch (Exception e) {
return false;
}
}
public Object convert(Object fromValue, final ReifiedType type) throws Exception {
// Discard null values
if (fromValue == null) {
return null;
}
// First convert service proxies
if (fromValue instanceof Convertible) {
return ((Convertible) fromValue).convert(type);
} else if (fromValue instanceof UnwrapperedBeanHolder) {
UnwrapperedBeanHolder holder = (UnwrapperedBeanHolder) fromValue;
if (isAssignable(holder.unwrapperedBean, type)) {
return BeanRecipe.wrap(holder, type.getRawClass());
} else {
fromValue = BeanRecipe.wrap(holder, Object.class);
}
} else if (isAssignable(fromValue, type)) {
// If the object is an instance of the type, just return it
return fromValue;
}
final Object finalFromValue = fromValue;
ConversionResult result = null;
AccessControlContext acc = blueprintContainer.getAccessControlContext();
if (acc == null) {
result = convertWithConverters(fromValue, type);
} else {
result = AccessController.doPrivileged(new PrivilegedExceptionAction<ConversionResult>() {
public ConversionResult run() throws Exception {
return convertWithConverters(finalFromValue, type);
}
}, acc);
}
if (result == null) {
if (fromValue instanceof Number && Number.class.isAssignableFrom(unwrap(toClass(type)))) {
return convertToNumber((Number) fromValue, toClass(type));
} else if (fromValue instanceof String) {
return convertFromString((String) fromValue, toClass(type), blueprintContainer);
} else if (toClass(type).isArray() && (fromValue instanceof Collection || fromValue.getClass().isArray())) {
return convertToArray(fromValue, type);
} else if (Map.class.isAssignableFrom(toClass(type)) && (fromValue instanceof Map || fromValue instanceof Dictionary)) {
return convertToMap(fromValue, type);
} else if (Dictionary.class.isAssignableFrom(toClass(type)) && (fromValue instanceof Map || fromValue instanceof Dictionary)) {
return convertToDictionary(fromValue, type);
} else if (Collection.class.isAssignableFrom(toClass(type)) && (fromValue instanceof Collection || fromValue.getClass().isArray())) {
return convertToCollection(fromValue, type);
} else {
throw new Exception("Unable to convert value " + fromValue + " to type " + type);
}
}
return result.value;
}
private Converter selectMatchingConverter(Object source, ReifiedType type) {
for (Converter converter : converters) {
if (converter.canConvert(source, type)) {
return converter;
}
}
return null;
}
private boolean canConvertWithConverters(Object source, ReifiedType type) {
return selectMatchingConverter(source,type) != null;
}
private ConversionResult convertWithConverters(Object source, ReifiedType type) throws Exception {
Converter converter = selectMatchingConverter(source,type);
if (converter == null) return null;
Object value = converter.convert(source, type);
return new ConversionResult(converter,value);
}
public Object convertToNumber(Number value, Class toType) throws Exception {
toType = unwrap(toType);
if (AtomicInteger.class == toType) {
return new AtomicInteger((Integer) convertToNumber(value, Integer.class));
} else if (AtomicLong.class == toType) {
return new AtomicLong((Long) convertToNumber(value, Long.class));
} else if (Integer.class == toType) {
return value.intValue();
} else if (Short.class == toType) {
return value.shortValue();
} else if (Long.class == toType) {
return value.longValue();
} else if (Float.class == toType) {
return value.floatValue();
} else if (Double.class == toType) {
return value.doubleValue();
} else if (Byte.class == toType) {
return value.byteValue();
} else if (BigInteger.class == toType) {
return new BigInteger(value.toString());
} else if (BigDecimal.class == toType) {
return new BigDecimal(value.toString());
} else {
throw new Exception("Unable to convert number " + value + " to " + toType);
}
}
public Object convertFromString(String value, Class toType, Object loader) throws Exception {
toType = unwrap(toType);
if (ReifiedType.class == toType) {
try {
return GenericType.parse(value, loader);
} catch (ClassNotFoundException e) {
throw new Exception("Unable to convert", e);
}
} else if (Class.class == toType) {
try {
return GenericType.parse(value, loader).getRawClass();
} catch (ClassNotFoundException e) {
throw new Exception("Unable to convert", e);
}
} else if (Locale.class == toType) {
String[] tokens = value.split("_");
if (tokens.length == 1) {
return new Locale(tokens[0]);
} else if (tokens.length == 2) {
return new Locale(tokens[0], tokens[1]);
} else if (tokens.length == 3) {
return new Locale(tokens[0], tokens[1], tokens[2]);
} else {
throw new Exception("Invalid locale string:" + value);
}
} else if (Pattern.class == toType) {
return Pattern.compile(value);
} else if (Properties.class == toType) {
Properties props = new Properties();
ByteArrayInputStream in = new ByteArrayInputStream(value.getBytes("UTF8"));
props.load(in);
return props;
} else if (Boolean.class == toType) {
if ("yes".equalsIgnoreCase(value) || "true".equalsIgnoreCase(value) || "on".equalsIgnoreCase(value)) {
return Boolean.TRUE;
} else if ("no".equalsIgnoreCase(value) || "false".equalsIgnoreCase(value) || "off".equalsIgnoreCase(value)) {
return Boolean.FALSE;
} else {
throw new RuntimeException("Invalid boolean value: " + value);
}
} else if (Integer.class == toType) {
return Integer.valueOf(value);
} else if (Short.class == toType) {
return Short.valueOf(value);
} else if (Long.class == toType) {
return Long.valueOf(value);
} else if (Float.class == toType) {
return Float.valueOf(value);
} else if (Double.class == toType) {
return Double.valueOf(value);
} else if (Character.class == toType) {
if (value.length() == 6 && value.startsWith("\\u")) {
int code = Integer.parseInt(value.substring(2), 16);
return (char)code;
} else if (value.length() == 1) {
return value.charAt(0);
} else {
throw new Exception("Invalid value for character type: " + value);
}
} else if (Byte.class == toType) {
return Byte.valueOf(value);
} else if (Enum.class.isAssignableFrom(toType)) {
return Enum.valueOf((Class<Enum>) toType, value);
} else {
return createObject(value, toType);
}
}
private Object createObject(String value, Class type) throws Exception {
if (type.isInterface() || Modifier.isAbstract(type.getModifiers())) {
throw new Exception("Unable to convert value " + value + " to type " + type + ". Type " + type + " is an interface or an abstract class");
}
Constructor constructor = null;
try {
constructor = type.getConstructor(String.class);
} catch (NoSuchMethodException e) {
throw new RuntimeException("Unable to convert to " + type);
}
try {
return ReflectionUtils.newInstance(blueprintContainer.getAccessControlContext(), constructor, value);
} catch (Exception e) {
throw new Exception("Unable to convert ", getRealCause(e));
}
}
private Object convertToCollection(Object obj, ReifiedType type) throws Exception {
ReifiedType valueType = type.getActualTypeArgument(0);
Collection newCol = (Collection) ReflectionUtils.newInstance(blueprintContainer.getAccessControlContext(),
CollectionRecipe.getCollection(toClass(type)));
if (obj.getClass().isArray()) {
for (int i = 0; i < Array.getLength(obj); i++) {
try {
Object ov = Array.get(obj, i);
Object cv = convert(ov, valueType);
newCol.add(cv);
} catch (Exception t) {
throw new Exception("Unable to convert from " + obj + " to " + type + "(error converting array element)", t);
}
}
return newCol;
} else {
boolean converted = !toClass(type).isAssignableFrom(obj.getClass());
for (Object item : (Collection) obj) {
try {
Object cv = convert(item, valueType);
converted |= item != cv;
newCol.add(cv);
} catch (Exception t) {
throw new Exception("Unable to convert from " + obj + " to " + type + "(error converting collection entry)", t);
}
}
return converted ? newCol : obj;
}
}
private Object convertToDictionary(Object obj, ReifiedType type) throws Exception {
ReifiedType keyType = type.getActualTypeArgument(0);
ReifiedType valueType = type.getActualTypeArgument(1);
if (obj instanceof Dictionary) {
Dictionary newDic = new Hashtable();
Dictionary dic = (Dictionary) obj;
boolean converted = false;
for (Enumeration keyEnum = dic.keys(); keyEnum.hasMoreElements();) {
Object key = keyEnum.nextElement();
try {
Object nk = convert(key, keyType);
Object ov = dic.get(key);
Object nv = convert(ov, valueType);
newDic.put(nk, nv);
converted |= nk != key || nv != ov;
} catch (Exception t) {
throw new Exception("Unable to convert from " + obj + " to " + type + "(error converting map entry)", t);
}
}
return converted ? newDic : obj;
} else {
Dictionary newDic = new Hashtable();
for (Map.Entry e : ((Map<Object,Object>) obj).entrySet()) {
try {
newDic.put(convert(e.getKey(), keyType), convert(e.getValue(), valueType));
} catch (Exception t) {
throw new Exception("Unable to convert from " + obj + " to " + type + "(error converting map entry)", t);
}
}
return newDic;
}
}
private Object convertToMap(Object obj, ReifiedType type) throws Exception {
ReifiedType keyType = type.getActualTypeArgument(0);
ReifiedType valueType = type.getActualTypeArgument(1);
Map newMap = (Map) ReflectionUtils.newInstance(blueprintContainer.getAccessControlContext(),
MapRecipe.getMap(toClass(type)));
if (obj instanceof Dictionary) {
Dictionary dic = (Dictionary) obj;
for (Enumeration keyEnum = dic.keys(); keyEnum.hasMoreElements();) {
Object key = keyEnum.nextElement();
try {
newMap.put(convert(key, keyType), convert(dic.get(key), valueType));
} catch (Exception t) {
throw new Exception("Unable to convert from " + obj + " to " + type + "(error converting map entry)", t);
}
}
return newMap;
} else {
boolean converted = false;
for (Map.Entry e : ((Map<Object,Object>) obj).entrySet()) {
try {
Object nk = convert(e.getKey(), keyType);
Object nv = convert(e.getValue(), valueType);
converted |= nk != e.getKey() || nv != e.getValue();
newMap.put(nk, nv);
} catch (Exception t) {
throw new Exception("Unable to convert from " + obj + " to " + type + "(error converting map entry)", t);
}
}
return converted ? newMap : obj;
}
}
private Object convertToArray(Object obj, ReifiedType type) throws Exception {
if (obj instanceof Collection) {
obj = ((Collection) obj).toArray();
}
if (!obj.getClass().isArray()) {
throw new Exception("Unable to convert from " + obj + " to " + type);
}
ReifiedType componentType;
if (type.size() > 0) {
componentType = type.getActualTypeArgument(0);
} else {
componentType = new GenericType(type.getRawClass().getComponentType());
}
Object array = Array.newInstance(toClass(componentType), Array.getLength(obj));
boolean converted = array.getClass() != obj.getClass();
for (int i = 0; i < Array.getLength(obj); i++) {
try {
Object ov = Array.get(obj, i);
Object nv = convert(ov, componentType);
converted |= nv != ov;
Array.set(array, i, nv);
} catch (Exception t) {
throw new Exception("Unable to convert from " + obj + " to " + type + "(error converting array element)", t);
}
}
return converted ? array : obj;
}
public static boolean isAssignable(Object source, ReifiedType target) {
if (source == null) {
return true;
}
if (target.size() == 0) {
return unwrap(target.getRawClass()).isAssignableFrom(unwrap(source.getClass()));
} else {
return isTypeAssignable(new GenericType(source.getClass()), target);
}
}
public static boolean isTypeAssignable(ReifiedType from, ReifiedType to) {
if (from.equals(to)) {
return true;
}
Type t = from.getRawClass().getGenericSuperclass();
if (t != null && isTypeAssignable(new GenericType(t), to)) {
return true;
}
for (Type ti : from.getRawClass().getGenericInterfaces()) {
if (ti != null && isTypeAssignable(new GenericType(ti), to)) {
return true;
}
}
return false;
}
private static Class unwrap(Class c) {
Class u = primitives.get(c);
return u != null ? u : c;
}
private static final Map<Class, Class> primitives;
static {
primitives = new HashMap<Class, Class>();
primitives.put(byte.class, Byte.class);
primitives.put(short.class, Short.class);
primitives.put(char.class, Character.class);
primitives.put(int.class, Integer.class);
primitives.put(long.class, Long.class);
primitives.put(float.class, Float.class);
primitives.put(double.class, Double.class);
primitives.put(boolean.class, Boolean.class);
}
public Object convert(Object source, Type target) throws Exception {
return convert( source, new GenericType(target));
}
private Class toClass(ReifiedType type) {
return type.getRawClass();
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.test;
import android.app.Instrumentation;
import android.content.Context;
import android.os.PerformanceCollector.PerformanceResultsWriter;
import com.google.android.collect.Lists;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestListener;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import junit.runner.BaseTestRunner;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
public class AndroidTestRunner extends BaseTestRunner {
private TestResult mTestResult;
private String mTestClassName;
private List<TestCase> mTestCases;
private Context mContext;
private boolean mSkipExecution = false;
private List<TestListener> mTestListeners = Lists.newArrayList();
private Instrumentation mInstrumentation;
private PerformanceResultsWriter mPerfWriter;
@SuppressWarnings("unchecked")
public void setTestClassName(String testClassName, String testMethodName) {
Class testClass = loadTestClass(testClassName);
if (shouldRunSingleTestMethod(testMethodName, testClass)) {
TestCase testCase = buildSingleTestMethod(testClass, testMethodName);
mTestCases = Lists.newArrayList(testCase);
mTestClassName = testClass.getSimpleName();
} else {
setTest(getTest(testClass), testClass);
}
}
public void setTest(Test test) {
setTest(test, test.getClass());
}
private void setTest(Test test, Class<? extends Test> testClass) {
mTestCases = (List<TestCase>) TestCaseUtil.getTests(test, true);
if (TestSuite.class.isAssignableFrom(testClass)) {
mTestClassName = TestCaseUtil.getTestName(test);
} else {
mTestClassName = testClass.getSimpleName();
}
}
public void clearTestListeners() {
mTestListeners.clear();
}
public void addTestListener(TestListener testListener) {
if (testListener != null) {
mTestListeners.add(testListener);
}
}
@SuppressWarnings("unchecked")
private Class<? extends Test> loadTestClass(String testClassName) {
try {
return (Class<? extends Test>) mContext.getClassLoader().loadClass(testClassName);
} catch (ClassNotFoundException e) {
runFailed("Could not find test class. Class: " + testClassName);
}
return null;
}
private TestCase buildSingleTestMethod(Class testClass, String testMethodName) {
try {
Constructor c = testClass.getConstructor();
return newSingleTestMethod(testClass, testMethodName, c);
} catch (NoSuchMethodException e) {
}
try {
Constructor c = testClass.getConstructor(String.class);
return newSingleTestMethod(testClass, testMethodName, c, testMethodName);
} catch (NoSuchMethodException e) {
}
return null;
}
private TestCase newSingleTestMethod(Class testClass, String testMethodName,
Constructor constructor, Object... args) {
try {
TestCase testCase = (TestCase) constructor.newInstance(args);
testCase.setName(testMethodName);
return testCase;
} catch (IllegalAccessException e) {
runFailed("Could not access test class. Class: " + testClass.getName());
} catch (InstantiationException e) {
runFailed("Could not instantiate test class. Class: " + testClass.getName());
} catch (IllegalArgumentException e) {
runFailed("Illegal argument passed to constructor. Class: " + testClass.getName());
} catch (InvocationTargetException e) {
runFailed("Constructor thew an exception. Class: " + testClass.getName());
}
return null;
}
private boolean shouldRunSingleTestMethod(String testMethodName,
Class<? extends Test> testClass) {
return testMethodName != null && TestCase.class.isAssignableFrom(testClass);
}
private Test getTest(Class clazz) {
if (TestSuiteProvider.class.isAssignableFrom(clazz)) {
try {
TestSuiteProvider testSuiteProvider =
(TestSuiteProvider) clazz.getConstructor().newInstance();
return testSuiteProvider.getTestSuite();
} catch (InstantiationException e) {
runFailed("Could not instantiate test suite provider. Class: " + clazz.getName());
} catch (IllegalAccessException e) {
runFailed("Illegal access of test suite provider. Class: " + clazz.getName());
} catch (InvocationTargetException e) {
runFailed("Invocation exception test suite provider. Class: " + clazz.getName());
} catch (NoSuchMethodException e) {
runFailed("No such method on test suite provider. Class: " + clazz.getName());
}
}
return getTest(clazz.getName());
}
protected TestResult createTestResult() {
if (mSkipExecution) {
return new NoExecTestResult();
}
return new TestResult();
}
void setSkipExecution(boolean skip) {
mSkipExecution = skip;
}
public List<TestCase> getTestCases() {
return mTestCases;
}
public String getTestClassName() {
return mTestClassName;
}
public TestResult getTestResult() {
return mTestResult;
}
public void runTest() {
runTest(createTestResult());
}
public void runTest(TestResult testResult) {
mTestResult = testResult;
for (TestListener testListener : mTestListeners) {
mTestResult.addListener(testListener);
}
Context testContext = mInstrumentation == null ? mContext : mInstrumentation.getContext();
for (TestCase testCase : mTestCases) {
setContextIfAndroidTestCase(testCase, mContext, testContext);
setInstrumentationIfInstrumentationTestCase(testCase, mInstrumentation);
setPerformanceWriterIfPerformanceCollectorTestCase(testCase, mPerfWriter);
testCase.run(mTestResult);
}
}
private void setContextIfAndroidTestCase(Test test, Context context, Context testContext) {
if (AndroidTestCase.class.isAssignableFrom(test.getClass())) {
((AndroidTestCase) test).setContext(context);
((AndroidTestCase) test).setTestContext(testContext);
}
}
public void setContext(Context context) {
mContext = context;
}
private void setInstrumentationIfInstrumentationTestCase(
Test test, Instrumentation instrumentation) {
if (InstrumentationTestCase.class.isAssignableFrom(test.getClass())) {
((InstrumentationTestCase) test).injectInstrumentation(instrumentation);
}
}
private void setPerformanceWriterIfPerformanceCollectorTestCase(
Test test, PerformanceResultsWriter writer) {
if (PerformanceCollectorTestCase.class.isAssignableFrom(test.getClass())) {
((PerformanceCollectorTestCase) test).setPerformanceResultsWriter(writer);
}
}
public void setInstrumentation(Instrumentation instrumentation) {
mInstrumentation = instrumentation;
}
/**
* @deprecated Incorrect spelling,
* use {@link #setInstrumentation(android.app.Instrumentation)} instead.
*/
@Deprecated
public void setInstrumentaiton(Instrumentation instrumentation) {
setInstrumentation(instrumentation);
}
/**
* {@hide} Pending approval for public API.
*/
public void setPerformanceResultsWriter(PerformanceResultsWriter writer) {
mPerfWriter = writer;
}
@Override
protected Class loadSuiteClass(String suiteClassName) throws ClassNotFoundException {
return mContext.getClassLoader().loadClass(suiteClassName);
}
public void testStarted(String testName) {
}
public void testEnded(String testName) {
}
public void testFailed(int status, Test test, Throwable t) {
}
protected void runFailed(String message) {
throw new RuntimeException(message);
}
}
| |
package com.onebitmedia.pisolipet;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.BitmapFactory;
import android.graphics.BitmapFactory.Options;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.PorterDuff.Mode;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
public class BitmapUtils {
public static Bitmap decodeUri(Context context, Uri uri) {
return decodeUri(context, uri, null);
}
public static Bitmap decodeUri(Context context, Uri uri, Options opts) {
Bitmap bm = null;
InputStream is;
try {
is = FileUtils.openUri(context, uri);
bm = BitmapFactory.decodeStream(is, null, opts);
is.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return bm;
}
public static Bitmap decodeSampledBitmapFromUri(Context context, Uri uri, int reqWidth, int reqHeight) {
File file = new File(FileUtils.getPath(context, uri));
// check dimension
final Options options = new Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(file.getAbsolutePath(), options);
// calculate
options.inSampleSize = calculateInSampleSize(options, reqWidth,
reqHeight);
// return sampled
options.inJustDecodeBounds = false;
return BitmapFactory.decodeFile(file.getAbsolutePath(), options);
}
public static int calculateInSampleSize(Options options, int reqWidth, int reqHeight) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
// Calculate ratios of height and width to requested height and
// width
final int heightRatio = Math.round((float) height
/ (float) reqHeight);
final int widthRatio = Math.round((float) width / (float) reqWidth);
// Choose the smallest ratio as inSampleSize value, this will
// guarantee
// a final image with both dimensions larger than or equal to the
// requested height and width.
inSampleSize = heightRatio < widthRatio ? heightRatio : widthRatio;
}
return inSampleSize;
}
public static Bitmap createBitmapWithAlphaMask(Bitmap bmpSource,
Bitmap bmpMask) {
int width = bmpSource.getWidth();
int height = bmpSource.getHeight();
int size = width * height;
if (width != bmpMask.getWidth() || height != bmpMask.getHeight())
bmpMask = resize(bmpMask, width, height);
int[] result = new int[size];
int[] mask = new int[size];
bmpSource.getPixels(result, 0, width, 0, 0, width, height);
bmpMask.getPixels(mask, 0, width, 0, 0, width, height);
int alphaMask = 0xff000000;
int colorMask = 0x00ffffff;
for (int i = 0; i < size; i++) {
result[i] = (mask[i] & alphaMask) | (result[i] & colorMask);
}
// ensuring the bitmap is mutable
Bitmap bmpResult = Bitmap.createBitmap(width, height, Config.ARGB_8888);
bmpResult.setPixels(result, 0, width, 0, 0, width, height);
return bmpResult;
}
public static Bitmap resize(Bitmap src, int w2, int h2) {
int w1 = src.getWidth();
int h1 = src.getHeight();
int[] pxSource = new int[w1 * h1];
int[] pxResult = new int[w2 * h2];
src.getPixels(pxSource, 0, w1, 0, 0, w1, h1);
double x_ratio = w1 / (double) w2;
double y_ratio = h1 / (double) h2;
double px, py;
for (int i = 0; i < h2; i++) {
for (int j = 0; j < w2; j++) {
px = Math.floor(j * x_ratio);
py = Math.floor(i * y_ratio);
pxResult[(i * w2) + j] = pxSource[(int) ((py * w1) + px)];
}
}
return Bitmap.createBitmap(pxResult, w2, h2, Config.ARGB_8888);
}
public static Bitmap drawableToBitmap(Drawable drawable) {
if (drawable instanceof BitmapDrawable) {
return ((BitmapDrawable) drawable).getBitmap();
}
int width = drawable.getIntrinsicWidth();
width = width > 0 ? width : 1;
int height = drawable.getIntrinsicHeight();
height = height > 0 ? height : 1;
Bitmap bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
drawable.setBounds(0, 0, canvas.getWidth(), canvas.getHeight());
drawable.draw(canvas);
return bitmap;
}
public static Bitmap circleCrop(Bitmap bitmap) {
int size = Math.min(bitmap.getWidth(), bitmap.getHeight());
Bitmap output = Bitmap.createBitmap(size, size, Config.ARGB_8888);
Canvas canvas = new Canvas(output);
final int color = 0xff424242;
final Paint paint = new Paint();
final Rect rect = new Rect(0, 0, size, size);
paint.setAntiAlias(true);
canvas.drawARGB(0, 0, 0, 0);
paint.setColor(color);
// canvas.drawRoundRect(rectF, roundPx, roundPx, paint);
canvas.drawCircle(size / 2, size / 2, size / 2, paint);
paint.setXfermode(new PorterDuffXfermode(Mode.SRC_IN));
canvas.drawBitmap(bitmap, rect, rect, paint);
// Bitmap _bmp = Bitmap.createScaledBitmap(output, 60, 60, false);
// return _bmp;
return output;
}
public static Bitmap fastBlur(Bitmap sentBitmap, int radius) {
// Stack Blur v1.0 from
// http://www.quasimondo.com/StackBlurForCanvas/StackBlurDemo.html
//
// Java Author: Mario Klingemann <mario at quasimondo.com>
// http://incubator.quasimondo.com
// created Feburary 29, 2004
// Android port : Yahel Bouaziz <yahel at kayenko.com>
// http://www.kayenko.com
// ported april 5th, 2012
// This is a compromise between Gaussian Blur and Box blur
// It creates much better looking blurs than Box Blur, but is
// 7x faster than my Gaussian Blur implementation.
//
// I called it Stack Blur because this describes best how this
// filter works internally: it creates a kind of moving stack
// of colors whilst scanning through the image. Thereby it
// just has to add one new block of color to the right side
// of the stack and remove the leftmost color. The remaining
// colors on the topmost layer of the stack are either added on
// or reduced by one, depending on if they are on the right or
// on the left side of the stack.
//
// If you are using this algorithm in your code please add
// the following line:
//
// Stack Blur Algorithm by Mario Klingemann <mario@quasimondo.com>
Bitmap bitmap = sentBitmap.copy(sentBitmap.getConfig(), true);
if (radius < 1) {
return (null);
}
int w = bitmap.getWidth();
int h = bitmap.getHeight();
int[] pix = new int[w * h];
bitmap.getPixels(pix, 0, w, 0, 0, w, h);
int wm = w - 1;
int hm = h - 1;
int wh = w * h;
int div = radius + radius + 1;
int r[] = new int[wh];
int g[] = new int[wh];
int b[] = new int[wh];
int rsum, gsum, bsum, x, y, i, p, yp, yi, yw;
int vmin[] = new int[Math.max(w, h)];
int divsum = (div + 1) >> 1;
divsum *= divsum;
int dv[] = new int[256 * divsum];
for (i = 0; i < 256 * divsum; i++) {
dv[i] = (i / divsum);
}
yw = yi = 0;
int[][] stack = new int[div][3];
int stackpointer;
int stackstart;
int[] sir;
int rbs;
int r1 = radius + 1;
int routsum, goutsum, boutsum;
int rinsum, ginsum, binsum;
for (y = 0; y < h; y++) {
rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0;
for (i = -radius; i <= radius; i++) {
p = pix[yi + Math.min(wm, Math.max(i, 0))];
sir = stack[i + radius];
sir[0] = (p & 0xff0000) >> 16;
sir[1] = (p & 0x00ff00) >> 8;
sir[2] = (p & 0x0000ff);
rbs = r1 - Math.abs(i);
rsum += sir[0] * rbs;
gsum += sir[1] * rbs;
bsum += sir[2] * rbs;
if (i > 0) {
rinsum += sir[0];
ginsum += sir[1];
binsum += sir[2];
} else {
routsum += sir[0];
goutsum += sir[1];
boutsum += sir[2];
}
}
stackpointer = radius;
for (x = 0; x < w; x++) {
r[yi] = dv[rsum];
g[yi] = dv[gsum];
b[yi] = dv[bsum];
rsum -= routsum;
gsum -= goutsum;
bsum -= boutsum;
stackstart = stackpointer - radius + div;
sir = stack[stackstart % div];
routsum -= sir[0];
goutsum -= sir[1];
boutsum -= sir[2];
if (y == 0) {
vmin[x] = Math.min(x + radius + 1, wm);
}
p = pix[yw + vmin[x]];
sir[0] = (p & 0xff0000) >> 16;
sir[1] = (p & 0x00ff00) >> 8;
sir[2] = (p & 0x0000ff);
rinsum += sir[0];
ginsum += sir[1];
binsum += sir[2];
rsum += rinsum;
gsum += ginsum;
bsum += binsum;
stackpointer = (stackpointer + 1) % div;
sir = stack[(stackpointer) % div];
routsum += sir[0];
goutsum += sir[1];
boutsum += sir[2];
rinsum -= sir[0];
ginsum -= sir[1];
binsum -= sir[2];
yi++;
}
yw += w;
}
for (x = 0; x < w; x++) {
rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0;
yp = -radius * w;
for (i = -radius; i <= radius; i++) {
yi = Math.max(0, yp) + x;
sir = stack[i + radius];
sir[0] = r[yi];
sir[1] = g[yi];
sir[2] = b[yi];
rbs = r1 - Math.abs(i);
rsum += r[yi] * rbs;
gsum += g[yi] * rbs;
bsum += b[yi] * rbs;
if (i > 0) {
rinsum += sir[0];
ginsum += sir[1];
binsum += sir[2];
} else {
routsum += sir[0];
goutsum += sir[1];
boutsum += sir[2];
}
if (i < hm) {
yp += w;
}
}
yi = x;
stackpointer = radius;
for (y = 0; y < h; y++) {
// Preserve alpha channel: ( 0xff000000 & pix[yi] )
pix[yi] = (0xff000000 & pix[yi]) | (dv[rsum] << 16)
| (dv[gsum] << 8) | dv[bsum];
rsum -= routsum;
gsum -= goutsum;
bsum -= boutsum;
stackstart = stackpointer - radius + div;
sir = stack[stackstart % div];
routsum -= sir[0];
goutsum -= sir[1];
boutsum -= sir[2];
if (x == 0) {
vmin[y] = Math.min(y + r1, hm) * w;
}
p = x + vmin[y];
sir[0] = r[p];
sir[1] = g[p];
sir[2] = b[p];
rinsum += sir[0];
ginsum += sir[1];
binsum += sir[2];
rsum += rinsum;
gsum += ginsum;
bsum += binsum;
stackpointer = (stackpointer + 1) % div;
sir = stack[stackpointer];
routsum += sir[0];
goutsum += sir[1];
boutsum += sir[2];
rinsum -= sir[0];
ginsum -= sir[1];
binsum -= sir[2];
yi += w;
}
}
bitmap.setPixels(pix, 0, w, 0, 0, w, h);
return (bitmap);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.parser.QueryParser;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.common.params.DisMaxParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.util.SolrPluginUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Query parser for dismax queries
* <p>
* <b>Note: This API is experimental and may change in non backward-compatible ways in the future</b>
*
*
*/
public class DisMaxQParser extends QParser {
/**
* A field we can't ever find in any schema, so we can safely tell DisjunctionMaxQueryParser to use it as our
* defaultField, and map aliases from it to any field in our schema.
*/
private static String IMPOSSIBLE_FIELD_NAME = "\uFFFC\uFFFC\uFFFC";
/**
* Applies the appropriate default rules for the "mm" param based on the
* effective value of the "q.op" param
*
* @see QueryParsing#getQueryParserDefaultOperator
* @see QueryParsing#OP
* @see DisMaxParams#MM
*/
public static String parseMinShouldMatch(final IndexSchema schema,
final SolrParams params) {
org.apache.solr.parser.QueryParser.Operator op = QueryParsing.getQueryParserDefaultOperator
(schema, params.get(QueryParsing.OP));
return params.get(DisMaxParams.MM,
op.equals(QueryParser.Operator.AND) ? "100%" : "0%");
}
/**
* Uses {@link SolrPluginUtils#parseFieldBoosts(String)} with the 'qf' parameter. Falls back to the 'df' parameter
* or {@link org.apache.solr.schema.IndexSchema#getDefaultSearchFieldName()}.
*/
public static Map<String, Float> parseQueryFields(final IndexSchema indexSchema, final SolrParams solrParams)
throws SyntaxError {
Map<String, Float> queryFields = SolrPluginUtils.parseFieldBoosts(solrParams.getParams(DisMaxParams.QF));
if (queryFields.isEmpty()) {
String df = QueryParsing.getDefaultField(indexSchema, solrParams.get(CommonParams.DF));
if (df == null) {
throw new SyntaxError("Neither "+DisMaxParams.QF+", "+CommonParams.DF +", nor the default search field are present.");
}
queryFields.put(df, 1.0f);
}
return queryFields;
}
public DisMaxQParser(String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) {
super(qstr, localParams, params, req);
}
protected Map<String, Float> queryFields;
protected Query parsedUserQuery;
protected String[] boostParams;
protected List<Query> boostQueries;
protected Query altUserQuery;
private boolean parsed = false;
@Override
public Query parse() throws SyntaxError {
parsed = true;
SolrParams solrParams = SolrParams.wrapDefaults(localParams, params);
queryFields = parseQueryFields(req.getSchema(), solrParams);
/* the main query we will execute. we disable the coord because
* this query is an artificial construct
*/
BooleanQuery query = new BooleanQuery(true);
boolean notBlank = addMainQuery(query, solrParams);
if (!notBlank)
return null;
addBoostQuery(query, solrParams);
addBoostFunctions(query, solrParams);
return query;
}
protected void addBoostFunctions(BooleanQuery query, SolrParams solrParams) throws SyntaxError {
String[] boostFuncs = solrParams.getParams(DisMaxParams.BF);
if (null != boostFuncs && 0 != boostFuncs.length) {
for (String boostFunc : boostFuncs) {
if (null == boostFunc || "".equals(boostFunc)) continue;
Map<String, Float> ff = SolrPluginUtils.parseFieldBoosts(boostFunc);
for (String f : ff.keySet()) {
Query fq = subQuery(f, FunctionQParserPlugin.NAME).getQuery();
Float b = ff.get(f);
if (null != b) {
fq.setBoost(b);
}
query.add(fq, BooleanClause.Occur.SHOULD);
}
}
}
}
protected void addBoostQuery(BooleanQuery query, SolrParams solrParams) throws SyntaxError {
boostParams = solrParams.getParams(DisMaxParams.BQ);
//List<Query> boostQueries = SolrPluginUtils.parseQueryStrings(req, boostParams);
boostQueries = null;
if (boostParams != null && boostParams.length > 0) {
boostQueries = new ArrayList<>();
for (String qs : boostParams) {
if (qs.trim().length() == 0) continue;
Query q = subQuery(qs, null).getQuery();
boostQueries.add(q);
}
}
if (null != boostQueries) {
if (1 == boostQueries.size() && 1 == boostParams.length) {
/* legacy logic */
Query f = boostQueries.get(0);
if (1.0f == f.getBoost() && f instanceof BooleanQuery) {
/* if the default boost was used, and we've got a BooleanQuery
* extract the subqueries out and use them directly
*/
for (Object c : ((BooleanQuery) f).clauses()) {
query.add((BooleanClause) c);
}
} else {
query.add(f, BooleanClause.Occur.SHOULD);
}
} else {
for (Query f : boostQueries) {
query.add(f, BooleanClause.Occur.SHOULD);
}
}
}
}
/** Adds the main query to the query argument. If it's blank then false is returned. */
protected boolean addMainQuery(BooleanQuery query, SolrParams solrParams) throws SyntaxError {
Map<String, Float> phraseFields = SolrPluginUtils.parseFieldBoosts(solrParams.getParams(DisMaxParams.PF));
float tiebreaker = solrParams.getFloat(DisMaxParams.TIE, 0.0f);
/* a parser for dealing with user input, which will convert
* things to DisjunctionMaxQueries
*/
SolrPluginUtils.DisjunctionMaxQueryParser up = getParser(queryFields, DisMaxParams.QS, solrParams, tiebreaker);
/* for parsing sloppy phrases using DisjunctionMaxQueries */
SolrPluginUtils.DisjunctionMaxQueryParser pp = getParser(phraseFields, DisMaxParams.PS, solrParams, tiebreaker);
/* * * Main User Query * * */
parsedUserQuery = null;
String userQuery = getString();
altUserQuery = null;
if (userQuery == null || userQuery.trim().length() < 1) {
// If no query is specified, we may have an alternate
altUserQuery = getAlternateUserQuery(solrParams);
if (altUserQuery == null)
return false;
query.add(altUserQuery, BooleanClause.Occur.MUST);
} else {
// There is a valid query string
userQuery = SolrPluginUtils.partialEscape(SolrPluginUtils.stripUnbalancedQuotes(userQuery)).toString();
userQuery = SolrPluginUtils.stripIllegalOperators(userQuery).toString();
parsedUserQuery = getUserQuery(userQuery, up, solrParams);
query.add(parsedUserQuery, BooleanClause.Occur.MUST);
Query phrase = getPhraseQuery(userQuery, pp);
if (null != phrase) {
query.add(phrase, BooleanClause.Occur.SHOULD);
}
}
return true;
}
protected Query getAlternateUserQuery(SolrParams solrParams) throws SyntaxError {
String altQ = solrParams.get(DisMaxParams.ALTQ);
if (altQ != null) {
QParser altQParser = subQuery(altQ, null);
return altQParser.getQuery();
} else {
return null;
}
}
protected Query getPhraseQuery(String userQuery, SolrPluginUtils.DisjunctionMaxQueryParser pp) throws SyntaxError {
/* * * Add on Phrases for the Query * * */
/* build up phrase boosting queries */
/* if the userQuery already has some quotes, strip them out.
* we've already done the phrases they asked for in the main
* part of the query, this is to boost docs that may not have
* matched those phrases but do match looser phrases.
*/
String userPhraseQuery = userQuery.replace("\"", "");
return pp.parse("\"" + userPhraseQuery + "\"");
}
protected Query getUserQuery(String userQuery, SolrPluginUtils.DisjunctionMaxQueryParser up, SolrParams solrParams)
throws SyntaxError {
String minShouldMatch = parseMinShouldMatch(req.getSchema(), solrParams);
Query dis = up.parse(userQuery);
Query query = dis;
if (dis instanceof BooleanQuery) {
BooleanQuery t = new BooleanQuery();
SolrPluginUtils.flattenBooleanQuery(t, (BooleanQuery) dis);
SolrPluginUtils.setMinShouldMatch(t, minShouldMatch);
query = t;
}
return query;
}
protected SolrPluginUtils.DisjunctionMaxQueryParser getParser(Map<String, Float> fields, String paramName,
SolrParams solrParams, float tiebreaker) {
int slop = solrParams.getInt(paramName, 0);
SolrPluginUtils.DisjunctionMaxQueryParser parser = new SolrPluginUtils.DisjunctionMaxQueryParser(this,
IMPOSSIBLE_FIELD_NAME);
parser.addAlias(IMPOSSIBLE_FIELD_NAME, tiebreaker, fields);
parser.setPhraseSlop(slop);
return parser;
}
@Override
public String[] getDefaultHighlightFields() {
return queryFields.keySet().toArray(new String[queryFields.keySet().size()]);
}
@Override
public Query getHighlightQuery() throws SyntaxError {
if (!parsed)
parse();
return parsedUserQuery == null ? altUserQuery : parsedUserQuery;
}
@Override
public void addDebugInfo(NamedList<Object> debugInfo) {
super.addDebugInfo(debugInfo);
debugInfo.add("altquerystring", altUserQuery);
if (null != boostQueries) {
debugInfo.add("boost_queries", boostParams);
debugInfo.add("parsed_boost_queries",
QueryParsing.toString(boostQueries, req.getSchema()));
}
debugInfo.add("boostfuncs", req.getParams().getParams(DisMaxParams.BF));
}
}
| |
/*
* FIME (Fast Infoset ME) software ("Software")
*
* Copyright, 2005 Sun Microsystems, Inc. All Rights Reserved.
*
* Software is licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations.
*
* Sun supports and benefits from the global community of open source
* developers, and thanks the community for its important contributions and
* open standards-based technology, which Sun has adopted into many of its
* products.
*
* Please note that portions of Software may be provided with notices and
* open source licenses from such communities and third parties that govern the
* use of those portions, and any licenses granted hereunder do not alter any
* rights and obligations you may have under such open source licenses,
* however, the disclaimer of warranty and limitation of liability provisions
* in this License will apply to all Software in this distribution.
*
* You acknowledge that the Software is not designed, licensed or intended
* for use in the design, construction, operation or maintenance of any nuclear
* facility.
*
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
*/
package com.sun.xml.fime.stax;
import java.io.IOException;
import java.io.InputStream;
import java.util.Enumeration;
import java.util.NoSuchElementException;
import javax.xml.namespace.NamespaceContext;
import javax.xml.namespace.QName;
import javax.xml.stream.Location;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.apache.mirae.util.CharacterUtil;
import com.sun.xml.fime.Decoder;
import com.sun.xml.fime.DecoderStateTables;
import com.sun.xml.fime.EncodingConstants;
import com.sun.xml.fime.QualifiedName;
import com.sun.xml.fime.algorithm.BuiltInEncodingAlgorithmFactory;
import com.sun.xml.fime.jvnet.EncodingAlgorithm;
import com.sun.xml.fime.jvnet.EncodingAlgorithmException;
import com.sun.xml.fime.jvnet.EncodingAlgorithmIndexes;
import com.sun.xml.fime.jvnet.FastInfosetException;
import com.sun.xml.fime.util.CharArray;
import com.sun.xml.fime.util.CharArrayString;
import com.sun.xml.fime.util.DuplicateAttributeVerifier;
import com.sun.xml.fime.util.EventLocation;
import com.sun.xml.fime.util.MessageCenter;
public class StAXDocumentParser extends Decoder implements XMLStreamReader {
protected static final int INTERNAL_STATE_START_DOCUMENT = 0;
protected static final int INTERNAL_STATE_START_ELEMENT_TERMINATE = 1;
protected static final int INTERNAL_STATE_SINGLE_TERMINATE_ELEMENT_WITH_NAMESPACES = 2;
protected static final int INTERNAL_STATE_DOUBLE_TERMINATE_ELEMENT = 3;
protected static final int INTERNAL_STATE_END_DOCUMENT = 4;
protected static final int INTERNAL_STATE_VOID = -1;
protected int _internalState;
/**
* Current event
*/
protected int _eventType;
/**
* Stack of qualified names and namespaces
*/
protected QualifiedName[] _qNameStack = new QualifiedName[32];
protected int[] _namespaceAIIsStartStack = new int[32];
protected int[] _namespaceAIIsEndStack = new int[32];
protected int _stackCount = -1;
protected String[] _namespaceAIIsPrefix = new String[32];
protected String[] _namespaceAIIsNamespaceName = new String[32];
protected int[] _namespaceAIIsPrefixIndex = new int[32];
protected int _namespaceAIIsIndex;
/**
* Namespaces associated with START_ELEMENT or END_ELEMENT
*/
protected int _currentNamespaceAIIsStart;
protected int _currentNamespaceAIIsEnd;
/**
* Qualified name associated with START_ELEMENT or END_ELEMENT.
*/
protected QualifiedName _qualifiedName;
/**
* List of attributes
*/
protected AttributesHolder _attributes = new AttributesHolder();
protected boolean _clearAttributes = false;
/**
* Characters associated with event.
*/
protected char[] _characters;
protected int _charactersOffset;
protected int _charactersLength;
protected String _algorithmURI;
protected int _algorithmId;
protected byte[] _algorithmData;
protected int _algorithmDataOffset;
protected int _algorithmDataLength;
/**
* State for processing instruction
*/
protected String _piTarget;
protected String _piData;
protected NamespaceContextImpl _nsContext = new NamespaceContextImpl();
protected String _characterEncodingScheme;
protected StAXManager _manager;
public StAXDocumentParser() {
reset();
}
public StAXDocumentParser(InputStream s) {
this();
setInputStream(s);
}
public StAXDocumentParser(InputStream s, StAXManager manager) {
this(s);
_manager = manager;
}
public void setInputStream(InputStream s) {
super.setInputStream(s);
reset();
}
public void reset() {
super.reset();
if (_internalState != INTERNAL_STATE_START_DOCUMENT &&
_internalState != INTERNAL_STATE_END_DOCUMENT) {
for (int i = _namespaceAIIsIndex - 1; i >= 0; i--) {
_prefixTable.popScopeWithPrefixEntry(_namespaceAIIsPrefixIndex[i]);
}
_stackCount = -1;
_namespaceAIIsIndex = 0;
_characters = null;
_algorithmData = null;
}
_characterEncodingScheme = "UTF-8";
_eventType = START_DOCUMENT;
_internalState = INTERNAL_STATE_START_DOCUMENT;
}
protected void resetOnError() {
super.reset();
if (_v != null) {
_prefixTable.clearCompletely();
}
_duplicateAttributeVerifier.clear();
_stackCount = -1;
_namespaceAIIsIndex = 0;
_characters = null;
_algorithmData = null;
_eventType = START_DOCUMENT;
_internalState = INTERNAL_STATE_START_DOCUMENT;
}
// -- XMLStreamReader Interface -------------------------------------------
public Object getProperty(java.lang.String name)
throws java.lang.IllegalArgumentException {
if (_manager != null) {
return _manager.getProperty(name);
}
return null;
}
public int next() throws XMLStreamException {
try {
if (_internalState != INTERNAL_STATE_VOID) {
switch (_internalState) {
case INTERNAL_STATE_START_DOCUMENT:
decodeHeader();
processDII();
_internalState = INTERNAL_STATE_VOID;
break;
case INTERNAL_STATE_START_ELEMENT_TERMINATE:
if (_currentNamespaceAIIsEnd > 0) {
for (int i = _currentNamespaceAIIsEnd - 1; i >= _currentNamespaceAIIsStart; i--) {
_prefixTable.popScopeWithPrefixEntry(_namespaceAIIsPrefixIndex[i]);
}
_namespaceAIIsIndex = _currentNamespaceAIIsStart;
}
// Pop information off the stack
popStack();
_internalState = INTERNAL_STATE_VOID;
return _eventType = END_ELEMENT;
case INTERNAL_STATE_SINGLE_TERMINATE_ELEMENT_WITH_NAMESPACES:
// Undeclare namespaces
for (int i = _currentNamespaceAIIsEnd - 1; i >= _currentNamespaceAIIsStart; i--) {
_prefixTable.popScopeWithPrefixEntry(_namespaceAIIsPrefixIndex[i]);
}
_namespaceAIIsIndex = _currentNamespaceAIIsStart;
_internalState = INTERNAL_STATE_VOID;
break;
case INTERNAL_STATE_DOUBLE_TERMINATE_ELEMENT:
// Undeclare namespaces
if (_currentNamespaceAIIsEnd > 0) {
for (int i = _currentNamespaceAIIsEnd - 1; i >= _currentNamespaceAIIsStart; i--) {
_prefixTable.popScopeWithPrefixEntry(_namespaceAIIsPrefixIndex[i]);
}
_namespaceAIIsIndex = _currentNamespaceAIIsStart;
}
if (_stackCount == -1) {
_internalState = INTERNAL_STATE_END_DOCUMENT;
return _eventType = END_DOCUMENT;
}
// Pop information off the stack
popStack();
_internalState = (_currentNamespaceAIIsEnd > 0) ?
INTERNAL_STATE_SINGLE_TERMINATE_ELEMENT_WITH_NAMESPACES :
INTERNAL_STATE_VOID;
return _eventType = END_ELEMENT;
case INTERNAL_STATE_END_DOCUMENT:
throw new NoSuchElementException(MessageCenter.getString("message.noMoreEvents"));
}
}
// Reset internal state
_characters = null;
_algorithmData = null;
_currentNamespaceAIIsEnd = 0;
// Process information item
final int b = read();
switch(DecoderStateTables.EII[b]) {
case DecoderStateTables.EII_NO_AIIS_INDEX_SMALL:
processEII(_elementNameTable._array[b], false);
return _eventType;
case DecoderStateTables.EII_AIIS_INDEX_SMALL:
processEII(_elementNameTable._array[b & EncodingConstants.INTEGER_3RD_BIT_SMALL_MASK], true);
return _eventType;
case DecoderStateTables.EII_INDEX_MEDIUM:
processEII(processEIIIndexMedium(b), (b & EncodingConstants.ELEMENT_ATTRIBUTE_FLAG) > 0);
return _eventType;
case DecoderStateTables.EII_INDEX_LARGE:
processEII(processEIIIndexLarge(b), (b & EncodingConstants.ELEMENT_ATTRIBUTE_FLAG) > 0);
return _eventType;
case DecoderStateTables.EII_LITERAL:
{
final QualifiedName qn = processLiteralQualifiedName(
b & EncodingConstants.LITERAL_QNAME_PREFIX_NAMESPACE_NAME_MASK);
_elementNameTable.add(qn);
processEII(qn, (b & EncodingConstants.ELEMENT_ATTRIBUTE_FLAG) > 0);
return _eventType;
}
case DecoderStateTables.EII_NAMESPACES:
processEIIWithNamespaces((b & EncodingConstants.ELEMENT_ATTRIBUTE_FLAG) > 0);
return _eventType;
case DecoderStateTables.CII_UTF8_SMALL_LENGTH:
_octetBufferLength = (b & EncodingConstants.OCTET_STRING_LENGTH_7TH_BIT_SMALL_MASK)
+ 1;
decodeUtf8StringAsCharBuffer();
if ((b & EncodingConstants.CHARACTER_CHUNK_ADD_TO_TABLE_FLAG) > 0) {
_characterContentChunkTable.add(_charBuffer, _charBufferLength);
}
_characters = _charBuffer;
_charactersOffset = 0;
_charactersLength = _charBufferLength;
return _eventType = CHARACTERS;
case DecoderStateTables.CII_UTF8_MEDIUM_LENGTH:
_octetBufferLength = read() + EncodingConstants.OCTET_STRING_LENGTH_7TH_BIT_SMALL_LIMIT;
decodeUtf8StringAsCharBuffer();
if ((b & EncodingConstants.CHARACTER_CHUNK_ADD_TO_TABLE_FLAG) > 0) {
_characterContentChunkTable.add(_charBuffer, _charBufferLength);
}
_characters = _charBuffer;
_charactersOffset = 0;
_charactersLength = _charBufferLength;
return _eventType = CHARACTERS;
case DecoderStateTables.CII_UTF8_LARGE_LENGTH:
_octetBufferLength = ((read() << 24) |
(read() << 16) |
(read() << 8) |
read())
+ EncodingConstants.OCTET_STRING_LENGTH_7TH_BIT_MEDIUM_LIMIT;
decodeUtf8StringAsCharBuffer();
if ((b & EncodingConstants.CHARACTER_CHUNK_ADD_TO_TABLE_FLAG) > 0) {
_characterContentChunkTable.add(_charBuffer, _charBufferLength);
}
_characters = _charBuffer;
_charactersOffset = 0;
_charactersLength = _charBufferLength;
return _eventType = CHARACTERS;
case DecoderStateTables.CII_UTF16_SMALL_LENGTH:
_octetBufferLength = (b & EncodingConstants.OCTET_STRING_LENGTH_7TH_BIT_SMALL_MASK)
+ 1;
decodeUtf16StringAsCharBuffer();
if ((b & EncodingConstants.CHARACTER_CHUNK_ADD_TO_TABLE_FLAG) > 0) {
_characterContentChunkTable.add(_charBuffer, _charBufferLength);
}
_characters = _charBuffer;
_charactersOffset = 0;
_charactersLength = _charBufferLength;
return _eventType = CHARACTERS;
case DecoderStateTables.CII_UTF16_MEDIUM_LENGTH:
_octetBufferLength = read() + EncodingConstants.OCTET_STRING_LENGTH_7TH_BIT_SMALL_LIMIT;
decodeUtf16StringAsCharBuffer();
if ((b & EncodingConstants.CHARACTER_CHUNK_ADD_TO_TABLE_FLAG) > 0) {
_characterContentChunkTable.add(_charBuffer, _charBufferLength);
}
_characters = _charBuffer;
_charactersOffset = 0;
_charactersLength = _charBufferLength;
return _eventType = CHARACTERS;
case DecoderStateTables.CII_UTF16_LARGE_LENGTH:
_octetBufferLength = ((read() << 24) |
(read() << 16) |
(read() << 8) |
read())
+ EncodingConstants.OCTET_STRING_LENGTH_7TH_BIT_MEDIUM_LIMIT;
decodeUtf16StringAsCharBuffer();
if ((b & EncodingConstants.CHARACTER_CHUNK_ADD_TO_TABLE_FLAG) > 0) {
_characterContentChunkTable.add(_charBuffer, _charBufferLength);
}
_characters = _charBuffer;
_charactersOffset = 0;
_charactersLength = _charBufferLength;
return _eventType = CHARACTERS;
case DecoderStateTables.CII_RA:
{
final boolean addToTable = (_b & EncodingConstants.CHARACTER_CHUNK_ADD_TO_TABLE_FLAG) > 0;
_identifier = (b & 0x02) << 6;
final int b2 = read();
_identifier |= (b2 & 0xFC) >> 2;
decodeOctetsOnSeventhBitOfNonIdentifyingStringOnThirdBit(b2);
decodeRestrictedAlphabetAsCharBuffer();
if (addToTable) {
_characterContentChunkTable.add(_charBuffer, _charBufferLength);
}
_characters = _charBuffer;
_charactersOffset = 0;
_charactersLength = _charBufferLength;
return _eventType = CHARACTERS;
}
case DecoderStateTables.CII_EA:
{
if ((b & EncodingConstants.NISTRING_ADD_TO_TABLE_FLAG) > 0) {
throw new EncodingAlgorithmException(MessageCenter.getString("message.addToTableNotSupported"));
}
// Decode encoding algorithm integer
_algorithmId = (b & 0x02) << 6;
final int b2 = read();
_algorithmId |= (b2 & 0xFC) >> 2;
decodeOctetsOnSeventhBitOfNonIdentifyingStringOnThirdBit(b2);
processCIIEncodingAlgorithm();
return _eventType = CHARACTERS;
}
case DecoderStateTables.CII_INDEX_SMALL:
{
final int index = b & EncodingConstants.INTEGER_4TH_BIT_SMALL_MASK;
_characters = _characterContentChunkTable._array;
_charactersOffset = _characterContentChunkTable._offset[index];
_charactersLength = _characterContentChunkTable._length[index];
return _eventType = CHARACTERS;
}
case DecoderStateTables.CII_INDEX_MEDIUM:
{
final int index = (((b & EncodingConstants.INTEGER_4TH_BIT_MEDIUM_MASK) << 8) | read())
+ EncodingConstants.INTEGER_4TH_BIT_SMALL_LIMIT;
_characters = _characterContentChunkTable._array;
_charactersOffset = _characterContentChunkTable._offset[index];
_charactersLength = _characterContentChunkTable._length[index];
return _eventType = CHARACTERS;
}
case DecoderStateTables.CII_INDEX_LARGE:
{
final int index = (((b & EncodingConstants.INTEGER_4TH_BIT_LARGE_MASK) << 16) |
(read() << 8) |
read())
+ EncodingConstants.INTEGER_4TH_BIT_MEDIUM_LIMIT;
_characters = _characterContentChunkTable._array;
_charactersOffset = _characterContentChunkTable._offset[index];
_charactersLength = _characterContentChunkTable._length[index];
return _eventType = CHARACTERS;
}
case DecoderStateTables.CII_INDEX_LARGE_LARGE:
{
final int index = ((read() << 16) |
(read() << 8) |
read())
+ EncodingConstants.INTEGER_4TH_BIT_LARGE_LIMIT;
_characters = _characterContentChunkTable._array;
_charactersOffset = _characterContentChunkTable._offset[index];
_charactersLength = _characterContentChunkTable._length[index];
return _eventType = CHARACTERS;
}
case DecoderStateTables.COMMENT_II:
processCommentII();
return _eventType;
case DecoderStateTables.PROCESSING_INSTRUCTION_II:
processProcessingII();
return _eventType;
case DecoderStateTables.UNEXPANDED_ENTITY_REFERENCE_II:
{
/*
* TODO
* How does StAX report such events?
*/
return _eventType;
}
case DecoderStateTables.TERMINATOR_DOUBLE:
if (_stackCount != -1) {
// Pop information off the stack
popStack();
_internalState = INTERNAL_STATE_DOUBLE_TERMINATE_ELEMENT;
return _eventType = END_ELEMENT;
}
_internalState = INTERNAL_STATE_END_DOCUMENT;
return _eventType = END_DOCUMENT;
case DecoderStateTables.TERMINATOR_SINGLE:
if (_stackCount != -1) {
// Pop information off the stack
popStack();
if (_currentNamespaceAIIsEnd > 0) {
_internalState = INTERNAL_STATE_SINGLE_TERMINATE_ELEMENT_WITH_NAMESPACES;
}
return _eventType = END_ELEMENT;
}
_internalState = INTERNAL_STATE_END_DOCUMENT;
return _eventType = END_DOCUMENT;
default:
throw new FastInfosetException(MessageCenter.getString("message.IllegalStateDecodingEII"));
}
} catch (IOException e) {
resetOnError();
e.printStackTrace();
throw new XMLStreamException(e);
} catch (FastInfosetException e) {
resetOnError();
e.printStackTrace();
throw new XMLStreamException(e);
} catch (RuntimeException e) {
resetOnError();
e.printStackTrace();
throw e;
}
}
private final void popStack() {
// Pop information off the stack
_qualifiedName = _qNameStack[_stackCount];
_currentNamespaceAIIsStart = _namespaceAIIsStartStack[_stackCount];
_currentNamespaceAIIsEnd = _namespaceAIIsEndStack[_stackCount];
_qNameStack[_stackCount--] = null;
}
/** Test if the current event is of the given type and if the namespace and name match the current namespace and name of the current event.
* If the namespaceURI is null it is not checked for equality, if the localName is null it is not checked for equality.
* @param type the event type
* @param namespaceURI the uri of the event, may be null
* @param localName the localName of the event, may be null
* @throws XMLStreamException if the required values are not matched.
*/
public final void require(int type, String namespaceURI, String localName)
throws XMLStreamException {
if( type != _eventType)
throw new XMLStreamException(MessageCenter.getString("message.eventTypeNotMatch", new Object[]{getEventTypeString(type)}));
if( namespaceURI != null && !namespaceURI.equals(getNamespaceURI()) )
throw new XMLStreamException(MessageCenter.getString("message.namespaceURINotMatch", new Object[]{namespaceURI}));
if(localName != null && !localName.equals(getLocalName()))
throw new XMLStreamException(MessageCenter.getString("message.localNameNotMatch", new Object[]{localName}));
return;
}
/** Reads the content of a text-only element. Precondition:
* the current event is START_ELEMENT. Postcondition:
* The current event is the corresponding END_ELEMENT.
* @throws XMLStreamException if the current event is not a START_ELEMENT or if
* a non text element is encountered
*/
public final String getElementText() throws XMLStreamException {
if(getEventType() != START_ELEMENT) {
throw new XMLStreamException(
MessageCenter.getString("message.mustBeOnSTARTELEMENT"), getLocation());
}
//current is StartElement, move to the next
next();
return getElementText(true);
}
/**
* @param startElementRead flag if start element has already been read
*/
public final String getElementText(boolean startElementRead) throws XMLStreamException {
if (!startElementRead) {
throw new XMLStreamException(
MessageCenter.getString("message.mustBeOnSTARTELEMENT"), getLocation());
}
int eventType = getEventType();
StringBuffer content = new StringBuffer();
while(eventType != END_ELEMENT ) {
if(eventType == CHARACTERS
|| eventType == CDATA
|| eventType == SPACE
|| eventType == ENTITY_REFERENCE) {
content.append(getText());
} else if(eventType == PROCESSING_INSTRUCTION
|| eventType == COMMENT) {
// skipping
} else if(eventType == END_DOCUMENT) {
throw new XMLStreamException(MessageCenter.getString("message.unexpectedEOF"));
} else if(eventType == START_ELEMENT) {
throw new XMLStreamException(
MessageCenter.getString("message.getElementTextExpectTextOnly"), getLocation());
} else {
throw new XMLStreamException(
MessageCenter.getString("message.unexpectedEventType")+ getEventTypeString(eventType), getLocation());
}
eventType = next();
}
return content.toString();
}
/** Skips any white space (isWhiteSpace() returns true), COMMENT,
* or PROCESSING_INSTRUCTION,
* until a START_ELEMENT or END_ELEMENT is reached.
* If other than white space characters, COMMENT, PROCESSING_INSTRUCTION, START_ELEMENT, END_ELEMENT
* are encountered, an exception is thrown. This method should
* be used when processing element-only content seperated by white space.
* This method should
* be used when processing element-only content because
* the parser is not able to recognize ignorable whitespace if
* then DTD is missing or not interpreted.
* @return the event type of the element read
* @throws XMLStreamException if the current event is not white space
*/
public final int nextTag() throws XMLStreamException {
next();
return nextTag(true);
}
/** if the current tag has already read, such as in the case EventReader's
* peek() has been called, the current cursor should not move before the loop
*/
public final int nextTag(boolean currentTagRead) throws XMLStreamException {
int eventType = getEventType();
if (!currentTagRead) {
eventType = next();
}
while((eventType == CHARACTERS && isWhiteSpace()) // skip whitespace
|| (eventType == CDATA && isWhiteSpace())
|| eventType == SPACE
|| eventType == PROCESSING_INSTRUCTION
|| eventType == COMMENT) {
eventType = next();
}
if (eventType != START_ELEMENT && eventType != END_ELEMENT) {
throw new XMLStreamException(MessageCenter.getString("message.expectedStartOrEnd"), getLocation());
}
return eventType;
}
public final boolean hasNext() throws XMLStreamException {
return (_eventType != END_DOCUMENT);
}
public void close() throws XMLStreamException {
}
public final String getNamespaceURI(String prefix) {
String namespace = getNamespaceDecl(prefix);
if (namespace == null) {
if (prefix == null) {
throw new IllegalArgumentException(MessageCenter.getString("message.nullPrefix"));
}
return null; // unbound
}
return namespace;
}
public final boolean isStartElement() {
return (_eventType == START_ELEMENT);
}
public final boolean isEndElement() {
return (_eventType == END_ELEMENT);
}
public final boolean isCharacters() {
return (_eventType == CHARACTERS);
}
/**
* Returns true if the cursor points to a character data event that consists of all whitespace
* Application calling this method needs to cache the value and avoid calling this method again
* for the same event.
* @return true if the cursor points to all whitespace, false otherwise
*/
public final boolean isWhiteSpace() {
if(isCharacters() || (_eventType == CDATA)){
char [] ch = this.getTextCharacters();
int start = this.getTextStart();
int length = this.getTextLength();
for (int i=start; i< length;i++){
// XMLChar
// if(!XMLChar.isSpace(ch[i])){
if(!CharacterUtil.isWhitespace(ch[i])){
return false;
}
}
return true;
}
return false;
//throw new UnsupportedOperationException("Not implemented");
}
public final String getAttributeValue(String namespaceURI, String localName) {
if (_eventType != START_ELEMENT) {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetAttributeValue"));
}
// Search for the attributes in _attributes
for (int i = 0; i < _attributes.getLength(); i++) {
if (_attributes.getLocalName(i) == localName &&
_attributes.getURI(i) == namespaceURI) {
return _attributes.getValue(i);
}
}
return null;
}
public final int getAttributeCount() {
if (_eventType != START_ELEMENT) {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetAttributeValue"));
}
return _attributes.getLength();
}
public final javax.xml.namespace.QName getAttributeName(int index) {
if (_eventType != START_ELEMENT) {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetAttributeValue"));
}
return _attributes.getQualifiedName(index).getQName();
}
public final String getAttributeNamespace(int index) {
if (_eventType != START_ELEMENT) {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetAttributeValue"));
}
return _attributes.getURI(index);
}
public final String getAttributeLocalName(int index) {
if (_eventType != START_ELEMENT) {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetAttributeValue"));
}
return _attributes.getLocalName(index);
}
public final String getAttributePrefix(int index) {
if (_eventType != START_ELEMENT) {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetAttributeValue"));
}
return _attributes.getPrefix(index);
}
public final String getAttributeType(int index) {
if (_eventType != START_ELEMENT) {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetAttributeValue"));
}
return _attributes.getType(index);
}
public final String getAttributeValue(int index) {
if (_eventType != START_ELEMENT) {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetAttributeValue"));
}
return _attributes.getValue(index);
}
public final boolean isAttributeSpecified(int index) {
return false; // non-validating parser
}
public final int getNamespaceCount() {
if (_eventType == START_ELEMENT || _eventType == END_ELEMENT) {
return (_currentNamespaceAIIsEnd > 0) ? (_currentNamespaceAIIsEnd - _currentNamespaceAIIsStart) : 0;
} else {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetNamespaceCount"));
}
}
public final String getNamespacePrefix(int index) {
if (_eventType == START_ELEMENT || _eventType == END_ELEMENT) {
return _namespaceAIIsPrefix[_currentNamespaceAIIsStart + index];
} else {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetNamespacePrefix"));
}
}
public final String getNamespaceURI(int index) {
if (_eventType == START_ELEMENT || _eventType == END_ELEMENT) {
return _namespaceAIIsNamespaceName[_currentNamespaceAIIsStart + index];
} else {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetNamespacePrefix"));
}
}
public final NamespaceContext getNamespaceContext() {
return _nsContext;
}
public final int getEventType() {
return _eventType;
}
public final String getText() {
if (_characters == null) {
checkTextState();
}
return new String(_characters,
_charactersOffset,
_charactersLength);
}
public final char[] getTextCharacters() {
if (_characters == null) {
checkTextState();
}
return _characters;
}
public final int getTextStart() {
if (_characters == null) {
checkTextState();
}
return _charactersOffset;
}
public final int getTextLength() {
if (_characters == null) {
checkTextState();
}
return _charactersLength;
}
public final int getTextCharacters(int sourceStart, char[] target,
int targetStart, int length) throws XMLStreamException {
if (_characters == null) {
checkTextState();
}
try {
System.arraycopy(_characters, sourceStart, target,
targetStart, length);
return length;
} catch (IndexOutOfBoundsException e) {
throw new XMLStreamException(e);
}
}
protected final void checkTextState() {
if (_algorithmData == null) {
throw new IllegalStateException(MessageCenter.getString("message.InvalidStateForText"));
}
try {
convertEncodingAlgorithmDataToCharacters();
} catch (Exception e) {
throw new IllegalStateException(MessageCenter.getString("message.InvalidStateForText"));
}
}
public final String getEncoding() {
return _characterEncodingScheme;
}
public final boolean hasText() {
return (_characters != null);
}
public final Location getLocation() {
//location should be created in next()
//returns a nil location for now
return EventLocation.getNilLocation();
}
public final QName getName() {
if (_eventType == START_ELEMENT || _eventType == END_ELEMENT) {
return _qualifiedName.getQName();
} else {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetName"));
}
}
public final String getLocalName() {
if (_eventType == START_ELEMENT || _eventType == END_ELEMENT) {
return _qualifiedName.localName;
} else {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetLocalName"));
}
}
public final boolean hasName() {
return (_eventType == START_ELEMENT || _eventType == END_ELEMENT);
}
public final String getNamespaceURI() {
if (_eventType == START_ELEMENT || _eventType == END_ELEMENT) {
return _qualifiedName.namespaceName;
} else {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetNamespaceURI"));
}
}
public final String getPrefix() {
if (_eventType == START_ELEMENT || _eventType == END_ELEMENT) {
return _qualifiedName.prefix;
} else {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetPrefix"));
}
}
public final String getVersion() {
return null;
}
public final boolean isStandalone() {
return false;
}
public final boolean standaloneSet() {
return false;
}
public final String getCharacterEncodingScheme() {
return null;
}
public final String getPITarget() {
if (_eventType != PROCESSING_INSTRUCTION) {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetPITarget"));
}
return _piTarget;
}
public final String getPIData() {
if (_eventType != PROCESSING_INSTRUCTION) {
throw new IllegalStateException(MessageCenter.getString("message.invalidCallingGetPIData"));
}
return _piData;
}
public final String getTextAlgorithmURI() {
return _algorithmURI;
}
public final int getTextAlgorithmIndex() {
return _algorithmId;
}
public final byte[] getTextAlgorithmBytes() {
return _algorithmData;
}
public final byte[] getTextAlgorithmBytesClone() {
if (_algorithmData == null) {
return null;
}
byte[] algorithmData = new byte[_algorithmDataLength];
System.arraycopy(_algorithmData, _algorithmDataOffset, algorithmData, 0, _algorithmDataLength);
return algorithmData;
}
public final int getTextAlgorithmStart() {
return _algorithmDataOffset;
}
public final int getTextAlgorithmLength() {
return _algorithmDataLength;
}
public final int getTextAlgorithmBytes(int sourceStart, byte[] target,
int targetStart, int length) throws XMLStreamException {
try {
System.arraycopy(_algorithmData, sourceStart, target,
targetStart, length);
return length;
} catch (IndexOutOfBoundsException e) {
throw new XMLStreamException(e);
}
}
//
protected final void processDII() throws FastInfosetException, IOException {
final int b = read();
if (b > 0) {
processDIIOptionalProperties(b);
}
}
protected final void processDIIOptionalProperties(int b) throws FastInfosetException, IOException {
// Optimize for the most common case
if (b == EncodingConstants.DOCUMENT_INITIAL_VOCABULARY_FLAG) {
decodeInitialVocabulary();
return;
}
if ((b & EncodingConstants.DOCUMENT_ADDITIONAL_DATA_FLAG) > 0) {
decodeAdditionalData();
/*
* TODO
* how to report the additional data?
*/
}
if ((b & EncodingConstants.DOCUMENT_INITIAL_VOCABULARY_FLAG) > 0) {
decodeInitialVocabulary();
}
if ((b & EncodingConstants.DOCUMENT_NOTATIONS_FLAG) > 0) {
decodeNotations();
/*
try {
_dtdHandler.notationDecl(name, public_identifier, system_identifier);
} catch (SAXException e) {
throw new IOException("NotationsDeclarationII");
}
*/
}
if ((b & EncodingConstants.DOCUMENT_UNPARSED_ENTITIES_FLAG) > 0) {
decodeUnparsedEntities();
/*
try {
_dtdHandler.unparsedEntityDecl(name, public_identifier, system_identifier, notation_name);
} catch (SAXException e) {
throw new IOException("UnparsedEntitiesII");
}
*/
}
if ((b & EncodingConstants.DOCUMENT_CHARACTER_ENCODING_SCHEME) > 0) {
_characterEncodingScheme = decodeCharacterEncodingScheme();
}
// if ((b & EncodingConstants.DOCUMENT_STANDALONE_FLAG) > 0) {
// boolean standalone = (read() > 0) ? true : false ;
// /*
// * TODO
// * how to report the standalone flag?
// */
// }
//
// if ((b & EncodingConstants.DOCUMENT_VERSION_FLAG) > 0) {
// String version = decodeVersion();
// /*
// * TODO
// * how to report the standalone flag?
// */
// }
}
protected final void resizeNamespaceAIIs() {
final String[] namespaceAIIsPrefix = new String[_namespaceAIIsIndex * 2];
System.arraycopy(_namespaceAIIsPrefix, 0, namespaceAIIsPrefix, 0, _namespaceAIIsIndex);
_namespaceAIIsPrefix = namespaceAIIsPrefix;
final String[] namespaceAIIsNamespaceName = new String[_namespaceAIIsIndex * 2];
System.arraycopy(_namespaceAIIsNamespaceName, 0, namespaceAIIsNamespaceName, 0, _namespaceAIIsIndex);
_namespaceAIIsNamespaceName = namespaceAIIsNamespaceName;
final int[] namespaceAIIsPrefixIndex = new int[_namespaceAIIsIndex * 2];
System.arraycopy(_namespaceAIIsPrefixIndex, 0, namespaceAIIsPrefixIndex, 0, _namespaceAIIsIndex);
_namespaceAIIsPrefixIndex = namespaceAIIsPrefixIndex;
}
protected final void processEIIWithNamespaces(boolean hasAttributes) throws FastInfosetException, IOException {
if (++_prefixTable._declarationId == Integer.MAX_VALUE) {
_prefixTable.clearDeclarationIds();
}
_currentNamespaceAIIsStart = _namespaceAIIsIndex;
String prefix = "", namespaceName = "";
int b = read();
while ((b & EncodingConstants.NAMESPACE_ATTRIBUTE_MASK) == EncodingConstants.NAMESPACE_ATTRIBUTE) {
if (_namespaceAIIsIndex == _namespaceAIIsPrefix.length) {
resizeNamespaceAIIs();
}
switch (b & EncodingConstants.NAMESPACE_ATTRIBUTE_PREFIX_NAME_MASK) {
// no prefix, no namespace
// Undeclaration of default namespace
case 0:
prefix = namespaceName =
_namespaceAIIsPrefix[_namespaceAIIsIndex] =
_namespaceAIIsNamespaceName[_namespaceAIIsIndex] = "";
_namespaceNameIndex = _prefixIndex = _namespaceAIIsPrefixIndex[_namespaceAIIsIndex++] = -1;
break;
// no prefix, namespace
// Declaration of default namespace
case 1:
prefix = _namespaceAIIsPrefix[_namespaceAIIsIndex] = "";
namespaceName = _namespaceAIIsNamespaceName[_namespaceAIIsIndex] =
decodeIdentifyingNonEmptyStringOnFirstBitAsNamespaceName(false);
_prefixIndex = _namespaceAIIsPrefixIndex[_namespaceAIIsIndex++] = -1;
break;
// prefix, no namespace
// Undeclaration of namespace
case 2:
prefix = _namespaceAIIsPrefix[_namespaceAIIsIndex] =
decodeIdentifyingNonEmptyStringOnFirstBitAsPrefix(false);
namespaceName = _namespaceAIIsNamespaceName[_namespaceAIIsIndex] = "";
_namespaceNameIndex = -1;
_namespaceAIIsPrefixIndex[_namespaceAIIsIndex++] = _prefixIndex;
break;
// prefix, namespace
// Declaration of prefixed namespace
case 3:
prefix = _namespaceAIIsPrefix[_namespaceAIIsIndex] =
decodeIdentifyingNonEmptyStringOnFirstBitAsPrefix(true);
namespaceName = _namespaceAIIsNamespaceName[_namespaceAIIsIndex] =
decodeIdentifyingNonEmptyStringOnFirstBitAsNamespaceName(true);
_namespaceAIIsPrefixIndex[_namespaceAIIsIndex++] = _prefixIndex;
break;
}
// Push namespace declarations onto the stack
_prefixTable.pushScopeWithPrefixEntry(prefix, namespaceName, _prefixIndex, _namespaceNameIndex);
b = read();
}
if (b != EncodingConstants.TERMINATOR) {
throw new FastInfosetException(MessageCenter.getString("message.EIInamespaceNameNotTerminatedCorrectly"));
}
_currentNamespaceAIIsEnd = _namespaceAIIsIndex;
b = read();
switch(DecoderStateTables.EII[b]) {
case DecoderStateTables.EII_NO_AIIS_INDEX_SMALL:
processEII(_elementNameTable._array[b], hasAttributes);
break;
case DecoderStateTables.EII_INDEX_MEDIUM:
processEII(processEIIIndexMedium(b), hasAttributes);
break;
case DecoderStateTables.EII_INDEX_LARGE:
processEII(processEIIIndexLarge(b), hasAttributes);
break;
case DecoderStateTables.EII_LITERAL:
{
final QualifiedName qn = processLiteralQualifiedName(
b & EncodingConstants.LITERAL_QNAME_PREFIX_NAMESPACE_NAME_MASK);
_elementNameTable.add(qn);
processEII(qn, hasAttributes);
break;
}
default:
throw new FastInfosetException(MessageCenter.getString("message.IllegalStateDecodingEIIAfterAIIs"));
}
}
protected final void processEII(QualifiedName name, boolean hasAttributes) throws FastInfosetException, IOException {
if (_prefixTable._currentInScope[name.prefixIndex] != name.namespaceNameIndex) {
throw new FastInfosetException(MessageCenter.getString("message.qnameOfEIINotInScope"));
}
_eventType = START_ELEMENT;
_qualifiedName = name;
if (_clearAttributes) {
_attributes.clear();
_clearAttributes = false;
}
if (hasAttributes) {
processAIIs();
}
// Push element holder onto the stack
_stackCount++;
if (_stackCount == _qNameStack.length) {
QualifiedName[] qNameStack = new QualifiedName[_qNameStack.length * 2];
System.arraycopy(_qNameStack, 0, qNameStack, 0, _qNameStack.length);
_qNameStack = qNameStack;
int[] namespaceAIIsStartStack = new int[_namespaceAIIsStartStack.length * 2];
System.arraycopy(_namespaceAIIsStartStack, 0, namespaceAIIsStartStack, 0, _namespaceAIIsStartStack.length);
_namespaceAIIsStartStack = namespaceAIIsStartStack;
int[] namespaceAIIsEndStack = new int[_namespaceAIIsEndStack.length * 2];
System.arraycopy(_namespaceAIIsEndStack, 0, namespaceAIIsEndStack, 0, _namespaceAIIsEndStack.length);
_namespaceAIIsEndStack = namespaceAIIsEndStack;
}
_qNameStack[_stackCount] = _qualifiedName;
_namespaceAIIsStartStack[_stackCount] = _currentNamespaceAIIsStart;
_namespaceAIIsEndStack[_stackCount] = _currentNamespaceAIIsEnd;
}
protected final void processAIIs() throws FastInfosetException, IOException {
QualifiedName name;
int b;
String value;
if (++_duplicateAttributeVerifier._currentIteration == Integer.MAX_VALUE) {
_duplicateAttributeVerifier.clear();
}
_clearAttributes = true;
boolean terminate = false;
do {
// AII qualified name
b = read();
switch (DecoderStateTables.AII[b]) {
case DecoderStateTables.AII_INDEX_SMALL:
name = _attributeNameTable._array[b];
break;
case DecoderStateTables.AII_INDEX_MEDIUM:
{
final int i = (((b & EncodingConstants.INTEGER_2ND_BIT_MEDIUM_MASK) << 8) | read())
+ EncodingConstants.INTEGER_2ND_BIT_SMALL_LIMIT;
name = _attributeNameTable._array[i];
break;
}
case DecoderStateTables.AII_INDEX_LARGE:
{
final int i = (((b & EncodingConstants.INTEGER_2ND_BIT_LARGE_MASK) << 16) | (read() << 8) | read())
+ EncodingConstants.INTEGER_2ND_BIT_MEDIUM_LIMIT;
name = _attributeNameTable._array[i];
break;
}
case DecoderStateTables.AII_LITERAL:
name = processLiteralQualifiedName(
b & EncodingConstants.LITERAL_QNAME_PREFIX_NAMESPACE_NAME_MASK);
name.createAttributeValues(DuplicateAttributeVerifier.MAP_SIZE);
_attributeNameTable.add(name);
break;
case DecoderStateTables.AII_TERMINATOR_DOUBLE:
_internalState = INTERNAL_STATE_START_ELEMENT_TERMINATE;
case DecoderStateTables.AII_TERMINATOR_SINGLE:
terminate = true;
// AIIs have finished break out of loop
continue;
default:
throw new FastInfosetException(MessageCenter.getString("message.decodingAIIs"));
}
// [normalized value] of AII
if (name.prefixIndex > 0 && _prefixTable._currentInScope[name.prefixIndex] != name.namespaceNameIndex) {
throw new FastInfosetException(MessageCenter.getString("message.AIIqNameNotInScope"));
}
_duplicateAttributeVerifier.checkForDuplicateAttribute(name.attributeHash, name.attributeId);
b = read();
switch(DecoderStateTables.NISTRING[b]) {
case DecoderStateTables.NISTRING_UTF8_SMALL_LENGTH:
_octetBufferLength = (b & EncodingConstants.OCTET_STRING_LENGTH_5TH_BIT_SMALL_MASK) + 1;
value = decodeUtf8StringAsString();
if ((b & EncodingConstants.NISTRING_ADD_TO_TABLE_FLAG) > 0) {
_attributeValueTable.add(value);
}
_attributes.addAttribute(name, value);
break;
case DecoderStateTables.NISTRING_UTF8_MEDIUM_LENGTH:
_octetBufferLength = read() + EncodingConstants.OCTET_STRING_LENGTH_5TH_BIT_SMALL_LIMIT;
value = decodeUtf8StringAsString();
if ((b & EncodingConstants.NISTRING_ADD_TO_TABLE_FLAG) > 0) {
_attributeValueTable.add(value);
}
_attributes.addAttribute(name, value);
break;
case DecoderStateTables.NISTRING_UTF8_LARGE_LENGTH:
_octetBufferLength = ((read() << 24) |
(read() << 16) |
(read() << 8) |
read())
+ EncodingConstants.OCTET_STRING_LENGTH_5TH_BIT_MEDIUM_LIMIT;
value = decodeUtf8StringAsString();
if ((b & EncodingConstants.NISTRING_ADD_TO_TABLE_FLAG) > 0) {
_attributeValueTable.add(value);
}
_attributes.addAttribute(name, value);
break;
case DecoderStateTables.NISTRING_UTF16_SMALL_LENGTH:
_octetBufferLength = (b & EncodingConstants.OCTET_STRING_LENGTH_5TH_BIT_SMALL_MASK) + 1;
value = decodeUtf16StringAsString();
if ((b & EncodingConstants.NISTRING_ADD_TO_TABLE_FLAG) > 0) {
_attributeValueTable.add(value);
}
_attributes.addAttribute(name, value);
break;
case DecoderStateTables.NISTRING_UTF16_MEDIUM_LENGTH:
_octetBufferLength = read() + EncodingConstants.OCTET_STRING_LENGTH_5TH_BIT_SMALL_LIMIT;
value = decodeUtf16StringAsString();
if ((b & EncodingConstants.NISTRING_ADD_TO_TABLE_FLAG) > 0) {
_attributeValueTable.add(value);
}
_attributes.addAttribute(name, value);
break;
case DecoderStateTables.NISTRING_UTF16_LARGE_LENGTH:
_octetBufferLength = ((read() << 24) |
(read() << 16) |
(read() << 8) |
read())
+ EncodingConstants.OCTET_STRING_LENGTH_5TH_BIT_MEDIUM_LIMIT;
value = decodeUtf16StringAsString();
if ((b & EncodingConstants.NISTRING_ADD_TO_TABLE_FLAG) > 0) {
_attributeValueTable.add(value);
}
_attributes.addAttribute(name, value);
break;
case DecoderStateTables.NISTRING_RA:
{
final boolean addToTable = (b & EncodingConstants.NISTRING_ADD_TO_TABLE_FLAG) > 0;
// Decode resitricted alphabet integer
_identifier = (b & 0x0F) << 4;
b = read();
_identifier |= (b & 0xF0) >> 4;
decodeOctetsOnFifthBitOfNonIdentifyingStringOnFirstBit(b);
value = decodeRestrictedAlphabetAsString();
if (addToTable) {
_attributeValueTable.add(value);
}
_attributes.addAttribute(name, value);
break;
}
case DecoderStateTables.NISTRING_EA:
{
if ((b & EncodingConstants.NISTRING_ADD_TO_TABLE_FLAG) > 0) {
throw new EncodingAlgorithmException(MessageCenter.getString("message.addToTableNotSupported"));
}
// Decode encoding algorithm integer
_identifier = (b & 0x0F) << 4;
b = read();
_identifier |= (b & 0xF0) >> 4;
decodeOctetsOnFifthBitOfNonIdentifyingStringOnFirstBit(b);
processAIIEncodingAlgorithm(name);
break;
}
case DecoderStateTables.NISTRING_INDEX_SMALL:
_attributes.addAttribute(name,
_attributeValueTable._array[b & EncodingConstants.INTEGER_2ND_BIT_SMALL_MASK]);
break;
case DecoderStateTables.NISTRING_INDEX_MEDIUM:
{
final int index = (((b & EncodingConstants.INTEGER_2ND_BIT_MEDIUM_MASK) << 8) | read())
+ EncodingConstants.INTEGER_2ND_BIT_SMALL_LIMIT;
_attributes.addAttribute(name,
_attributeValueTable._array[index]);
break;
}
case DecoderStateTables.NISTRING_INDEX_LARGE:
{
final int index = (((b & EncodingConstants.INTEGER_2ND_BIT_LARGE_MASK) << 16) | (read() << 8) | read())
+ EncodingConstants.INTEGER_2ND_BIT_MEDIUM_LIMIT;
_attributes.addAttribute(name,
_attributeValueTable._array[index]);
break;
}
case DecoderStateTables.NISTRING_EMPTY:
_attributes.addAttribute(name, "");
break;
default:
throw new FastInfosetException(MessageCenter.getString("message.decodingAIIValue"));
}
} while (!terminate);
// Reset duplication attribute verfifier
_duplicateAttributeVerifier._poolCurrent = _duplicateAttributeVerifier._poolHead;
}
protected final QualifiedName processEIIIndexMedium(int b) throws FastInfosetException, IOException {
final int i = (((b & EncodingConstants.INTEGER_3RD_BIT_MEDIUM_MASK) << 8) | read())
+ EncodingConstants.INTEGER_3RD_BIT_SMALL_LIMIT;
return _elementNameTable._array[i];
}
protected final QualifiedName processEIIIndexLarge(int b) throws FastInfosetException, IOException {
int i;
if ((b & EncodingConstants.INTEGER_3RD_BIT_LARGE_LARGE_FLAG) == 0x20) {
// EII large index
i = (((b & EncodingConstants.INTEGER_3RD_BIT_LARGE_MASK) << 16) | (read() << 8) | read())
+ EncodingConstants.INTEGER_3RD_BIT_MEDIUM_LIMIT;
} else {
// EII large large index
i = (((read() & EncodingConstants.INTEGER_3RD_BIT_LARGE_LARGE_MASK) << 16) | (read() << 8) | read())
+ EncodingConstants.INTEGER_3RD_BIT_LARGE_LIMIT;
}
return _elementNameTable._array[i];
}
protected final QualifiedName processLiteralQualifiedName(int state) throws FastInfosetException, IOException {
switch (state) {
// no prefix, no namespace
case 0:
return new QualifiedName(
"",
"",
decodeIdentifyingNonEmptyStringOnFirstBit(_v.localName),
"",
0,
-1,
-1,
_identifier);
// no prefix, namespace
case 1:
return new QualifiedName(
"",
decodeIdentifyingNonEmptyStringIndexOnFirstBitAsNamespaceName(false),
decodeIdentifyingNonEmptyStringOnFirstBit(_v.localName),
"",
0,
-1,
_namespaceNameIndex,
_identifier);
// prefix, no namespace
case 2:
throw new FastInfosetException(MessageCenter.getString("message.qNameMissingNamespaceName"));
// prefix, namespace
case 3:
return new QualifiedName(
decodeIdentifyingNonEmptyStringIndexOnFirstBitAsPrefix(true),
decodeIdentifyingNonEmptyStringIndexOnFirstBitAsNamespaceName(true),
decodeIdentifyingNonEmptyStringOnFirstBit(_v.localName),
"",
0,
_prefixIndex,
_namespaceNameIndex,
_identifier);
default:
throw new FastInfosetException(MessageCenter.getString("message.decodingEII"));
}
}
protected final void processCommentII() throws FastInfosetException, IOException {
_eventType = COMMENT;
switch(decodeNonIdentifyingStringOnFirstBit()) {
case NISTRING_STRING:
if (_addToTable) {
_v.otherString.add(new CharArray(_charBuffer, 0, _charBufferLength, true));
}
_characters = _charBuffer;
_charactersOffset = 0;
_charactersLength = _charBufferLength;
break;
case NISTRING_ENCODING_ALGORITHM:
throw new FastInfosetException(MessageCenter.getString("message.commentIIAlgorithmNotSupported"));
case NISTRING_INDEX:
final CharArray ca = _v.otherString.get(_integer);
_characters = ca.ch;
_charactersOffset = ca.start;
_charactersLength = ca.length;
break;
case NISTRING_EMPTY_STRING:
_characters = _charBuffer;
_charactersOffset = 0;
_charactersLength = 0;
break;
}
}
protected final void processProcessingII() throws FastInfosetException, IOException {
_eventType = PROCESSING_INSTRUCTION;
_piTarget = decodeIdentifyingNonEmptyStringOnFirstBit(_v.otherNCName);
switch(decodeNonIdentifyingStringOnFirstBit()) {
case NISTRING_STRING:
_piData = new String(_charBuffer, 0, _charBufferLength);
if (_addToTable) {
_v.otherString.add(new CharArrayString(_piData));
}
break;
case NISTRING_ENCODING_ALGORITHM:
throw new FastInfosetException(MessageCenter.getString("message.processingIIWithEncodingAlgorithm"));
case NISTRING_INDEX:
_piData = _v.otherString.get(_integer).toString();
break;
case NISTRING_EMPTY_STRING:
_piData = "";
break;
}
}
protected final void processCIIEncodingAlgorithm() throws FastInfosetException, IOException {
_algorithmData = _octetBuffer;
_algorithmDataOffset = _octetBufferStart;
_algorithmDataLength = _octetBufferLength;
if (_algorithmId >= EncodingConstants.ENCODING_ALGORITHM_APPLICATION_START) {
_algorithmURI = _v.encodingAlgorithm.get(_algorithmId - EncodingConstants.ENCODING_ALGORITHM_APPLICATION_START);
if (_algorithmURI == null) {
throw new EncodingAlgorithmException(MessageCenter.getString("message.URINotPresent", new Object[]{new Integer(_identifier)}));
}
} else if (_algorithmId > EncodingConstants.ENCODING_ALGORITHM_BUILTIN_END) {
// Reserved built-in algorithms for future use
// TODO should use sax property to decide if event will be
// reported, allows for support through handler if required.
throw new EncodingAlgorithmException(MessageCenter.getString("message.identifiers10to31Reserved"));
}
}
protected final void processAIIEncodingAlgorithm(QualifiedName name) throws FastInfosetException, IOException {
String URI = null;
if (_identifier >= EncodingConstants.ENCODING_ALGORITHM_APPLICATION_START) {
URI = _v.encodingAlgorithm.get(_identifier - EncodingConstants.ENCODING_ALGORITHM_APPLICATION_START);
if (URI == null) {
throw new EncodingAlgorithmException(MessageCenter.getString("message.URINotPresent", new Object[]{new Integer(_identifier)}));
}
} else if (_identifier >= EncodingConstants.ENCODING_ALGORITHM_BUILTIN_END) {
if (_identifier == EncodingAlgorithmIndexes.CDATA) {
throw new EncodingAlgorithmException(MessageCenter.getString("message.CDATAAlgorithmNotSupported"));
}
// Reserved built-in algorithms for future use
// TODO should use sax property to decide if event will be
// reported, allows for support through handler if required.
throw new EncodingAlgorithmException(MessageCenter.getString("message.identifiers10to31Reserved"));
}
final byte[] data = new byte[_octetBufferLength];
System.arraycopy(_octetBuffer, _octetBufferStart, data, 0, _octetBufferLength);
_attributes.addAttributeWithAlgorithmData(name, URI, _identifier, data);
}
protected final void convertEncodingAlgorithmDataToCharacters() throws FastInfosetException, IOException {
StringBuffer buffer = new StringBuffer();
if (_algorithmId < EncodingConstants.ENCODING_ALGORITHM_BUILTIN_END) {
Object array = BuiltInEncodingAlgorithmFactory.table[_algorithmId].
decodeFromBytes(_algorithmData, _algorithmDataOffset, _algorithmDataLength);
BuiltInEncodingAlgorithmFactory.table[_algorithmId].convertToCharacters(array, buffer);
} else if (_algorithmId == EncodingAlgorithmIndexes.CDATA) {
_octetBufferOffset -= _octetBufferLength;
decodeUtf8StringIntoCharBuffer();
_characters = _charBuffer;
_charactersOffset = 0;
_charactersLength = _charBufferLength;
return;
} else if (_algorithmId >= EncodingConstants.ENCODING_ALGORITHM_APPLICATION_START) {
final EncodingAlgorithm ea = (EncodingAlgorithm)_registeredEncodingAlgorithms.get(_algorithmURI);
if (ea != null) {
final Object data = ea.decodeFromBytes(_octetBuffer, _octetBufferStart, _octetBufferLength);
ea.convertToCharacters(data, buffer);
} else {
throw new EncodingAlgorithmException(
MessageCenter.getString("message.algorithmDataCannotBeReported"));
}
}
_characters = new char[buffer.length()];
buffer.getChars(0, buffer.length(), _characters, 0);
_charactersOffset = 0;
_charactersLength = _characters.length;
}
protected class NamespaceContextImpl implements NamespaceContext {
public final String getNamespaceURI(String prefix) {
return _prefixTable.getNamespaceFromPrefix(prefix);
}
public final String getPrefix(String namespaceURI) {
return _prefixTable.getPrefixFromNamespace(namespaceURI);
}
public final Enumeration getPrefixes(String namespaceURI) {
return _prefixTable.getPrefixesFromNamespace(namespaceURI);
}
}
public final String getNamespaceDecl(String prefix) {
return _prefixTable.getNamespaceFromPrefix(prefix);
}
public final String getURI(String prefix) {
return getNamespaceDecl(prefix);
}
public final Enumeration getPrefixes() {
return _prefixTable.getPrefixes();
}
public final AttributesHolder getAttributesHolder() {
return _attributes;
}
public final void setManager(StAXManager manager) {
_manager = manager;
}
final static String getEventTypeString(int eventType) {
switch (eventType){
case START_ELEMENT:
return "START_ELEMENT";
case END_ELEMENT:
return "END_ELEMENT";
case PROCESSING_INSTRUCTION:
return "PROCESSING_INSTRUCTION";
case CHARACTERS:
return "CHARACTERS";
case COMMENT:
return "COMMENT";
case START_DOCUMENT:
return "START_DOCUMENT";
case END_DOCUMENT:
return "END_DOCUMENT";
case ENTITY_REFERENCE:
return "ENTITY_REFERENCE";
case ATTRIBUTE:
return "ATTRIBUTE";
case DTD:
return "DTD";
case CDATA:
return "CDATA";
}
return "UNKNOWN_EVENT_TYPE";
}
}
| |
//----------------------------------------------------------------------------
// Copyright (C) 2004 Yasser EL-Manzalawy.
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
// Contact the Author:
// Yasser EL-Manzalawy
// e-mail: ymelmanz@yahoo.com
//----------------------------------------------------------------------------
/*
* Project: GraphPlan 1.0
* Class: graphplan.TOperator
*
* Date: 2004-03-31
*
* Author: Yasser EL-Manzalawy
* Email: ymelmanz@yahoo.com
*/
package graphplan;
import java.util.*;
import java.io.*;
import parser.*;
/**
* Class TOperator.
* Represents an operator template.
* @author Yasser EL-Manzalawy
*/
public class TOperator {
// ------------------------------------------------------------------------
// --- fields ---
// ------------------------------------------------------------------------
/**
* The head.
*/
private TOpHead head;
/**
* The parameter conditions.
*/
private Vector cond;
/**
* The precondition.
*/
private Vector pre;
/**
* The add effects.
*/
private Vector add;
/**
* The del effects.
*/
private Vector del;
/**
* 1. no arguments --> allUnifiers and validUnifiers are null
* 2. arguments and cond == null --> allUnifiers = validUnifiers
* 3. arguments and cond != null --> allUnifiers >= validUnifiers
*/
private Vector allUnifiers;
/**
* The valid unifiers.
*/
private Vector validUnifiers;
// ------------------------------------------------------------------------
// --- constructor ---
// ------------------------------------------------------------------------
/**
* Creates a new instance of TOperator.
*
* @param h The t op head.
* @param c The vector.
* @param p The vector.
* @param a The vector.
* @param d The vector.
*/
public TOperator(TOpHead h, Vector c, Vector p, Vector a, Vector d) {
head = h;
// any of those vectors could be null
cond = c;
pre = p;
add = a;
del = d;
}
// ------------------------------------------------------------------------
// --- methods ---
// ------------------------------------------------------------------------
/**
* uses the types of parameters and the set of objects to determine
* a list of all possible unifiers. however, some of them is never been
* applicable after applying the ParaCond condition.
*
* @param os The object set.
*/
public void getPossibleUnifiers(ObjectSet os) {
Vector types = head.getTypes();
if (types == null) // no arguments
return;
allUnifiers = new Vector();
int len = types.size();
for (int i = 0; i < len; i++)
{
String type = (String) types.elementAt(i);
Vector para = os.getObjects(type);
if (para.size() == 0) // no objects found
{
System.out.print("Error in operator: " + head.getName());
System.out.print("No object of the type: " + type);
System.exit(0);
}
join (para);
}
// Now remove irrelevant unifiers
if (cond == null)
validUnifiers = allUnifiers;
else
{
len = allUnifiers.size();
Vector vars = head.getVars();
validUnifiers = new Vector();
for (int i = 0; i < len; i++)
{
Unifier un = new Unifier (vars, (String) allUnifiers.elementAt(i));
if (evaluateCond (un) )
validUnifiers.addElement (allUnifiers.elementAt(i));
}
} // end else
}
/**
* for each valid unifier, evaluate the precondition according to the given
* conjunction (which is the initial state or the prev. proposition layer.
* if operator is not applicable then it will return a Vector with ZERO size.
*
* @param thePre the precondition used to determine applicability of each action.
* @return a vector of all applicable actions.
*/
public Vector generateActions(Conjunction thePre) {
Vector actions = new Vector();
Vector vars = head.getVars();
// note: there must be at least one variable
// or there is no pre, add, del == no operator
int len = validUnifiers.size();
for (int i = 0; i < len; i++)
{
String s = (String) validUnifiers.elementAt(i);
Unifier un = new Unifier (vars, s);
String apre = G.substitute (pre, un);
Conjunction cnj = new Conjunction (apre);
// apre may be null
if (thePre.contains(cnj)) // pre-condition is satisfied
{
// create an action
String adel = G.substitute (del, un);
String aadd = G.substitute (add, un);
String ahead = G.substitute (head, un);
// add it to actions vector
actions.addElement (new Action (ahead, apre, aadd, adel));
}
}
return actions;
}
/**
* for testing
*/
public void show() {
if (allUnifiers == null)
System.out.println ("Please Call getPossibleUnifiers() first");
System.out.println (allUnifiers);
System.out.println (validUnifiers);
}
/**
* i/p unifier used with the expression
* returns true if the whole condition is evaluated as true
*
* @param un The unifier.
* @return The boolean.
*/
private boolean evaluateCond(Unifier un) {
int len = cond.size();
for (int i = 0; i < len; i++)
{
Expression e = (Expression) cond.elementAt(i);
if (e.evaluate (un) == true)
continue;
else
return false;
}
return true;
}
/**
* joins allUnifiers with second and keep the result at allUnifiers
*
* @param second The second.
*/
private void join(Vector second) {
int len = allUnifiers.size();
if (len == 0)
{
allUnifiers.addAll (second);
return;
}
Vector temp = new Vector();
temp.addAll(allUnifiers);
allUnifiers.clear();
for (int i = 0; i < len; i++)
{
String s1 = (String) temp.elementAt(i);
int len2 = second.size();
for (int j = 0; j < len2; j++)
{
String s2 = new String();
s2 = s1 + " " + (String) second.elementAt(j);
allUnifiers.addElement (s2);
}
}
}
// ------------------------------------------------------------------------
// --- static method ---
// ------------------------------------------------------------------------
/**
* Unit testing method
*
* @param args The args array.
*/
public static void main(String[] args) {
GParser p = null;
try {
p = new GParser (new java.io.FileInputStream("input.txt"));
}catch (java.io.FileNotFoundException e) {
System.out.println("GParser Version 0.1: input.txt not found.");
return;
}
TOperator v = null;
try {
v = p.Operator();
} catch (ParseException ex) {
System.out.println ("Error Parsing Proposition");
ex.printStackTrace();
return;
}
System.out.println (v.head);
if ( v.cond != null)
System.out.println (v.cond);
if ( v.pre != null)
System.out.println (v.pre);
if ( v.add != null)
System.out.println (v.add);
if ( v.del != null)
System.out.println (v.del);
}
} // end TOperator
| |
package org.jetbrains.plugins.scala.debugger.filters;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.uiDesigner.core.GridConstraints;
import com.intellij.uiDesigner.core.GridLayoutManager;
import com.intellij.uiDesigner.core.Spacer;
import org.jetbrains.annotations.Nls;
import org.jetbrains.plugins.scala.ScalaBundle;
import org.jetbrains.plugins.scala.icons.Icons;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ResourceBundle;
/**
* @author ilyas
*/
public class ScalaDebuggerSettingsConfigurable implements Configurable {
private JCheckBox myIgnoreScalaMethods;
private JPanel myPanel;
private JLabel startIndexLabel;
private JSpinner myStartIndexSpinner;
private JSpinner myEndIndexSpinner;
private JLabel endIndexLabel;
private JCheckBox friendlyDisplayOfScalaCheckBox;
private JCheckBox dontShowRuntimeRefs;
private JCheckBox doNotExpandStreamsCheckBox;
private JCheckBox showOuterVariables;
private boolean isModified = false;
private final ScalaDebuggerSettings mySettings;
public ScalaDebuggerSettingsConfigurable(final ScalaDebuggerSettings settings) {
mySettings = settings;
final Boolean flag = settings.DEBUG_DISABLE_SPECIFIC_SCALA_METHODS;
myIgnoreScalaMethods.setSelected(flag == null || flag.booleanValue());
friendlyDisplayOfScalaCheckBox.setSelected(settings.FRIENDLY_COLLECTION_DISPLAY_ENABLED);
doNotExpandStreamsCheckBox.setSelected(settings.DO_NOT_DISPLAY_STREAMS);
dontShowRuntimeRefs.setSelected(settings.DONT_SHOW_RUNTIME_REFS);
friendlyDisplayOfScalaCheckBox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
final boolean collectionsSettingsEnabled = friendlyDisplayOfScalaCheckBox.isSelected();
myStartIndexSpinner.setEnabled(collectionsSettingsEnabled);
myEndIndexSpinner.setEnabled(collectionsSettingsEnabled);
doNotExpandStreamsCheckBox.setEnabled(collectionsSettingsEnabled);
}
});
showOuterVariables.setSelected(settings.SHOW_VARIABLES_FROM_OUTER_SCOPES);
}
@Nls
public String getDisplayName() {
return ScalaBundle.message("scala.debug.caption");
}
public Icon getIcon() {
return Icons.SCALA_SMALL_LOGO;
}
public String getHelpTopic() {
return null;
}
public JComponent createComponent() {
myStartIndexSpinner.setModel(new SpinnerNumberModel(1, 1, null, 1));
myEndIndexSpinner.setModel(new SpinnerNumberModel(1, 1, null, 1));
myStartIndexSpinner.setValue(mySettings.COLLECTION_START_INDEX);
myEndIndexSpinner.setValue(mySettings.COLLECTION_END_INDEX);
return myPanel;
}
public boolean isModified() {
return mySettings.COLLECTION_START_INDEX != myStartIndexSpinner.getValue() ||
mySettings.COLLECTION_END_INDEX != myEndIndexSpinner.getValue() ||
mySettings.FRIENDLY_COLLECTION_DISPLAY_ENABLED != friendlyDisplayOfScalaCheckBox.isSelected() ||
mySettings.DONT_SHOW_RUNTIME_REFS != dontShowRuntimeRefs.isSelected() ||
mySettings.DEBUG_DISABLE_SPECIFIC_SCALA_METHODS != myIgnoreScalaMethods.isSelected() ||
mySettings.DO_NOT_DISPLAY_STREAMS != doNotExpandStreamsCheckBox.isSelected() ||
mySettings.SHOW_VARIABLES_FROM_OUTER_SCOPES != showOuterVariables.isSelected();
}
public void apply() throws ConfigurationException {
mySettings.FRIENDLY_COLLECTION_DISPLAY_ENABLED = friendlyDisplayOfScalaCheckBox.isSelected();
mySettings.DONT_SHOW_RUNTIME_REFS = dontShowRuntimeRefs.isSelected();
mySettings.DEBUG_DISABLE_SPECIFIC_SCALA_METHODS = myIgnoreScalaMethods.isSelected();
mySettings.COLLECTION_START_INDEX = (Integer) myStartIndexSpinner.getValue();
mySettings.COLLECTION_END_INDEX = (Integer) myEndIndexSpinner.getValue();
mySettings.DO_NOT_DISPLAY_STREAMS = doNotExpandStreamsCheckBox.isSelected();
mySettings.SHOW_VARIABLES_FROM_OUTER_SCOPES = showOuterVariables.isSelected();
}
public void reset() {
final Boolean flag = mySettings.DEBUG_DISABLE_SPECIFIC_SCALA_METHODS;
myIgnoreScalaMethods.setSelected(flag == null || flag.booleanValue());
}
public void disposeUIResources() {
}
{
// GUI initializer generated by IntelliJ IDEA GUI Designer
// >>> IMPORTANT!! <<<
// DO NOT EDIT OR ADD ANY CODE HERE!
$$$setupUI$$$();
}
/**
* Method generated by IntelliJ IDEA GUI Designer
* >>> IMPORTANT!! <<<
* DO NOT edit this method OR call it in your code!
*
* @noinspection ALL
*/
private void $$$setupUI$$$() {
myPanel = new JPanel();
myPanel.setLayout(new GridLayoutManager(7, 3, new Insets(0, 0, 0, 0), -1, -1));
myIgnoreScalaMethods = new JCheckBox();
myIgnoreScalaMethods.setSelected(false);
this.$$$loadButtonText$$$(myIgnoreScalaMethods, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("scala.debug.disable.specific.methods"));
myPanel.add(myIgnoreScalaMethods, new GridConstraints(0, 0, 1, 2, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final Spacer spacer1 = new Spacer();
myPanel.add(spacer1, new GridConstraints(6, 0, 1, 2, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_VERTICAL, 1, GridConstraints.SIZEPOLICY_WANT_GROW, null, null, null, 0, false));
friendlyDisplayOfScalaCheckBox = new JCheckBox();
this.$$$loadButtonText$$$(friendlyDisplayOfScalaCheckBox, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("friendly.collection.display.enabled"));
myPanel.add(friendlyDisplayOfScalaCheckBox, new GridConstraints(2, 0, 1, 2, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final JPanel panel1 = new JPanel();
panel1.setLayout(new GridLayoutManager(2, 2, new Insets(0, 0, 0, 0), -1, -1));
myPanel.add(panel1, new GridConstraints(4, 1, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, null, null, null, 0, false));
startIndexLabel = new JLabel();
this.$$$loadLabelText$$$(startIndexLabel, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("friendly.collection.debug.start.index"));
panel1.add(startIndexLabel, new GridConstraints(0, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
myStartIndexSpinner = new JSpinner();
panel1.add(myStartIndexSpinner, new GridConstraints(0, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
endIndexLabel = new JLabel();
this.$$$loadLabelText$$$(endIndexLabel, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("friendly.collection.debug.end.index"));
panel1.add(endIndexLabel, new GridConstraints(1, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
myEndIndexSpinner = new JSpinner();
panel1.add(myEndIndexSpinner, new GridConstraints(1, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
doNotExpandStreamsCheckBox = new JCheckBox();
this.$$$loadButtonText$$$(doNotExpandStreamsCheckBox, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("friendly.collection.do.not.display.streams"));
myPanel.add(doNotExpandStreamsCheckBox, new GridConstraints(3, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final Spacer spacer2 = new Spacer();
myPanel.add(spacer2, new GridConstraints(4, 2, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, 1, null, null, null, 0, false));
final Spacer spacer3 = new Spacer();
myPanel.add(spacer3, new GridConstraints(3, 2, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, 1, null, null, null, 0, false));
final JLabel label1 = new JLabel();
label1.setText("");
myPanel.add(label1, new GridConstraints(3, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final JLabel label2 = new JLabel();
label2.setText(" ");
myPanel.add(label2, new GridConstraints(4, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
dontShowRuntimeRefs = new JCheckBox();
this.$$$loadButtonText$$$(dontShowRuntimeRefs, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("dont.show.runtime.refs"));
myPanel.add(dontShowRuntimeRefs, new GridConstraints(1, 0, 1, 2, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
showOuterVariables = new JCheckBox();
this.$$$loadButtonText$$$(showOuterVariables, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("show.variables.from.outer.scopes.in.variables.view"));
myPanel.add(showOuterVariables, new GridConstraints(5, 0, 1, 2, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
}
/**
* @noinspection ALL
*/
private void $$$loadLabelText$$$(JLabel component, String text) {
StringBuffer result = new StringBuffer();
boolean haveMnemonic = false;
char mnemonic = '\0';
int mnemonicIndex = -1;
for (int i = 0; i < text.length(); i++) {
if (text.charAt(i) == '&') {
i++;
if (i == text.length()) break;
if (!haveMnemonic && text.charAt(i) != '&') {
haveMnemonic = true;
mnemonic = text.charAt(i);
mnemonicIndex = result.length();
}
}
result.append(text.charAt(i));
}
component.setText(result.toString());
if (haveMnemonic) {
component.setDisplayedMnemonic(mnemonic);
component.setDisplayedMnemonicIndex(mnemonicIndex);
}
}
/**
* @noinspection ALL
*/
private void $$$loadButtonText$$$(AbstractButton component, String text) {
StringBuffer result = new StringBuffer();
boolean haveMnemonic = false;
char mnemonic = '\0';
int mnemonicIndex = -1;
for (int i = 0; i < text.length(); i++) {
if (text.charAt(i) == '&') {
i++;
if (i == text.length()) break;
if (!haveMnemonic && text.charAt(i) != '&') {
haveMnemonic = true;
mnemonic = text.charAt(i);
mnemonicIndex = result.length();
}
}
result.append(text.charAt(i));
}
component.setText(result.toString());
if (haveMnemonic) {
component.setMnemonic(mnemonic);
component.setDisplayedMnemonicIndex(mnemonicIndex);
}
}
/**
* @noinspection ALL
*/
public JComponent $$$getRootComponent$$$() {
return myPanel;
}
}
| |
package tec.uom.client.fitbit.model.food;
import javax.measure.Quantity;
import javax.measure.quantity.Energy;
import javax.measure.quantity.Mass;
/**
* User: Anakar Parida
* Date: May 6, 2015
* Time: 2:53:29 PM
*/
public class NutritionalValuesEntry {
private Quantity<Energy> calories;
private Quantity<Energy> caloriesFromFat;
private Quantity<Mass> totalFat;
private Quantity<Mass> saturatedFat;
private Quantity<Mass> transFat;
private Quantity<Mass> cholesterol;
private Quantity<Mass> sodium;
private Quantity<Mass> totalCarbohydrate;
private Quantity<Mass> dietaryFiber;
private Quantity<Mass> sugars;
private Quantity<Mass> protein;
private Quantity<Mass> vitaminA;
private Quantity<Mass> vitaminC;
private Quantity<Mass> iron;
private Quantity<Mass> calcium;
private Quantity<Mass> potassium;
private Quantity<Mass> thiamin;
private Quantity<Mass> riboflavin;
private Quantity<Mass> niacin;
private Quantity<Mass> vitaminD;
private Quantity<Mass> vitaminE;
private Quantity<Mass> vitaminB6;
private Quantity<Mass> folicAcid;
private Quantity<Mass> vitaminB12;
private Quantity<Mass> phosphorus;
private Quantity<Mass> iodine;
private Quantity<Mass> magnesium;
private Quantity<Mass> zinc;
private Quantity<Mass> copper;
private Quantity<Mass> biotin;
private Quantity<Mass> pantothenicAcid;
public NutritionalValuesEntry() {
}
public NutritionalValuesEntry(Quantity<Energy> calories,
Quantity<Energy> caloriesFromFat, Quantity<Mass> totalFat,
Quantity<Mass> saturatedFat, Quantity<Mass> transFat,
Quantity<Mass> cholesterol, Quantity<Mass> sodium,
Quantity<Mass> totalCarbohydrate, Quantity<Mass> dietaryFiber,
Quantity<Mass> sugars, Quantity<Mass> protein,
Quantity<Mass> vitaminA, Quantity<Mass> vitaminC,
Quantity<Mass> iron, Quantity<Mass> calcium,
Quantity<Mass> potassium, Quantity<Mass> thiamin,
Quantity<Mass> riboflavin, Quantity<Mass> niacin,
Quantity<Mass> vitaminD, Quantity<Mass> vitaminE,
Quantity<Mass> vitaminB6, Quantity<Mass> folicAcid,
Quantity<Mass> vitaminB12, Quantity<Mass> phosphorus,
Quantity<Mass> iodine, Quantity<Mass> magnesium,
Quantity<Mass> zinc, Quantity<Mass> copper, Quantity<Mass> biotin,
Quantity<Mass> pantothenicAcid) {
this.calories = calories;
this.caloriesFromFat = caloriesFromFat;
this.totalFat = totalFat;
this.saturatedFat = saturatedFat;
this.transFat = transFat;
this.cholesterol = cholesterol;
this.sodium = sodium;
this.totalCarbohydrate = totalCarbohydrate;
this.dietaryFiber = dietaryFiber;
this.sugars = sugars;
this.protein = protein;
this.vitaminA = vitaminA;
this.vitaminC = vitaminC;
this.iron = iron;
this.calcium = calcium;
this.potassium = potassium;
this.thiamin = thiamin;
this.riboflavin = riboflavin;
this.niacin = niacin;
this.vitaminD = vitaminD;
this.vitaminE = vitaminE;
this.vitaminB6 = vitaminB6;
this.folicAcid = folicAcid;
this.vitaminB12 = vitaminB12;
this.phosphorus = phosphorus;
this.iodine = iodine;
this.magnesium = magnesium;
this.zinc = zinc;
this.copper = copper;
this.biotin = biotin;
this.pantothenicAcid = pantothenicAcid;
}
public Quantity<Energy> getCalories() {
return calories;
}
public void setCalories(Quantity<Energy> calories) {
this.calories = calories;
}
public Quantity<Energy> getCaloriesFromFat() {
return caloriesFromFat;
}
public void setCaloriesFromFat(Quantity<Energy> caloriesFromFat) {
this.caloriesFromFat = caloriesFromFat;
}
public Quantity<Mass> getTotalFat() {
return totalFat;
}
public void setTotalFat(Quantity<Mass> totalFat) {
this.totalFat = totalFat;
}
public Quantity<Mass> getSaturatedFat() {
return saturatedFat;
}
public void setSaturatedFat(Quantity<Mass> saturatedFat) {
this.saturatedFat = saturatedFat;
}
public Quantity<Mass> getTransFat() {
return transFat;
}
public void setTransFat(Quantity<Mass> transFat) {
this.transFat = transFat;
}
public Quantity<Mass> getCholesterol() {
return cholesterol;
}
public void setCholesterol(Quantity<Mass> cholesterol) {
this.cholesterol = cholesterol;
}
public Quantity<Mass> getSodium() {
return sodium;
}
public void setSodium(Quantity<Mass> sodium) {
this.sodium = sodium;
}
public Quantity<Mass> getTotalCarbohydrate() {
return totalCarbohydrate;
}
public void setTotalCarbohydrate(Quantity<Mass> totalCarbohydrate) {
this.totalCarbohydrate = totalCarbohydrate;
}
public Quantity<Mass> getDietaryFiber() {
return dietaryFiber;
}
public void setDietaryFiber(Quantity<Mass> dietaryFiber) {
this.dietaryFiber = dietaryFiber;
}
public Quantity<Mass> getSugars() {
return sugars;
}
public void setSugars(Quantity<Mass> sugars) {
this.sugars = sugars;
}
public Quantity<Mass> getProtein() {
return protein;
}
public void setProtein(Quantity<Mass> protein) {
this.protein = protein;
}
public Quantity<Mass> getVitaminA() {
return vitaminA;
}
public void setVitaminA(Quantity<Mass> vitaminA) {
this.vitaminA = vitaminA;
}
public Quantity<Mass> getVitaminC() {
return vitaminC;
}
public void setVitaminC(Quantity<Mass> vitaminC) {
this.vitaminC = vitaminC;
}
public Quantity<Mass> getIron() {
return iron;
}
public void setIron(Quantity<Mass> iron) {
this.iron = iron;
}
public Quantity<Mass> getCalcium() {
return calcium;
}
public void setCalcium(Quantity<Mass> calcium) {
this.calcium = calcium;
}
public Quantity<Mass> getPotassium() {
return potassium;
}
public void setPotassium(Quantity<Mass> potassium) {
this.potassium = potassium;
}
public Quantity<Mass> getThiamin() {
return thiamin;
}
public void setThiamin(Quantity<Mass> thiamin) {
this.thiamin = thiamin;
}
public Quantity<Mass> getRiboflavin() {
return riboflavin;
}
public void setRiboflavin(Quantity<Mass> riboflavin) {
this.riboflavin = riboflavin;
}
public Quantity<Mass> getNiacin() {
return niacin;
}
public void setNiacin(Quantity<Mass> niacin) {
this.niacin = niacin;
}
public Quantity<Mass> getVitaminD() {
return vitaminD;
}
public void setVitaminD(Quantity<Mass> vitaminD) {
this.vitaminD = vitaminD;
}
public Quantity<Mass> getVitaminE() {
return vitaminE;
}
public void setVitaminE(Quantity<Mass> vitaminE) {
this.vitaminE = vitaminE;
}
public Quantity<Mass> getVitaminB6() {
return vitaminB6;
}
public void setVitaminB6(Quantity<Mass> vitaminB6) {
this.vitaminB6 = vitaminB6;
}
public Quantity<Mass> getFolicAcid() {
return folicAcid;
}
public void setFolicAcid(Quantity<Mass> folicAcid) {
this.folicAcid = folicAcid;
}
public Quantity<Mass> getVitaminB12() {
return vitaminB12;
}
public void setVitaminB12(Quantity<Mass> vitaminB12) {
this.vitaminB12 = vitaminB12;
}
public Quantity<Mass> getPhosphorus() {
return phosphorus;
}
public void setPhosphorus(Quantity<Mass> phosphorus) {
this.phosphorus = phosphorus;
}
public Quantity<Mass> getIodine() {
return iodine;
}
public void setIodine(Quantity<Mass> iodine) {
this.iodine = iodine;
}
public Quantity<Mass> getMagnesium() {
return magnesium;
}
public void setMagnesium(Quantity<Mass> magnesium) {
this.magnesium = magnesium;
}
public Quantity<Mass> getZinc() {
return zinc;
}
public void setZinc(Quantity<Mass> zinc) {
this.zinc = zinc;
}
public Quantity<Mass> getCopper() {
return copper;
}
public void setCopper(Quantity<Mass> copper) {
this.copper = copper;
}
public Quantity<Mass> getBiotin() {
return biotin;
}
public void setBiotin(Quantity<Mass> biotin) {
this.biotin = biotin;
}
public Quantity<Mass> getPantothenicAcid() {
return pantothenicAcid;
}
public void setPantothenicAcid(Quantity<Mass> pantothenicAcid) {
this.pantothenicAcid = pantothenicAcid;
}
}
| |
/*
* The Gemma project
*
* Copyright (c) 2012 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubic.gemma.persistence.service.analysis.expression.diff;
import net.sf.ehcache.Cache;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.Element;
import net.sf.ehcache.config.*;
import net.sf.ehcache.config.PersistenceConfiguration.Strategy;
import net.sf.ehcache.config.TimeoutBehaviorConfiguration.TimeoutBehaviorType;
import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.ehcache.EhCacheManagerFactoryBean;
import org.springframework.stereotype.Component;
import ubic.gemma.model.analysis.expression.diff.DiffExprGeneSearchResult;
import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionValueObject;
import ubic.gemma.model.analysis.expression.diff.ExpressionAnalysisResultSet;
import ubic.gemma.persistence.util.Settings;
import java.io.Serializable;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
/**
* Cache for data from differential expression result queries.
*
* @author Paul
*/
@Component
public class DifferentialExpressionResultCacheImpl implements DifferentialExpressionResultCache, InitializingBean {
private static final String CACHE_NAME_BASE = "DiffExResultCache";
private static final int CACHE_DEFAULT_MAX_ELEMENTS = 1000000;
private static final int CACHE_DEFAULT_TIME_TO_LIVE = 10000;
private static final int CACHE_DEFAULT_TIME_TO_IDLE = 10000;
private static final boolean CACHE_DEFAULT_ETERNAL = true;
private static final boolean CACHE_DEFAULT_OVERFLOW_TO_DISK = false;
private static final String TOP_HIT_CACHE_NAME_BASE = "TopDiffExResultCache";
@Autowired
private EhCacheManagerFactoryBean cacheManagerFactory;
private Boolean enabled = true;
private Cache cache;
private Cache topHitsCache;
@Override
public void addToCache( DiffExprGeneSearchResult diffExForCache ) {
Long r = diffExForCache.getResultSetId();
Long g = diffExForCache.getGeneId();
cache.put( new Element( new CacheKey( r, g ), diffExForCache ) );
}
@Override
public void addToCache( Collection<DiffExprGeneSearchResult> diffExForCache ) {
for ( DiffExprGeneSearchResult d : diffExForCache ) {
this.addToCache( d );
}
}
@Override
public void clearCache() {
cache.removeAll();
topHitsCache.removeAll();
}
@Override
public void clearCache( Long resultSetId ) {
for ( Object o : cache.getKeys() ) {
CacheKey k = ( CacheKey ) o;
if ( k.resultSetId.equals( resultSetId ) ) {
cache.remove( k );
}
}
}
@Override
public void clearTopHitCache( Long resultSetId ) {
this.topHitsCache.remove( resultSetId );
}
@Override
public Collection<DiffExprGeneSearchResult> get( Long resultSet, Collection<Long> genes ) {
assert cache != null;
Collection<DiffExprGeneSearchResult> results = new HashSet<>();
for ( Long g : genes ) {
Element element = cache.get( new CacheKey( resultSet, g ) );
if ( element != null ) {
results.add( ( DiffExprGeneSearchResult ) element.getObjectValue() );
}
}
return results;
}
@Override
public DiffExprGeneSearchResult get( Long resultSet, Long g ) {
assert cache != null;
Element element = cache.get( new CacheKey( resultSet, g ) );
if ( element == null )
return null;
return ( DiffExprGeneSearchResult ) element.getObjectValue();
}
@Override
public Boolean isEnabled() {
return enabled;
}
@Override
public void setEnabled( Boolean enabled ) {
this.enabled = enabled;
}
@Override
public void addToTopHitsCache( ExpressionAnalysisResultSet resultSet,
List<DifferentialExpressionValueObject> items ) {
this.topHitsCache.put( new Element( resultSet.getId(), items ) );
}
@SuppressWarnings("unchecked")
@Override
public List<DifferentialExpressionValueObject> getTopHits( ExpressionAnalysisResultSet resultSet ) {
Element element = this.topHitsCache.get( resultSet );
if ( element == null )
return null;
return ( List<DifferentialExpressionValueObject> ) element.getObjectValue();
}
@Override
public void afterPropertiesSet() {
CacheManager cacheManager = cacheManagerFactory.getObject();
int maxElements = Settings.getInt( "gemma.cache.diffex.maxelements",
DifferentialExpressionResultCacheImpl.CACHE_DEFAULT_MAX_ELEMENTS );
int timeToLive = Settings.getInt( "gemma.cache.diffex.timetolive",
DifferentialExpressionResultCacheImpl.CACHE_DEFAULT_TIME_TO_LIVE );
int timeToIdle = Settings.getInt( "gemma.cache.diffex.timetoidle",
DifferentialExpressionResultCacheImpl.CACHE_DEFAULT_TIME_TO_IDLE );
boolean eternal = Settings.getBoolean( "gemma.cache.diffex.eternal",
DifferentialExpressionResultCacheImpl.CACHE_DEFAULT_ETERNAL ) && timeToLive == 0;
boolean terracottaEnabled = Settings.getBoolean( "gemma.cache.clustered", true );
boolean overFlowToDisk = Settings.getBoolean( "gemma.cache.diffex.usedisk",
DifferentialExpressionResultCacheImpl.CACHE_DEFAULT_OVERFLOW_TO_DISK );
boolean diskPersistent = Settings.getBoolean( "gemma.cache.diskpersistent", false ) && !terracottaEnabled;
if ( !cacheManager.cacheExists( DifferentialExpressionResultCacheImpl.CACHE_NAME_BASE ) ) {
/*
* See TerracottaConfiguration.
*/
int diskExpiryThreadIntervalSeconds = 600;
int maxElementsOnDisk = 10000;
boolean terracottaCoherentReads = false;
boolean clearOnFlush = false;
if ( terracottaEnabled ) {
CacheConfiguration config = new CacheConfiguration(
DifferentialExpressionResultCacheImpl.CACHE_NAME_BASE, maxElements );
config.setStatistics( false );
config.setMemoryStoreEvictionPolicy( MemoryStoreEvictionPolicy.LRU.toString() );
config.addPersistence( new PersistenceConfiguration().strategy( Strategy.NONE ) );
config.setEternal( eternal );
config.setTimeToIdleSeconds( timeToIdle );
config.setMaxElementsOnDisk( maxElementsOnDisk );
config.addTerracotta( new TerracottaConfiguration() );
//noinspection ConstantConditions // Better readability
config.getTerracottaConfiguration().setCoherentReads( terracottaCoherentReads );
//noinspection ConstantConditions // Better readability
config.clearOnFlush( clearOnFlush );
config.setTimeToLiveSeconds( timeToLive );
config.getTerracottaConfiguration().setClustered( true );
config.getTerracottaConfiguration().setValueMode( "SERIALIZATION" );
NonstopConfiguration nonstopConfiguration = new NonstopConfiguration();
TimeoutBehaviorConfiguration tobc = new TimeoutBehaviorConfiguration();
tobc.setType( TimeoutBehaviorType.NOOP.getTypeName() );
nonstopConfiguration.addTimeoutBehavior( tobc );
config.getTerracottaConfiguration().addNonstop( nonstopConfiguration );
this.cache = new Cache( config );
this.topHitsCache = new Cache( config );
this.topHitsCache.setName( DifferentialExpressionResultCacheImpl.TOP_HIT_CACHE_NAME_BASE );
} else {
this.cache = new Cache( DifferentialExpressionResultCacheImpl.CACHE_NAME_BASE, maxElements,
MemoryStoreEvictionPolicy.LRU, overFlowToDisk, null, eternal, timeToLive, timeToIdle,
diskPersistent, diskExpiryThreadIntervalSeconds, null );
this.topHitsCache = new Cache( DifferentialExpressionResultCacheImpl.TOP_HIT_CACHE_NAME_BASE,
maxElements, MemoryStoreEvictionPolicy.LRU, overFlowToDisk, null, eternal, timeToLive,
timeToIdle, diskPersistent, diskExpiryThreadIntervalSeconds, null );
}
cacheManager.addCache( cache );
cacheManager.addCache( topHitsCache );
}
}
}
class CacheKey implements Serializable {
private static final long serialVersionUID = 1453661277282349121L;
final Long resultSetId;
private final Long geneId;
CacheKey( Long resultSetId, Long geneId ) {
this.resultSetId = resultSetId;
this.geneId = geneId;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ( ( resultSetId == null ) ? 0 : resultSetId.hashCode() );
result = prime * result + ( ( geneId == null ) ? 0 : geneId.hashCode() );
return result;
}
@Override
public boolean equals( Object obj ) {
if ( this == obj )
return true;
if ( obj == null )
return false;
if ( this.getClass() != obj.getClass() )
return false;
CacheKey other = ( CacheKey ) obj;
if ( resultSetId == null ) {
if ( other.resultSetId != null )
return false;
} else if ( !resultSetId.equals( other.resultSetId ) )
return false;
if ( geneId == null ) {
return other.geneId == null;
} else
return geneId.equals( other.geneId );
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.sort;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.join.ToChildBlockJoinQuery;
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.Rewriteable;
import org.elasticsearch.search.DocValueFormat;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
public abstract class SortBuilder<T extends SortBuilder<T>> implements NamedWriteable, ToXContentObject, Rewriteable<SortBuilder<?>> {
protected SortOrder order = SortOrder.ASC;
// parse fields common to more than one SortBuilder
public static final ParseField ORDER_FIELD = new ParseField("order");
public static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter");
public static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path");
private static final Map<String, Parser<?>> PARSERS;
static {
Map<String, Parser<?>> parsers = new HashMap<>();
parsers.put(ScriptSortBuilder.NAME, ScriptSortBuilder::fromXContent);
parsers.put(GeoDistanceSortBuilder.NAME, GeoDistanceSortBuilder::fromXContent);
parsers.put(GeoDistanceSortBuilder.ALTERNATIVE_NAME, GeoDistanceSortBuilder::fromXContent);
parsers.put(ScoreSortBuilder.NAME, ScoreSortBuilder::fromXContent);
// FieldSortBuilder gets involved if the user specifies a name that isn't one of these.
PARSERS = unmodifiableMap(parsers);
}
/**
* Create a @link {@link SortFieldAndFormat} from this builder.
*/
public abstract SortFieldAndFormat build(QueryShardContext context) throws IOException;
/**
* Set the order of sorting.
*/
@SuppressWarnings("unchecked")
public T order(SortOrder order) {
Objects.requireNonNull(order, "sort order cannot be null.");
this.order = order;
return (T) this;
}
/**
* Return the {@link SortOrder} used for this {@link SortBuilder}.
*/
public SortOrder order() {
return this.order;
}
public static List<SortBuilder<?>> fromXContent(XContentParser parser) throws IOException {
List<SortBuilder<?>> sortFields = new ArrayList<>(2);
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
parseCompoundSortField(parser, sortFields);
} else if (token == XContentParser.Token.VALUE_STRING) {
String fieldName = parser.text();
sortFields.add(fieldOrScoreSort(fieldName));
} else {
throw new IllegalArgumentException("malformed sort format, "
+ "within the sort array, an object, or an actual string are allowed");
}
}
} else if (token == XContentParser.Token.VALUE_STRING) {
String fieldName = parser.text();
sortFields.add(fieldOrScoreSort(fieldName));
} else if (token == XContentParser.Token.START_OBJECT) {
parseCompoundSortField(parser, sortFields);
} else {
throw new IllegalArgumentException("malformed sort format, either start with array, object, or an actual string");
}
return sortFields;
}
private static SortBuilder<?> fieldOrScoreSort(String fieldName) {
if (fieldName.equals(ScoreSortBuilder.NAME)) {
return new ScoreSortBuilder();
} else {
return new FieldSortBuilder(fieldName);
}
}
private static void parseCompoundSortField(XContentParser parser, List<SortBuilder<?>> sortFields)
throws IOException {
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
String fieldName = parser.currentName();
token = parser.nextToken();
if (token == XContentParser.Token.VALUE_STRING) {
SortOrder order = SortOrder.fromString(parser.text());
sortFields.add(fieldOrScoreSort(fieldName).order(order));
} else {
if (PARSERS.containsKey(fieldName)) {
sortFields.add(PARSERS.get(fieldName).fromXContent(parser, fieldName));
} else {
sortFields.add(FieldSortBuilder.fromXContent(parser, fieldName));
}
}
}
}
}
public static Optional<SortAndFormats> buildSort(List<SortBuilder<?>> sortBuilders, QueryShardContext context) throws IOException {
List<SortField> sortFields = new ArrayList<>(sortBuilders.size());
List<DocValueFormat> sortFormats = new ArrayList<>(sortBuilders.size());
for (SortBuilder<?> builder : sortBuilders) {
SortFieldAndFormat sf = builder.build(context);
sortFields.add(sf.field);
sortFormats.add(sf.format);
}
if (!sortFields.isEmpty()) {
// optimize if we just sort on score non reversed, we don't really
// need sorting
boolean sort;
if (sortFields.size() > 1) {
sort = true;
} else {
SortField sortField = sortFields.get(0);
if (sortField.getType() == SortField.Type.SCORE && !sortField.getReverse()) {
sort = false;
} else {
sort = true;
}
}
if (sort) {
return Optional.of(new SortAndFormats(
new Sort(sortFields.toArray(new SortField[sortFields.size()])),
sortFormats.toArray(new DocValueFormat[sortFormats.size()])));
}
}
return Optional.empty();
}
protected static Nested resolveNested(QueryShardContext context, String nestedPath, QueryBuilder nestedFilter) throws IOException {
NestedSortBuilder nestedSortBuilder = new NestedSortBuilder(nestedPath);
nestedSortBuilder.setFilter(nestedFilter);
return resolveNested(context, nestedSortBuilder);
}
protected static Nested resolveNested(QueryShardContext context, NestedSortBuilder nestedSort) throws IOException {
return resolveNested(context, nestedSort, null);
}
private static Nested resolveNested(QueryShardContext context, NestedSortBuilder nestedSort, Nested nested) throws IOException {
if (nestedSort == null || nestedSort.getPath() == null) {
return null;
}
String nestedPath = nestedSort.getPath();
QueryBuilder nestedFilter = nestedSort.getFilter();
NestedSortBuilder nestedNestedSort = nestedSort.getNestedSort();
// verify our nested path
ObjectMapper nestedObjectMapper = context.getObjectMapper(nestedPath);
if (nestedObjectMapper == null) {
throw new QueryShardException(context, "[nested] failed to find nested object under path [" + nestedPath + "]");
}
if (!nestedObjectMapper.nested().isNested()) {
throw new QueryShardException(context, "[nested] nested object under path [" + nestedPath + "] is not of nested type");
}
// get our parent query which will determines our parent documents
Query parentQuery;
ObjectMapper objectMapper = context.nestedScope().getObjectMapper();
if (objectMapper == null) {
parentQuery = Queries.newNonNestedFilter(context.indexVersionCreated());
} else {
parentQuery = objectMapper.nestedTypeFilter();
}
// get our child query, potentially applying a users filter
Query childQuery;
try {
context.nestedScope().nextLevel(nestedObjectMapper);
if (nestedFilter != null) {
assert nestedFilter == Rewriteable.rewrite(nestedFilter, context) : "nested filter is not rewritten";
if (nested == null) {
// this is for back-compat, original single level nested sorting never applied a nested type filter
childQuery = nestedFilter.toFilter(context);
} else {
childQuery = Queries.filtered(nestedObjectMapper.nestedTypeFilter(), nestedFilter.toFilter(context));
}
} else {
childQuery = nestedObjectMapper.nestedTypeFilter();
}
} finally {
context.nestedScope().previousLevel();
}
// apply filters from the previous nested level
if (nested != null) {
parentQuery = Queries.filtered(parentQuery,
new ToParentBlockJoinQuery(nested.getInnerQuery(), nested.getRootFilter(), ScoreMode.None));
if (objectMapper != null) {
childQuery = Queries.filtered(childQuery,
new ToChildBlockJoinQuery(nested.getInnerQuery(), context.bitsetFilter(objectMapper.nestedTypeFilter())));
}
}
// wrap up our parent and child and either process the next level of nesting or return
final Nested innerNested = new Nested(context.bitsetFilter(parentQuery), childQuery);
if (nestedNestedSort != null) {
try {
context.nestedScope().nextLevel(nestedObjectMapper);
return resolveNested(context, nestedNestedSort, innerNested);
} finally {
context.nestedScope().previousLevel();
}
} else {
return innerNested;
}
}
protected static QueryBuilder parseNestedFilter(XContentParser parser) {
try {
return parseInnerQueryBuilder(parser);
} catch (Exception e) {
throw new ParsingException(parser.getTokenLocation(), "Expected " + NESTED_FILTER_FIELD.getPreferredName() + " element.", e);
}
}
@FunctionalInterface
private interface Parser<T extends SortBuilder<?>> {
T fromXContent(XContentParser parser, String elementName) throws IOException;
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
}
| |
/*
* Copyright (c) 2009 Concurrent, Inc.
*
* This work has been released into the public domain
* by the copyright holder. This applies worldwide.
*
* In case this is not legally possible:
* The copyright holder grants any entity the right
* to use this work for any purpose, without any
* conditions, unless such conditions are required by law.
*/
package parallelai.spyglass.hbase;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapred.TableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.RecordReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.twitter.elephantbird.mapred.input.DeprecatedInputFormatValueCopier;
import com.twitter.elephantbird.mapred.input.DeprecatedInputFormatWrapper;
import cascading.flow.FlowProcess;
import cascading.scheme.Scheme;
import cascading.scheme.SinkCall;
import cascading.scheme.SourceCall;
import cascading.tap.Tap;
import cascading.tuple.Fields;
import cascading.tuple.Tuple;
import cascading.tuple.TupleEntry;
import cascading.util.Util;
/**
* It provides the wiring between Fields and Columns and Column families
* In effect to write to cf:column
*
* data:name data:surname address: street
* name1 surname1 address1
*
* We will initialize the HBaseSource with
* ("data","data","data")
* ("name","surname","address")
* Data:
* ("name1","surname1","address1")
* ...
*
* The HBaseRawScheme class is a {@link Scheme} subclass. It is used in conjunction
* with the {@HBaseRawTap} to allow for the reading and writing of data
* to and from a HBase cluster.
*
* @see HBaseRawTap
*/
@SuppressWarnings({ "rawtypes", "deprecation" })
public class HBaseRawScheme extends Scheme<JobConf, RecordReader, OutputCollector, Object[], Object[]> {
/**
*
*/
private static final long serialVersionUID = 6248976486883281356L;
/** Field LOG */
private static final Logger LOG = LoggerFactory.getLogger(HBaseRawScheme.class);
public final Fields RowKeyField = new Fields("rowkey");
public final Fields RowField = new Fields("row");
/** String familyNames */
private String[] familyNames;
private boolean writeNulls = true;
/**
* Constructor HBaseScheme creates a new HBaseScheme instance.
* @param familyName
* of type String
*/
public HBaseRawScheme(String familyName) {
this(new String[] { familyName });
}
public HBaseRawScheme(String[] familyNames) {
this.familyNames = familyNames;
setSourceFields();
}
public HBaseRawScheme(String familyName, boolean writeNulls) {
this(new String[] { familyName }, writeNulls);
}
public HBaseRawScheme(String[] familyNames, boolean writeNulls) {
this.familyNames = familyNames;
this.writeNulls = writeNulls;
setSourceFields();
}
private void setSourceFields() {
Fields sourceFields = Fields.join(RowKeyField, RowField);
setSourceFields(sourceFields);
}
/**
* Method getFamilyNames returns the set of familyNames of this HBaseScheme
* object.
*
* @return the familyNames (type String[]) of this HBaseScheme object.
*/
public String[] getFamilyNames() {
HashSet<String> familyNameSet = new HashSet<String>();
if (familyNames != null) {
for (String familyName : familyNames) {
familyNameSet.add(familyName);
}
}
return familyNameSet.toArray(new String[0]);
}
@Override
public void sourcePrepare(FlowProcess<JobConf> flowProcess, SourceCall<Object[], RecordReader> sourceCall) {
Object[] pair = new Object[] { sourceCall.getInput().createKey(), sourceCall.getInput().createValue() };
sourceCall.setContext(pair);
}
@Override
public void sourceCleanup(FlowProcess<JobConf> flowProcess, SourceCall<Object[], RecordReader> sourceCall) {
sourceCall.setContext(null);
}
@SuppressWarnings("unchecked")
@Override
public boolean source(FlowProcess<JobConf> flowProcess, SourceCall<Object[], RecordReader> sourceCall)
throws IOException {
Tuple result = new Tuple();
Object key = sourceCall.getContext()[0];
Object value = sourceCall.getContext()[1];
boolean hasNext = sourceCall.getInput().next(key, value);
if (!hasNext) {
return false;
}
// Skip nulls
if (key == null || value == null) {
return true;
}
ImmutableBytesWritable keyWritable = (ImmutableBytesWritable) key;
Result row = (Result) value;
result.add(keyWritable);
result.add(row);
sourceCall.getIncomingEntry().setTuple(result);
return true;
}
@SuppressWarnings("unchecked")
@Override
public void sink(FlowProcess<JobConf> flowProcess, SinkCall<Object[], OutputCollector> sinkCall) throws IOException {
TupleEntry tupleEntry = sinkCall.getOutgoingEntry();
OutputCollector outputCollector = sinkCall.getOutput();
Tuple key = tupleEntry.selectTuple(RowKeyField);
Object okey = key.getObject(0);
ImmutableBytesWritable keyBytes = getBytes(okey);
Put put = new Put(keyBytes.get());
Fields outFields = tupleEntry.getFields().subtract(RowKeyField);
if (null != outFields) {
TupleEntry values = tupleEntry.selectEntry(outFields);
for (int n = 0; n < values.getFields().size(); n++) {
Object o = values.get(n);
ImmutableBytesWritable valueBytes = getBytes(o);
Comparable field = outFields.get(n);
ColumnName cn = parseColumn((String) field);
if (null == cn.family) {
if (n >= familyNames.length)
cn.family = familyNames[familyNames.length - 1];
else
cn.family = familyNames[n];
}
if (null != o || writeNulls)
put.add(Bytes.toBytes(cn.family), Bytes.toBytes(cn.name), valueBytes.get());
}
}
outputCollector.collect(null, put);
}
private ImmutableBytesWritable getBytes(Object obj) {
if (null == obj)
return new ImmutableBytesWritable(new byte[0]);
if (obj instanceof ImmutableBytesWritable)
return (ImmutableBytesWritable) obj;
else if (obj instanceof String)
return new ImmutableBytesWritable(Bytes.toBytes((String) obj));
else if (obj instanceof Long)
return new ImmutableBytesWritable(Bytes.toBytes((Long) obj));
else if (obj instanceof Integer)
return new ImmutableBytesWritable(Bytes.toBytes((Integer) obj));
else if (obj instanceof Short)
return new ImmutableBytesWritable(Bytes.toBytes((Short) obj));
else if (obj instanceof Boolean)
return new ImmutableBytesWritable(Bytes.toBytes((Boolean) obj));
else if (obj instanceof Double)
return new ImmutableBytesWritable(Bytes.toBytes((Double) obj));
else
throw new IllegalArgumentException("cannot convert object to ImmutableBytesWritable, class="
+ obj.getClass().getName());
}
private ColumnName parseColumn(String column) {
ColumnName ret = new ColumnName();
int pos = column.indexOf(":");
if (pos > 0) {
ret.name = column.substring(pos + 1);
ret.family = column.substring(0, pos);
} else {
ret.name = column;
}
return ret;
}
private class ColumnName {
String family;
String name;
ColumnName() {
}
}
@Override
public void sinkConfInit(FlowProcess<JobConf> process, Tap<JobConf, RecordReader, OutputCollector> tap, JobConf conf) {
conf.setOutputFormat(TableOutputFormat.class);
conf.setOutputKeyClass(ImmutableBytesWritable.class);
conf.setOutputValueClass(Put.class);
}
@Override
public void sourceConfInit(FlowProcess<JobConf> process, Tap<JobConf, RecordReader, OutputCollector> tap,
JobConf conf) {
DeprecatedInputFormatWrapper.setInputFormat(org.apache.hadoop.hbase.mapreduce.TableInputFormat.class, conf,
ValueCopier.class);
if (null != familyNames) {
String columns = Util.join(this.familyNames, " ");
LOG.debug("sourcing from column families: {}", columns);
conf.set(org.apache.hadoop.hbase.mapreduce.TableInputFormat.SCAN_COLUMNS, columns);
}
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
if (!super.equals(object)) {
return false;
}
HBaseRawScheme that = (HBaseRawScheme) object;
if (!Arrays.equals(familyNames, that.familyNames)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (familyNames != null ? Arrays.hashCode(familyNames) : 0);
return result;
}
public static class ValueCopier implements DeprecatedInputFormatValueCopier<Result> {
public ValueCopier() {
}
public void copyValue(Result oldValue, Result newValue) {
if (null != oldValue && null != newValue) {
oldValue.copyFrom(newValue);
}
}
}
}
| |
/*
* Copyright 2016 Code Above Lab LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.codeabovelab.dm.cluman.cluster.registry;
import com.codeabovelab.dm.cluman.cluster.docker.model.ContainerConfig;
import com.codeabovelab.dm.cluman.cluster.registry.data.*;
import com.codeabovelab.dm.cluman.cluster.registry.model.RegistryAdapter;
import com.codeabovelab.dm.cluman.cluster.registry.model.RegistryConfig;
import com.codeabovelab.dm.cluman.cluster.registry.model.RegistryCredentials;
import com.codeabovelab.dm.cluman.model.*;
import com.codeabovelab.dm.common.utils.Throwables;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.*;
import org.springframework.util.Assert;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.HttpStatusCodeException;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.util.UriComponentsBuilder;
import java.net.ConnectException;
import java.net.URI;
import java.net.URISyntaxException;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import java.util.function.Consumer;
import static org.springframework.web.util.UriComponentsBuilder.newInstance;
/**
*/
@Slf4j
abstract class AbstractV2RegistryService implements RegistryService {
private final RegistryAdapter adapter;
private final LoadingCache<String[], ImageDescriptor> descriptorCache;
private Consumer<RegistryEvent> eventConsumer;
AbstractV2RegistryService(RegistryAdapter adapter) {
this.adapter = adapter;
// we use non expired cache, because imageId is descriptor hash, and it cannot be modified
this.descriptorCache = CacheBuilder.<String[], ImageDescriptor>newBuilder()
.build(new CacheLoader<String[], ImageDescriptor>() {
@Override
public ImageDescriptor load(String[] key) throws Exception {
Assert.isTrue(key.length == 2, "key array must have tho items");
return getDescriptor(key[0], key[1]);
}
});
}
public Consumer<RegistryEvent> getEventConsumer() {
return eventConsumer;
}
public void setEventConsumer(Consumer<RegistryEvent> eventConsumer) {
this.eventConsumer = eventConsumer;
}
@Override
public ImageCatalog getCatalog() {
try {
ImageCatalog catalog = getRestTemplate().getForObject(makeBaseUrl().path("_catalog").build().toUri(), ImageCatalog.class);
online();
return catalog;
} catch (Exception e) {
checkOnline(e);
log.error("Can't fetch catalog from {}", getConfig(), e);
return null;
}
}
private void online() {
toggleOnline(null);
}
/**
* Do analysing of exception for connection errors, which is mean offline status
*
* @param e
*/
private void checkOnline(Exception e) {
ConnectException conn = Throwables.find(e, ConnectException.class);
if (conn != null) {
toggleOnline(conn.toString());
}
}
@Override
public boolean checkHealth() {
String error = null;
RegistryConfig config = getConfig();
try {
getRestTemplate().getForObject(makeBaseUrl().build().toUri(), String.class);
} catch (Exception e) {
log.error("Can't fetch catalog from {}", config, e);
error = e.getMessage();
}
toggleOnline(error);
return error == null;
}
private void toggleOnline(String error) {
RegistryConfig config = getConfig();
String oldMessage = config.getErrorMessage();
boolean online = error == null;
if (!Objects.equals(oldMessage, error)) {
//error is changed, so we need to send event
fireEvent(RegistryEvent.builder()
.action(online ? StandardActions.ONLINE : StandardActions.OFFLINE)
.severity(online ? Severity.INFO : Severity.ERROR)
.message(error));
}
config.setErrorMessage(error);
}
void fireEvent(RegistryEvent.Builder reb) {
if (eventConsumer == null) {
return;
}
reb.setName(getConfig().getName());
if (reb.getSeverity() == null) {
reb.setSeverity(Severity.INFO);
}
eventConsumer.accept(reb.build());
}
private UriComponentsBuilder makeBaseUrl() {
try {
return newInstance().uri(new URI(adapter.getUrl())).path("/v2/");
} catch (URISyntaxException e) {
throw Throwables.asRuntime(e);
}
}
@Override
public Tags getTags(String name) {
try {
Tags tags = getRestTemplate().getForObject(forName(name).path("/tags/list").build().toUri(),
Tags.class);
online();
List<String> tagList = tags.getTags();
if (tagList != null) {
tagList.sort(ImageNameComparator.getTagsComparator());
}
return tags;
} catch (Exception e) {
checkOnline(e);
log.error("Can't fetch tags for {} from {}", name, getConfig(), e);
return null;
}
}
private UriComponentsBuilder forName(String name) {
UriComponentsBuilder ucb = makeBaseUrl();
String processed = adapter.adaptNameForUrl(toRelative(name));
return ucb.path(processed);
}
@Override
public String toRelative(String name) {
String registryName = getConfig().getName();
int len = registryName.length();
if(name.length() > len + 1 && name.startsWith(registryName) && name.charAt(len) == '/') {
// remove registry name + slash
return name.substring(registryName.length() + 1);
}
return name;
}
//DELETE /v2/<name>/manifests/<reference>
/**
* @param name
* @param reference must be digest!!!
*/
@Override
public void deleteTag(String name, String reference) {
getRestTemplate().delete(forName(name).path("/manifests/").path(reference).build().toUri());
}
//"{protocol}://{host}:{port}/v2/{name}/manifests/{reference}
private Manifest getManifest(String name, String reference) {
HttpHeaders headers = new HttpHeaders();
headers.setAccept(Arrays.asList(
new MediaType("application", "vnd.docker.distribution.manifest.v2+json"),
new MediaType("application", "vnd.docker.distribution.manifest.v2+prettyjws")
));
HttpEntity entity = new HttpEntity<>(headers);
URI uri = forName(name).path("/manifests/").path(reference).build().toUri();
try {
ResponseEntity<Manifest> exchange = getRestTemplate().exchange(uri, HttpMethod.GET, entity, Manifest.class);
return exchange.getBody();
} catch (HttpClientErrorException e) {
if (e.getStatusCode() == HttpStatus.NOT_FOUND) {
return null;
}
log.error("can't fetch manifest from {} by {}", uri, e.getMessage());
throw e;
}
}
//{protocol}://{host}:{port}/v2/{name}/blobs/{digest}
private <T> T getBlob(String name, String digest, Class<T> type) {
return getRestTemplate().getForObject(forName(name).path("/blobs/").path(digest).build().toUri(), type);
}
public ImageDescriptor getImage(String fullImageName) {
ImageName parsed = ImageName.parse(fullImageName);
return getImage(parsed.getName(), parsed.getTag());
}
@Override
public ImageDescriptor getImage(String name, String reference) {
String imageId = getImageId(name, reference);
if (imageId == null) {
return null;
}
try {
return this.descriptorCache.get(new String[]{name, imageId});
} catch (ExecutionException e) {
throw Throwables.asRuntime(e.getCause());
}
}
private ImageDescriptor getDescriptor(String name, String imageId) {
ImageData imageData = getBlob(name, imageId, ImageData.class);
ContainerConfig cc = imageData.getContainerConfig();
return ImageDescriptorImpl.builder()
.id(imageId)
.containerConfig(cc)
.created(imageData.getCreated())
.labels(cc.getLabels())
.build();
}
/**
* Give image id for specified tag
*
* @param name
* @param reference
* @return
*/
protected String getImageId(String name, String reference) {
Manifest manifest = getManifest(name, reference);
// it happen when image with this tag is not found
if (manifest == null) {
return null;
}
Manifest.Entry config = manifest.getConfig();
if (manifest.getConfig() == null) {
log.warn("Manifest has outdated version for {}: {}", name, reference);
return null;
}
return config.getDigest();
}
@Override
public abstract SearchResult search(String searchTerm, int page, int count);
public RegistryConfig getConfig() {
return adapter.getConfig();
}
RestTemplate getRestTemplate() {
return adapter.getRestTemplate();
}
protected void processStatusCodeException(HttpStatusCodeException e) {
String message;
try {
message = MessageFormat.format("Response from server: {0} {1}",
e.getStatusCode().value(),
e.getStatusText());
//we do not read error body, because it contains html code in some cases
} catch (Exception ex) {
message = e.getStatusText();
}
log.error("Error from server: {}", message, e);
}
@Override
public RegistryCredentials getCredentials() {
return adapter.getCredentials();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.termvectors;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.DocumentRequest;
import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.VersionType;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
/**
* Request returning the term vector (doc frequency, positions, offsets) for a
* document.
* <p/>
* Note, the {@link #index()}, {@link #type(String)} and {@link #id(String)} are
* required.
*/
public class TermVectorsRequest extends SingleShardOperationRequest<TermVectorsRequest> implements DocumentRequest<TermVectorsRequest> {
private String type;
private String id;
private BytesReference doc;
private String routing;
private VersionType versionType = VersionType.INTERNAL;
private long version = Versions.MATCH_ANY;
protected String preference;
private static final AtomicInteger randomInt = new AtomicInteger(0);
// TODO: change to String[]
private Set<String> selectedFields;
Boolean realtime;
private Map<String, String> perFieldAnalyzer;
private EnumSet<Flag> flagsEnum = EnumSet.of(Flag.Positions, Flag.Offsets, Flag.Payloads,
Flag.FieldStatistics);
long startTime;
public TermVectorsRequest() {
}
/**
* Constructs a new term vector request for a document that will be fetch
* from the provided index. Use {@link #type(String)} and
* {@link #id(String)} to specify the document to load.
*/
public TermVectorsRequest(String index, String type, String id) {
super(index);
this.id = id;
this.type = type;
}
/**
* Constructs a new term vector request for a document that will be fetch
* from the provided index. Use {@link #type(String)} and
* {@link #id(String)} to specify the document to load.
*/
public TermVectorsRequest(TermVectorsRequest other) {
super(other.index());
this.id = other.id();
this.type = other.type();
if (this.doc != null) {
this.doc = other.doc().copyBytesArray();
}
this.flagsEnum = other.getFlags().clone();
this.preference = other.preference();
this.routing = other.routing();
if (other.selectedFields != null) {
this.selectedFields = new HashSet<>(other.selectedFields);
}
if (other.perFieldAnalyzer != null) {
this.perFieldAnalyzer = new HashMap<>(other.perFieldAnalyzer);
}
this.realtime = other.realtime();
this.version = other.version();
this.versionType = VersionType.fromValue(other.versionType().getValue());
this.startTime = other.startTime();
}
public TermVectorsRequest(MultiGetRequest.Item item) {
super(item.index());
this.id = item.id();
this.type = item.type();
this.selectedFields(item.fields());
this.routing(item.routing());
}
public EnumSet<Flag> getFlags() {
return flagsEnum;
}
/**
* Sets the type of document to get the term vector for.
*/
public TermVectorsRequest type(String type) {
this.type = type;
return this;
}
/**
* Returns the type of document to get the term vector for.
*/
@Override
public String type() {
return type;
}
/**
* Returns the id of document the term vector is requested for.
*/
@Override
public String id() {
return id;
}
/**
* Sets the id of document the term vector is requested for.
*/
public TermVectorsRequest id(String id) {
this.id = id;
return this;
}
/**
* Returns the artificial document from which term vectors are requested for.
*/
public BytesReference doc() {
return doc;
}
/**
* Sets an artificial document from which term vectors are requested for.
*/
public TermVectorsRequest doc(XContentBuilder documentBuilder) {
return this.doc(documentBuilder.bytes(), true);
}
/**
* Sets an artificial document from which term vectors are requested for.
*/
public TermVectorsRequest doc(BytesReference doc, boolean generateRandomId) {
// assign a random id to this artificial document, for routing
if (generateRandomId) {
this.id(String.valueOf(randomInt.getAndAdd(1)));
}
this.doc = doc;
return this;
}
/**
* @return The routing for this request.
*/
@Override
public String routing() {
return routing;
}
@Override
public TermVectorsRequest routing(String routing) {
this.routing = routing;
return this;
}
/**
* Sets the parent id of this document. Will simply set the routing to this
* value, as it is only used for routing with delete requests.
*/
public TermVectorsRequest parent(String parent) {
if (routing == null) {
routing = parent;
}
return this;
}
public String preference() {
return this.preference;
}
/**
* Sets the preference to execute the search. Defaults to randomize across
* shards. Can be set to <tt>_local</tt> to prefer local shards,
* <tt>_primary</tt> to execute only on primary shards, or a custom value,
* which guarantees that the same order will be used across different
* requests.
*/
public TermVectorsRequest preference(String preference) {
this.preference = preference;
return this;
}
/**
* Return the start and stop offsets for each term if they were stored or
* skip offsets.
*/
public TermVectorsRequest offsets(boolean offsets) {
setFlag(Flag.Offsets, offsets);
return this;
}
/**
* @return <code>true</code> if term offsets should be returned. Otherwise
* <code>false</code>
*/
public boolean offsets() {
return flagsEnum.contains(Flag.Offsets);
}
/**
* Return the positions for each term if stored or skip.
*/
public TermVectorsRequest positions(boolean positions) {
setFlag(Flag.Positions, positions);
return this;
}
/**
* @return Returns if the positions for each term should be returned if
* stored or skip.
*/
public boolean positions() {
return flagsEnum.contains(Flag.Positions);
}
/**
* @return <code>true</code> if term payloads should be returned. Otherwise
* <code>false</code>
*/
public boolean payloads() {
return flagsEnum.contains(Flag.Payloads);
}
/**
* Return the payloads for each term or skip.
*/
public TermVectorsRequest payloads(boolean payloads) {
setFlag(Flag.Payloads, payloads);
return this;
}
/**
* @return <code>true</code> if term statistics should be returned.
* Otherwise <code>false</code>
*/
public boolean termStatistics() {
return flagsEnum.contains(Flag.TermStatistics);
}
/**
* Return the term statistics for each term in the shard or skip.
*/
public TermVectorsRequest termStatistics(boolean termStatistics) {
setFlag(Flag.TermStatistics, termStatistics);
return this;
}
/**
* @return <code>true</code> if field statistics should be returned.
* Otherwise <code>false</code>
*/
public boolean fieldStatistics() {
return flagsEnum.contains(Flag.FieldStatistics);
}
/**
* Return the field statistics for each term in the shard or skip.
*/
public TermVectorsRequest fieldStatistics(boolean fieldStatistics) {
setFlag(Flag.FieldStatistics, fieldStatistics);
return this;
}
/**
* @return <code>true</code> if distributed frequencies should be returned. Otherwise
* <code>false</code>
*/
public boolean dfs() {
return flagsEnum.contains(Flag.Dfs);
}
/**
* Use distributed frequencies instead of shard statistics.
*/
public TermVectorsRequest dfs(boolean dfs) {
setFlag(Flag.Dfs, dfs);
return this;
}
/**
* Return only term vectors for special selected fields. Returns for term
* vectors for all fields if selectedFields == null
*/
public Set<String> selectedFields() {
return selectedFields;
}
/**
* Return only term vectors for special selected fields. Returns the term
* vectors for all fields if selectedFields == null
*/
public TermVectorsRequest selectedFields(String... fields) {
selectedFields = fields != null && fields.length != 0 ? Sets.newHashSet(fields) : null;
return this;
}
/**
* Return whether term vectors should be generated real-time (default to true).
*/
public boolean realtime() {
return this.realtime == null ? true : this.realtime;
}
/**
* Choose whether term vectors be generated real-time.
*/
public TermVectorsRequest realtime(Boolean realtime) {
this.realtime = realtime;
return this;
}
/**
* Return the overridden analyzers at each field.
*/
public Map<String, String> perFieldAnalyzer() {
return perFieldAnalyzer;
}
/**
* Override the analyzer used at each field when generating term vectors.
*/
public TermVectorsRequest perFieldAnalyzer(Map<String, String> perFieldAnalyzer) {
this.perFieldAnalyzer = perFieldAnalyzer != null && perFieldAnalyzer.size() != 0 ? Maps.newHashMap(perFieldAnalyzer) : null;
return this;
}
public long version() {
return version;
}
public TermVectorsRequest version(long version) {
this.version = version;
return this;
}
public VersionType versionType() {
return versionType;
}
public TermVectorsRequest versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
private void setFlag(Flag flag, boolean set) {
if (set && !flagsEnum.contains(flag)) {
flagsEnum.add(flag);
} else if (!set) {
flagsEnum.remove(flag);
assert (!flagsEnum.contains(flag));
}
}
public long startTime() {
return this.startTime;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();
if (type == null) {
validationException = ValidateActions.addValidationError("type is missing", validationException);
}
if (id == null && doc == null) {
validationException = ValidateActions.addValidationError("id or doc is missing", validationException);
}
return validationException;
}
public static TermVectorsRequest readTermVectorsRequest(StreamInput in) throws IOException {
TermVectorsRequest termVectorsRequest = new TermVectorsRequest();
termVectorsRequest.readFrom(in);
return termVectorsRequest;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
type = in.readString();
id = in.readString();
if (in.readBoolean()) {
doc = in.readBytesReference();
}
routing = in.readOptionalString();
preference = in.readOptionalString();
long flags = in.readVLong();
flagsEnum.clear();
for (Flag flag : Flag.values()) {
if ((flags & (1 << flag.ordinal())) != 0) {
flagsEnum.add(flag);
}
}
int numSelectedFields = in.readVInt();
if (numSelectedFields > 0) {
selectedFields = new HashSet<>();
for (int i = 0; i < numSelectedFields; i++) {
selectedFields.add(in.readString());
}
}
if (in.readBoolean()) {
perFieldAnalyzer = readPerFieldAnalyzer(in.readMap());
}
realtime = in.readBoolean();
versionType = VersionType.fromValue(in.readByte());
version = in.readLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(type);
out.writeString(id);
out.writeBoolean(doc != null);
if (doc != null) {
out.writeBytesReference(doc);
}
out.writeOptionalString(routing);
out.writeOptionalString(preference);
long longFlags = 0;
for (Flag flag : flagsEnum) {
longFlags |= (1 << flag.ordinal());
}
out.writeVLong(longFlags);
if (selectedFields != null) {
out.writeVInt(selectedFields.size());
for (String selectedField : selectedFields) {
out.writeString(selectedField);
}
} else {
out.writeVInt(0);
}
out.writeBoolean(perFieldAnalyzer != null);
if (perFieldAnalyzer != null) {
out.writeGenericValue(perFieldAnalyzer);
}
out.writeBoolean(realtime());
out.writeByte(versionType.getValue());
out.writeLong(version);
}
public static enum Flag {
// Do not change the order of these flags we use
// the ordinal for encoding! Only append to the end!
Positions, Offsets, Payloads, FieldStatistics, TermStatistics, Dfs
}
/**
* populates a request object (pre-populated with defaults) based on a parser.
*/
public static void parseRequest(TermVectorsRequest termVectorsRequest, XContentParser parser) throws IOException {
XContentParser.Token token;
String currentFieldName = null;
List<String> fields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (currentFieldName != null) {
if (currentFieldName.equals("fields")) {
if (token == XContentParser.Token.START_ARRAY) {
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
fields.add(parser.text());
}
} else {
throw new ElasticsearchParseException(
"The parameter fields must be given as an array! Use syntax : \"fields\" : [\"field1\", \"field2\",...]");
}
} else if (currentFieldName.equals("offsets")) {
termVectorsRequest.offsets(parser.booleanValue());
} else if (currentFieldName.equals("positions")) {
termVectorsRequest.positions(parser.booleanValue());
} else if (currentFieldName.equals("payloads")) {
termVectorsRequest.payloads(parser.booleanValue());
} else if (currentFieldName.equals("term_statistics") || currentFieldName.equals("termStatistics")) {
termVectorsRequest.termStatistics(parser.booleanValue());
} else if (currentFieldName.equals("field_statistics") || currentFieldName.equals("fieldStatistics")) {
termVectorsRequest.fieldStatistics(parser.booleanValue());
} else if (currentFieldName.equals("dfs")) {
termVectorsRequest.dfs(parser.booleanValue());
} else if (currentFieldName.equals("per_field_analyzer") || currentFieldName.equals("perFieldAnalyzer")) {
termVectorsRequest.perFieldAnalyzer(readPerFieldAnalyzer(parser.map()));
} else if ("_index".equals(currentFieldName)) { // the following is important for multi request parsing.
termVectorsRequest.index = parser.text();
} else if ("_type".equals(currentFieldName)) {
termVectorsRequest.type = parser.text();
} else if ("_id".equals(currentFieldName)) {
if (termVectorsRequest.doc != null) {
throw new ElasticsearchParseException("Either \"id\" or \"doc\" can be specified, but not both!");
}
termVectorsRequest.id = parser.text();
} else if ("doc".equals(currentFieldName)) {
if (termVectorsRequest.id != null) {
throw new ElasticsearchParseException("Either \"id\" or \"doc\" can be specified, but not both!");
}
termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser));
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
termVectorsRequest.routing = parser.text();
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
termVectorsRequest.version = parser.longValue();
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
termVectorsRequest.versionType = VersionType.fromString(parser.text());
} else {
throw new ElasticsearchParseException("The parameter " + currentFieldName
+ " is not valid for term vector request!");
}
}
}
if (fields.size() > 0) {
String[] fieldsAsArray = new String[fields.size()];
termVectorsRequest.selectedFields(fields.toArray(fieldsAsArray));
}
}
private static Map<String, String> readPerFieldAnalyzer(Map<String, Object> map) {
Map<String, String> mapStrStr = new HashMap<>();
for (Map.Entry<String, Object> e : map.entrySet()) {
if (e.getValue() instanceof String) {
mapStrStr.put(e.getKey(), (String) e.getValue());
} else {
throw new ElasticsearchException(
"The analyzer at " + e.getKey() + " should be of type String, but got a " + e.getValue().getClass() + "!");
}
}
return mapStrStr;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq;
import javax.jms.BytesMessage;
import javax.jms.DeliveryMode;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageListener;
import javax.jms.MessageProducer;
import javax.jms.Session;
import javax.jms.TextMessage;
import javax.jms.Topic;
import javax.management.MBeanServer;
import javax.management.MBeanServerInvocationHandler;
import javax.management.ObjectName;
import java.lang.Thread.UncaughtExceptionHandler;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import junit.framework.Test;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.management.ObjectNameBuilder;
import org.apache.activemq.artemis.api.core.management.QueueControl;
import org.apache.activemq.artemis.core.server.RoutingType;
import org.apache.activemq.broker.artemiswrapper.ArtemisBrokerWrapper;
import org.apache.activemq.command.ActiveMQDestination;
import org.apache.activemq.command.ActiveMQQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test cases used to test the JMS message consumer.
*/
public class JMSConsumerTest extends JmsTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(JMSConsumerTest.class);
public ActiveMQDestination destination;
public int deliveryMode;
public int prefetch;
public int ackMode;
public byte destinationType;
public boolean durableConsumer;
public static Test suite() {
return suite(JMSConsumerTest.class);
}
public static void main(String[] args) {
junit.textui.TestRunner.run(suite());
}
public void initCombosForTestMessageListenerWithConsumerCanBeStopped() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT), Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TOPIC_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testMessageListenerWithConsumerCanBeStopped() throws Exception {
final AtomicInteger counter = new AtomicInteger(0);
final CountDownLatch done1 = new CountDownLatch(1);
final CountDownLatch done2 = new CountDownLatch(1);
// Receive a message with the JMS API
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
destination = createDestination(session, destinationType);
ActiveMQMessageConsumer consumer = (ActiveMQMessageConsumer) session.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message m) {
counter.incrementAndGet();
if (counter.get() == 1) {
done1.countDown();
}
if (counter.get() == 2) {
done2.countDown();
}
}
});
// Send a first message to make sure that the consumer dispatcher is
// running
sendMessages(session, destination, 1);
assertTrue(done1.await(1, TimeUnit.SECONDS));
assertEquals(1, counter.get());
// Stop the consumer.
consumer.stop();
// Send a message, but should not get delivered.
sendMessages(session, destination, 1);
assertFalse(done2.await(1, TimeUnit.SECONDS));
assertEquals(1, counter.get());
// Start the consumer, and the message should now get delivered.
consumer.start();
assertTrue(done2.await(1, TimeUnit.SECONDS));
assertEquals(2, counter.get());
}
public void testMessageListenerWithConsumerCanBeStoppedConcurently() throws Exception {
final AtomicInteger counter = new AtomicInteger(0);
final CountDownLatch closeDone = new CountDownLatch(1);
connection.start();
Session session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE);
destination = createDestination(session, ActiveMQDestination.QUEUE_TYPE);
// preload the queue
sendMessages(session, destination, 2000);
final ActiveMQMessageConsumer consumer = (ActiveMQMessageConsumer) session.createConsumer(destination);
final Map<Thread, Throwable> exceptions = Collections.synchronizedMap(new HashMap<Thread, Throwable>());
Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
LOG.error("Uncaught exception:", e);
exceptions.put(t, e);
}
});
final class AckAndClose implements Runnable {
private final Message message;
public AckAndClose(Message m) {
this.message = m;
}
@Override
public void run() {
try {
int count = counter.incrementAndGet();
if (count == 590) {
// close in a separate thread is ok by jms
consumer.close();
closeDone.countDown();
}
if (count % 200 == 0) {
// ensure there are some outstanding messages
// ack every 200
message.acknowledge();
}
} catch (Exception e) {
LOG.error("Exception on close or ack:", e);
exceptions.put(Thread.currentThread(), e);
}
}
}
final ExecutorService executor = Executors.newCachedThreadPool();
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message m) {
// ack and close eventually in separate thread
executor.execute(new AckAndClose(m));
}
});
assertTrue(closeDone.await(20, TimeUnit.SECONDS));
// await possible exceptions
Thread.sleep(1000);
assertTrue("no exceptions: " + exceptions, exceptions.isEmpty());
}
public void initCombosForTestMutiReceiveWithPrefetch1() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT), Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("ackMode", new Object[]{Integer.valueOf(Session.AUTO_ACKNOWLEDGE), Integer.valueOf(Session.DUPS_OK_ACKNOWLEDGE), Integer.valueOf(Session.CLIENT_ACKNOWLEDGE)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TOPIC_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testMutiReceiveWithPrefetch1() throws Exception {
// Set prefetch to 1
connection.getPrefetchPolicy().setAll(1);
connection.start();
// Use all the ack modes
Session session = connection.createSession(false, ackMode);
destination = createDestination(session, destinationType);
MessageConsumer consumer = session.createConsumer(destination);
// Send the messages
sendMessages(session, destination, 4);
// Make sure 4 messages were delivered.
Message message = null;
for (int i = 0; i < 4; i++) {
message = consumer.receive(1000);
assertNotNull(message);
}
assertNull(consumer.receiveNoWait());
message.acknowledge();
}
public void initCombosForTestDurableConsumerSelectorChange() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT), Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.TOPIC_TYPE)});
}
public void testDurableConsumerSelectorChange() throws Exception {
// Receive a message with the JMS API
connection.setClientID("test");
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
destination = createDestination(session, destinationType);
MessageProducer producer = session.createProducer(destination);
producer.setDeliveryMode(deliveryMode);
MessageConsumer consumer = session.createDurableSubscriber((Topic) destination, "test", "color='red'", false);
// Send the messages
TextMessage message = session.createTextMessage("1st");
message.setStringProperty("color", "red");
producer.send(message);
Message m = consumer.receive(1000);
assertNotNull(m);
assertEquals("1st", ((TextMessage) m).getText());
// Change the subscription.
consumer.close();
consumer = session.createDurableSubscriber((Topic) destination, "test", "color='blue'", false);
message = session.createTextMessage("2nd");
message.setStringProperty("color", "red");
producer.send(message);
message = session.createTextMessage("3rd");
message.setStringProperty("color", "blue");
producer.send(message);
// Selector should skip the 2nd message.
m = consumer.receive(1000);
assertNotNull(m);
assertEquals("3rd", ((TextMessage) m).getText());
assertNull(consumer.receiveNoWait());
}
public void initCombosForTestSendReceiveBytesMessage() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT), Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TOPIC_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testSendReceiveBytesMessage() throws Exception {
// Receive a message with the JMS API
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
destination = createDestination(session, destinationType);
MessageConsumer consumer = session.createConsumer(destination);
MessageProducer producer = session.createProducer(destination);
BytesMessage message = session.createBytesMessage();
message.writeBoolean(true);
message.writeBoolean(false);
producer.send(message);
// Make sure only 1 message was delivered.
BytesMessage m = (BytesMessage) consumer.receive(1000);
assertNotNull(m);
assertTrue(m.readBoolean());
assertFalse(m.readBoolean());
assertNull(consumer.receiveNoWait());
}
public void initCombosForTestSetMessageListenerAfterStart() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT), Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TOPIC_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testSetMessageListenerAfterStart() throws Exception {
final AtomicInteger counter = new AtomicInteger(0);
final CountDownLatch done = new CountDownLatch(1);
// Receive a message with the JMS API
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
destination = createDestination(session, destinationType);
MessageConsumer consumer = session.createConsumer(destination);
// Send the messages
sendMessages(session, destination, 4);
// See if the message get sent to the listener
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message m) {
counter.incrementAndGet();
if (counter.get() == 4) {
done.countDown();
}
}
});
assertTrue(done.await(1000, TimeUnit.MILLISECONDS));
Thread.sleep(200);
// Make sure only 4 messages were delivered.
assertEquals(4, counter.get());
}
public void initCombosForTestPassMessageListenerIntoCreateConsumer() {
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TOPIC_TYPE)});
}
public void testPassMessageListenerIntoCreateConsumer() throws Exception {
final AtomicInteger counter = new AtomicInteger(0);
final CountDownLatch done = new CountDownLatch(1);
// Receive a message with the JMS API
connection.start();
ActiveMQSession session = (ActiveMQSession) connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
destination = createDestination(session, destinationType);
MessageConsumer consumer = session.createConsumer(destination, new MessageListener() {
@Override
public void onMessage(Message m) {
counter.incrementAndGet();
if (counter.get() == 4) {
done.countDown();
}
}
});
assertNotNull(consumer);
// Send the messages
sendMessages(session, destination, 4);
assertTrue(done.await(1000, TimeUnit.MILLISECONDS));
Thread.sleep(200);
// Make sure only 4 messages were delivered.
assertEquals(4, counter.get());
}
public void initCombosForTestMessageListenerOnMessageCloseUnackedWithPrefetch1StayInQueue() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT), Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("ackMode", new Object[]{Integer.valueOf(Session.CLIENT_ACKNOWLEDGE)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE)});
}
public void testMessageListenerOnMessageCloseUnackedWithPrefetch1StayInQueue() throws Exception {
final AtomicInteger counter = new AtomicInteger(0);
final CountDownLatch sendDone = new CountDownLatch(1);
final CountDownLatch got2Done = new CountDownLatch(1);
// Set prefetch to 1
connection.getPrefetchPolicy().setAll(1);
// This test case does not work if optimized message dispatch is used as
// the main thread send block until the consumer receives the
// message. This test depends on thread decoupling so that the main
// thread can stop the consumer thread.
connection.setOptimizedMessageDispatch(false);
connection.start();
// Use all the ack modes
Session session = connection.createSession(false, ackMode);
destination = createDestination(session, destinationType);
MessageConsumer consumer = session.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message m) {
try {
TextMessage tm = (TextMessage) m;
LOG.info("Got in first listener: " + tm.getText());
assertEquals("" + counter.get(), tm.getText());
counter.incrementAndGet();
if (counter.get() == 2) {
sendDone.await();
connection.close();
got2Done.countDown();
}
tm.acknowledge();
} catch (Throwable e) {
e.printStackTrace();
}
}
});
// Send the messages
sendMessages(session, destination, 4);
sendDone.countDown();
// Wait for first 2 messages to arrive.
assertTrue(got2Done.await(100000, TimeUnit.MILLISECONDS));
// Re-start connection.
connection = (ActiveMQConnection) factory.createConnection();
connections.add(connection);
connection.getPrefetchPolicy().setAll(1);
connection.start();
// Pickup the remaining messages.
final CountDownLatch done2 = new CountDownLatch(1);
session = connection.createSession(false, ackMode);
consumer = session.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message m) {
try {
TextMessage tm = (TextMessage) m;
LOG.info("Got in second listener: " + tm.getText());
// order is not guaranteed as the connection is started before the listener is set.
// assertEquals("" + counter.get(), tm.getText());
counter.incrementAndGet();
if (counter.get() == 4) {
done2.countDown();
}
} catch (Throwable e) {
LOG.error("unexpected ex onMessage: ", e);
}
}
});
assertTrue(done2.await(1000, TimeUnit.MILLISECONDS));
Thread.sleep(200);
// assert msg 2 was redelivered as close() from onMessages() will only ack in auto_ack and dups_ok mode
assertEquals(5, counter.get());
}
public void initCombosForTestMessageListenerAutoAckOnCloseWithPrefetch1() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT), Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("ackMode", new Object[]{Integer.valueOf(Session.AUTO_ACKNOWLEDGE), Integer.valueOf(Session.CLIENT_ACKNOWLEDGE)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE)});
}
public void testMessageListenerAutoAckOnCloseWithPrefetch1() throws Exception {
final AtomicInteger counter = new AtomicInteger(0);
final CountDownLatch sendDone = new CountDownLatch(1);
final CountDownLatch got2Done = new CountDownLatch(1);
// Set prefetch to 1
connection.getPrefetchPolicy().setAll(1);
// This test case does not work if optimized message dispatch is used as
// the main thread send block until the consumer receives the
// message. This test depends on thread decoupling so that the main
// thread can stop the consumer thread.
connection.setOptimizedMessageDispatch(false);
connection.start();
// Use all the ack modes
Session session = connection.createSession(false, ackMode);
destination = createDestination(session, destinationType);
MessageConsumer consumer = session.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message m) {
try {
TextMessage tm = (TextMessage) m;
LOG.info("Got in first listener: " + tm.getText());
assertEquals("" + counter.get(), tm.getText());
counter.incrementAndGet();
m.acknowledge();
if (counter.get() == 2) {
sendDone.await();
connection.close();
got2Done.countDown();
}
} catch (Throwable e) {
e.printStackTrace();
}
}
});
// Send the messages
sendMessages(session, destination, 4);
sendDone.countDown();
// Wait for first 2 messages to arrive.
assertTrue(got2Done.await(100000, TimeUnit.MILLISECONDS));
// Re-start connection.
connection = (ActiveMQConnection) factory.createConnection();
connections.add(connection);
connection.getPrefetchPolicy().setAll(1);
connection.start();
// Pickup the remaining messages.
final CountDownLatch done2 = new CountDownLatch(1);
session = connection.createSession(false, ackMode);
consumer = session.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message m) {
try {
TextMessage tm = (TextMessage) m;
LOG.info("Got in second listener: " + tm.getText());
counter.incrementAndGet();
if (counter.get() == 4) {
done2.countDown();
}
} catch (Throwable e) {
LOG.error("unexpected ex onMessage: ", e);
}
}
});
assertTrue(done2.await(1000, TimeUnit.MILLISECONDS));
Thread.sleep(200);
// close from onMessage with Auto_ack will ack
// Make sure only 4 messages were delivered.
assertEquals(4, counter.get());
}
public void initCombosForTestMessageListenerWithConsumerWithPrefetch1() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT), Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TOPIC_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testMessageListenerWithConsumerWithPrefetch1() throws Exception {
final AtomicInteger counter = new AtomicInteger(0);
final CountDownLatch done = new CountDownLatch(1);
// Receive a message with the JMS API
connection.getPrefetchPolicy().setAll(1);
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
destination = createDestination(session, destinationType);
MessageConsumer consumer = session.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message m) {
counter.incrementAndGet();
if (counter.get() == 4) {
done.countDown();
}
}
});
// Send the messages
sendMessages(session, destination, 4);
assertTrue(done.await(1000, TimeUnit.MILLISECONDS));
Thread.sleep(200);
// Make sure only 4 messages were delivered.
assertEquals(4, counter.get());
}
public void initCombosForTestMessageListenerWithConsumer() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT), Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TOPIC_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testMessageListenerWithConsumer() throws Exception {
final AtomicInteger counter = new AtomicInteger(0);
final CountDownLatch done = new CountDownLatch(1);
// Receive a message with the JMS API
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
destination = createDestination(session, destinationType);
MessageConsumer consumer = session.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message m) {
counter.incrementAndGet();
if (counter.get() == 4) {
done.countDown();
}
}
});
// Send the messages
sendMessages(session, destination, 4);
assertTrue(done.await(1000, TimeUnit.MILLISECONDS));
Thread.sleep(200);
// Make sure only 4 messages were delivered.
assertEquals(4, counter.get());
}
public void initCombosForTestUnackedWithPrefetch1StayInQueue() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT), Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("ackMode", new Object[]{Integer.valueOf(Session.AUTO_ACKNOWLEDGE), Integer.valueOf(Session.DUPS_OK_ACKNOWLEDGE), Integer.valueOf(Session.CLIENT_ACKNOWLEDGE)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE)});
}
public void testUnackedWithPrefetch1StayInQueue() throws Exception {
// Set prefetch to 1
connection.getPrefetchPolicy().setAll(1);
connection.start();
// Use all the ack modes
Session session = connection.createSession(false, ackMode);
destination = createDestination(session, destinationType);
MessageConsumer consumer = session.createConsumer(destination);
// Send the messages
sendMessages(session, destination, 4);
// Only pick up the first 2 messages.
Message message = null;
for (int i = 0; i < 2; i++) {
message = consumer.receive(1000);
assertNotNull(message);
}
message.acknowledge();
connection.close();
connection = (ActiveMQConnection) factory.createConnection();
connections.add(connection);
connection.getPrefetchPolicy().setAll(1);
connection.start();
// Use all the ack modes
session = connection.createSession(false, ackMode);
consumer = session.createConsumer(destination);
// Pickup the rest of the messages.
for (int i = 0; i < 2; i++) {
message = consumer.receive(1000);
assertNotNull(message);
}
message.acknowledge();
assertNull(consumer.receiveNoWait());
}
public void initCombosForTestPrefetch1MessageNotDispatched() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT), Integer.valueOf(DeliveryMode.PERSISTENT)});
}
public void testPrefetch1MessageNotDispatched() throws Exception {
// Set prefetch to 1
connection.getPrefetchPolicy().setAll(1);
connection.start();
Session session = connection.createSession(true, 0);
destination = new ActiveMQQueue("TEST");
MessageConsumer consumer = session.createConsumer(destination);
// Send 2 messages to the destination.
sendMessages(session, destination, 2);
session.commit();
// The prefetch should fill up with 1 message.
// Since prefetch is still full, the 2nd message should get dispatched
// to another consumer.. lets create the 2nd consumer test that it does
// make sure it does.
ActiveMQConnection connection2 = (ActiveMQConnection) factory.createConnection();
connection2.start();
connections.add(connection2);
Session session2 = connection2.createSession(true, 0);
MessageConsumer consumer2 = session2.createConsumer(destination);
// Pick up the first message.
Message message1 = consumer.receive(1000);
assertNotNull(message1);
// Pick up the 2nd messages.
Message message2 = consumer2.receive(5000);
assertNotNull(message2);
session.commit();
session2.commit();
assertNull(consumer.receiveNoWait());
}
public void initCombosForTestDontStart() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TOPIC_TYPE)});
}
public void testDontStart() throws Exception {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
destination = createDestination(session, destinationType);
MessageConsumer consumer = session.createConsumer(destination);
// Send the messages
sendMessages(session, destination, 1);
// Make sure no messages were delivered.
assertNull(consumer.receive(1000));
}
public void initCombosForTestStartAfterSend() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TOPIC_TYPE)});
}
public void testStartAfterSend() throws Exception {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
destination = createDestination(session, destinationType);
MessageConsumer consumer = session.createConsumer(destination);
// Send the messages
sendMessages(session, destination, 1);
// Start the conncection after the message was sent.
connection.start();
// Make sure only 1 message was delivered.
assertNotNull(consumer.receive(1000));
assertNull(consumer.receiveNoWait());
}
public void initCombosForTestReceiveMessageWithConsumer() {
addCombinationValues("deliveryMode", new Object[]{Integer.valueOf(DeliveryMode.NON_PERSISTENT), Integer.valueOf(DeliveryMode.PERSISTENT)});
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TOPIC_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_QUEUE_TYPE), Byte.valueOf(ActiveMQDestination.TEMP_TOPIC_TYPE)});
}
public void testReceiveMessageWithConsumer() throws Exception {
// Receive a message with the JMS API
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
destination = createDestination(session, destinationType);
MessageConsumer consumer = session.createConsumer(destination);
// Send the messages
sendMessages(session, destination, 1);
// Make sure only 1 message was delivered.
Message m = consumer.receive(1000);
assertNotNull(m);
assertEquals("0", ((TextMessage) m).getText());
assertNull(consumer.receiveNoWait());
}
public void testDupsOkConsumer() throws Exception {
// Receive a message with the JMS API
connection.start();
Session session = connection.createSession(false, Session.DUPS_OK_ACKNOWLEDGE);
destination = createDestination(session, ActiveMQDestination.QUEUE_TYPE);
MessageConsumer consumer = session.createConsumer(destination);
// Send the messages
sendMessages(session, destination, 4);
// Make sure only 4 message are delivered.
for (int i = 0; i < 4; i++) {
Message m = consumer.receive(1000);
assertNotNull(m);
}
assertNull(consumer.receive(1000));
// Close out the consumer.. no other messages should be left on the queue.
consumer.close();
consumer = session.createConsumer(destination);
assertNull(consumer.receive(1000));
}
public void testRedispatchOfUncommittedTx() throws Exception {
connection.start();
Session session = connection.createSession(true, Session.SESSION_TRANSACTED);
destination = createDestination(session, ActiveMQDestination.QUEUE_TYPE);
sendMessages(connection, destination, 2);
MessageConsumer consumer = session.createConsumer(destination);
assertNotNull(consumer.receive(1000));
assertNotNull(consumer.receive(1000));
// install another consumer while message dispatch is unacked/uncommitted
Session redispatchSession = connection.createSession(true, Session.SESSION_TRANSACTED);
MessageConsumer redispatchConsumer = redispatchSession.createConsumer(destination);
// no commit so will auto rollback and get re-dispatched to redisptachConsumer
session.close();
Message msg = redispatchConsumer.receive(1000);
assertNotNull(msg);
assertTrue("redelivered flag set", msg.getJMSRedelivered());
assertEquals(2, msg.getLongProperty("JMSXDeliveryCount"));
msg = redispatchConsumer.receive(1000);
assertNotNull(msg);
assertTrue(msg.getJMSRedelivered());
assertEquals(2, msg.getLongProperty("JMSXDeliveryCount"));
redispatchSession.commit();
assertNull(redispatchConsumer.receive(500));
redispatchSession.close();
}
public void testRedispatchOfRolledbackTx() throws Exception {
connection.start();
Session session = connection.createSession(true, Session.SESSION_TRANSACTED);
destination = createDestination(session, ActiveMQDestination.QUEUE_TYPE);
sendMessages(connection, destination, 2);
MessageConsumer consumer = session.createConsumer(destination);
assertNotNull(consumer.receive(1000));
assertNotNull(consumer.receive(1000));
// install another consumer while message dispatch is unacked/uncommitted
Session redispatchSession = connection.createSession(true, Session.SESSION_TRANSACTED);
MessageConsumer redispatchConsumer = redispatchSession.createConsumer(destination);
session.rollback();
session.close();
Message msg = redispatchConsumer.receive(1000);
assertNotNull(msg);
assertTrue(msg.getJMSRedelivered());
assertEquals(2, msg.getLongProperty("JMSXDeliveryCount"));
msg = redispatchConsumer.receive(1000);
assertNotNull(msg);
assertTrue(msg.getJMSRedelivered());
assertEquals(2, msg.getLongProperty("JMSXDeliveryCount"));
redispatchSession.commit();
assertNull(redispatchConsumer.receive(500));
redispatchSession.close();
}
public void initCombosForTestAckOfExpired() {
addCombinationValues("destinationType", new Object[]{Byte.valueOf(ActiveMQDestination.QUEUE_TYPE)});
}
public void testAckOfExpired() throws Exception {
ActiveMQConnectionFactory fact = new ActiveMQConnectionFactory("vm://localhost?jms.prefetchPolicy.all=4&jms.sendAcksAsync=false");
connection = fact.createActiveMQConnection();
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
destination = (ActiveMQDestination) (destinationType == ActiveMQDestination.QUEUE_TYPE ? session.createQueue("test") : session.createTopic("test"));
MessageConsumer consumer = session.createConsumer(destination);
connection.setStatsEnabled(true);
Session sendSession = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = sendSession.createProducer(destination);
producer.setTimeToLive(1000);
final int count = 4;
for (int i = 0; i < count; i++) {
TextMessage message = sendSession.createTextMessage("" + i);
producer.send(message);
}
// let first bunch in queue expire
Thread.sleep(2000);
producer.setTimeToLive(0);
for (int i = 0; i < count; i++) {
TextMessage message = sendSession.createTextMessage("no expiry" + i);
producer.send(message);
}
ActiveMQMessageConsumer amqConsumer = (ActiveMQMessageConsumer) consumer;
for (int i = 0; i < count; i++) {
TextMessage msg = (TextMessage) amqConsumer.receive();
assertNotNull(msg);
assertTrue("message has \"no expiry\" text: " + msg.getText(), msg.getText().contains("no expiry"));
// force an ack when there are expired messages
amqConsumer.acknowledge();
}
assertEquals("consumer has expiredMessages", count, amqConsumer.getConsumerStats().getExpiredMessageCount().getCount());
QueueControl view = createQueueControl(destination.getPhysicalName());
assertEquals("Wrong inFlightCount: " + view.getDeliveringCount(), 0, view.getDeliveringCount());
assertEquals("Wrong dispatch count: " + view.getMessagesAdded(), 8, view.getMessagesAdded());
}
private QueueControl createQueueControl(String destName) throws Exception {
ArtemisBrokerWrapper wrapper = (ArtemisBrokerWrapper) broker.getBroker();
MBeanServer beanServer = wrapper.getMbeanServer();
SimpleString address = new SimpleString(destName);
ObjectName objName = ObjectNameBuilder.DEFAULT.getQueueObjectName(address, address, RoutingType.ANYCAST);
return MBeanServerInvocationHandler.newProxyInstance(beanServer, objName, QueueControl.class, false);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.util.jsse;
import java.io.IOException;
import java.io.InputStream;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import java.security.Security;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.List;
import org.apache.camel.converter.CollectionConverter;
import org.owasp.encoder.Encode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A representation of configuration options for creating and loading a
* {@link KeyStore} instance.
*/
public class KeyStoreParameters extends JsseParameters {
private static final Logger LOG = LoggerFactory.getLogger(KeyStoreParameters.class);
/**
* The optional type of the key store to load. See Appendix A in the
* <a href="http://download.oracle.com/javase/6/docs/technotes/guides/security/StandardNames.html#KeyStore">
* Java Cryptography Architecture Standard Algorithm Name Documentation</a> for more information on standard names.
*/
protected String type;
/**
* The optional password for reading/opening/verifying the key store.
*/
protected String password;
/**
* The optional provider identifier for instantiating the key store.
*/
protected String provider;
/**
* The optional file path, class path resource, or URL of the resource
* used to load the key store.
*/
protected String resource;
/**
* @see #setType(String)
*/
public String getType() {
return type;
}
/**
* Sets the type of the key store to create and load. See Appendix A in the
* <a href="http://download.oracle.com/javase/6/docs/technotes/guides/security/StandardNames.html#KeyStore"
* >Java Cryptography Architecture Standard Algorithm Name
* Documentation</a> for more information on standard names.
*
* @param value the key store type identifier (may be {@code null})
*/
public void setType(String value) {
this.type = value;
}
/**
* @see #getPassword()
*/
public String getPassword() {
return password;
}
/**
* Set the optional password for reading/opening/verifying the key store.
*
* @param value the password value (may be {@code null})
*/
public void setPassword(String value) {
this.password = value;
}
/**
* @see #setProvider(String)
*/
public String getProvider() {
return provider;
}
/**
* Sets the optional provider identifier for instantiating the key store.
*
* @param value the provider identifier (may be {@code null})
*
* @see Security#getProviders()
*/
public void setProvider(String value) {
this.provider = value;
}
/**
* @see #getResource()
*/
public String getResource() {
return resource;
}
/**
* Sets the optional file path, class path resource, or URL of the resource
* used to load the key store.
*
* @param value the resource (may be {@code null})
*/
public void setResource(String value) {
this.resource = value;
}
/**
* Creates a {@link KeyStoreParameters} instance based off of the configuration state
* of this instance. If {@link #getType()} returns {@code null}, the default
* key store type is loaded, otherwise the type will be of that specified.
* <p/>
* The created instance will always be loaded, but if the type requires an
* input stream and {@link #getResource()} returns {@code null}, the
* instance will be empty. The loading of the resource, if not {@code null},
* is attempted by treating the resource as a file path, a class path
* resource, and a URL in that order. An exception is thrown if the resource
* cannot be resolved to readable input stream using any of the above
* methods.
*
* @return a configured and loaded key store
* @throws GeneralSecurityException if there is an error creating an instance
* with the given configuration
* @throws IOException if there is an error resolving the configured
* resource to an input stream
*/
public KeyStore createKeyStore() throws GeneralSecurityException, IOException {
LOG.trace("Creating KeyStore instance from KeyStoreParameters [{}].", this);
String ksType = this.parsePropertyValue(this.type);
if (ksType == null) {
ksType = KeyStore.getDefaultType();
}
char[] ksPassword = null;
if (this.password != null) {
ksPassword = this.parsePropertyValue(this.password).toCharArray();
}
KeyStore ks;
if (this.provider == null) {
ks = KeyStore.getInstance(ksType);
} else {
ks = KeyStore.getInstance(ksType, this.parsePropertyValue(this.provider));
}
if (this.resource == null) {
ks.load(null, ksPassword);
} else {
InputStream is = this.resolveResource(this.parsePropertyValue(this.resource));
ks.load(is, ksPassword);
}
if (LOG.isDebugEnabled()) {
List<String> aliases = new LinkedList<String>();
Enumeration<String> aliasEnum = ks.aliases();
while (aliasEnum.hasMoreElements()) {
aliases.add(aliasEnum.nextElement());
}
LOG.debug("KeyStore [{}], initialized from [{}], is using provider [{}], has type [{}], and contains aliases {}.",
new Object[] {ks, Encode.forJava(this.toString()), Encode.forJava(ks.getProvider().toString()), Encode.forJava(ks.getType()), Encode.forJava(Arrays.toString(aliases.toArray()))});
}
return ks;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("KeyStoreParameters [type=");
builder.append(type);
builder.append(", password=");
builder.append("********");
builder.append(", provider=");
builder.append(provider);
builder.append(", resource=");
builder.append(resource);
builder.append(", getContext()=");
builder.append(getCamelContext());
builder.append("]");
return builder.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode.metrics;
import static org.apache.hadoop.metrics2.impl.MsInfo.ProcessName;
import static org.apache.hadoop.metrics2.impl.MsInfo.SessionId;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
import org.apache.hadoop.metrics2.MetricsSystem;
import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metrics;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.MetricsRegistry;
import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import org.apache.hadoop.metrics2.lib.MutableGaugeInt;
import org.apache.hadoop.metrics2.lib.MutableQuantiles;
import org.apache.hadoop.metrics2.lib.MutableRate;
import org.apache.hadoop.metrics2.source.JvmMetrics;
/**
* This class is for maintaining the various NameNode activity statistics
* and publishing them through the metrics interfaces.
*/
@Metrics(name="NameNodeActivity", about="NameNode metrics", context="dfs")
public class NameNodeMetrics {
final MetricsRegistry registry = new MetricsRegistry("namenode");
@Metric MutableCounterLong createFileOps;
@Metric MutableCounterLong filesCreated;
@Metric MutableCounterLong filesAppended;
@Metric MutableCounterLong getBlockLocations;
@Metric MutableCounterLong filesRenamed;
@Metric MutableCounterLong filesTruncated;
@Metric MutableCounterLong getListingOps;
@Metric MutableCounterLong deleteFileOps;
@Metric("Number of files/dirs deleted by delete or rename operations")
MutableCounterLong filesDeleted;
@Metric MutableCounterLong fileInfoOps;
@Metric MutableCounterLong addBlockOps;
@Metric MutableCounterLong getAdditionalDatanodeOps;
@Metric MutableCounterLong createSymlinkOps;
@Metric MutableCounterLong getLinkTargetOps;
@Metric MutableCounterLong filesInGetListingOps;
@Metric ("Number of successful re-replications")
MutableCounterLong successfulReReplications;
@Metric ("Number of times we failed to schedule a block re-replication.")
MutableCounterLong numTimesReReplicationNotScheduled;
@Metric("Number of timed out block re-replications")
MutableCounterLong timeoutReReplications;
@Metric("Number of allowSnapshot operations")
MutableCounterLong allowSnapshotOps;
@Metric("Number of disallowSnapshot operations")
MutableCounterLong disallowSnapshotOps;
@Metric("Number of createSnapshot operations")
MutableCounterLong createSnapshotOps;
@Metric("Number of deleteSnapshot operations")
MutableCounterLong deleteSnapshotOps;
@Metric("Number of renameSnapshot operations")
MutableCounterLong renameSnapshotOps;
@Metric("Number of listSnapshottableDirectory operations")
MutableCounterLong listSnapshottableDirOps;
@Metric("Number of snapshotDiffReport operations")
MutableCounterLong snapshotDiffReportOps;
@Metric("Number of blockReceivedAndDeleted calls")
MutableCounterLong blockReceivedAndDeletedOps;
@Metric("Number of blockReports and blockReceivedAndDeleted queued")
MutableGaugeInt blockOpsQueued;
@Metric("Number of blockReports and blockReceivedAndDeleted batch processed")
MutableCounterLong blockOpsBatched;
@Metric("Number of file system operations")
public long totalFileOps(){
return
getBlockLocations.value() +
createFileOps.value() +
filesAppended.value() +
addBlockOps.value() +
getAdditionalDatanodeOps.value() +
filesRenamed.value() +
filesTruncated.value() +
deleteFileOps.value() +
getListingOps.value() +
fileInfoOps.value() +
getLinkTargetOps.value() +
createSnapshotOps.value() +
deleteSnapshotOps.value() +
allowSnapshotOps.value() +
disallowSnapshotOps.value() +
renameSnapshotOps.value() +
listSnapshottableDirOps.value() +
createSymlinkOps.value() +
snapshotDiffReportOps.value();
}
@Metric("Journal transactions") MutableRate transactions;
@Metric("Journal syncs") MutableRate syncs;
final MutableQuantiles[] syncsQuantiles;
@Metric("Journal transactions batched in sync")
MutableCounterLong transactionsBatchedInSync;
@Metric("Number of blockReports from individual storages")
MutableRate storageBlockReport;
final MutableQuantiles[] storageBlockReportQuantiles;
@Metric("Cache report") MutableRate cacheReport;
final MutableQuantiles[] cacheReportQuantiles;
@Metric("Generate EDEK time") private MutableRate generateEDEKTime;
private final MutableQuantiles[] generateEDEKTimeQuantiles;
@Metric("Warm-up EDEK time") private MutableRate warmUpEDEKTime;
private final MutableQuantiles[] warmUpEDEKTimeQuantiles;
@Metric("Resource check time") private MutableRate resourceCheckTime;
private final MutableQuantiles[] resourceCheckTimeQuantiles;
@Metric("Duration in SafeMode at startup in msec")
MutableGaugeInt safeModeTime;
@Metric("Time loading FS Image at startup in msec")
MutableGaugeInt fsImageLoadTime;
@Metric("GetImageServlet getEdit")
MutableRate getEdit;
@Metric("GetImageServlet getImage")
MutableRate getImage;
@Metric("GetImageServlet putImage")
MutableRate putImage;
JvmMetrics jvmMetrics = null;
NameNodeMetrics(String processName, String sessionId, int[] intervals,
final JvmMetrics jvmMetrics) {
this.jvmMetrics = jvmMetrics;
registry.tag(ProcessName, processName).tag(SessionId, sessionId);
final int len = intervals.length;
syncsQuantiles = new MutableQuantiles[len];
storageBlockReportQuantiles = new MutableQuantiles[len];
cacheReportQuantiles = new MutableQuantiles[len];
generateEDEKTimeQuantiles = new MutableQuantiles[len];
warmUpEDEKTimeQuantiles = new MutableQuantiles[len];
resourceCheckTimeQuantiles = new MutableQuantiles[len];
for (int i = 0; i < len; i++) {
int interval = intervals[i];
syncsQuantiles[i] = registry.newQuantiles(
"syncs" + interval + "s",
"Journal syncs", "ops", "latency", interval);
storageBlockReportQuantiles[i] = registry.newQuantiles(
"storageBlockReport" + interval + "s",
"Storage block report", "ops", "latency", interval);
cacheReportQuantiles[i] = registry.newQuantiles(
"cacheReport" + interval + "s",
"Cache report", "ops", "latency", interval);
generateEDEKTimeQuantiles[i] = registry.newQuantiles(
"generateEDEKTime" + interval + "s",
"Generate EDEK time", "ops", "latency", interval);
warmUpEDEKTimeQuantiles[i] = registry.newQuantiles(
"warmupEDEKTime" + interval + "s",
"Warm up EDEK time", "ops", "latency", interval);
resourceCheckTimeQuantiles[i] = registry.newQuantiles(
"resourceCheckTime" + interval + "s",
"resource check time", "ops", "latency", interval);
}
}
public static NameNodeMetrics create(Configuration conf, NamenodeRole r) {
String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
String processName = r.toString();
MetricsSystem ms = DefaultMetricsSystem.instance();
JvmMetrics jm = JvmMetrics.create(processName, sessionId, ms);
// Percentile measurement is off by default, by watching no intervals
int[] intervals =
conf.getInts(DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY);
return ms.register(new NameNodeMetrics(processName, sessionId,
intervals, jm));
}
public JvmMetrics getJvmMetrics() {
return jvmMetrics;
}
public void shutdown() {
DefaultMetricsSystem.shutdown();
}
public void incrGetBlockLocations() {
getBlockLocations.incr();
}
public void incrFilesCreated() {
filesCreated.incr();
}
public void incrCreateFileOps() {
createFileOps.incr();
}
public void incrFilesAppended() {
filesAppended.incr();
}
public void incrAddBlockOps() {
addBlockOps.incr();
}
public void incrGetAdditionalDatanodeOps() {
getAdditionalDatanodeOps.incr();
}
public void incrFilesRenamed() {
filesRenamed.incr();
}
public void incrFilesTruncated() {
filesTruncated.incr();
}
public void incrFilesDeleted(long delta) {
filesDeleted.incr(delta);
}
public void incrDeleteFileOps() {
deleteFileOps.incr();
}
public void incrGetListingOps() {
getListingOps.incr();
}
public void incrFilesInGetListingOps(int delta) {
filesInGetListingOps.incr(delta);
}
public void incrFileInfoOps() {
fileInfoOps.incr();
}
public void incrCreateSymlinkOps() {
createSymlinkOps.incr();
}
public void incrGetLinkTargetOps() {
getLinkTargetOps.incr();
}
public void incrAllowSnapshotOps() {
allowSnapshotOps.incr();
}
public void incrDisAllowSnapshotOps() {
disallowSnapshotOps.incr();
}
public void incrCreateSnapshotOps() {
createSnapshotOps.incr();
}
public void incrDeleteSnapshotOps() {
deleteSnapshotOps.incr();
}
public void incrRenameSnapshotOps() {
renameSnapshotOps.incr();
}
public void incrListSnapshottableDirOps() {
listSnapshottableDirOps.incr();
}
public void incrSnapshotDiffReportOps() {
snapshotDiffReportOps.incr();
}
public void incrBlockReceivedAndDeletedOps() {
blockReceivedAndDeletedOps.incr();
}
public void setBlockOpsQueued(int size) {
blockOpsQueued.set(size);
}
public void addBlockOpsBatched(int count) {
blockOpsBatched.incr(count);
}
public void addTransaction(long latency) {
transactions.add(latency);
}
public void incrTransactionsBatchedInSync(long count) {
transactionsBatchedInSync.incr(count);
}
public void incSuccessfulReReplications() {
successfulReReplications.incr();
}
public void incNumTimesReReplicationNotScheduled() {
numTimesReReplicationNotScheduled.incr();
}
public void incTimeoutReReplications() {
timeoutReReplications.incr();
}
public void addSync(long elapsed) {
syncs.add(elapsed);
for (MutableQuantiles q : syncsQuantiles) {
q.add(elapsed);
}
}
public void setFsImageLoadTime(long elapsed) {
fsImageLoadTime.set((int) elapsed);
}
public void addStorageBlockReport(long latency) {
storageBlockReport.add(latency);
for (MutableQuantiles q : storageBlockReportQuantiles) {
q.add(latency);
}
}
public void addCacheBlockReport(long latency) {
cacheReport.add(latency);
for (MutableQuantiles q : cacheReportQuantiles) {
q.add(latency);
}
}
public void setSafeModeTime(long elapsed) {
safeModeTime.set((int) elapsed);
}
public void addGetEdit(long latency) {
getEdit.add(latency);
}
public void addGetImage(long latency) {
getImage.add(latency);
}
public void addPutImage(long latency) {
putImage.add(latency);
}
public void addGenerateEDEKTime(long latency) {
generateEDEKTime.add(latency);
for (MutableQuantiles q : generateEDEKTimeQuantiles) {
q.add(latency);
}
}
public void addWarmUpEDEKTime(long latency) {
warmUpEDEKTime.add(latency);
for (MutableQuantiles q : warmUpEDEKTimeQuantiles) {
q.add(latency);
}
}
public void addResourceCheckTime(long latency) {
resourceCheckTime.add(latency);
for (MutableQuantiles q : resourceCheckTimeQuantiles) {
q.add(latency);
}
}
}
| |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.mvel.compiler.oopath;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.assertj.core.api.Assertions;
import org.drools.core.base.ClassObjectType;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.kiesession.rulebase.InternalKnowledgeBase;
import org.drools.core.reteoo.BetaMemory;
import org.drools.core.reteoo.EntryPointNode;
import org.drools.core.reteoo.LeftInputAdapterNode;
import org.drools.core.reteoo.LeftTuple;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.reteoo.ReactiveFromNode;
import org.drools.core.reteoo.TupleMemory;
import org.drools.core.util.Iterator;
import org.drools.mvel.compiler.oopath.model.Adult;
import org.drools.mvel.compiler.oopath.model.Child;
import org.drools.mvel.compiler.oopath.model.Disease;
import org.drools.mvel.compiler.oopath.model.Group;
import org.drools.mvel.compiler.oopath.model.Man;
import org.drools.mvel.compiler.oopath.model.School;
import org.drools.mvel.compiler.oopath.model.Toy;
import org.drools.mvel.compiler.oopath.model.Woman;
import org.drools.mvel.integrationtests.SerializationHelper;
import org.drools.testcoverage.common.util.KieBaseTestConfiguration;
import org.drools.testcoverage.common.util.KieBaseUtil;
import org.drools.testcoverage.common.util.KieUtil;
import org.drools.testcoverage.common.util.TestParametersUtil;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.kie.api.KieBase;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.Message;
import org.kie.api.runtime.KieSession;
import static org.drools.mvel.compiler.oopath.model.BodyMeasurement.CHEST;
import static org.drools.mvel.compiler.oopath.model.BodyMeasurement.RIGHT_FOREARM;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@RunWith(Parameterized.class)
public class OOPathReactiveTest {
private final KieBaseTestConfiguration kieBaseTestConfiguration;
public OOPathReactiveTest(final KieBaseTestConfiguration kieBaseTestConfiguration) {
this.kieBaseTestConfiguration = kieBaseTestConfiguration;
}
@Parameterized.Parameters(name = "KieBase type={0}")
public static Collection<Object[]> getParameters() {
return TestParametersUtil.getKieBaseCloudConfigurations(true);
}
@Test
public void testReactiveOnLia() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"global java.util.List list\n" +
"\n" +
"rule R when\n" +
" Man( $toy: /wife/children[age > 10]/toys )\n" +
"then\n" +
" list.add( $toy.getName() );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final List<String> list = new ArrayList<>();
ksession.setGlobal( "list", list );
final Woman alice = new Woman( "Alice", 38 );
final Man bob = new Man( "Bob", 40 );
bob.setWife( alice );
final Child charlie = new Child( "Charles", 12 );
final Child debbie = new Child( "Debbie", 10 );
alice.addChild( charlie );
alice.addChild( debbie );
charlie.addToy( new Toy( "car" ) );
charlie.addToy( new Toy( "ball" ) );
debbie.addToy( new Toy( "doll" ) );
ksession.insert( bob );
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder("car", "ball");
list.clear();
debbie.setAge( 11 );
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder("doll");
}
@Test
public void testReactiveDeleteOnLia() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"global java.util.List list\n" +
"\n" +
"rule R when\n" +
" Man( $toy: /wife/children[age > 10]/toys )\n" +
"then\n" +
" list.add( $toy.getName() );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final EntryPointNode epn = ( (InternalKnowledgeBase) ksession.getKieBase() ).getRete().getEntryPointNodes().values().iterator().next();
final ObjectTypeNode otn = epn.getObjectTypeNodes().get( new ClassObjectType(Man.class) );
final LeftInputAdapterNode lian = (LeftInputAdapterNode)otn.getObjectSinkPropagator().getSinks()[0];
final ReactiveFromNode from1 = (ReactiveFromNode)lian.getSinkPropagator().getSinks()[0];
final ReactiveFromNode from2 = (ReactiveFromNode)from1.getSinkPropagator().getSinks()[0];
final ReactiveFromNode from3 = (ReactiveFromNode)from2.getSinkPropagator().getSinks()[0];
final BetaMemory betaMemory = ( (InternalWorkingMemory) ksession ).getNodeMemory(from3).getBetaMemory();
final List<String> list = new ArrayList<>();
ksession.setGlobal( "list", list );
final Woman alice = new Woman( "Alice", 38 );
final Man bob = new Man( "Bob", 40 );
bob.setWife( alice );
final Child charlie = new Child( "Charles", 12 );
final Child debbie = new Child( "Debbie", 11 );
alice.addChild( charlie );
alice.addChild( debbie );
charlie.addToy( new Toy( "car" ) );
charlie.addToy( new Toy( "ball" ) );
debbie.addToy( new Toy( "doll" ) );
ksession.insert( bob );
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder("car", "ball", "doll");
final TupleMemory tupleMemory = betaMemory.getLeftTupleMemory();
Assertions.assertThat(betaMemory.getLeftTupleMemory().size()).isEqualTo(2);
Iterator<LeftTuple> it = tupleMemory.iterator();
for ( LeftTuple next = it.next(); next != null; next = it.next() ) {
final Object obj = next.getFactHandle().getObject();
Assertions.assertThat(obj == charlie || obj == debbie).isTrue();
}
list.clear();
debbie.setAge( 10 );
ksession.fireAllRules();
Assertions.assertThat(list).hasSize(0);;
Assertions.assertThat(betaMemory.getLeftTupleMemory().size()).isEqualTo(1);
it = tupleMemory.iterator();
for ( LeftTuple next = it.next(); next != null; next = it.next() ) {
final Object obj = next.getFactHandle().getObject();
Assertions.assertThat(obj == charlie).isTrue();
}
}
@Test
public void testRemoveFromReactiveListBasic() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"\n" +
"rule R2 when\n" +
" School( $child: /children[age >= 13 && age < 20] )\n" +
"then\n" +
" System.out.println( $child );\n" +
" insertLogical( $child );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final Child charlie = new Child( "Charles", 15 );
final Child debbie = new Child( "Debbie", 19 );
final School school = new School( "Da Vinci" );
school.addChild( charlie );
ksession.insert( school );
ksession.fireAllRules();
assertTrue(ksession.getObjects().contains(charlie));
assertFalse(ksession.getObjects().contains(debbie));
school.addChild( debbie );
ksession.fireAllRules();
assertTrue(ksession.getObjects().contains(charlie));
assertTrue(ksession.getObjects().contains(debbie));
school.getChildren().remove( debbie );
ksession.fireAllRules();
assertTrue(ksession.getObjects().contains(charlie));
assertFalse(ksession.getObjects().contains(debbie));
school.addChild( debbie );
ksession.fireAllRules();
assertTrue(ksession.getObjects().contains(charlie));
assertTrue(ksession.getObjects().contains(debbie));
debbie.setAge( 20 );
ksession.fireAllRules();
assertTrue(ksession.getObjects().contains(charlie));
assertFalse(ksession.getObjects().contains(debbie));
}
@Test
public void testRemoveFromReactiveListExtended() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"\n" +
"rule R2 when\n" +
" Group( $id: name, $p: /members[age >= 20] )\n" +
"then\n" +
" System.out.println( $id + \".\" + $p.getName() );\n" +
" insertLogical( $id + \".\" + $p.getName() );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final Adult ada = new Adult("Ada", 19);
final Adult bea = new Adult("Bea", 19);
final Group x = new Group("X");
final Group y = new Group("Y");
x.addPerson(ada);
x.addPerson(bea);
y.addPerson(ada);
y.addPerson(bea);
ksession.insert( x );
ksession.insert( y );
ksession.fireAllRules();
assertFalse (factsCollection(ksession).contains("X.Ada"));
assertFalse (factsCollection(ksession).contains("X.Bea"));
assertFalse (factsCollection(ksession).contains("Y.Ada"));
assertFalse (factsCollection(ksession).contains("Y.Bea"));
ada.setAge( 20 );
ksession.fireAllRules();
ksession.getObjects().forEach(System.out::println);
assertTrue (factsCollection(ksession).contains("X.Ada"));
assertFalse (factsCollection(ksession).contains("X.Bea"));
assertTrue (factsCollection(ksession).contains("Y.Ada"));
assertFalse (factsCollection(ksession).contains("Y.Bea"));
y.removePerson(bea);
bea.setAge( 20 );
ksession.fireAllRules();
assertTrue (factsCollection(ksession).contains("X.Ada"));
assertTrue (factsCollection(ksession).contains("X.Bea"));
assertTrue (factsCollection(ksession).contains("Y.Ada"));
assertFalse (factsCollection(ksession).contains("Y.Bea"));
}
@Test
public void testRemoveFromAndAddToReactiveSet() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"\n" +
"rule R when\n" +
" School( $disease: /children/diseases )\n" +
"then\n" +
" insertLogical( $disease );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final Disease flu = new Disease("flu");
final Disease asthma = new Disease("asthma");
final Disease diabetes = new Disease("diabetes");
final Child charlie = new Child("Charles", 15);
charlie.addDisease(flu);
charlie.addDisease(asthma);
final Child debbie = new Child("Debbie", 19);
debbie.addDisease(diabetes);
final School school = new School("Da Vinci");
school.addChild(charlie);
school.addChild(debbie);
ksession.insert(school);
ksession.fireAllRules();
assertTrue(ksession.getObjects().contains(flu));
assertTrue(ksession.getObjects().contains(asthma));
assertTrue(ksession.getObjects().contains(diabetes));
charlie.getDiseases().remove(flu);
ksession.fireAllRules();
assertFalse(ksession.getObjects().contains(flu));
assertTrue(ksession.getObjects().contains(asthma));
assertTrue(ksession.getObjects().contains(diabetes));
charlie.getDiseases().remove(asthma);
ksession.fireAllRules();
assertFalse(ksession.getObjects().contains(flu));
assertFalse(ksession.getObjects().contains(asthma));
assertTrue(ksession.getObjects().contains(diabetes));
debbie.getDiseases().remove(diabetes);
ksession.fireAllRules();
assertFalse(ksession.getObjects().contains(flu));
assertFalse(ksession.getObjects().contains(asthma));
assertFalse(ksession.getObjects().contains(diabetes));
charlie.addDisease(flu);
ksession.fireAllRules();
assertTrue(ksession.getObjects().contains(flu));
assertFalse(ksession.getObjects().contains(asthma));
assertFalse(ksession.getObjects().contains(diabetes));
charlie.addDisease(asthma);
debbie.addDisease(diabetes);
ksession.fireAllRules();
assertTrue(ksession.getObjects().contains(flu));
assertTrue(ksession.getObjects().contains(asthma));
assertTrue(ksession.getObjects().contains(diabetes));
}
/**
* Same test as above but with serialization.
*/
@Test
public void testRemoveFromReactiveListExtendedWithSerialization() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"\n" +
"rule R2 when\n" +
" Group( $id: name, $p: /members[age >= 20] )\n" +
"then\n" +
" System.out.println( $id + \".\" + $p.getName() );\n" +
" insertLogical( $id + \".\" + $p.getName() );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
try {
ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession( ksession, true, false );
} catch ( Exception e ) {
e.printStackTrace();
fail( e.getMessage() );
}
final Adult ada = new Adult("Ada", 19);
final Adult bea = new Adult("Bea", 19);
final Group x = new Group("X");
final Group y = new Group("Y");
x.addPerson(ada);
x.addPerson(bea);
y.addPerson(ada);
y.addPerson(bea);
ksession.insert( x );
ksession.insert( y );
ksession.fireAllRules();
assertFalse (factsCollection(ksession).contains("X.Ada"));
assertFalse (factsCollection(ksession).contains("X.Bea"));
assertFalse (factsCollection(ksession).contains("Y.Ada"));
assertFalse (factsCollection(ksession).contains("Y.Bea"));
ada.setAge( 20 );
ksession.fireAllRules();
ksession.getObjects().forEach(System.out::println);
assertTrue (factsCollection(ksession).contains("X.Ada"));
assertFalse (factsCollection(ksession).contains("X.Bea"));
assertTrue (factsCollection(ksession).contains("Y.Ada"));
assertFalse (factsCollection(ksession).contains("Y.Bea"));
y.removePerson(bea);
bea.setAge( 20 );
ksession.fireAllRules();
assertTrue (factsCollection(ksession).contains("X.Ada"));
assertTrue (factsCollection(ksession).contains("X.Bea"));
assertTrue (factsCollection(ksession).contains("Y.Ada"));
assertFalse (factsCollection(ksession).contains("Y.Bea"));
}
@Test
public void testReactiveOnBeta() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"global java.util.List list\n" +
"\n" +
"rule R when\n" +
" $i : Integer()\n" +
" Man( $toy: /wife/children[age > $i]?/toys )\n" +
"then\n" +
" list.add( $toy.getName() );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final List<String> list = new ArrayList<>();
ksession.setGlobal( "list", list );
final Woman alice = new Woman( "Alice", 38 );
final Man bob = new Man( "Bob", 40 );
bob.setWife( alice );
final Child charlie = new Child( "Charles", 12 );
final Child debbie = new Child( "Debbie", 10 );
alice.addChild( charlie );
alice.addChild( debbie );
charlie.addToy( new Toy( "car" ) );
charlie.addToy( new Toy( "ball" ) );
debbie.addToy( new Toy( "doll" ) );
ksession.insert( 10 );
ksession.insert( bob );
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder("car", "ball");
list.clear();
debbie.setAge( 11 );
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder("doll");
}
@Test
public void testReactive2Rules() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"global java.util.List toyList\n" +
"global java.util.List teenagers\n" +
"\n" +
"rule R1 when\n" +
" $i : Integer()\n" +
" Man( $toy: /wife/children[age >= $i]/toys )\n" +
"then\n" +
" toyList.add( $toy.getName() );\n" +
"end\n" +
"rule R2 when\n" +
" School( $child: /children[age >= 13] )\n" +
"then\n" +
" teenagers.add( $child.getName() );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final List<String> toyList = new ArrayList<>();
ksession.setGlobal( "toyList", toyList );
final List<String> teenagers = new ArrayList<>();
ksession.setGlobal( "teenagers", teenagers );
final Woman alice = new Woman( "Alice", 38 );
final Man bob = new Man( "Bob", 40 );
bob.setWife( alice );
final Child charlie = new Child( "Charles", 15 );
final Child debbie = new Child( "Debbie", 12 );
alice.addChild( charlie );
alice.addChild( debbie );
charlie.addToy( new Toy( "car" ) );
charlie.addToy( new Toy( "ball" ) );
debbie.addToy( new Toy( "doll" ) );
final School school = new School( "Da Vinci" );
school.addChild( charlie );
school.addChild( debbie );
ksession.insert( 13 );
ksession.insert( bob );
ksession.insert( school );
ksession.fireAllRules();
Assertions.assertThat(toyList).containsExactlyInAnyOrder("car", "ball");
Assertions.assertThat(teenagers).containsExactlyInAnyOrder("Charles");
toyList.clear();
debbie.setAge( 13 );
ksession.fireAllRules();
Assertions.assertThat(toyList).containsExactlyInAnyOrder("doll");
Assertions.assertThat(teenagers).containsExactlyInAnyOrder("Charles", "Debbie");
}
@Test
public void testReactiveList() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"global java.util.List list\n" +
"\n" +
"rule R when\n" +
" Man( $toy: /wife/children[age > 10]/toys )\n" +
"then\n" +
" list.add( $toy.getName() );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final List<String> list = new ArrayList<>();
ksession.setGlobal( "list", list );
final Woman alice = new Woman( "Alice", 38 );
final Man bob = new Man( "Bob", 40 );
bob.setWife( alice );
final Child charlie = new Child( "Charles", 12 );
final Child debbie = new Child( "Debbie", 10 );
alice.addChild( charlie );
alice.addChild( debbie );
charlie.addToy( new Toy( "car" ) );
charlie.addToy( new Toy( "ball" ) );
debbie.addToy( new Toy( "doll" ) );
ksession.insert( bob );
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder("car", "ball");
list.clear();
charlie.addToy( new Toy( "gun" ) );
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder("gun");
}
@Test
public void testReactiveSet() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"global java.util.List list\n" +
"\n" +
"rule R when\n" +
" Man( $disease: /wife/children[age > 10]/diseases )\n" +
"then\n" +
" list.add( $disease.getName() );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final List<String> list = new ArrayList<>();
ksession.setGlobal("list", list);
final Woman alice = new Woman("Alice", 38);
final Man bob = new Man("Bob", 40);
bob.setWife(alice);
final Child charlie = new Child("Charles", 12);
final Child debbie = new Child("Debbie", 10);
alice.addChild(charlie);
alice.addChild(debbie);
charlie.addDisease(new Disease("flu"));
charlie.addDisease(new Disease("asthma"));
debbie.addDisease(new Disease("diabetes"));
ksession.insert(bob);
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder("flu", "asthma");
list.clear();
charlie.addDisease(new Disease("epilepsy"));
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder("epilepsy");
}
@Test
public void testReactiveMap() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"global java.util.List list\n" +
"\n" +
"rule R when\n" +
" Man( $bodyMeasurement: /wife/bodyMeasurementsMap/entrySet )\n" +
"then\n" +
" list.add( $bodyMeasurement.getValue() );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final List<Integer> list = new ArrayList<>();
ksession.setGlobal("list", list);
final Man bob = new Man("Bob", 40);
final Woman alice = new Woman("Alice", 38);
alice.putBodyMeasurement(CHEST, 80);
bob.setWife(alice);
ksession.insert(bob);
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder(80);
list.clear();
alice.putBodyMeasurement(RIGHT_FOREARM, 38);
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder(38, 80);
}
@Test
public void testNonReactivePart() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"global java.util.List list\n" +
"\n" +
"rule R when\n" +
" Man( $toy: /wife/children[age > 10]?/toys )\n" +
"then\n" +
" list.add( $toy.getName() );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final List<String> list = new ArrayList<>();
ksession.setGlobal( "list", list );
final Woman alice = new Woman( "Alice", 38 );
final Man bob = new Man( "Bob", 40 );
bob.setWife( alice );
final Child charlie = new Child( "Charles", 12 );
final Child debbie = new Child( "Debbie", 10 );
alice.addChild( charlie );
alice.addChild( debbie );
charlie.addToy( new Toy( "car" ) );
charlie.addToy( new Toy( "ball" ) );
debbie.addToy( new Toy( "doll" ) );
ksession.insert( bob );
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder("car", "ball");
list.clear();
charlie.addToy( new Toy( "robot" ) );
ksession.fireAllRules();
Assertions.assertThat(list).isEmpty();
}
@Test
public void testAllNonReactiveAfterNonReactivePart() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"global java.util.List list\n" +
"\n" +
"rule R when\n" +
" Man( $toy: ?/wife/children[age > 10]/toys )\n" +
"then\n" +
" list.add( $toy.getName() );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final List<String> list = new ArrayList<>();
ksession.setGlobal( "list", list );
final Woman alice = new Woman( "Alice", 38 );
final Man bob = new Man( "Bob", 40 );
bob.setWife( alice );
final Child charlie = new Child( "Charles", 12 );
final Child debbie = new Child( "Debbie", 10 );
alice.addChild( charlie );
alice.addChild( debbie );
charlie.addToy( new Toy( "car" ) );
charlie.addToy( new Toy( "ball" ) );
debbie.addToy( new Toy( "doll" ) );
ksession.insert( bob );
ksession.fireAllRules();
Assertions.assertThat(list).containsExactlyInAnyOrder("car", "ball");
list.clear();
charlie.addToy( new Toy( "robot" ) );
ksession.fireAllRules();
Assertions.assertThat(list).isEmpty();
}
@Test
public void testInvalidDoubleNonReactivePart() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"global java.util.List list\n" +
"\n" +
"rule R when\n" +
" Man( $toy: /wife?/children[age > 10]?/toys )\n" +
"then\n" +
" list.add( $toy.getName() );\n" +
"end\n";
KieBuilder kieBuilder = KieUtil.getKieBuilderFromDrls(kieBaseTestConfiguration, false, drl);
assertTrue(kieBuilder.getResults().hasMessages(Message.Level.ERROR));
}
@Test
public void testSingleFireOnReactiveChange() {
// DROOLS-1302
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"global java.util.List list\n" +
"\n" +
"rule R when\n" +
" Man( $toy: /wife/children[age > 10]/toys )\n" +
"then\n" +
" list.add( $toy );\n" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final List<String> list = new ArrayList<>();
ksession.setGlobal( "list", list );
final Woman alice = new Woman( "Alice", 38 );
final Man bob = new Man( "Bob", 40 );
bob.setWife( alice );
ksession.insert( bob );
ksession.fireAllRules();
list.clear();
final Child eleonor = new Child( "Eleonor", 10 );
alice.addChild( eleonor );
final Toy toy = new Toy( "eleonor toy 1" );
eleonor.addToy( toy );
eleonor.setAge(11);
ksession.fireAllRules();
Assertions.assertThat(list).hasSize(1);
list.clear();
toy.setName( "eleonor toy 2" );
ksession.fireAllRules();
Assertions.assertThat(list).hasSize(1);
}
@Test
public void testReactivitySettingAttributeInDrl() {
final String drl =
"import org.drools.mvel.compiler.oopath.model.*;\n" +
"\n" +
"rule R when\n" +
" Man( $child: /wife/children[age >= 10] )\n" +
"then\n" +
"end\n" +
"rule R2 when\n" +
" Man( $child: /wife/children[age < 10] )\n" +
"then\n" +
"$child.setAge(12);" +
"end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession ksession = kbase.newKieSession();
final Man bob = new Man("Bob", 40);
final Woman alice = new Woman("Alice", 38);
final Child charlie = new Child("Charles", 9);
final Child debbie = new Child("Debbie", 8);
bob.setWife(alice);
alice.addChild(charlie);
alice.addChild(debbie);
ksession.insert(bob);
Assertions.assertThat(ksession.fireAllRules()).isEqualTo(4);
}
private List<?> factsCollection(KieSession ksession) {
final List<Object> res = new ArrayList<>();
res.addAll(ksession.getObjects());
return res;
}
}
| |
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.dac.daemon;
import static com.dremio.config.DremioConfig.WEB_AUTH_TYPE;
import static com.dremio.service.reflection.ReflectionServiceImpl.LOCAL_TASK_LEADER_NAME;
import static com.dremio.service.users.SystemUser.SYSTEM_USERNAME;
import static com.google.common.base.Preconditions.checkNotNull;
import java.io.IOException;
import java.net.URI;
import java.net.UnknownHostException;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.function.Consumer;
import javax.inject.Provider;
import org.apache.arrow.memory.BufferAllocator;
import org.projectnessie.client.api.NessieApiV1;
import org.projectnessie.client.http.HttpClientBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.dremio.common.config.SabotConfig;
import com.dremio.common.nodes.NodeProvider;
import com.dremio.common.scanner.persistence.ScanResult;
import com.dremio.config.DremioConfig;
import com.dremio.context.RequestContext;
import com.dremio.context.TenantContext;
import com.dremio.context.UserContext;
import com.dremio.dac.daemon.DACDaemon.ClusterMode;
import com.dremio.dac.homefiles.HomeFileTool;
import com.dremio.dac.server.APIServer;
import com.dremio.dac.server.BufferAllocatorFactory;
import com.dremio.dac.server.DACConfig;
import com.dremio.dac.server.DremioServer;
import com.dremio.dac.server.DremioServlet;
import com.dremio.dac.server.LivenessService;
import com.dremio.dac.server.RestServerV2;
import com.dremio.dac.server.WebServer;
import com.dremio.dac.service.admin.KVStoreReportService;
import com.dremio.dac.service.catalog.CatalogServiceHelper;
import com.dremio.dac.service.collaboration.CollaborationHelper;
import com.dremio.dac.service.datasets.DACViewCreatorFactory;
import com.dremio.dac.service.datasets.DatasetVersionMutator;
import com.dremio.dac.service.exec.MasterElectionService;
import com.dremio.dac.service.exec.MasterStatusListener;
import com.dremio.dac.service.exec.MasterlessStatusListener;
import com.dremio.dac.service.flight.CoordinatorFlightProducer;
import com.dremio.dac.service.flight.FlightCloseableBindableService;
import com.dremio.dac.service.reflection.ReflectionServiceHelper;
import com.dremio.dac.service.search.SearchService;
import com.dremio.dac.service.search.SearchServiceImpl;
import com.dremio.dac.service.search.SearchServiceInvoker;
import com.dremio.dac.service.source.SourceService;
import com.dremio.dac.service.sysflight.SysFlightTablesProvider.JobsTable;
import com.dremio.dac.service.sysflight.SysFlightTablesProvider.MaterializationsTable;
import com.dremio.dac.service.sysflight.SysFlightTablesProvider.ReflectionDependenciesTable;
import com.dremio.dac.service.sysflight.SysFlightTablesProvider.ReflectionsTable;
import com.dremio.dac.service.users.UserServiceHelper;
import com.dremio.dac.support.BasicQueryLogBundleService;
import com.dremio.dac.support.BasicSupportService;
import com.dremio.dac.support.CoordinatorLogService;
import com.dremio.dac.support.QueryLogBundleService;
import com.dremio.dac.support.SupportService;
import com.dremio.datastore.adapter.LegacyKVStoreProviderAdapter;
import com.dremio.datastore.api.KVStoreProvider;
import com.dremio.datastore.api.LegacyIndexedStore;
import com.dremio.datastore.api.LegacyKVStoreProvider;
import com.dremio.datastore.transientstore.TransientStoreProvider;
import com.dremio.edition.EditionProvider;
import com.dremio.edition.EditionProviderImpl;
import com.dremio.exec.ExecConstants;
import com.dremio.exec.catalog.CatalogServiceImpl;
import com.dremio.exec.catalog.CatalogServiceSynchronizer;
import com.dremio.exec.catalog.ConnectionReader;
import com.dremio.exec.catalog.DatasetCatalogServiceImpl;
import com.dremio.exec.catalog.InformationSchemaServiceImpl;
import com.dremio.exec.catalog.MetadataRefreshInfoBroadcaster;
import com.dremio.exec.catalog.ViewCreatorFactory;
import com.dremio.exec.enginemanagement.proto.EngineManagementProtos.EngineId;
import com.dremio.exec.enginemanagement.proto.EngineManagementProtos.SubEngineId;
import com.dremio.exec.maestro.GlobalKeysService;
import com.dremio.exec.maestro.MaestroForwarder;
import com.dremio.exec.maestro.MaestroService;
import com.dremio.exec.maestro.MaestroServiceImpl;
import com.dremio.exec.maestro.NoOpMaestroForwarder;
import com.dremio.exec.planner.cost.DremioRelMetadataQuery;
import com.dremio.exec.planner.cost.RelMetadataQuerySupplier;
import com.dremio.exec.planner.observer.QueryObserverFactory;
import com.dremio.exec.proto.CoordinationProtos.NodeEndpoint;
import com.dremio.exec.rpc.RpcConstants;
import com.dremio.exec.rpc.ssl.SSLConfigurator;
import com.dremio.exec.server.BootStrapContext;
import com.dremio.exec.server.ContextService;
import com.dremio.exec.server.JobResultInfoProvider;
import com.dremio.exec.server.MaterializationDescriptorProvider;
import com.dremio.exec.server.NodeRegistration;
import com.dremio.exec.server.SabotContext;
import com.dremio.exec.server.SimpleJobRunner;
import com.dremio.exec.server.SysFlightChannelProvider;
import com.dremio.exec.server.options.DefaultOptionManager;
import com.dremio.exec.server.options.OptionChangeBroadcaster;
import com.dremio.exec.server.options.OptionManagerWrapper;
import com.dremio.exec.server.options.OptionNotificationService;
import com.dremio.exec.server.options.OptionValidatorListingImpl;
import com.dremio.exec.server.options.ProjectOptionManager;
import com.dremio.exec.server.options.SessionOptionManager;
import com.dremio.exec.server.options.SessionOptionManagerImpl;
import com.dremio.exec.server.options.SystemOptionManager;
import com.dremio.exec.service.executor.ExecutorService;
import com.dremio.exec.service.executor.ExecutorServiceProductClientFactory;
import com.dremio.exec.service.jobresults.JobResultsSoftwareClientFactory;
import com.dremio.exec.service.jobtelemetry.JobTelemetrySoftwareClientFactory;
import com.dremio.exec.service.maestro.MaestroGrpcServerFacade;
import com.dremio.exec.service.maestro.MaestroSoftwareClientFactory;
import com.dremio.exec.store.CatalogService;
import com.dremio.exec.store.JobResultsStoreConfig;
import com.dremio.exec.store.dfs.FileSystemPlugin;
import com.dremio.exec.store.dfs.PDFSService;
import com.dremio.exec.store.dfs.PDFSService.PDFSMode;
import com.dremio.exec.store.sys.SystemTablePluginConfigProvider;
import com.dremio.exec.store.sys.accel.AccelerationListManager;
import com.dremio.exec.store.sys.accel.AccelerationManager;
import com.dremio.exec.store.sys.statistics.StatisticsAdministrationService;
import com.dremio.exec.store.sys.statistics.StatisticsListManager;
import com.dremio.exec.store.sys.statistics.StatisticsService;
import com.dremio.exec.work.WorkStats;
import com.dremio.exec.work.protector.ActiveQueryListService;
import com.dremio.exec.work.protector.ForemenTool;
import com.dremio.exec.work.protector.ForemenWorkManager;
import com.dremio.exec.work.protector.UserWorker;
import com.dremio.exec.work.rpc.CoordToExecTunnelCreator;
import com.dremio.exec.work.rpc.CoordTunnelCreator;
import com.dremio.exec.work.user.LocalQueryExecutor;
import com.dremio.options.OptionManager;
import com.dremio.options.OptionValidatorListing;
import com.dremio.plugins.sysflight.SysFlightPluginConfigProvider;
import com.dremio.provision.service.ProvisioningService;
import com.dremio.provision.service.ProvisioningServiceImpl;
import com.dremio.resource.ClusterResourceInformation;
import com.dremio.resource.GroupResourceInformation;
import com.dremio.resource.QueryCancelTool;
import com.dremio.resource.ResourceAllocator;
import com.dremio.resource.RuleBasedEngineSelector;
import com.dremio.resource.basic.BasicResourceAllocator;
import com.dremio.sabot.exec.CancelQueryContext;
import com.dremio.sabot.exec.CoordinatorHeapClawBackStrategy;
import com.dremio.sabot.exec.ExecToCoordTunnelCreator;
import com.dremio.sabot.exec.FragmentWorkManager;
import com.dremio.sabot.exec.HeapMonitorManager;
import com.dremio.sabot.exec.TaskPoolInitializer;
import com.dremio.sabot.exec.WorkloadTicketDepot;
import com.dremio.sabot.exec.WorkloadTicketDepotService;
import com.dremio.sabot.exec.context.ContextInformationFactory;
import com.dremio.sabot.op.common.spill.SpillServiceOptionsImpl;
import com.dremio.sabot.rpc.CoordExecService;
import com.dremio.sabot.rpc.ExecToCoordResultsHandler;
import com.dremio.sabot.rpc.ExecToCoordStatusHandler;
import com.dremio.sabot.rpc.user.UserServer;
import com.dremio.sabot.task.TaskPool;
import com.dremio.security.CredentialsService;
import com.dremio.service.InitializerRegistry;
import com.dremio.service.SingletonRegistry;
import com.dremio.service.acceleration.ReflectionDescriptionServiceGrpc;
import com.dremio.service.acceleration.ReflectionDescriptionServiceGrpc.ReflectionDescriptionServiceStub;
import com.dremio.service.accelerator.AccelerationListManagerImpl;
import com.dremio.service.accelerator.AccelerationListServiceImpl;
import com.dremio.service.catalog.DatasetCatalogServiceGrpc;
import com.dremio.service.catalog.DatasetCatalogServiceGrpc.DatasetCatalogServiceBlockingStub;
import com.dremio.service.catalog.InformationSchemaServiceGrpc;
import com.dremio.service.catalog.InformationSchemaServiceGrpc.InformationSchemaServiceBlockingStub;
import com.dremio.service.commandpool.CommandPool;
import com.dremio.service.commandpool.CommandPoolFactory;
import com.dremio.service.conduit.ConduitUtils;
import com.dremio.service.conduit.client.ConduitProvider;
import com.dremio.service.conduit.client.ConduitProviderImpl;
import com.dremio.service.conduit.server.ConduitInProcessChannelProvider;
import com.dremio.service.conduit.server.ConduitServer;
import com.dremio.service.conduit.server.ConduitServiceRegistry;
import com.dremio.service.conduit.server.ConduitServiceRegistryImpl;
import com.dremio.service.coordinator.ClusterCoordinator;
import com.dremio.service.coordinator.ClusterElectionManager;
import com.dremio.service.coordinator.ClusterServiceSetManager;
import com.dremio.service.coordinator.DremioAssumeRoleCredentialsProviderV1;
import com.dremio.service.coordinator.DremioAssumeRoleCredentialsProviderV2;
import com.dremio.service.coordinator.ExecutorSetService;
import com.dremio.service.coordinator.LocalExecutorSetService;
import com.dremio.service.coordinator.NoOpClusterCoordinator;
import com.dremio.service.coordinator.ProjectConfig;
import com.dremio.service.coordinator.ProjectConfigImpl;
import com.dremio.service.coordinator.ProjectConfigStore;
import com.dremio.service.coordinator.ProjectRoleInitializer;
import com.dremio.service.coordinator.SoftwareAssumeRoleCredentialsProvider;
import com.dremio.service.coordinator.SoftwareCoordinatorModeInfo;
import com.dremio.service.coordinator.local.LocalClusterCoordinator;
import com.dremio.service.coordinator.zk.ZKClusterCoordinator;
import com.dremio.service.execselector.ExecutorSelectionService;
import com.dremio.service.execselector.ExecutorSelectionServiceImpl;
import com.dremio.service.execselector.ExecutorSelectorFactory;
import com.dremio.service.execselector.ExecutorSelectorFactoryImpl;
import com.dremio.service.execselector.ExecutorSelectorProvider;
import com.dremio.service.executor.ExecutorServiceClientFactory;
import com.dremio.service.flight.DremioFlightService;
import com.dremio.service.grpc.GrpcChannelBuilderFactory;
import com.dremio.service.grpc.GrpcServerBuilderFactory;
import com.dremio.service.grpc.MultiTenantGrpcServerBuilderFactory;
import com.dremio.service.grpc.SingleTenantGrpcChannelBuilderFactory;
import com.dremio.service.job.ChronicleGrpc;
import com.dremio.service.job.proto.JobId;
import com.dremio.service.job.proto.JobResult;
import com.dremio.service.jobresults.client.JobResultsClientFactory;
import com.dremio.service.jobresults.server.JobResultsGrpcServerFacade;
import com.dremio.service.jobs.Chronicle;
import com.dremio.service.jobs.HybridJobsService;
import com.dremio.service.jobs.JobResultsStore;
import com.dremio.service.jobs.JobsFlightProducer;
import com.dremio.service.jobs.JobsService;
import com.dremio.service.jobs.JobsServiceAdapter;
import com.dremio.service.jobs.LocalJobsService;
import com.dremio.service.jobtelemetry.JobTelemetryClient;
import com.dremio.service.jobtelemetry.client.JobTelemetryExecutorClientFactory;
import com.dremio.service.jobtelemetry.server.LocalJobTelemetryServer;
import com.dremio.service.listing.DatasetListingInvoker;
import com.dremio.service.listing.DatasetListingService;
import com.dremio.service.listing.DatasetListingServiceImpl;
import com.dremio.service.maestroservice.MaestroClientFactory;
import com.dremio.service.namespace.NamespaceService;
import com.dremio.service.namespace.NamespaceServiceImpl;
import com.dremio.service.namespace.SplitOrphansCleanerService;
import com.dremio.service.nessie.NessieApiV1Unsupported;
import com.dremio.service.nessie.NessieService;
import com.dremio.service.nessieapi.ContentsApiGrpc;
import com.dremio.service.nessieapi.TreeApiGrpc;
import com.dremio.service.reflection.AccelerationManagerImpl;
import com.dremio.service.reflection.ExecutorOnlyReflectionService;
import com.dremio.service.reflection.ReflectionAdministrationService;
import com.dremio.service.reflection.ReflectionService;
import com.dremio.service.reflection.ReflectionServiceImpl;
import com.dremio.service.reflection.ReflectionStatusService;
import com.dremio.service.reflection.ReflectionStatusServiceImpl;
import com.dremio.service.scheduler.LocalSchedulerService;
import com.dremio.service.scheduler.SchedulerService;
import com.dremio.service.spill.SpillService;
import com.dremio.service.spill.SpillServiceImpl;
import com.dremio.service.statistics.StatisticsListManagerImpl;
import com.dremio.service.statistics.StatisticsServiceImpl;
import com.dremio.service.sysflight.SysFlightDataProvider;
import com.dremio.service.sysflight.SysFlightProducer;
import com.dremio.service.sysflight.SystemTableManager;
import com.dremio.service.sysflight.SystemTableManagerImpl;
import com.dremio.service.tokens.TokenManager;
import com.dremio.service.tokens.TokenManagerImpl;
import com.dremio.service.users.SimpleUserService;
import com.dremio.service.users.UserService;
import com.dremio.service.usersessions.UserSessionService;
import com.dremio.service.usersessions.UserSessionServiceImpl;
import com.dremio.services.fabric.FabricServiceImpl;
import com.dremio.services.fabric.api.FabricService;
import com.dremio.ssl.SSLEngineFactory;
import com.dremio.telemetry.utils.GrpcTracerFacade;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.Maps;
import com.google.inject.util.Providers;
import io.opentracing.Tracer;
/**
* DAC module to setup Dremio daemon
*/
public class DACDaemonModule implements DACModule {
private static final Logger logger = LoggerFactory.getLogger(DACDaemonModule.class);
public static final String JOBS_STORAGEPLUGIN_NAME = "__jobResultsStore";
public static final String SCRATCH_STORAGEPLUGIN_NAME = "$scratch";
public DACDaemonModule() {}
@Override
public void bootstrap(final Runnable shutdownHook, final SingletonRegistry bootstrapRegistry, ScanResult scanResult, DACConfig dacConfig, boolean isMaster) {
final DremioConfig config = dacConfig.getConfig();
final boolean embeddedZookeeper = config.getBoolean(DremioConfig.EMBEDDED_MASTER_ZK_ENABLED_BOOL);
final BootStrapContext bootStrapContext = new BootStrapContext(config, scanResult, bootstrapRegistry);
boolean isMasterless = config.isMasterlessEnabled();
bootstrapRegistry.bindSelf(bootStrapContext);
bootstrapRegistry.bind(BufferAllocator.class, bootStrapContext.getAllocator());
// Start cluster coordinator before all other services so that non master nodes can poll for master status
if (dacConfig.getClusterMode() == ClusterMode.LOCAL) {
bootstrapRegistry.bind(ClusterCoordinator.class, new LocalClusterCoordinator());
} else if (config.getBoolean(DremioConfig.NO_OP_CLUSTER_COORDINATOR_ENABLED)) {
isMasterless = true;
Preconditions.checkState(!isMaster);
bootstrapRegistry.bind(ClusterCoordinator.class, new NoOpClusterCoordinator());
} else {
// ClusterCoordinator has a runtime dependency on ZooKeeper. If no ZooKeeper server
// is present, ClusterCoordinator won't start, so this service should be initialized first.
final Provider<Integer> portProvider;
if (isMaster && embeddedZookeeper) {
ZkServer zkServer = new ZkServer(
config.getString(DremioConfig.EMBEDDED_MASTER_ZK_ENABLED_PATH_STRING),
config.getInt(DremioConfig.EMBEDDED_MASTER_ZK_ENABLED_PORT_INT),
dacConfig.autoPort);
bootstrapRegistry.bindSelf(zkServer);
portProvider = dacConfig.autoPort ? new Provider<Integer>(){
@Override
public Integer get() {
return bootstrapRegistry.lookup(ZkServer.class).getPort();
}} : null;
} else {
portProvider = null;
}
final ZKClusterCoordinator coord;
try {
coord = new ZKClusterCoordinator(config.getSabotConfig(), portProvider);
} catch (IOException e) {
throw new RuntimeException("Cannot instantiate the ZooKeeper cluster coordinator", e);
}
bootstrapRegistry.bind(ClusterCoordinator.class, coord);
}
// Start master election
if (isMaster && !config.getBoolean(DremioConfig.DEBUG_DISABLE_MASTER_ELECTION_SERVICE_BOOL)) {
bootstrapRegistry.bindSelf(new MasterElectionService(bootstrapRegistry.provider(ClusterCoordinator.class)));
}
final MasterStatusListener masterStatusListener;
final Provider<ClusterServiceSetManager> clusterServiceSetManagerProvider =
() -> bootstrapRegistry.provider(ClusterCoordinator.class).get();
if (!isMasterless) {
masterStatusListener = new MasterStatusListener(clusterServiceSetManagerProvider, config.getSabotConfig(), isMaster);
} else {
masterStatusListener =
new MasterlessStatusListener(clusterServiceSetManagerProvider, isMaster);
}
// start master status listener
bootstrapRegistry.bind(MasterStatusListener.class, masterStatusListener);
bootstrapRegistry.bindProvider(EngineId.class, Providers.of(null));
bootstrapRegistry.bindProvider(SubEngineId.class, Providers.of(null));
// Default request Context
bootstrapRegistry.bind(RequestContext.class,
RequestContext.empty()
.with(TenantContext.CTX_KEY, TenantContext.DEFAULT_SERVICE_CONTEXT)
.with(UserContext.CTX_KEY, UserContext.SYSTEM_USER_CONTEXT)
);
}
@Override
public void build(final SingletonRegistry bootstrapRegistry, final SingletonRegistry registry, ScanResult scanResult,
DACConfig dacConfig, boolean isMaster){
final DremioConfig config = dacConfig.getConfig();
final SabotConfig sabotConfig = config.getSabotConfig();
final BootStrapContext bootstrap = bootstrapRegistry.lookup(BootStrapContext.class);
final boolean isMasterless = config.isMasterlessEnabled();
final boolean isCoordinator = config.getBoolean(DremioConfig.ENABLE_COORDINATOR_BOOL);
final boolean isExecutor = config.getBoolean(DremioConfig.ENABLE_EXECUTOR_BOOL);
final boolean isDistributedCoordinator = isMasterless && isCoordinator;
final boolean isDistributedMaster = isDistributedCoordinator || isMaster;
final Provider<NodeEndpoint> masterEndpoint = new Provider<NodeEndpoint>() {
private final Provider<MasterStatusListener> masterStatusListener =
registry.provider(MasterStatusListener.class);
@Override
public NodeEndpoint get() {
return masterStatusListener.get().getMasterNode();
}
};
final Provider<SabotContext> sabotContextProvider = registry.provider(SabotContext.class);
final Provider<NodeEndpoint> selfEndpoint = () -> sabotContextProvider.get().getEndpoint();
registry.bind(java.util.concurrent.ExecutorService.class, bootstrap.getExecutor());
final BufferAllocatorFactory bufferAllocatorFactory = new BufferAllocatorFactory(bootstrap.getAllocator(), "WebServer");
registry.bindSelf(bufferAllocatorFactory);
EnumSet<ClusterCoordinator.Role> roles = EnumSet.noneOf(ClusterCoordinator.Role.class);
if (isMaster) {
roles.add(ClusterCoordinator.Role.MASTER);
}
if (isCoordinator) {
roles.add(ClusterCoordinator.Role.COORDINATOR);
}
if (isExecutor) {
roles.add(ClusterCoordinator.Role.EXECUTOR);
}
registry.bindSelf(config);
registry.bind(ConnectionReader.class, ConnectionReader.of(scanResult, sabotConfig));
// register default providers.
registry.bind(MaterializationDescriptorProvider.class, MaterializationDescriptorProvider.EMPTY);
registry.bind(QueryObserverFactory.class, QueryObserverFactory.DEFAULT);
// copy bootstrap bindings to the main registry.
bootstrapRegistry.copyBindings(registry);
registry.bind(GrpcChannelBuilderFactory.class,
new SingleTenantGrpcChannelBuilderFactory(
registry.lookup(Tracer.class),
registry.provider(RequestContext.class),
() -> { return Maps.newHashMap();}
)
);
registry.bind(GrpcServerBuilderFactory.class, new MultiTenantGrpcServerBuilderFactory(registry.lookup(Tracer.class)));
final String fabricAddress = getFabricAddress();
registry.bind(
FabricService.class,
new FabricServiceImpl(
fabricAddress,
dacConfig.localPort,
dacConfig.autoPort,
sabotConfig.getInt(ExecConstants.BIT_SERVER_RPC_THREADS),
bootstrap.getAllocator(),
config.getBytes(DremioConfig.FABRIC_MEMORY_RESERVATION),
Long.MAX_VALUE,
sabotConfig.getInt(RpcConstants.BIT_RPC_TIMEOUT),
bootstrap.getExecutor()
));
final Optional<SSLEngineFactory> conduitSslEngineFactory;
try {
final SSLConfigurator conduitSslConfigurator =
new SSLConfigurator(config, ConduitUtils.CONDUIT_SSL_PREFIX, "conduit");
conduitSslEngineFactory = SSLEngineFactory.create(
conduitSslConfigurator.getSSLConfig(false, fabricAddress));
} catch (Exception e) {
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
final ConduitServiceRegistry conduitServiceRegistry = new ConduitServiceRegistryImpl();
registry.bind(ConduitServiceRegistry.class, conduitServiceRegistry);
final String inProcessServerName = UUID.randomUUID().toString();
registry.bind(ConduitServer.class,
new ConduitServer(
registry.provider(ConduitServiceRegistry.class),
config.getInt(DremioConfig.CONDUIT_PORT_INT),
conduitSslEngineFactory,
inProcessServerName
)
);
// should be after conduit server
final ConduitInProcessChannelProvider conduitInProcessChannelProvider = new ConduitInProcessChannelProvider(inProcessServerName);
registry.bind(ConduitInProcessChannelProvider.class, conduitInProcessChannelProvider);
// for masterless case, this defaults to the local conduit server.
final Provider<NodeEndpoint> conduitEndpoint;
if (!isMasterless) {
conduitEndpoint = masterEndpoint;
} else {
conduitEndpoint = selfEndpoint;
}
final ConduitProviderImpl conduitProvider = new ConduitProviderImpl(
conduitEndpoint,
conduitSslEngineFactory
);
registry.bind(ConduitProvider.class, conduitProvider);
registry.bind(ConduitProviderImpl.class, conduitProvider); // this bind manages lifecycle
registry.bindProvider(InformationSchemaServiceBlockingStub.class,
() -> InformationSchemaServiceGrpc.newBlockingStub(conduitProvider.getOrCreateChannelToMaster()));
registry.bindProvider(DatasetCatalogServiceBlockingStub.class,
() -> DatasetCatalogServiceGrpc.newBlockingStub(conduitProvider.getOrCreateChannelToMaster()));
registry.bindProvider(TreeApiGrpc.TreeApiBlockingStub.class,
() -> TreeApiGrpc.newBlockingStub(conduitProvider.getOrCreateChannelToMaster()));
registry.bindProvider(ContentsApiGrpc.ContentsApiBlockingStub.class,
() -> ContentsApiGrpc.newBlockingStub(conduitProvider.getOrCreateChannelToMaster()));
registry.bindProvider(NessieApiV1.class, () -> getNessieClientInstance(config));
registry.bind(
KVStoreProvider.class,
KVStoreProviderHelper.newKVStoreProvider(
dacConfig,
bootstrap,
registry.provider(FabricService.class),
masterEndpoint,
bootstrapRegistry.lookup(Tracer.class)
)
);
// should be after the kv store
registry.bind(ProjectRoleInitializer.class, new ProjectRoleInitializer() {
@Override
public void start() throws Exception {
// NO-OP
}
@Override
public void close() throws Exception {
// NO-OP
}
});
registry.bind(
LegacyKVStoreProvider.class,
new LegacyKVStoreProviderAdapter(
registry.provider(KVStoreProvider.class).get())
);
registry.bind(
ViewCreatorFactory.class,
new DACViewCreatorFactory(
registry.provider(InitializerRegistry.class),
registry.provider(LegacyKVStoreProvider.class),
registry.provider(JobsService.class),
registry.provider(NamespaceService.Factory.class),
registry.provider(CatalogService.class),
registry.provider(ContextService.class),
() -> bootstrap.getAllocator()
)
);
com.dremio.services.credentials.CredentialsService credentialsService = com.dremio.services.credentials.CredentialsService.newInstance(config, scanResult);
registry.bind(com.dremio.services.credentials.CredentialsService.class, credentialsService);
// RPC Endpoints.
if (isCoordinator) {
registry.bindSelf(
new UserServer(
config,
registry.provider(java.util.concurrent.ExecutorService.class),
registry.provider(BufferAllocator.class),
registry.provider(UserService.class),
registry.provider(NodeEndpoint.class),
registry.provider(UserWorker.class),
dacConfig.autoPort,
bootstrapRegistry.lookup(Tracer.class),
registry.provider(OptionValidatorListing.class)
)
);
}
// Context Service.
final ContextService contextService = new ContextService(
bootstrap,
registry.provider(ClusterCoordinator.class),
registry.provider(GroupResourceInformation.class),
registry.provider(WorkStats.class),
registry.provider(LegacyKVStoreProvider.class),
registry.provider(FabricService.class),
registry.provider(ConduitServer.class),
registry.provider(UserServer.class),
registry.provider(MaterializationDescriptorProvider.class),
registry.provider(QueryObserverFactory.class),
registry.provider(AccelerationManager.class),
registry.provider(AccelerationListManager.class),
registry.provider(NamespaceService.Factory.class),
registry.provider(DatasetListingService.class),
registry.provider(UserService.class),
registry.provider(CatalogService.class),
registry.provider(ConduitProvider.class),
registry.provider(InformationSchemaServiceBlockingStub.class),
registry.provider(ViewCreatorFactory.class),
registry.provider(SpillService.class),
registry.provider(ConnectionReader.class),
registry.provider(CredentialsService.class),
registry.provider(JobResultInfoProvider.class),
registry.provider(OptionManager.class),
registry.provider(SystemOptionManager.class),
bootstrapRegistry.provider(EngineId.class),
bootstrapRegistry.provider(SubEngineId.class),
registry.provider(OptionValidatorListing.class),
roles,
() -> new SoftwareCoordinatorModeInfo(),
registry.provider(TreeApiGrpc.TreeApiBlockingStub.class),
registry.provider(ContentsApiGrpc.ContentsApiBlockingStub.class),
registry.provider(NessieApiV1.class),
registry.provider(StatisticsService.class),
registry.provider(StatisticsAdministrationService.Factory.class),
registry.provider(StatisticsListManager.class),
registry.provider(RelMetadataQuerySupplier.class),
registry.provider(SimpleJobRunner.class),
registry.provider(DatasetCatalogServiceBlockingStub.class),
registry.provider(GlobalKeysService.class),
registry.provider(com.dremio.services.credentials.CredentialsService.class),
registry.provider(ConduitInProcessChannelProvider.class),
registry.provider(SysFlightChannelProvider.class));
registry.bind(SysFlightChannelProvider.class, SysFlightChannelProvider.NO_OP);
registry.bind(ContextService.class, contextService);
registry.bindProvider(SabotContext.class, contextService::get);
registry.bindProvider(NodeEndpoint.class, contextService::getEndpoint);
registry.bindProvider(GlobalKeysService.class, () -> GlobalKeysService.NO_OP);
final Provider<NodeEndpoint> currentEndPoint =
() -> registry.provider(SabotContext.class).get().getEndpoint();
final Provider<ClusterServiceSetManager> clusterServiceSetManagerProvider =
() -> registry.provider(ClusterCoordinator.class).get();
final Provider<ClusterElectionManager> clusterElectionManagerProvider =
() -> registry.provider(ClusterCoordinator.class).get();
final boolean isInternalUGS = setupUserService(registry, dacConfig.getConfig(),
registry.provider(SabotContext.class), isMaster, isCoordinator);
registry.bind(NamespaceService.Factory.class, NamespaceServiceImpl.Factory.class);
final DatasetListingService localListing;
if (isDistributedMaster) {
localListing = new DatasetListingServiceImpl(registry.provider(NamespaceService.Factory.class));
} else {
localListing = DatasetListingService.UNSUPPORTED;
}
final Provider<NodeEndpoint> searchEndPoint = () -> {
// will return master endpoint if it's masterful mode
Optional<NodeEndpoint> serviceEndPoint =
registry.provider(SabotContext.class).get().getServiceLeader(SearchServiceImpl.LOCAL_TASK_LEADER_NAME);
return serviceEndPoint.orElse(null);
};
// this is the delegate service for localListing (calls start/close internally)
registry.bind(DatasetListingService.class,
new DatasetListingInvoker(
isDistributedMaster,
searchEndPoint,
registry.provider(FabricService.class),
bootstrap.getAllocator(),
localListing));
registry.bindSelf(new CoordExecService(
bootstrap.getConfig(),
bootstrap.getAllocator(),
registry.provider(FabricService.class),
registry.provider(ExecutorService.class),
registry.provider(ExecToCoordResultsHandler.class),
registry.provider(ExecToCoordStatusHandler.class),
registry.provider(NodeEndpoint.class),
registry.provider(JobTelemetryClient.class)
));
registry.bind(HomeFileTool.HostNameProvider.class, config::getThisNode);
registry.bindSelf(HomeFileTool.class);
registry.bindSelf(CredentialsService.class);
// Periodic task scheduler service
registry.bind(SchedulerService.class, new LocalSchedulerService(
config.getInt(DremioConfig.SCHEDULER_SERVICE_THREAD_COUNT),
clusterServiceSetManagerProvider, clusterElectionManagerProvider, currentEndPoint,
isDistributedCoordinator));
final OptionChangeBroadcaster systemOptionChangeBroadcaster =
new OptionChangeBroadcaster(
registry.provider(ConduitProvider.class),
() -> registry.provider(ClusterCoordinator.class)
.get()
.getServiceSet(ClusterCoordinator.Role.COORDINATOR)
.getAvailableEndpoints(),
currentEndPoint);
final OptionValidatorListing optionValidatorListing = new OptionValidatorListingImpl(scanResult);
final DefaultOptionManager defaultOptionManager = new DefaultOptionManager(optionValidatorListing);
registry.bindProvider(SessionOptionManager.class, () -> new SessionOptionManagerImpl(optionValidatorListing));
registry.bindProvider(UserSessionService.class, () -> new UserSessionServiceImpl(registry.provider(OptionManager.class), registry.provider(SessionOptionManager.class), registry.provider(TransientStoreProvider.class)));
final SystemOptionManager systemOptionManager;
if (isCoordinator) {
systemOptionManager = new SystemOptionManager(optionValidatorListing,
bootstrap.getLpPersistance(),
registry.provider(LegacyKVStoreProvider.class),
registry.provider(SchedulerService.class),
systemOptionChangeBroadcaster,
!isCoordinator);
conduitServiceRegistry.registerService(new OptionNotificationService(registry.provider(SystemOptionManager.class)));
} else {
systemOptionManager = new SystemOptionManager(optionValidatorListing,
bootstrap.getLpPersistance(),
registry.provider(LegacyKVStoreProvider.class),
!isCoordinator);
}
final OptionManagerWrapper optionManagerWrapper = OptionManagerWrapper.Builder.newBuilder()
.withOptionValidatorProvider(optionValidatorListing)
.withOptionManager(defaultOptionManager)
.withOptionManager(systemOptionManager)
.build();
final ProjectOptionManagerWrapper projectOptionManagerWrapper = new ProjectOptionManagerWrapper(systemOptionManager, defaultOptionManager);
// SystemOptionManager must be bound because it must be #start'ed
registry.bind(SystemOptionManager.class, systemOptionManager);
registry.bind(OptionValidatorListing.class, optionValidatorListing);
registry.bind(OptionManager.class, optionManagerWrapper);
registry.bind(ProjectOptionManager.class, projectOptionManagerWrapper);
if (isDistributedMaster) {
// Companion service to clean split orphans
registry.bind(SplitOrphansCleanerService.class, new SplitOrphansCleanerService(
registry.provider(SchedulerService.class),
registry.provider(NamespaceService.Factory.class),
registry.provider(OptionManager.class)));
}
final Provider<Iterable<NodeEndpoint>> executorsProvider = () -> sabotContextProvider.get().getExecutors();
if(isExecutor) {
registry.bind(SpillService.class, new SpillServiceImpl(
config,
new SpillServiceOptionsImpl(registry.provider(OptionManager.class)),
registry.provider(SchedulerService.class),
selfEndpoint,
(isMasterless) ? null : executorsProvider
)
);
}
registry.bind(GroupResourceInformation.class,
new ClusterResourceInformation(registry.provider(ClusterCoordinator.class)));
// PDFS depends on fabric.
registry.bind(PDFSService.class, new PDFSService(
registry.provider(FabricService.class),
selfEndpoint,
isCoordinator ? executorsProvider : () -> Collections.singleton(selfEndpoint.get()),
bootstrapRegistry.lookup(Tracer.class),
sabotConfig,
bootstrap.getAllocator(),
isExecutor ? PDFSMode.DATA : PDFSMode.CLIENT
));
registry.bindSelf(new SystemTablePluginConfigProvider());
registry.bind(SysFlightPluginConfigProvider.class, new SysFlightPluginConfigProvider(registry.provider(NodeEndpoint.class)));
final MetadataRefreshInfoBroadcaster metadataRefreshInfoBroadcaster =
new MetadataRefreshInfoBroadcaster(
registry.provider(ConduitProvider.class),
() -> registry.provider(ClusterCoordinator.class)
.get()
.getServiceSet(ClusterCoordinator.Role.COORDINATOR)
.getAvailableEndpoints(),
() -> registry.provider(SabotContext.class).get().getEndpoint());
registry.bindSelf(metadataRefreshInfoBroadcaster);
if (isCoordinator) {
registry.bind(ProjectConfigStore.class, ProjectConfigStore.NO_OP);
registry.bind(ProjectConfig.class, new ProjectConfigImpl(registry.provider(DremioConfig.class),
registry.provider(ProjectConfigStore.class)));
// register a no-op assume role provider
final SoftwareAssumeRoleCredentialsProvider softwareAssumeRoleCredentialsProvider = new SoftwareAssumeRoleCredentialsProvider();
DremioAssumeRoleCredentialsProviderV2.setAssumeRoleProvider(() -> {
return softwareAssumeRoleCredentialsProvider;
});
DremioAssumeRoleCredentialsProviderV1.setAssumeRoleProvider(() -> {
return softwareAssumeRoleCredentialsProvider;
});
}
registry.bind(CatalogService.class, new CatalogServiceImpl(
registry.provider(SabotContext.class),
registry.provider(SchedulerService.class),
registry.provider(SystemTablePluginConfigProvider.class),
registry.provider(SysFlightPluginConfigProvider.class),
registry.provider(FabricService.class),
registry.provider(ConnectionReader.class),
registry.provider(BufferAllocator.class),
registry.provider(LegacyKVStoreProvider.class),
registry.provider(DatasetListingService.class),
registry.provider(OptionManager.class),
() -> metadataRefreshInfoBroadcaster,
config,
roles
));
conduitServiceRegistry.registerService(new InformationSchemaServiceImpl(registry.provider(CatalogService.class),
bootstrap::getExecutor));
if (isCoordinator) {
conduitServiceRegistry.registerService(new CatalogServiceSynchronizer(registry.provider(CatalogService.class)));
conduitServiceRegistry.registerService(new DatasetCatalogServiceImpl(
registry.provider(CatalogService.class), registry.provider(NamespaceService.Factory.class)));
}
// Run initializers only on coordinator.
if (isCoordinator) {
registry.bindSelf(new InitializerRegistry(bootstrap.getClasspathScan(), registry.getBindingProvider()));
}
registry.bind(CommandPool.class, CommandPoolFactory.INSTANCE.newPool(config, bootstrapRegistry.lookup(Tracer.class)));
final Provider<NamespaceService> namespaceServiceProvider = () -> sabotContextProvider.get().getNamespaceService(SYSTEM_USERNAME);
if (isCoordinator) {
registry.bind(KVStoreReportService.class,
new KVStoreReportService(registry.provider(LegacyKVStoreProvider.class),
namespaceServiceProvider,
registry.provider(java.util.concurrent.ExecutorService.class)));
}
registry.bind(JobTelemetryClient.class,
new JobTelemetryClient(registry.lookup(GrpcChannelBuilderFactory.class),
registry.provider(NodeEndpoint.class)));
LocalJobsService localJobsService = null;
if (isCoordinator) {
Provider<JobResultsStoreConfig> jobResultsStoreConfigProvider = getJobResultsStoreConfigProvider(registry);
Provider<LegacyKVStoreProvider> kvStoreProviderProvider = registry.provider(LegacyKVStoreProvider.class);
BufferAllocator allocator = getChildBufferAllocator(bootstrap.getAllocator());
Provider<JobResultsStore> jobResultsStoreProvider = getJobResultsStoreProvider(jobResultsStoreConfigProvider,
kvStoreProviderProvider,
allocator);
localJobsService = new LocalJobsService(
kvStoreProviderProvider,
allocator,
jobResultsStoreConfigProvider,
jobResultsStoreProvider,
registry.provider(LocalQueryExecutor.class),
registry.provider(CoordTunnelCreator.class),
registry.provider(ForemenTool.class),
registry.provider(NodeEndpoint.class),
() -> sabotContextProvider.get().getCoordinators(),
namespaceServiceProvider,
registry.provider(OptionManager.class),
registry.provider(AccelerationManager.class),
registry.provider(SchedulerService.class),
registry.provider(CommandPool.class),
registry.provider(JobTelemetryClient.class),
LocalJobsService.createJobResultLogger(),
isDistributedMaster,
registry.provider(ConduitProvider.class)
);
registry.bind(LocalJobsService.class, localJobsService);
registry.bind(SimpleJobRunner.class, localJobsService);
registry.replaceProvider(QueryObserverFactory.class, localJobsService::getQueryObserverFactory);
HybridJobsService hybridJobsService = new HybridJobsService(
// for now, provide the coordinator service set
registry.lookup(GrpcChannelBuilderFactory.class),
() -> bootstrap.getAllocator(), registry.provider(NodeEndpoint.class), registry.lookup(ConduitProvider.class));
registry.bind(JobsService.class, hybridJobsService);
registry.bind(HybridJobsService.class, hybridJobsService);
registry.bind(JobResultInfoProvider.class, localJobsService);
} else {
registry.bind(JobResultInfoProvider.class, JobResultInfoProvider.NOOP);
}
if (isCoordinator) {
// put provisioning service before resource allocator
final Provider<OptionManager> optionsProvider = () -> sabotContextProvider.get().getOptionManager();
final Provider<ClusterCoordinator> coordProvider = registry.provider(ClusterCoordinator.class);
final NodeProvider executionNodeProvider = new NodeProvider() {
@Override
public Collection<NodeEndpoint> getNodes() {
return coordProvider.get().getServiceSet(ClusterCoordinator.Role.EXECUTOR).getAvailableEndpoints();
}
};
EditionProvider editionProvider = new EditionProviderImpl();
registry.bind(EditionProvider.class, editionProvider);
registry.bind(ProvisioningService.class, new ProvisioningServiceImpl(
config,
registry.provider(LegacyKVStoreProvider.class),
executionNodeProvider,
bootstrap.getClasspathScan(),
optionsProvider,
registry.provider(EditionProvider.class)
));
}
registry.bind(ResourceAllocator.class, new BasicResourceAllocator(registry.provider(ClusterCoordinator
.class), registry.provider(GroupResourceInformation.class)));
if (isCoordinator){
final Provider<OptionManager> optionManagerProvider = () -> sabotContextProvider.get().getOptionManager();
registry.bind(ExecutorSelectorFactory.class, new ExecutorSelectorFactoryImpl());
ExecutorSelectorProvider executorSelectorProvider = new ExecutorSelectorProvider();
registry.bind(ExecutorSelectorProvider.class, executorSelectorProvider);
registry.bind(ExecutorSetService.class,
new LocalExecutorSetService(registry.provider(ClusterCoordinator.class),
optionManagerProvider));
registry.bind(ExecutorSelectionService.class,
new ExecutorSelectionServiceImpl(
registry.provider(ExecutorSetService.class),
optionManagerProvider,
registry.provider(ExecutorSelectorFactory.class),
executorSelectorProvider
)
);
CoordToExecTunnelCreator tunnelCreator = new CoordToExecTunnelCreator(registry.provider
(FabricService.class));
registry.bind(ExecutorServiceClientFactory.class, new ExecutorServiceProductClientFactory
(tunnelCreator));
registry.bind(MaestroForwarder.class, new NoOpMaestroForwarder());
final MaestroService maestroServiceImpl = new MaestroServiceImpl(
registry.provider(ExecutorSetService.class),
registry.provider(FabricService.class),
registry.provider(SabotContext.class),
registry.provider(ResourceAllocator.class),
registry.provider(CommandPool.class),
registry.provider(ExecutorSelectionService.class),
registry.provider(ExecutorServiceClientFactory.class),
registry.provider(JobTelemetryClient.class),
registry.provider(MaestroForwarder.class)
);
registry.bind(MaestroService.class, maestroServiceImpl);
registry.bindProvider(ExecToCoordStatusHandler.class, maestroServiceImpl::getExecStatusHandler);
registry.bind(RuleBasedEngineSelector.class, RuleBasedEngineSelector.NO_OP);
final ForemenWorkManager foremenWorkManager = new ForemenWorkManager(
registry.provider(FabricService.class),
registry.provider(SabotContext.class),
registry.provider(CommandPool.class),
registry.provider(MaestroService.class),
registry.provider(JobTelemetryClient.class),
registry.provider(MaestroForwarder.class),
bootstrapRegistry.lookup(Tracer.class),
registry.provider(RuleBasedEngineSelector.class));
registry.bindSelf(foremenWorkManager);
registry.bindProvider(ExecToCoordResultsHandler.class, foremenWorkManager::getExecToCoordResultsHandler);
registry.replaceProvider(ForemenTool.class, foremenWorkManager::getForemenTool);
registry.replaceProvider(CoordTunnelCreator.class, foremenWorkManager::getCoordTunnelCreator);
registry.replaceProvider(QueryCancelTool.class, foremenWorkManager::getQueryCancelTool);
// accept enduser rpc requests (replaces noop implementation).
registry.bindProvider(UserWorker.class, foremenWorkManager::getUserWorker);
// accept local query execution requests.
registry.bindProvider(LocalQueryExecutor.class, foremenWorkManager::getLocalQueryExecutor);
} else {
registry.bind(ForemenTool.class, ForemenTool.NO_OP);
registry.bind(QueryCancelTool.class, QueryCancelTool.NO_OP);
}
TaskPoolInitializer taskPoolInitializer = null;
if(isExecutor){
registry.bindSelf(new ContextInformationFactory());
taskPoolInitializer = new TaskPoolInitializer(
registry.provider(OptionManager.class),
config);
registry.bindSelf(taskPoolInitializer);
registry.bindProvider(TaskPool.class, taskPoolInitializer::getTaskPool);
final WorkloadTicketDepotService workloadTicketDepotService = new WorkloadTicketDepotService(
registry.provider(BufferAllocator.class),
registry.provider(TaskPool.class),
registry.provider(DremioConfig.class)
);
registry.bindSelf(workloadTicketDepotService);
registry.bindProvider(WorkloadTicketDepot.class, workloadTicketDepotService::getTicketDepot);
ExecToCoordTunnelCreator execToCoordTunnelCreator =
new ExecToCoordTunnelCreator(registry.provider(FabricService.class));
registry.bind(MaestroClientFactory.class,
new MaestroSoftwareClientFactory(execToCoordTunnelCreator));
registry.bind(JobTelemetryExecutorClientFactory.class,
new JobTelemetrySoftwareClientFactory(execToCoordTunnelCreator));
registry.bind(JobResultsClientFactory.class,
new JobResultsSoftwareClientFactory(execToCoordTunnelCreator));
final FragmentWorkManager fragmentWorkManager = new FragmentWorkManager(bootstrap,
registry.provider(NodeEndpoint.class),
registry.provider(SabotContext.class),
registry.provider(FabricService.class),
registry.provider(CatalogService.class),
registry.provider(ContextInformationFactory.class),
registry.provider(WorkloadTicketDepot.class),
registry.provider(TaskPool.class),
registry.provider(MaestroClientFactory.class),
registry.provider(JobTelemetryExecutorClientFactory.class),
registry.provider(JobResultsClientFactory.class));
registry.bindSelf(fragmentWorkManager);
registry.bindProvider(WorkStats.class, fragmentWorkManager::getWorkStats);
registry.bindProvider(ExecutorService.class, fragmentWorkManager::getExecutorService);
} else {
registry.bind(WorkStats.class, WorkStats.NO_OP);
}
registry.bind(AccelerationManager.class, AccelerationManager.NO_OP);
registry.bind(StatisticsService.class, StatisticsService.NO_OP);
registry.bind(RelMetadataQuerySupplier.class, DremioRelMetadataQuery.QUERY_SUPPLIER);
if (isCoordinator) {
final ReflectionServiceImpl reflectionService = new ReflectionServiceImpl(
sabotConfig,
registry.provider(LegacyKVStoreProvider.class),
registry.provider(SchedulerService.class),
registry.provider(JobsService.class),
registry.provider(CatalogService.class),
registry.provider(SabotContext.class),
registry.provider(ReflectionStatusService.class),
bootstrap.getExecutor(),
registry.provider(ForemenWorkManager.class),
isDistributedMaster,
bootstrap.getAllocator());
registry.bind(ReflectionService.class, reflectionService);
registry.bind(ReflectionAdministrationService.Factory.class, (context) -> reflectionService);
registry.replaceProvider(MaterializationDescriptorProvider.class, reflectionService::getMaterializationDescriptor);
registry.replace(AccelerationManager.class, new AccelerationManagerImpl(
registry.provider(ReflectionService.class),
registry.provider(ReflectionAdministrationService.Factory.class),
namespaceServiceProvider));
final StatisticsServiceImpl statisticsService = new StatisticsServiceImpl(
registry.provider(LegacyKVStoreProvider.class),
registry.provider(SchedulerService.class),
registry.provider(JobsService.class),
namespaceServiceProvider,
registry.provider(BufferAllocator.class),
registry.provider(SabotContext.class));
registry.replace(StatisticsService.class, statisticsService);
registry.bind(StatisticsAdministrationService.Factory.class, (context) -> statisticsService);
registry.replace(RelMetadataQuerySupplier.class, DremioRelMetadataQuery.getSupplier(statisticsService));
final Provider<Collection<NodeEndpoint>> nodeEndpointsProvider = () -> sabotContextProvider.get().getExecutors();
registry.bind(ReflectionStatusService.class, new ReflectionStatusServiceImpl(
nodeEndpointsProvider,
namespaceServiceProvider,
registry.provider(CatalogService.class),
registry.provider(LegacyKVStoreProvider.class),
reflectionService.getCacheViewerProvider()
));
} else {
registry.bind(ReflectionService.class, new ExecutorOnlyReflectionService());
registry.bind(ReflectionStatusService.class, ReflectionStatusService.NOOP);
}
final Provider<Optional<NodeEndpoint>> serviceLeaderProvider = () -> sabotContextProvider.get().getServiceLeader(LOCAL_TASK_LEADER_NAME);
if(isCoordinator){
conduitServiceRegistry.registerService(new AccelerationListServiceImpl(registry.provider(ReflectionStatusService.class),
registry.provider(ReflectionService.class),
registry.provider(LegacyKVStoreProvider.class),
bootstrap::getExecutor));
}
final AccelerationListManagerImpl accelerationListManager = new AccelerationListManagerImpl(
registry.provider(LegacyKVStoreProvider.class),
registry.provider(ReflectionStatusService.class),
registry.provider(ReflectionService.class),
() -> config,
isMaster,
isCoordinator,
serviceLeaderProvider,
registry.provider(ConduitProvider.class));
registry.bind(AccelerationListManager.class, accelerationListManager);
final StatisticsListManager statisticsListManager = new StatisticsListManagerImpl(
registry.provider(StatisticsService.class),
serviceLeaderProvider,
registry.provider(FabricService.class),
registry.provider(BufferAllocator.class),
() -> config,
isMaster,
isCoordinator);
registry.bind(StatisticsListManager.class, statisticsListManager);
final Provider<OptionManager> optionsProvider = () -> sabotContextProvider.get().getOptionManager();
if(isCoordinator) {
registry.bindSelf(new ServerHealthMonitor(registry.provider(MasterStatusListener.class)));
conduitServiceRegistry.registerService(new CoordinatorLogService(
sabotContextProvider,
registry.provider(SupportService.class)
));
}
registry.bind(SupportService.class, new BasicSupportService(
dacConfig,
registry.provider(LegacyKVStoreProvider.class),
registry.provider(JobsService.class),
registry.provider(UserService.class),
registry.provider(ClusterCoordinator.class),
registry.provider(OptionManager.class),
namespaceServiceProvider,
registry.provider(CatalogService.class),
registry.provider(FabricService.class),
bootstrap.getAllocator()));
registry.bind(QueryLogBundleService.class, new BasicQueryLogBundleService(
bootstrap.getDremioConfig(),
bootstrap.getClasspathScan(),
sabotContextProvider,
registry.provider(ClusterCoordinator.class),
registry.provider(ProjectOptionManager.class),
registry.provider(SupportService.class),
registry.provider(JobsService.class),
registry.provider(ProvisioningService.class),
registry.provider(ConduitProvider.class),
() -> registry.provider(ClusterCoordinator.class)
.get()
.getServiceSet(ClusterCoordinator.Role.COORDINATOR)
.getAvailableEndpoints()
));
registry.bindSelf(new NodeRegistration(
registry.provider(NodeEndpoint.class),
registry.provider(FragmentWorkManager.class),
registry.provider(ForemenWorkManager.class),
registry.provider(ClusterCoordinator.class),
registry.provider(DremioConfig.class)
));
if(isCoordinator){
registry.bind(SampleDataPopulatorService.class,
new SampleDataPopulatorService(
registry.provider(SabotContext.class),
registry.provider(LegacyKVStoreProvider.class),
registry.provider(UserService.class),
registry.provider(InitializerRegistry.class),
registry.provider(JobsService.class),
registry.provider(CatalogService.class),
registry.provider(ConnectionReader.class),
optionsProvider,
dacConfig.prepopulate,
dacConfig.addDefaultUser));
// search
final SearchService searchService;
if (isDistributedMaster) {
searchService = new SearchServiceImpl(
namespaceServiceProvider,
registry.provider(OptionManager.class),
registry.provider(LegacyKVStoreProvider.class),
registry.provider(SchedulerService.class),
bootstrap.getExecutor()
);
} else {
searchService = SearchService.UNSUPPORTED;
}
final Provider<Optional<NodeEndpoint>> taskLeaderProvider =
() -> sabotContextProvider.get().getServiceLeader(SearchServiceImpl.LOCAL_TASK_LEADER_NAME);
registry.bind(SearchService.class, new SearchServiceInvoker(
isDistributedMaster,
registry.provider(NodeEndpoint.class),
taskLeaderProvider,
registry.provider(FabricService.class),
bootstrap.getAllocator(),
searchService
));
registry.bind(RestServerV2.class, new RestServerV2(bootstrap.getClasspathScan()));
registry.bind(APIServer.class, new APIServer(bootstrap.getClasspathScan()));
registry.bind(DremioServlet.class, new DremioServlet(dacConfig.getConfig(),
registry.provider(ServerHealthMonitor.class),
optionsProvider,
registry.provider(SupportService.class)
));
}
LivenessService livenessService = new LivenessService(config);
registry.bind(LivenessService.class, livenessService);
if (taskPoolInitializer != null) {
livenessService.addHealthMonitor(taskPoolInitializer);
}
registry.bindSelf(SourceService.class);
registry.bindSelf(DatasetVersionMutator.class);
registry.bind(NamespaceService.class, NamespaceServiceImpl.class);
registry.bindSelf(NamespaceServiceImpl.class);
registry.bindSelf(ReflectionServiceHelper.class);
registry.bindSelf(CatalogServiceHelper.class);
registry.bindSelf(CollaborationHelper.class);
registry.bindSelf(UserServiceHelper.class);
registry.bind(FirstLoginSetupService.class, OSSFirstLoginSetupService.NOOP_INSTANCE);
if (isCoordinator && config.getBoolean(DremioConfig.JOBS_ENABLED_BOOL)) {
registry.bindSelf(new LocalJobTelemetryServer(
registry.lookup(GrpcServerBuilderFactory.class),
registry.provider(LegacyKVStoreProvider.class),
currentEndPoint,
bootstrapRegistry.lookup(GrpcTracerFacade.class))
);
registerJobsServices(conduitServiceRegistry, registry, bootstrap);
}
if (isExecutor) {
registry.bindSelf(new ExprCachePrewarmService(sabotContextProvider, optionsProvider, bootstrap.getAllocator()));
}
if (isCoordinator && config.getBoolean(DremioConfig.FLIGHT_SERVICE_ENABLED_BOOLEAN)) {
registry.bindSelf(new DremioFlightService(
registry.provider(DremioConfig.class),
registry.provider(BufferAllocator.class),
registry.provider(UserService.class),
registry.provider(UserWorker.class),
registry.provider(SabotContext.class),
registry.provider(TokenManager.class),
registry.provider(OptionManager.class)));
}
if (isCoordinator && config.getBoolean(DremioConfig.NESSIE_SERVICE_ENABLED_BOOLEAN)) {
final boolean inMemoryBackend = config.getBoolean(DremioConfig.NESSIE_SERVICE_IN_MEMORY_BOOLEAN);
final int defaultKvStoreMaxCommitRetries = config.getInt(DremioConfig.NESSIE_SERVICE_KVSTORE_MAX_COMMIT_RETRIES);
final NessieService nessieService = new NessieService(
registry.provider(KVStoreProvider.class),
registry.provider(OptionManager.class),
inMemoryBackend,
defaultKvStoreMaxCommitRetries
);
nessieService.getGrpcServices().forEach(conduitServiceRegistry::registerService);
registry.bindSelf(nessieService);
}
if(isCoordinator) {
SystemTableManager systemTableManager = new SystemTableManagerImpl(
getSystemTableAllocator(bootstrap),
() -> getSysFlightTableProviders(conduitProvider));
registry.bind(SystemTableManager.class, systemTableManager);
}
registerHeapMonitorManager(registry, isCoordinator);
registerActiveQueryListService(registry, isCoordinator, isDistributedMaster, conduitServiceRegistry);
// NOTE : Should be last after all other services
// used as health check to know when to start serving traffic.
if (isCoordinator) {
// if we have at least one user registered, disable firstTimeApi and checkNoUser
// but for userGroupService is not started yet so we cannot check for now
registry.bind(WebServer.class, new WebServer(registry,
dacConfig,
registry.provider(RestServerV2.class),
registry.provider(APIServer.class),
registry.provider(DremioServer.class),
new DremioBinder(registry),
"ui",
isInternalUGS));
registry.bind(TokenManager.class, new TokenManagerImpl(
registry.provider(LegacyKVStoreProvider.class),
registry.provider(SchedulerService.class),
registry.provider(OptionManager.class),
isDistributedMaster,
config));
}
}
protected BufferAllocator getSystemTableAllocator(final BootStrapContext bootstrap) {
return bootstrap.getAllocator().newChildAllocator("sysflight-producer", 0, Long.MAX_VALUE);
}
private void registerActiveQueryListService(SingletonRegistry registry, boolean isCoordinator,
boolean isDistributedMaster, ConduitServiceRegistry conduitServiceRegistry) {
if (isCoordinator) {
ActiveQueryListService activeQueryListService = new ActiveQueryListService(
registry.provider(SchedulerService.class),
registry.provider(ExecutorServiceClientFactory.class),
registry.provider(NodeEndpoint.class),
registry.provider(ExecutorSetService.class),
registry.provider(MaestroService.class),
registry.provider(SabotContext.class),
registry.provider(ConduitProvider.class),
registry.provider(OptionManager.class),
isDistributedMaster);
registry.bindSelf(activeQueryListService);
conduitServiceRegistry.registerService(activeQueryListService);
}
}
protected String getFabricAddress() {
// Fabric
final String fabricAddress;
try {
fabricAddress = FabricServiceImpl.getAddress(false);
} catch (UnknownHostException e) {
throw new RuntimeException("Cannot get local address", e);
}
return fabricAddress;
}
// Registering heap monitor manager as a service,
// so that it can be closed cleanly when shutdown
private void registerHeapMonitorManager(SingletonRegistry registry, boolean isCoordinator){
if (isCoordinator) {
Consumer<CancelQueryContext> cancelConsumer =
(cancelQueryContext) -> registry.provider(ForemenWorkManager.class).get()
.cancel(cancelQueryContext);
logger.info("Registering heap monitor manager in coordinator as a service.");
registry.bindSelf(new HeapMonitorManager(() -> registry.provider(SabotContext.class).get().getOptionManager(),
new CoordinatorHeapClawBackStrategy(cancelConsumer),
ClusterCoordinator.Role.COORDINATOR));
}
}
protected BufferAllocator getChildBufferAllocator(BufferAllocator allocator) {
return checkNotNull(allocator).newChildAllocator("jobs-service",
0,
Long.MAX_VALUE);
}
private void registerJobsServices(final ConduitServiceRegistry conduitServiceRegistry,
final SingletonRegistry registry, final BootStrapContext bootstrap) {
// 1. job adapter
conduitServiceRegistry.registerService(new JobsServiceAdapter(registry.provider(LocalJobsService.class)));
// 2. chronicle
conduitServiceRegistry.registerService(new Chronicle(registry.provider(LocalJobsService.class)));
// 3. jobs, sys flight producers registered together as CoordinatorFlightProducer, as individual binding is masking one of them
final BufferAllocator coordFlightAllocator = bootstrap.getAllocator().newChildAllocator(CoordinatorFlightProducer.class.getName(), 0, Long.MAX_VALUE);
final JobsFlightProducer jobsFlightProducer = new JobsFlightProducer(registry.provider(LocalJobsService.class), coordFlightAllocator);
final SysFlightProducer sysFlightProducer = new SysFlightProducer(registry.provider(SystemTableManager.class));
final CoordinatorFlightProducer coordFlightProducer = new CoordinatorFlightProducer(jobsFlightProducer, sysFlightProducer);
conduitServiceRegistry.registerService(new FlightCloseableBindableService(coordFlightAllocator, coordFlightProducer, null, null));
//4. MaestroGrpcServerFacade
conduitServiceRegistry.registerService(new MaestroGrpcServerFacade(registry.provider(ExecToCoordStatusHandler.class)));
//5. jobresults
final BufferAllocator jobResultsAllocator = bootstrap.getAllocator().newChildAllocator("JobResultsGrpcServer", 0, Long.MAX_VALUE);
conduitServiceRegistry.registerService(new JobResultsGrpcServerFacade(registry.provider(ExecToCoordResultsHandler.class), jobResultsAllocator, registry.provider(MaestroForwarder.class)));
}
protected LegacyIndexedStore<JobId, JobResult> getLegacyIndexedStore(Provider<LegacyKVStoreProvider> kvStoreProviderProvider) {
return kvStoreProviderProvider.get().getStore(LocalJobsService.JobsStoreCreator.class);
}
protected Provider<JobResultsStore> getJobResultsStoreProvider(Provider<JobResultsStoreConfig> jobResultsStoreConfigProvider,
Provider<LegacyKVStoreProvider> kvStoreProviderProvider,
BufferAllocator allocator) {
return () -> {
try {
return new JobResultsStore(jobResultsStoreConfigProvider.get(),
getLegacyIndexedStore(kvStoreProviderProvider),
allocator);
} catch (IOException e) {
throw new RuntimeException(e);
}
};
}
protected Provider<JobResultsStoreConfig> getJobResultsStoreConfigProvider(SingletonRegistry registry) {
return () -> {
try {
final CatalogService storagePluginRegistry = registry.provider(CatalogService.class).get();
final FileSystemPlugin<?> plugin = storagePluginRegistry.getSource(JOBS_STORAGEPLUGIN_NAME);
return new JobResultsStoreConfig(plugin.getName(), plugin.getConfig().getPath(), plugin.getSystemUserFS());
} catch (Exception e) {
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
};
}
/**
* Set up the {@link UserService} in registry according to the config.
* @return True if the internal user management is used.
*/
protected boolean setupUserService(
final SingletonRegistry registry,
final DremioConfig config,
final Provider<SabotContext> sabotContext,
boolean isMaster,
boolean isCoordinator){
if (!isCoordinator) {
registry.bind(UserService.class, new ExecutorUserService());
return false;
}
final String authType = config.getString(WEB_AUTH_TYPE);
if ("internal".equals(authType)) {
final SimpleUserService simpleUserService = new SimpleUserService(registry.provider(LegacyKVStoreProvider.class), isMaster);
registry.bindProvider(UserService.class, () -> simpleUserService);
registry.bindSelf(simpleUserService);
logger.info("Internal user/group service is configured.");
return true;
}
logger.error("Unknown value '{}' set for {}. Accepted values are ['internal', 'ldap']", authType, WEB_AUTH_TYPE);
throw new RuntimeException(
String.format("Unknown auth type '%s' set in config path '%s'", authType, WEB_AUTH_TYPE));
}
protected Map<SystemTableManager.TABLES, SysFlightDataProvider> getSysFlightTableProviders(ConduitProvider conduitProvider) {
Map<SystemTableManager.TABLES, SysFlightDataProvider> tablesMap = Maps.newHashMap();
ReflectionDescriptionServiceStub reflectionsStub = ReflectionDescriptionServiceGrpc.newStub(conduitProvider.getOrCreateChannelToMaster());
tablesMap.put(SystemTableManager.TABLES.JOBS, new JobsTable(() -> ChronicleGrpc.newStub(conduitProvider.getOrCreateChannelToMaster())));
tablesMap.put(SystemTableManager.TABLES.REFLECTIONS, new ReflectionsTable(() -> reflectionsStub));
tablesMap.put(SystemTableManager.TABLES.MATERIALIZATIONS, new MaterializationsTable(() -> reflectionsStub));
tablesMap.put(SystemTableManager.TABLES.REFLECTION_DEPENDENCIES, new ReflectionDependenciesTable(() -> reflectionsStub));
return tablesMap;
}
protected NessieApiV1 getNessieClientInstance(DremioConfig config) {
String endpoint = config.getString(DremioConfig.NESSIE_SERVICE_REMOTE_URI);
if (endpoint == null || endpoint.isEmpty()) {
return new NessieApiV1Unsupported();
}
return HttpClientBuilder.builder().withUri(URI.create(endpoint)).build(NessieApiV1.class);
}
}
| |
package kbasesearchengine.test.main;
import static kbasesearchengine.test.common.TestCommon.set;
import static kbasesearchengine.test.events.handler.WorkspaceEventHandlerTest.wsTuple;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import com.google.common.collect.ImmutableMap;
import kbasesearchengine.GetObjectsInput;
import kbasesearchengine.GetObjectsOutput;
import kbasesearchengine.ObjectData;
import kbasesearchengine.SearchObjectsInput;
import kbasesearchengine.SearchObjectsOutput;
import kbasesearchengine.SearchTypesInput;
import kbasesearchengine.SearchTypesOutput;
import kbasesearchengine.TypeDescriptor;
import kbasesearchengine.authorization.TemporaryAuth2Client;
import kbasesearchengine.events.handler.WorkspaceEventHandler;
import kbasesearchengine.main.NarrativeInfoDecorator;
import kbasesearchengine.main.SearchInterface;
import kbasesearchengine.test.common.TestCommon;
import us.kbase.common.service.JsonClientException;
import us.kbase.common.service.Tuple5;
public class NarrativeInfoDecoratorTest {
@Test
public void constructFail() {
final WorkspaceEventHandler weh = mock(WorkspaceEventHandler.class);
final SearchInterface si = mock(SearchInterface.class);
final TemporaryAuth2Client auth = mock(TemporaryAuth2Client.class);
failConstruct(null, weh, auth, "t", new NullPointerException("searchInterface"));
failConstruct(si, null, auth, "t", new NullPointerException("wsHandler"));
failConstruct(si, weh, null, "t", new NullPointerException("authClient"));
failConstruct(si, weh, auth, null,
new IllegalArgumentException("token cannot be null or whitespace only"));
failConstruct(si, weh, auth, " \n \t",
new IllegalArgumentException("token cannot be null or whitespace only"));
}
private void failConstruct(
final SearchInterface search,
final WorkspaceEventHandler weh,
final TemporaryAuth2Client cli,
final String token,
final Exception expected) {
try {
new NarrativeInfoDecorator(search, weh, cli, token);
fail("expected exception");
} catch (Exception got) {
TestCommon.assertExceptionCorrect(got, expected);
}
}
/* Note that the implementation is the same for GetObjects and SearchObjects and so it
* seems pointless to repeat the same tests for GetObjects. As such, only a very simple
* test is run to ensure that objects are decorated.
*
* If the implementation of GetObjects changes be sure to update the tests.
*/
@Test
public void searchObjectsDecorateWithNoData() throws Exception {
final WorkspaceEventHandler weh = mock(WorkspaceEventHandler.class);
final SearchInterface search = mock(SearchInterface.class);
final TemporaryAuth2Client auth = mock(TemporaryAuth2Client.class);
final NarrativeInfoDecorator nid = new NarrativeInfoDecorator(search, weh, auth, "token");
/* since a) the generated input class has no hashcode or equals and
* b) the argument is just a straight pass through, we just use an identity match
* for mockito to recognize the argument
*/
final SearchObjectsInput dummyInput = new SearchObjectsInput();
when(search.searchObjects(dummyInput, "user")).thenReturn(new SearchObjectsOutput()
.withObjects(Collections.emptyList())
.withAccessGroupNarrativeInfo(null));
when(auth.getUserDisplayNames("token", set())).thenReturn(Collections.emptyMap());
final SearchObjectsOutput res = nid.searchObjects(dummyInput, "user");
assertThat("incorrect object data", res.getObjects(), is(Collections.emptyList()));
assertThat("incorrect narr info", res.getAccessGroupNarrativeInfo(),
is(Collections.emptyMap()));
}
@Test
public void searchObjectsDecorateWithNullInfo() throws Exception {
// also tests the case where a username in the workspace is, for some reason, not
// found in the auth service results
final WorkspaceEventHandler weh = mock(WorkspaceEventHandler.class);
final SearchInterface search = mock(SearchInterface.class);
final TemporaryAuth2Client auth = mock(TemporaryAuth2Client.class);
final NarrativeInfoDecorator nid = new NarrativeInfoDecorator(search, weh, auth, "tok");
/* since a) the generated input class has no hashcode or equals and
* b) the argument is just a straight pass through, we just use an identity match
* for mockito to recognize the argument
*/
final SearchObjectsInput dummyInput = new SearchObjectsInput();
final List<ObjectData> objectdata = Arrays.asList(
new ObjectData().withGuid("WS:65/2/7"),
new ObjectData().withGuid("WS:42/7/21"),
new ObjectData().withGuid("WS:1/61/1"),
new ObjectData().withGuid("FS:6/22/3"), // expect skip
new ObjectData().withGuid("WS:2/345/1000"));
when(search.searchObjects(dummyInput, "user")).thenReturn(new SearchObjectsOutput()
.withObjects(objectdata)
.withAccessGroupNarrativeInfo(null));
// no narrative info at all
when(weh.getWorkspaceInfo(65)).thenReturn(wsTuple(
65, "name1", "owner1", "2018-02-08T21:55:45Z", 0, "r", "n", "unlocked",
Collections.emptyMap()));
// only narrative id
when(weh.getWorkspaceInfo(1)).thenReturn(wsTuple(
1, "name2", "owner2", "2018-02-08T21:55:57Z", 0, "r", "n", "unlocked",
ImmutableMap.of("narrative", "2")));
// only narrative name
when(weh.getWorkspaceInfo(2)).thenReturn(wsTuple(
2, "name3", "owner3", "2018-02-08T21:55:45.678Z", 0, "r", "n", "unlocked",
ImmutableMap.of("narrative_nice_name", "myhorridnarrative")));
// full narrative info
when(weh.getWorkspaceInfo(42)).thenReturn(wsTuple(
42, "name4", "owner4", "2018-02-08T21:55:50.678Z", 0, "r", "n", "unlocked",
ImmutableMap.of("narrative", "3", "narrative_nice_name", "mylovelynarrative")));
when(auth.getUserDisplayNames("tok", set("owner1", "owner2", "owner3", "owner4")))
.thenReturn(ImmutableMap.of(
"owner1", "disp1",
"owner2", "disp2",
"owner4", "disp4")); // missing 3
final SearchObjectsOutput res = nid.searchObjects(dummyInput, "user");
final Map<Long, Tuple5<String, Long, Long, String, String>> expected = ImmutableMap.of(
65L, narrInfo(null, null, 1518126945000L, "owner1", "disp1"),
1L, narrInfo(null, null, 1518126957000L, "owner2", "disp2"),
2L, narrInfo(null, null, 1518126945678L, "owner3", null),
42L, narrInfo("mylovelynarrative", 3L, 1518126950678L, "owner4", "disp4"));
assertThat("incorrect object data", res.getObjects(), is(objectdata));
compare(res.getAccessGroupNarrativeInfo(), expected);
}
@Test
public void searchObjectsDecorateWithPreexistingInfo() throws Exception {
final WorkspaceEventHandler weh = mock(WorkspaceEventHandler.class);
final SearchInterface search = mock(SearchInterface.class);
final TemporaryAuth2Client auth = mock(TemporaryAuth2Client.class);
final NarrativeInfoDecorator nid = new NarrativeInfoDecorator(search, weh, auth, "toke");
/* since a) the generated input class has no hashcode or equals and
* b) the argument is just a straight pass through, we just use an identity match
* for mockito to recognize the argument
*/
final SearchObjectsInput dummyInput = new SearchObjectsInput();
final List<ObjectData> objectdata = Arrays.asList(
new ObjectData().withGuid("WS:65/2/7"),
new ObjectData().withGuid("WS:2/345/1000"));
when(search.searchObjects(dummyInput, "user")).thenReturn(new SearchObjectsOutput()
.withObjects(objectdata)
.withAccessGroupNarrativeInfo(ImmutableMap.of(
// expect overwrite
65L, narrInfo("narrname", 1L, 10000L, "owner", "Herbert J. Kornfeld"),
32L, narrInfo("narrname6", 2L, 20000L, "owner6", "Herbert K. Kornfeld"))));
// no narrative info at all
when(weh.getWorkspaceInfo(65)).thenReturn(wsTuple(
65, "name1", "owner1", "2018-02-08T21:55:45Z", 0, "r", "n", "unlocked",
Collections.emptyMap()));
// only narrative name
when(weh.getWorkspaceInfo(2)).thenReturn(wsTuple(
2, "name3", "owner3", "2018-02-08T21:55:45.678Z", 0, "r", "n", "unlocked",
ImmutableMap.of("narrative_nice_name", "myhorridnarrative")));
when(auth.getUserDisplayNames("toke", set("owner1", "owner3"))).thenReturn(ImmutableMap.of(
"owner1", "Gengulphus P. Twistleton", "owner3", "Fred"));
final SearchObjectsOutput res = nid.searchObjects(dummyInput, "user");
final Map<Long, Tuple5<String, Long, Long, String, String>> expected = ImmutableMap.of(
65L, narrInfo(null, null, 1518126945000L, "owner1", "Gengulphus P. Twistleton"),
2L, narrInfo(null, null, 1518126945678L, "owner3", "Fred"),
32L, narrInfo("narrname6", 2L, 20000L, "owner6", "Herbert K. Kornfeld"));
assertThat("incorrect object data", res.getObjects(), is(objectdata));
compare(res.getAccessGroupNarrativeInfo(), expected);
}
@Test
public void searchObjectsDecorateFail() throws Exception {
failSearchObjects(new IOException("beer on router"),
new IOException("Failed retrieving workspace info: beer on router"));
failSearchObjects(new JsonClientException("workspace is turned off"),
new JsonClientException(
"Failed retrieving workspace info: workspace is turned off"));
}
private void failSearchObjects(
final Exception toThrow,
final Exception expected)
throws Exception {
final WorkspaceEventHandler weh = mock(WorkspaceEventHandler.class);
final SearchInterface search = mock(SearchInterface.class);
final TemporaryAuth2Client auth = mock(TemporaryAuth2Client.class);
final NarrativeInfoDecorator nid = new NarrativeInfoDecorator(search, weh, auth, "tok");
/* since a) the generated input class has no hashcode or equals and
* b) the argument is just a straight pass through, we just use an identity match
* for mockito to recognize the argument
*/
final SearchObjectsInput dummyInput = new SearchObjectsInput();
final List<ObjectData> objectdata = Arrays.asList(new ObjectData().withGuid("WS:65/2/7"));
when(search.searchObjects(dummyInput, "user")).thenReturn(new SearchObjectsOutput()
.withObjects(objectdata));
when(weh.getWorkspaceInfo(65)).thenThrow(toThrow);
try {
nid.searchObjects(dummyInput, "user");
fail("expected exception");
} catch (Exception got) {
TestCommon.assertExceptionCorrect(got, expected);
}
}
@Test
public void getObjectsSimpleTest() throws Exception {
final WorkspaceEventHandler weh = mock(WorkspaceEventHandler.class);
final SearchInterface search = mock(SearchInterface.class);
final TemporaryAuth2Client auth = mock(TemporaryAuth2Client.class);
final NarrativeInfoDecorator nid = new NarrativeInfoDecorator(search, weh, auth, "token");
/* since a) the generated input class has no hashcode or equals and
* b) the argument is just a straight pass through, we just use an identity match
* for mockito to recognize the argument
*/
final GetObjectsInput dummyInput = new GetObjectsInput();
final List<ObjectData> objectdata = Arrays.asList(new ObjectData().withGuid("WS:42/7/21"));
when(search.getObjects(dummyInput, "user")).thenReturn(new GetObjectsOutput()
.withObjects(objectdata)
.withAccessGroupNarrativeInfo(null));
// full narrative info
when(weh.getWorkspaceInfo(42)).thenReturn(wsTuple(
42, "name4", "owner4", "2018-02-08T21:55:50.678Z", 0, "r", "n", "unlocked",
ImmutableMap.of("narrative", "3", "narrative_nice_name", "mylovelynarrative")));
when(auth.getUserDisplayNames("token", set("owner4"))).thenReturn(ImmutableMap.of(
"owner4", "foo"));
final GetObjectsOutput res = nid.getObjects(dummyInput, "user");
final Map<Long, Tuple5<String, Long, Long, String, String>> expected = new HashMap<>();
expected.put(42L, narrInfo("mylovelynarrative", 3L, 1518126950678L, "owner4", "foo"));
assertThat("incorrect object data", res.getObjects(), is(objectdata));
compare(res.getAccessGroupNarrativeInfo(), expected);
}
@Test
public void searchTypes() throws Exception {
// this is just a passthrough
final WorkspaceEventHandler weh = mock(WorkspaceEventHandler.class);
final SearchInterface search = mock(SearchInterface.class);
final TemporaryAuth2Client auth = mock(TemporaryAuth2Client.class);
final NarrativeInfoDecorator nid = new NarrativeInfoDecorator(search, weh, auth, "token");
/* since a) the generated input and output classes have no hashcode or equals and
* b) the method is just a straight pass through, we just use an identity match
* for mockito to recognize the classes
*/
final SearchTypesInput dummyInput = new SearchTypesInput();
final SearchTypesOutput dummyOutput = new SearchTypesOutput();
when(search.searchTypes(dummyInput, "user")).thenReturn(dummyOutput);
assertThat("incorrect output", nid.searchTypes(dummyInput, "user"), is(dummyOutput));
}
@Test
public void listTypes() throws Exception {
// this is just a passthrough
final WorkspaceEventHandler weh = mock(WorkspaceEventHandler.class);
final SearchInterface search = mock(SearchInterface.class);
final TemporaryAuth2Client auth = mock(TemporaryAuth2Client.class);
final NarrativeInfoDecorator nid = new NarrativeInfoDecorator(search, weh, auth, "tok");
/* since a) the generated output class has no hashcode or equals and
* b) the method is just a straight pass through, we just use an identity match
* for mockito to recognize the class
*/
final TypeDescriptor dummyOutput = new TypeDescriptor();
when(search.listTypes("type")).thenReturn(ImmutableMap.of("type", dummyOutput));
assertThat("incorrect output", nid.listTypes("type"), is(ImmutableMap.of(
"type", dummyOutput)));
}
public static void compare(
final Map<Long, Tuple5<String, Long, Long, String, String>> got,
final Map<Long, Tuple5<String, Long, Long, String, String>> expected) {
assertThat("incorrect map keys", got.keySet(), is(expected.keySet()));
for (final Long key: got.keySet()) {
compare(got.get(key), expected.get(key));
}
}
public static void compare(
final Tuple5<String, Long, Long, String, String> got,
final Tuple5<String, Long, Long, String, String> expected) {
assertThat("incorrect narrative name", got.getE1(), is(expected.getE1()));
assertThat("incorrect narrative id", got.getE2(), is(expected.getE2()));
assertThat("incorrect epoch", got.getE3(), is(expected.getE3()));
assertThat("incorrect owner", got.getE4(), is(expected.getE4()));
assertThat("incorrect display name", got.getE5(), is(expected.getE5()));
}
public static Tuple5<String, Long, Long, String, String> narrInfo(
final String narrativeName,
final Long narrativeId,
final Long epoch,
final String owner,
final String displayName) {
return new Tuple5<String, Long, Long, String, String>()
.withE1(narrativeName)
.withE2(narrativeId)
.withE3(epoch)
.withE4(owner)
.withE5(displayName);
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.appservice.v2019_08_01.implementation;
import org.joda.time.DateTime;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.microsoft.rest.serializer.JsonFlatten;
import com.microsoft.azure.management.appservice.v2019_08_01.ProxyOnlyResource;
/**
* Process Thread Information.
*/
@JsonFlatten
public class ProcessThreadInfoInner extends ProxyOnlyResource {
/**
* Site extension ID.
*/
@JsonProperty(value = "properties.identifier", access = JsonProperty.Access.WRITE_ONLY)
private Integer identifier;
/**
* HRef URI.
*/
@JsonProperty(value = "properties.href")
private String href;
/**
* Process URI.
*/
@JsonProperty(value = "properties.process")
private String process;
/**
* Start address.
*/
@JsonProperty(value = "properties.start_address")
private String startAddress;
/**
* Current thread priority.
*/
@JsonProperty(value = "properties.current_priority")
private Integer currentPriority;
/**
* Thread priority level.
*/
@JsonProperty(value = "properties.priority_level")
private String priorityLevel;
/**
* Base priority.
*/
@JsonProperty(value = "properties.base_priority")
private Integer basePriority;
/**
* Start time.
*/
@JsonProperty(value = "properties.start_time")
private DateTime startTime;
/**
* Total processor time.
*/
@JsonProperty(value = "properties.total_processor_time")
private String totalProcessorTime;
/**
* User processor time.
*/
@JsonProperty(value = "properties.user_processor_time")
private String userProcessorTime;
/**
* Thread state.
*/
@JsonProperty(value = "properties.state")
private String state;
/**
* Wait reason.
*/
@JsonProperty(value = "properties.wait_reason")
private String waitReason;
/**
* Get site extension ID.
*
* @return the identifier value
*/
public Integer identifier() {
return this.identifier;
}
/**
* Get hRef URI.
*
* @return the href value
*/
public String href() {
return this.href;
}
/**
* Set hRef URI.
*
* @param href the href value to set
* @return the ProcessThreadInfoInner object itself.
*/
public ProcessThreadInfoInner withHref(String href) {
this.href = href;
return this;
}
/**
* Get process URI.
*
* @return the process value
*/
public String process() {
return this.process;
}
/**
* Set process URI.
*
* @param process the process value to set
* @return the ProcessThreadInfoInner object itself.
*/
public ProcessThreadInfoInner withProcess(String process) {
this.process = process;
return this;
}
/**
* Get start address.
*
* @return the startAddress value
*/
public String startAddress() {
return this.startAddress;
}
/**
* Set start address.
*
* @param startAddress the startAddress value to set
* @return the ProcessThreadInfoInner object itself.
*/
public ProcessThreadInfoInner withStartAddress(String startAddress) {
this.startAddress = startAddress;
return this;
}
/**
* Get current thread priority.
*
* @return the currentPriority value
*/
public Integer currentPriority() {
return this.currentPriority;
}
/**
* Set current thread priority.
*
* @param currentPriority the currentPriority value to set
* @return the ProcessThreadInfoInner object itself.
*/
public ProcessThreadInfoInner withCurrentPriority(Integer currentPriority) {
this.currentPriority = currentPriority;
return this;
}
/**
* Get thread priority level.
*
* @return the priorityLevel value
*/
public String priorityLevel() {
return this.priorityLevel;
}
/**
* Set thread priority level.
*
* @param priorityLevel the priorityLevel value to set
* @return the ProcessThreadInfoInner object itself.
*/
public ProcessThreadInfoInner withPriorityLevel(String priorityLevel) {
this.priorityLevel = priorityLevel;
return this;
}
/**
* Get base priority.
*
* @return the basePriority value
*/
public Integer basePriority() {
return this.basePriority;
}
/**
* Set base priority.
*
* @param basePriority the basePriority value to set
* @return the ProcessThreadInfoInner object itself.
*/
public ProcessThreadInfoInner withBasePriority(Integer basePriority) {
this.basePriority = basePriority;
return this;
}
/**
* Get start time.
*
* @return the startTime value
*/
public DateTime startTime() {
return this.startTime;
}
/**
* Set start time.
*
* @param startTime the startTime value to set
* @return the ProcessThreadInfoInner object itself.
*/
public ProcessThreadInfoInner withStartTime(DateTime startTime) {
this.startTime = startTime;
return this;
}
/**
* Get total processor time.
*
* @return the totalProcessorTime value
*/
public String totalProcessorTime() {
return this.totalProcessorTime;
}
/**
* Set total processor time.
*
* @param totalProcessorTime the totalProcessorTime value to set
* @return the ProcessThreadInfoInner object itself.
*/
public ProcessThreadInfoInner withTotalProcessorTime(String totalProcessorTime) {
this.totalProcessorTime = totalProcessorTime;
return this;
}
/**
* Get user processor time.
*
* @return the userProcessorTime value
*/
public String userProcessorTime() {
return this.userProcessorTime;
}
/**
* Set user processor time.
*
* @param userProcessorTime the userProcessorTime value to set
* @return the ProcessThreadInfoInner object itself.
*/
public ProcessThreadInfoInner withUserProcessorTime(String userProcessorTime) {
this.userProcessorTime = userProcessorTime;
return this;
}
/**
* Get thread state.
*
* @return the state value
*/
public String state() {
return this.state;
}
/**
* Set thread state.
*
* @param state the state value to set
* @return the ProcessThreadInfoInner object itself.
*/
public ProcessThreadInfoInner withState(String state) {
this.state = state;
return this;
}
/**
* Get wait reason.
*
* @return the waitReason value
*/
public String waitReason() {
return this.waitReason;
}
/**
* Set wait reason.
*
* @param waitReason the waitReason value to set
* @return the ProcessThreadInfoInner object itself.
*/
public ProcessThreadInfoInner withWaitReason(String waitReason) {
this.waitReason = waitReason;
return this;
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.opsworks.model;
import java.io.Serializable;
/**
* <p>
* Describes an Elastic Load Balancing instance.
* </p>
*/
public class ElasticLoadBalancer implements Serializable, Cloneable {
/**
* <p>
* The Elastic Load Balancing instance's name.
* </p>
*/
private String elasticLoadBalancerName;
/**
* <p>
* The instance's AWS region.
* </p>
*/
private String region;
/**
* <p>
* The instance's public DNS name.
* </p>
*/
private String dnsName;
/**
* <p>
* The ID of the stack that the instance is associated with.
* </p>
*/
private String stackId;
/**
* <p>
* The ID of the layer that the instance is attached to.
* </p>
*/
private String layerId;
/**
* <p>
* The VPC ID.
* </p>
*/
private String vpcId;
/**
* <p>
* A list of Availability Zones.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> availabilityZones;
/**
* <p>
* A list of subnet IDs, if the stack is running in a VPC.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> subnetIds;
/**
* <p>
* A list of the EC2 instances that the Elastic Load Balancing instance is
* managing traffic for.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> ec2InstanceIds;
/**
* <p>
* The Elastic Load Balancing instance's name.
* </p>
*
* @param elasticLoadBalancerName
* The Elastic Load Balancing instance's name.
*/
public void setElasticLoadBalancerName(String elasticLoadBalancerName) {
this.elasticLoadBalancerName = elasticLoadBalancerName;
}
/**
* <p>
* The Elastic Load Balancing instance's name.
* </p>
*
* @return The Elastic Load Balancing instance's name.
*/
public String getElasticLoadBalancerName() {
return this.elasticLoadBalancerName;
}
/**
* <p>
* The Elastic Load Balancing instance's name.
* </p>
*
* @param elasticLoadBalancerName
* The Elastic Load Balancing instance's name.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ElasticLoadBalancer withElasticLoadBalancerName(
String elasticLoadBalancerName) {
setElasticLoadBalancerName(elasticLoadBalancerName);
return this;
}
/**
* <p>
* The instance's AWS region.
* </p>
*
* @param region
* The instance's AWS region.
*/
public void setRegion(String region) {
this.region = region;
}
/**
* <p>
* The instance's AWS region.
* </p>
*
* @return The instance's AWS region.
*/
public String getRegion() {
return this.region;
}
/**
* <p>
* The instance's AWS region.
* </p>
*
* @param region
* The instance's AWS region.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ElasticLoadBalancer withRegion(String region) {
setRegion(region);
return this;
}
/**
* <p>
* The instance's public DNS name.
* </p>
*
* @param dnsName
* The instance's public DNS name.
*/
public void setDnsName(String dnsName) {
this.dnsName = dnsName;
}
/**
* <p>
* The instance's public DNS name.
* </p>
*
* @return The instance's public DNS name.
*/
public String getDnsName() {
return this.dnsName;
}
/**
* <p>
* The instance's public DNS name.
* </p>
*
* @param dnsName
* The instance's public DNS name.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ElasticLoadBalancer withDnsName(String dnsName) {
setDnsName(dnsName);
return this;
}
/**
* <p>
* The ID of the stack that the instance is associated with.
* </p>
*
* @param stackId
* The ID of the stack that the instance is associated with.
*/
public void setStackId(String stackId) {
this.stackId = stackId;
}
/**
* <p>
* The ID of the stack that the instance is associated with.
* </p>
*
* @return The ID of the stack that the instance is associated with.
*/
public String getStackId() {
return this.stackId;
}
/**
* <p>
* The ID of the stack that the instance is associated with.
* </p>
*
* @param stackId
* The ID of the stack that the instance is associated with.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ElasticLoadBalancer withStackId(String stackId) {
setStackId(stackId);
return this;
}
/**
* <p>
* The ID of the layer that the instance is attached to.
* </p>
*
* @param layerId
* The ID of the layer that the instance is attached to.
*/
public void setLayerId(String layerId) {
this.layerId = layerId;
}
/**
* <p>
* The ID of the layer that the instance is attached to.
* </p>
*
* @return The ID of the layer that the instance is attached to.
*/
public String getLayerId() {
return this.layerId;
}
/**
* <p>
* The ID of the layer that the instance is attached to.
* </p>
*
* @param layerId
* The ID of the layer that the instance is attached to.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ElasticLoadBalancer withLayerId(String layerId) {
setLayerId(layerId);
return this;
}
/**
* <p>
* The VPC ID.
* </p>
*
* @param vpcId
* The VPC ID.
*/
public void setVpcId(String vpcId) {
this.vpcId = vpcId;
}
/**
* <p>
* The VPC ID.
* </p>
*
* @return The VPC ID.
*/
public String getVpcId() {
return this.vpcId;
}
/**
* <p>
* The VPC ID.
* </p>
*
* @param vpcId
* The VPC ID.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ElasticLoadBalancer withVpcId(String vpcId) {
setVpcId(vpcId);
return this;
}
/**
* <p>
* A list of Availability Zones.
* </p>
*
* @return A list of Availability Zones.
*/
public java.util.List<String> getAvailabilityZones() {
if (availabilityZones == null) {
availabilityZones = new com.amazonaws.internal.SdkInternalList<String>();
}
return availabilityZones;
}
/**
* <p>
* A list of Availability Zones.
* </p>
*
* @param availabilityZones
* A list of Availability Zones.
*/
public void setAvailabilityZones(
java.util.Collection<String> availabilityZones) {
if (availabilityZones == null) {
this.availabilityZones = null;
return;
}
this.availabilityZones = new com.amazonaws.internal.SdkInternalList<String>(
availabilityZones);
}
/**
* <p>
* A list of Availability Zones.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setAvailabilityZones(java.util.Collection)} or
* {@link #withAvailabilityZones(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param availabilityZones
* A list of Availability Zones.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ElasticLoadBalancer withAvailabilityZones(
String... availabilityZones) {
if (this.availabilityZones == null) {
setAvailabilityZones(new com.amazonaws.internal.SdkInternalList<String>(
availabilityZones.length));
}
for (String ele : availabilityZones) {
this.availabilityZones.add(ele);
}
return this;
}
/**
* <p>
* A list of Availability Zones.
* </p>
*
* @param availabilityZones
* A list of Availability Zones.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ElasticLoadBalancer withAvailabilityZones(
java.util.Collection<String> availabilityZones) {
setAvailabilityZones(availabilityZones);
return this;
}
/**
* <p>
* A list of subnet IDs, if the stack is running in a VPC.
* </p>
*
* @return A list of subnet IDs, if the stack is running in a VPC.
*/
public java.util.List<String> getSubnetIds() {
if (subnetIds == null) {
subnetIds = new com.amazonaws.internal.SdkInternalList<String>();
}
return subnetIds;
}
/**
* <p>
* A list of subnet IDs, if the stack is running in a VPC.
* </p>
*
* @param subnetIds
* A list of subnet IDs, if the stack is running in a VPC.
*/
public void setSubnetIds(java.util.Collection<String> subnetIds) {
if (subnetIds == null) {
this.subnetIds = null;
return;
}
this.subnetIds = new com.amazonaws.internal.SdkInternalList<String>(
subnetIds);
}
/**
* <p>
* A list of subnet IDs, if the stack is running in a VPC.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setSubnetIds(java.util.Collection)} or
* {@link #withSubnetIds(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param subnetIds
* A list of subnet IDs, if the stack is running in a VPC.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ElasticLoadBalancer withSubnetIds(String... subnetIds) {
if (this.subnetIds == null) {
setSubnetIds(new com.amazonaws.internal.SdkInternalList<String>(
subnetIds.length));
}
for (String ele : subnetIds) {
this.subnetIds.add(ele);
}
return this;
}
/**
* <p>
* A list of subnet IDs, if the stack is running in a VPC.
* </p>
*
* @param subnetIds
* A list of subnet IDs, if the stack is running in a VPC.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ElasticLoadBalancer withSubnetIds(
java.util.Collection<String> subnetIds) {
setSubnetIds(subnetIds);
return this;
}
/**
* <p>
* A list of the EC2 instances that the Elastic Load Balancing instance is
* managing traffic for.
* </p>
*
* @return A list of the EC2 instances that the Elastic Load Balancing
* instance is managing traffic for.
*/
public java.util.List<String> getEc2InstanceIds() {
if (ec2InstanceIds == null) {
ec2InstanceIds = new com.amazonaws.internal.SdkInternalList<String>();
}
return ec2InstanceIds;
}
/**
* <p>
* A list of the EC2 instances that the Elastic Load Balancing instance is
* managing traffic for.
* </p>
*
* @param ec2InstanceIds
* A list of the EC2 instances that the Elastic Load Balancing
* instance is managing traffic for.
*/
public void setEc2InstanceIds(java.util.Collection<String> ec2InstanceIds) {
if (ec2InstanceIds == null) {
this.ec2InstanceIds = null;
return;
}
this.ec2InstanceIds = new com.amazonaws.internal.SdkInternalList<String>(
ec2InstanceIds);
}
/**
* <p>
* A list of the EC2 instances that the Elastic Load Balancing instance is
* managing traffic for.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setEc2InstanceIds(java.util.Collection)} or
* {@link #withEc2InstanceIds(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param ec2InstanceIds
* A list of the EC2 instances that the Elastic Load Balancing
* instance is managing traffic for.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ElasticLoadBalancer withEc2InstanceIds(String... ec2InstanceIds) {
if (this.ec2InstanceIds == null) {
setEc2InstanceIds(new com.amazonaws.internal.SdkInternalList<String>(
ec2InstanceIds.length));
}
for (String ele : ec2InstanceIds) {
this.ec2InstanceIds.add(ele);
}
return this;
}
/**
* <p>
* A list of the EC2 instances that the Elastic Load Balancing instance is
* managing traffic for.
* </p>
*
* @param ec2InstanceIds
* A list of the EC2 instances that the Elastic Load Balancing
* instance is managing traffic for.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ElasticLoadBalancer withEc2InstanceIds(
java.util.Collection<String> ec2InstanceIds) {
setEc2InstanceIds(ec2InstanceIds);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getElasticLoadBalancerName() != null)
sb.append("ElasticLoadBalancerName: "
+ getElasticLoadBalancerName() + ",");
if (getRegion() != null)
sb.append("Region: " + getRegion() + ",");
if (getDnsName() != null)
sb.append("DnsName: " + getDnsName() + ",");
if (getStackId() != null)
sb.append("StackId: " + getStackId() + ",");
if (getLayerId() != null)
sb.append("LayerId: " + getLayerId() + ",");
if (getVpcId() != null)
sb.append("VpcId: " + getVpcId() + ",");
if (getAvailabilityZones() != null)
sb.append("AvailabilityZones: " + getAvailabilityZones() + ",");
if (getSubnetIds() != null)
sb.append("SubnetIds: " + getSubnetIds() + ",");
if (getEc2InstanceIds() != null)
sb.append("Ec2InstanceIds: " + getEc2InstanceIds());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ElasticLoadBalancer == false)
return false;
ElasticLoadBalancer other = (ElasticLoadBalancer) obj;
if (other.getElasticLoadBalancerName() == null
^ this.getElasticLoadBalancerName() == null)
return false;
if (other.getElasticLoadBalancerName() != null
&& other.getElasticLoadBalancerName().equals(
this.getElasticLoadBalancerName()) == false)
return false;
if (other.getRegion() == null ^ this.getRegion() == null)
return false;
if (other.getRegion() != null
&& other.getRegion().equals(this.getRegion()) == false)
return false;
if (other.getDnsName() == null ^ this.getDnsName() == null)
return false;
if (other.getDnsName() != null
&& other.getDnsName().equals(this.getDnsName()) == false)
return false;
if (other.getStackId() == null ^ this.getStackId() == null)
return false;
if (other.getStackId() != null
&& other.getStackId().equals(this.getStackId()) == false)
return false;
if (other.getLayerId() == null ^ this.getLayerId() == null)
return false;
if (other.getLayerId() != null
&& other.getLayerId().equals(this.getLayerId()) == false)
return false;
if (other.getVpcId() == null ^ this.getVpcId() == null)
return false;
if (other.getVpcId() != null
&& other.getVpcId().equals(this.getVpcId()) == false)
return false;
if (other.getAvailabilityZones() == null
^ this.getAvailabilityZones() == null)
return false;
if (other.getAvailabilityZones() != null
&& other.getAvailabilityZones().equals(
this.getAvailabilityZones()) == false)
return false;
if (other.getSubnetIds() == null ^ this.getSubnetIds() == null)
return false;
if (other.getSubnetIds() != null
&& other.getSubnetIds().equals(this.getSubnetIds()) == false)
return false;
if (other.getEc2InstanceIds() == null
^ this.getEc2InstanceIds() == null)
return false;
if (other.getEc2InstanceIds() != null
&& other.getEc2InstanceIds().equals(this.getEc2InstanceIds()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getElasticLoadBalancerName() == null) ? 0
: getElasticLoadBalancerName().hashCode());
hashCode = prime * hashCode
+ ((getRegion() == null) ? 0 : getRegion().hashCode());
hashCode = prime * hashCode
+ ((getDnsName() == null) ? 0 : getDnsName().hashCode());
hashCode = prime * hashCode
+ ((getStackId() == null) ? 0 : getStackId().hashCode());
hashCode = prime * hashCode
+ ((getLayerId() == null) ? 0 : getLayerId().hashCode());
hashCode = prime * hashCode
+ ((getVpcId() == null) ? 0 : getVpcId().hashCode());
hashCode = prime
* hashCode
+ ((getAvailabilityZones() == null) ? 0
: getAvailabilityZones().hashCode());
hashCode = prime * hashCode
+ ((getSubnetIds() == null) ? 0 : getSubnetIds().hashCode());
hashCode = prime
* hashCode
+ ((getEc2InstanceIds() == null) ? 0 : getEc2InstanceIds()
.hashCode());
return hashCode;
}
@Override
public ElasticLoadBalancer clone() {
try {
return (ElasticLoadBalancer) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright (C) 2014-2015 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package gobblin.data.management.trash;
import java.io.IOException;
import java.util.List;
import java.util.Properties;
import java.util.regex.Pattern;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.UserGroupInformation;
import org.joda.time.DateTime;
import org.joda.time.DateTimeUtils;
import org.joda.time.DateTimeZone;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.testng.internal.collections.Pair;
import com.google.common.collect.Lists;
import static org.mockito.Mockito.*;
public class TrashTest {
@Test
public void testCreateTrash() throws IOException {
new TrashTestBase(new Properties());
}
@Test
public void testCreationCases() throws IOException {
TrashTestBase trash;
// If trash ident file doesn't exist, but trash is empty, create
trash = new TrashTestBase(new Properties(), true, false, true);
verify(trash.fs).createNewFile(new Path(trash.trash.getTrashLocation(), Trash.TRASH_IDENTIFIER_FILE));
// If trash ident file doesn't exist, but trash is not empty, fail
try {
trash = new TrashTestBase(new Properties(), true, false, false);
Assert.fail();
} catch(IOException ioe) {}
// If trash doesn't exist, create
trash = new TrashTestBase(new Properties(), false, false, true);
verify(trash.fs).mkdirs(trash.trash.getTrashLocation(), new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
verify(trash.fs).createNewFile(new Path(trash.trash.getTrashLocation(), Trash.TRASH_IDENTIFIER_FILE));
}
@Test
public void testUserReplacement() throws IOException {
Properties properties = new Properties();
properties.setProperty(Trash.TRASH_LOCATION_KEY, "/trash/$USER/dir");
Path expectedTrashPath = new Path("/trash/" + UserGroupInformation.getCurrentUser().getUserName() + "/dir");
TrashTestBase trash = new TrashTestBase(properties);
Assert.assertTrue(trash.trash.getTrashLocation().equals(expectedTrashPath));
}
@Test
public void testMoveToTrash() throws IOException {
TrashTestBase trash = new TrashTestBase(new Properties());
Path pathToDelete = new Path("/path/to/delete");
final List<Pair<Path, Path>> movedPaths = Lists.newArrayList();
when(trash.fs.exists(any(Path.class))).thenReturn(false);
when(trash.fs.rename(any(Path.class), any(Path.class))).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation)
throws Throwable {
Object[] args = invocation.getArguments();
movedPaths.add(new Pair<Path, Path>((Path) args[0], (Path) args[1]));
return true;
}
});
Assert.assertTrue(trash.trash.moveToTrash(pathToDelete));
verify(trash.fs, times(1)).mkdirs(any(Path.class));
Assert.assertEquals(movedPaths.size(), 1);
Assert.assertTrue(movedPaths.get(0).first().equals(pathToDelete));
Assert.assertTrue(movedPaths.get(0).second().toString().endsWith(pathToDelete.toString()));
Assert.assertTrue(movedPaths.get(0).second().getParent().getParent().getParent().equals(trash.trash.getTrashLocation()));
}
@Test
public void testMoveToTrashExistingFile() throws IOException {
TrashTestBase trash = new TrashTestBase(new Properties());
String fileName = "delete";
Path pathToDelete = new Path("/path/to", fileName);
Pattern expectedNamePattern = Pattern.compile("^" + fileName + "_[0-9]+$");
final List<Pair<Path, Path>> movedPaths = Lists.newArrayList();
when(trash.fs.exists(any(Path.class))).thenReturn(true);
when(trash.fs.rename(any(Path.class), any(Path.class))).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation)
throws Throwable {
Object[] args = invocation.getArguments();
movedPaths.add(new Pair<Path, Path>((Path) args[0], (Path) args[1]));
return true;
}
});
Assert.assertTrue(trash.trash.moveToTrash(pathToDelete));
verify(trash.fs, times(0)).mkdirs(any(Path.class));
Assert.assertEquals(movedPaths.size(), 1);
Assert.assertTrue(movedPaths.get(0).first().equals(pathToDelete));
Assert.assertTrue(movedPaths.get(0).second().getParent().toString().endsWith(pathToDelete.getParent().toString()));
Assert.assertTrue(expectedNamePattern.matcher(movedPaths.get(0).second().getName()).matches());
Assert.assertTrue(movedPaths.get(0).second().getParent().getParent().getParent().equals(trash.trash.getTrashLocation()));
}
@Test
public void testCreateSnapshot() throws IOException {
TrashTestBase trash = new TrashTestBase(new Properties());
Path pathInTrash = new Path(trash.trash.getTrashLocation(), "dirInTrash");
DateTimeUtils.setCurrentMillisFixed(new DateTime(2015, 7, 15, 10, 0).getMillis());
final List<Path> createdDirs = Lists.newArrayList();
final List<Pair<Path, Path>> movedPaths = Lists.newArrayList();
when(trash.fs.listStatus(eq(trash.trash.getTrashLocation()), any(PathFilter.class))).
thenReturn(Lists.newArrayList(new FileStatus(0, true, 0, 0, 0, pathInTrash)).toArray(new FileStatus[]{}));
when(trash.fs.exists(any(Path.class))).thenReturn(false);
when(trash.fs.mkdirs(any(Path.class), any(FsPermission.class))).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation)
throws Throwable {
createdDirs.add((Path) invocation.getArguments()[0]);
return true;
}
});
when(trash.fs.rename(any(Path.class), any(Path.class))).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation)
throws Throwable {
Object[] args = invocation.getArguments();
movedPaths.add(new Pair<Path, Path>((Path) args[0], (Path) args[1]));
return true;
}
});
trash.trash.createTrashSnapshot();
Assert.assertEquals(createdDirs.size(), 1);
Path createdDir = createdDirs.get(0);
Assert.assertTrue(Trash.TRASH_SNAPSHOT_NAME_FORMATTER.parseDateTime(createdDir.getName()).equals(new DateTime().withZone(
DateTimeZone.UTC)));
Assert.assertEquals(movedPaths.size(), 1);
Assert.assertTrue(movedPaths.get(0).first().equals(pathInTrash));
Assert.assertTrue(movedPaths.get(0).second().getName().equals(pathInTrash.getName()));
Assert.assertTrue(movedPaths.get(0).second().getParent().equals(createdDir));
DateTimeUtils.setCurrentMillisSystem();
}
@Test
public void testPurgeSnapshots() throws IOException {
Properties properties = new Properties();
properties.setProperty(Trash.SNAPSHOT_CLEANUP_POLICY_CLASS_KEY, TestCleanupPolicy.class.getCanonicalName());
TrashTestBase trash = new TrashTestBase(properties);
DateTimeUtils.setCurrentMillisFixed(new DateTime(2015, 7, 15, 10, 0).withZone(DateTimeZone.UTC).getMillis());
final List<Path> deletedPaths = Lists.newArrayList();
Path snapshot1 = new Path(trash.trash.getTrashLocation(), Trash.TRASH_SNAPSHOT_NAME_FORMATTER.print(new DateTime()));
Path snapshot2 = new Path(trash.trash.getTrashLocation(),
Trash.TRASH_SNAPSHOT_NAME_FORMATTER.print(new DateTime().minusDays(1)));
when(trash.fs.listStatus(any(Path.class), any(PathFilter.class))).
thenReturn(
Lists.newArrayList(
new FileStatus(0, true, 0, 0, 0, snapshot1),
new FileStatus(0, true, 0, 0, 0, snapshot2))
.toArray(new FileStatus[]{}));
when(trash.fs.delete(any(Path.class), anyBoolean())).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation)
throws Throwable {
deletedPaths.add((Path) invocation.getArguments()[0]);
return true;
}
});
trash.trash.purgeTrashSnapshots();
Assert.assertEquals(deletedPaths.size(), 1);
Assert.assertTrue(deletedPaths.get(0).equals(snapshot2));
DateTimeUtils.setCurrentMillisSystem();
}
}
| |
package au.com.agic.apptesting.utils.impl;
import au.com.agic.apptesting.constants.Constants;
import au.com.agic.apptesting.exception.DriverException;
import au.com.agic.apptesting.utils.ProxyDetails;
import au.com.agic.apptesting.utils.SystemPropertyUtils;
import au.com.agic.apptesting.utils.WebDriverFactory;
import com.google.gson.JsonObject;
import io.vavr.control.Try;
import org.apache.commons.lang3.StringUtils;
import org.openqa.selenium.Dimension;
import org.openqa.selenium.Proxy;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
import org.openqa.selenium.edge.EdgeDriver;
import org.openqa.selenium.edge.EdgeOptions;
import org.openqa.selenium.firefox.*;
import org.openqa.selenium.ie.InternetExplorerDriver;
import org.openqa.selenium.ie.InternetExplorerOptions;
import org.openqa.selenium.opera.OperaDriver;
import org.openqa.selenium.opera.OperaOptions;
import org.openqa.selenium.phantomjs.PhantomJSDriver;
import org.openqa.selenium.phantomjs.PhantomJSDriverService;
import org.openqa.selenium.remote.CapabilityType;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.safari.SafariDriver;
import org.openqa.selenium.safari.SafariOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.validation.constraints.NotNull;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import static au.com.agic.apptesting.constants.Constants.PHANTOMJS_LOGGING_LEVEL_SYSTEM_PROPERTY;
import static au.com.agic.apptesting.constants.Constants.PHANTON_JS_USER_AGENT_SYSTEM_PROPERTY;
/**
* An implementation of the web driver factory
*/
@Component
public class WebDriverFactoryImpl implements WebDriverFactory {
private static final Logger LOGGER = LoggerFactory.getLogger(WebDriverFactoryImpl.class);
private static final int PHANTOM_JS_SCREEN_WIDTH = 1280;
private static final int PHANTOM_JS_SCREEN_HEIGHT = 1024;
private static final int PHANTOMJS_TIMEOUTS = 60;
private static final SystemPropertyUtils SYSTEM_PROPERTY_UTILS = new SystemPropertyUtilsImpl();
/**
* Note that we exit the application here if the driver could not be created. This is because an exception
* during the creation of a driver can leave the binary running in the background with no way to clean it up
* (this was observed in the Gecko Driver). To allow long running systems a chance to catch this resource leak,
* we return with a specific error code.
*
* @param proxies The list of proxies that are used when configuring the web driver
* @param tempFiles maintains a list of temp files that are deleted once Iridium is closed
* @return The web driver for the given browser
*/
@Override
public WebDriver createWebDriver(
@NotNull final List<ProxyDetails<?>> proxies,
@NotNull final List<File> tempFiles) {
final String browser = SYSTEM_PROPERTY_UTILS.getProperty(
Constants.TEST_DESTINATION_SYSTEM_PROPERTY);
/*
Configure the proxy settings
*/
final DesiredCapabilities capabilities = new DesiredCapabilities();
/*
Don't worry about ssl issues
*/
capabilities.setCapability(CapabilityType.ACCEPT_SSL_CERTS, true);
capabilities.setCapability(CapabilityType.ACCEPT_INSECURE_CERTS, true);
/*
Don't block popups
*/
capabilities.setCapability("disable-popup-blocking", true);
/*
Find the proxy that the browser should point to
*/
final Optional<ProxyDetails<?>> mainProxy = proxies.stream()
.filter(ProxyDetails::isMainProxy)
.findFirst();
/*
Add that proxy as a capability for browsers other than Firefox and Marionette.
There is a bug in the geckodriver that prevents us from using capabilities for
the proxy: https://github.com/mozilla/geckodriver/issues/669
*/
if (!Constants.MARIONETTE.equalsIgnoreCase(browser)
&& !Constants.FIREFOX.equalsIgnoreCase(browser)
&& !Constants.FIREFOXHEADLESS.equalsIgnoreCase(browser)) {
mainProxy
.map(myMainProxy -> {
final Proxy proxy = new Proxy();
proxy.setProxyType(Proxy.ProxyType.MANUAL);
proxy.setHttpProxy("localhost:" + myMainProxy.getPort());
proxy.setFtpProxy("localhost:" + myMainProxy.getPort());
proxy.setSslProxy("localhost:" + myMainProxy.getPort());
return proxy;
})
.ifPresent(proxy -> capabilities.setCapability("proxy", proxy));
}
if (Constants.MARIONETTE.equalsIgnoreCase(browser)) {
return buildFirefox(browser, mainProxy, capabilities, false, false);
}
if (Constants.FIREFOX.equalsIgnoreCase(browser)) {
return buildFirefox(browser, mainProxy, capabilities, false, false);
}
if (Constants.FIREFOXHEADLESS.equalsIgnoreCase(browser)) {
return buildFirefox(browser, mainProxy, capabilities, false, true);
}
if (Constants.SAFARI.equalsIgnoreCase(browser)) {
return Try.of(() -> new SafariDriver(new SafariOptions(capabilities)))
.onFailure(ex -> exitWithError(browser, ex))
.getOrElseThrow(ex -> new RuntimeException(ex));
}
if (Constants.OPERA.equalsIgnoreCase(browser)) {
return Try.of(() -> new OperaOptions())
.peek(opts -> SYSTEM_PROPERTY_UTILS.getPropertyAsOptional(Constants.OPERA_BIN_LOCATION_SYSTEM_PROPERTY)
.ifPresent(val -> opts.setBinary(val)))
.peek(opts -> opts.merge(capabilities))
.mapTry(opts -> new OperaDriver(opts))
.onFailure(ex -> exitWithError(browser, ex))
.getOrElseThrow(ex -> new RuntimeException(ex));
}
if (Constants.IE.equalsIgnoreCase(browser)) {
return Try.of(() -> capabilities)
/*
IE doesn't support this option.
org.openqa.selenium.SessionNotCreatedException: Unable to match capability set 0: acceptInsecureCerts was 'true',
but the IE driver does not allow bypassing insecure (self-signed) SSL certificates
Build info: version: 'unknown', revision: 'unknown', time: 'unknown'
System info: host: 'DESKTOP-JVNRAAG', ip: '172.19.255.145', os.name: 'Windows 10', os.arch: 'amd64', os.version: '10.0', java.version: '9'
Driver info: driver.version: InternetExplorerDriver
*/
.andThenTry(caps -> caps.setCapability("acceptInsecureCerts", false))
.mapTry(caps -> new InternetExplorerDriver(new InternetExplorerOptions(capabilities)))
.onFailure(ex -> exitWithError(browser, ex))
.getOrElseThrow(ex -> new RuntimeException(ex));
}
if (Constants.EDGE.equalsIgnoreCase(browser)) {
return Try.of(() -> new EdgeDriver(new EdgeOptions().merge(capabilities)))
.onFailure(ex -> exitWithError(browser, ex))
.getOrElseThrow(ex -> new RuntimeException(ex));
}
if (Constants.CHROME_HEADLESS.equalsIgnoreCase(browser)) {
return buildChrome(browser, mainProxy, capabilities, false, true, false);
}
if (Constants.CHROME_HEADLESS_SECURE.equalsIgnoreCase(browser)) {
return buildChrome(browser, mainProxy, capabilities, true, true, false);
}
if (Constants.CHROME_SECURE.equalsIgnoreCase(browser)) {
return buildChrome(browser, mainProxy, capabilities, true, false, false);
}
if (Constants.CHROME_FULLSCREEN.equalsIgnoreCase(browser)) {
return buildChrome(browser, mainProxy, capabilities, false, false, true);
}
if (Constants.CHROME_SECURE_FULLSCREEN.equalsIgnoreCase(browser)) {
return buildChrome(browser, mainProxy, capabilities, true, false, true);
}
if (Constants.PHANTOMJS.equalsIgnoreCase(browser)) {
return buildPhantomJS(browser, capabilities, tempFiles);
}
return buildChrome(browser, mainProxy, capabilities, false, false, false);
}
private void exitWithError(final String browser, final Throwable ex) {
LOGGER.error("WEBAPPTESTER-BUG-0010: Failed to create the " + browser + " WebDriver", ex);
System.exit(Constants.WEB_DRIVER_FAILURE_EXIT_CODE);
}
private ChromeOptions buildChromeOptions() {
final ChromeOptions options = new ChromeOptions();
final Map<String, Object> prefs = new HashMap<String, Object>();
prefs.put("credentials_enable_service", false);
prefs.put("password_manager_enabled", false);
options.setExperimentalOption("prefs", prefs);
SYSTEM_PROPERTY_UTILS.getPropertyAsOptional(Constants.CHROME_EXECUTABLE_LOCATION_SYSTEM_PROPERTY)
.ifPresent(options::setBinary);
return options;
}
private void buildSecureChromeOptions(final ChromeOptions options) {
options.addArguments("disable-file-system");
options.addArguments("use-file-for-fake-audio-capture");
options.addArguments("use-file-for-fake-video-capture");
options.addArguments("use-fake-device-for-media-stream");
options.addArguments("use-fake-ui-for-media-stream");
options.addArguments("disable-sync");
options.addArguments("disable-tab-for-desktop-share");
options.addArguments("disable-translate");
options.addArguments("disable-voice-input");
options.addArguments("disable-volume-adjust-sound");
options.addArguments("disable-wake-on-wifi");
}
private WebDriver buildChrome(
final String browser,
final Optional<ProxyDetails<?>> mainProxy,
final DesiredCapabilities capabilities,
final boolean secure,
final boolean headless,
final boolean fullscreen) {
/*
These options are documented at:
https://developers.google.com/web/updates/2017/04/headless-chrome
*/
final ChromeOptions options = buildChromeOptions();
if (secure) {
buildSecureChromeOptions(options);
}
/*
There are some issues using Chrome headless
https://bugs.chromium.org/p/chromium/issues/detail?id=721739
*/
if (headless) {
options.addArguments("headless");
options.addArguments("disable-gpu");
options.addArguments("no-sandbox");
options.addArguments("allow-running-insecure-content");
options.addArguments("ignore-certificate-errors");
options.addArguments("window-size=1920,1080");
}
if (fullscreen) {
options.addArguments("kiosk");
}
capabilities.setCapability(ChromeOptions.CAPABILITY, options);
return Try.of(() -> new ChromeDriver(new ChromeOptions().merge(capabilities)))
.onFailure(ex -> exitWithError(browser, ex))
.getOrElseThrow(ex -> new RuntimeException(ex));
}
/**
* @return The binary used to run firefox if it was set via the FIREFOX_BINARY system property,
* or null if the FIREFOX_BINARY system property was not defined
*/
private FirefoxBinary getFirefoxBinary() {
final String firefoxBinary = SYSTEM_PROPERTY_UTILS.getProperty(Constants.FIREFOX_BINARY);
if (firefoxBinary != null) {
return new FirefoxBinary(new File(firefoxBinary));
}
return new FirefoxBinary();
}
private WebDriver buildFirefox(
final String browser,
final Optional<ProxyDetails<?>> mainProxy,
final DesiredCapabilities capabilities,
final boolean setProfile,
final boolean headless) {
final FirefoxOptions options = new FirefoxOptions().merge(capabilities);
/*
https://github.com/lightbody/browsermob-proxy/issues/676
*/
mainProxy.ifPresent(proxy -> {
JsonObject json = new JsonObject();
json.addProperty("proxyType", "manual");
json.addProperty("httpProxy", "localhost");
json.addProperty("httpProxyPort", proxy.getPort());
json.addProperty("ftpProxy", "localhost");
json.addProperty("ftpProxyPort", proxy.getPort());
json.addProperty("sslProxy", "localhost");
json.addProperty("sslProxyPort", proxy.getPort());
json.addProperty("socksProxy", "localhost");
json.addProperty("socksProxyPort", proxy.getPort());
json.addProperty("socksVersion", 5);
capabilities.setCapability(CapabilityType.PROXY, json);
});
/*
Override the firefox binary
*/
final FirefoxBinary firefoxBinary = getFirefoxBinary();
if (headless) {
firefoxBinary.addCommandLineOptions("--headless");
}
options.setBinary(firefoxBinary);
final String firefoxProfile = SYSTEM_PROPERTY_UTILS.getProperty(
Constants.FIREFOX_PROFILE_SYSTEM_PROPERTY);
/*
If we have not specified a profile via the system properties, go ahead
and create one here.
*/
if (setProfile) {
if (StringUtils.isBlank(firefoxProfile)) {
final FirefoxProfile profile = new FirefoxProfile();
/*
This is required for the CI unit tests to pass with firefox
*/
profile.setAcceptUntrustedCertificates(true);
/*
Set the proxy
*/
mainProxy.ifPresent(proxyDetails -> {
profile.setPreference("network.proxy.type", 1);
profile.setPreference("network.proxy.http", "localhost");
profile.setPreference("network.proxy.http_port", proxyDetails.getPort());
profile.setPreference("network.proxy.ssl", "localhost");
profile.setPreference("network.proxy.ssl_port", proxyDetails.getPort());
profile.setPreference("network.proxy.no_proxies_on", "");
});
options.setProfile(profile);
} else {
final ProfilesIni profileLoader = new ProfilesIni();
final FirefoxProfile profile = profileLoader.getProfile(firefoxProfile);
options.setProfile(profile);
}
}
return Try.of(() -> new FirefoxDriver(options))
.onFailure(ex -> exitWithError(browser, ex))
.getOrElseThrow(ex -> new RuntimeException(ex));
}
private WebDriver buildPhantomJS(
final String browser,
final DesiredCapabilities capabilities,
final List<File> tempFiles) {
try {
/*
PhantomJS will often report a lot of unnecessary errors, so by default
we turn logging off. But you can override this behaviour with a
system property.
*/
final String loggingLevel = StringUtils.defaultIfBlank(
SYSTEM_PROPERTY_UTILS.getProperty(PHANTOMJS_LOGGING_LEVEL_SYSTEM_PROPERTY),
Constants.DEFAULT_PHANTOM_JS_LOGGING_LEVEL
);
/*
Create a temp file for cookies and local session
*/
final Path cookies = Files.createTempFile("phantomjs-cookies", ".txt");
final Path session = Files.createTempDirectory("phantomjs-session");
final Path log = Files.createTempFile("phantomjs", ".log");
tempFiles.add(cookies.toFile());
tempFiles.add(session.toFile());
tempFiles.add(log.toFile());
/*
We need to ignore ssl errors
https://vaadin.com/forum#!/thread/9200596
*/
final String[] cliArgs = {
"--ignore-ssl-errors=true",
"--webdriver-loglevel=" + loggingLevel,
"--local-storage-path=" + session.toString(),
"--cookies-file=" + cookies.toString(),
"--webdriver-logfile=" + log.toString()};
capabilities.setCapability(PhantomJSDriverService.PHANTOMJS_CLI_ARGS, cliArgs);
/*
Configure a custom user agent
*/
final String userAgent = SYSTEM_PROPERTY_UTILS.getPropertyEmptyAsNull(
PHANTON_JS_USER_AGENT_SYSTEM_PROPERTY);
if (StringUtils.isNotBlank(userAgent)) {
capabilities.setCapability("phantomjs.page.settings.userAgent", userAgent);
}
return Try.of(() -> new PhantomJSDriver(capabilities))
.andThenTry(driver -> {
/*
This is required by PhantomJS
https://github.com/angular/protractor/issues/585
*/
driver.manage().window().setSize(
new Dimension(PHANTOM_JS_SCREEN_WIDTH, PHANTOM_JS_SCREEN_HEIGHT));
/*
Give the dev servers a large timeout
*/
driver.manage().timeouts()
.pageLoadTimeout(PHANTOMJS_TIMEOUTS, TimeUnit.SECONDS);
})
.onFailure(ex -> exitWithError(browser, ex))
.getOrElseThrow(ex -> new RuntimeException(ex));
} catch (final IOException ex) {
throw new DriverException("Could not create temp folder or file for PhantomJS cookies and session", ex);
}
}
}
| |
/*
*
* Copyright 2016 Big Data Curation Lab, University of Toronto,
* Patricia Arocena,
* Boris Glavic,
* Renee J. Miller
*
* This software also contains code derived from STBenchmark as described in
* with the permission of the authors:
*
* Bogdan Alexe, Wang-Chiew Tan, Yannis Velegrakis
*
* This code was originally described in:
*
* STBenchmark: Towards a Benchmark for Mapping Systems
* Alexe, Bogdan and Tan, Wang-Chiew and Velegrakis, Yannis
* PVLDB: Proceedings of the VLDB Endowment archive
* 2008, vol. 1, no. 1, pp. 230-244
*
* The copyright of the ToxGene (included as a jar file: toxgene.jar) belongs to
* Denilson Barbosa. The iBench distribution contains this jar file with the
* permission of the author of ToxGene
* (http://www.cs.toronto.edu/tox/toxgene/index.html)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package tresc.benchmark.schemaGen;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.apache.log4j.Logger;
import org.vagabond.benchmark.model.TrampModelFactory;
import org.vagabond.util.CollectionUtils;
import org.vagabond.xmlmodel.AttrDefType;
import org.vagabond.xmlmodel.RelationType;
import smark.support.MappingScenario;
import tresc.benchmark.Configuration;
import tresc.benchmark.Constants;
import tresc.benchmark.Constants.ParameterName;
import vtools.dataModel.expression.Rule;
//MN this class generates random source inclusion dependencies - 3 April 2014
//MN how to output random regular inclusion dependencies? - 12 April 2014
//MN only supports one attribute - one attribute inclusion dependencies, checks only two ways cyclic paths, does not allow self-referring inclusion dependencies
//MN added isCircularInclusionDependencyFK - 28 May 2014
//MN FIXED random foreign key generator to correctly set foreign keys when primaryKeySize>1 - 24 June 2014
//MN ToDo: fix random regular inclusion dependency generator to support primaryKeySize>1 - 24 June 2014
//MN at the moment, we don't have the feature - 24 June 2014
public class SourceInclusionDependencyGenerator implements ScenarioGenerator {
//MN this attribute has been considered so that we could inject random source inclusion dependencies into mappings - 14 April 2014
ArrayList<String> sids;
private class InclusionDependency{
public String fromRelName;
public String[] fromRelAttr;
public String toRelName;
public String[] toRelAttr;
public String toString = null;
public InclusionDependency(String fromRelName, String[] fromRelAttr, String toRelName, String[] toRelAttr){
this.fromRelName = fromRelName;
this.fromRelAttr = new String [fromRelAttr.length];
for(int i=0; i<fromRelAttr.length; i++)
this.fromRelAttr[i] = fromRelAttr[i];
this.toRelName = toRelName;
this.toRelAttr = new String[toRelAttr.length];
for(int i=0; i<toRelAttr.length; i++)
this.toRelAttr[i] = toRelAttr[i];
}
@Override
public String toString () {
if (toString == null) {
String temp1 = this.fromRelAttr[0];
for(int indx=1; indx<this.fromRelAttr.length; indx++)
temp1 += this.fromRelAttr[indx];
String temp2 = this.toRelAttr[0];
for(int indx=1; indx<this.toRelAttr.length; indx++)
temp2 += this.toRelAttr[indx];
toString = fromRelName + "|" + temp1 + "|" + toRelName +
"|" + temp2;
}
return toString;
}
}
static Logger log = Logger.getLogger(SourceInclusionDependencyGenerator.class);
//MN attempts to generate random source inclusion dependencies
@Override
public void generateScenario(MappingScenario scenario,
Configuration configuration) throws Exception {
// TODO Auto-generated method stub
Random _generator = configuration.getRandomGenerator();
Map<String, Map<String,InclusionDependency>> ids = new HashMap<String, Map<String,InclusionDependency>> ();
if (log.isDebugEnabled()) {log.debug("Attempting to Generate Random Source Inclusion Dependencies");};
//MN new ArrayList<String> - 14 April 2014
sids = new ArrayList<String> ();
if (configuration.getParam(ParameterName.SourceInclusionDependencyPerc) >0) {
generateRandomIDs(scenario, configuration, _generator, ids);
}
}
//MN this method returns random source inclusion dependencies - 14 April 2014
public ArrayList<String> getRandomSourceIDs(){
return sids;
}
//MN generates random source regular inclusion dependencies
public void generateRandomIDs (MappingScenario scenario, Configuration configuration, Random _generator,
Map<String, Map<String, InclusionDependency>> ids) throws Exception{
double sourceIDPerc = (double) configuration.getParam(Constants.ParameterName.SourceInclusionDependencyPerc);
double sourceIDFK = (double) configuration.getParam(Constants.ParameterName.SourceInclusionDependencyFKPerc);
//we calculate number of inclusion dependencies and number of foreign keys that need to be generated
//example: suppose there are 10 source relations, 80% sourceInclusionDependenciesPerc, 30% sourceForeignKeysPerc
//numIDs (number of random source inclusion dependencies that should be generated: 8
//numIDFKs (number of random source foreign keys that should be generated) : 2
double percentage = ((double) sourceIDPerc)/(double) 100;
int numSourceRels = scenario.getDoc().getSchema(true).getRelationArray().length;
int numIDs = (int) Math.floor(percentage * numSourceRels);
double percentagee = ((double) sourceIDFK)/(double) 100;
int numIDFKs = (int) Math.floor(percentagee * numIDs);
//first, we generate required number of regular inclusion dependencies
if(numIDs-numIDFKs>0){
if (log.isDebugEnabled()) {log.debug("Generating Random Source Regular Inclusion Dependencies: ");};
generateRandomRegularInclusionDependency(numIDs, numIDFKs, scenario, configuration, _generator, ids);
}
//second, we generate required number of foreign keys
if(numIDFKs > 0){
if (log.isDebugEnabled()) {log.debug("Generating Random Source Foreign Keys: ");};
generateRandomForeignKey(numIDFKs, scenario, configuration, _generator, ids);
}
}
//MN generates random source foreign keys
private void generateRandomForeignKey(int numIDFKs, MappingScenario scenario, Configuration configuration, Random _generator,
Map<String, Map<String, InclusionDependency>> ids) throws Exception {
Set<String[]> notPossible = new HashSet<String[]> ();
boolean sourceCircluarFKs = configuration.getParam(Constants.ParameterName.SourceCircularFK) != 0;
for(int i=0; i<numIDFKs; i++) {
RelationType[] rels = scenario.getDoc().getSchema(true).getRelationArray();
boolean done = false;
int max_tries = 10;
while(max_tries-- > 0 && !done) {
int fromRelIndex = _generator.nextInt(rels.length-1);
int toRelIndex = _generator.nextInt(rels.length-1);
// we already know it is not possible to add FKs between these two relation then continue
if (notPossible.contains(new String[] { rels[fromRelIndex].getName(), rels[toRelIndex].getName()}))
continue;
// not same relations
if (fromRelIndex == toRelIndex)
continue;
// to relation has to have a PK
if (!rels[toRelIndex].isSetPrimaryKey())
continue;
// from relation has to have more attributes than to relation PK
if (rels[toRelIndex].getPrimaryKey().sizeOfAttrArray() > rels[fromRelIndex].getAttrArray().length)
continue;
// get to PK attribute information
String[] toPKAttrs = scenario.getDoc().getPK(rels[toRelIndex].getName(), true);
AttrDefType[] toAttrs = rels[toRelIndex].getAttrArray();
String[] toRelAttrType = new String[toPKAttrs.length];
for(int j=0; j<toPKAttrs.length; j++)
for(int index=0; index<toAttrs.length; index++)
if(toAttrs[index].getName().equals(toPKAttrs[j])){
toRelAttrType[j] = toAttrs[index].getDataType();
break;
}
// determine from relation attributes that could be used for FK
String[] fromAttrs = new String [toPKAttrs.length];
int[] fromAttrsPos = new int [toPKAttrs.length];
String[] fromRelAttrType = new String [toPKAttrs.length];
int max_triesAttr = 10;
boolean doneAttr = false;
AttrDefType[] allFromAttrs = rels[fromRelIndex].getAttrArray();
while(max_triesAttr-- > 0 && !doneAttr) {
// create list of attr pos to pick from
ArrayList<Integer> allAttrPos = new ArrayList<Integer> ();
for(int j = 0; j < allFromAttrs.length; j++)
allAttrPos.add(j);
// determine random set of from attributes
for(int j=0; j<toPKAttrs.length; j++){
int fromRelAttrIndex = _generator.nextInt(allAttrPos.size());
allAttrPos.remove(fromRelAttrIndex);
fromAttrs[j] = allFromAttrs[fromRelAttrIndex].getName();
fromAttrsPos[j] = fromRelAttrIndex;
fromRelAttrType[j] = allFromAttrs[fromRelAttrIndex].getDataType();
}
// check that attribute types are the same for the picked from attributes and the to PK attributes
boolean failed = false;
for(int typeIndx=0; typeIndx<toPKAttrs.length; typeIndx++)
if(!fromRelAttrType[typeIndx].equals(toRelAttrType[typeIndx])) {
failed = true;
break;
}
// if we reach here we found matching attributes
doneAttr = !failed;
}
// if FK already exists then fail
if(existsID(ids, rels[fromRelIndex], rels[toRelIndex], fromAttrs, toPKAttrs))
continue;
// if it would create a circular inclusion dependency and the user has requested that no such dependencies should exist then fail
if(sourceCircluarFKs &&
isCircularInclusionDependencyFK(scenario, ids, rels[fromRelIndex].getName(),
rels[toRelIndex].getName(), fromAttrs, toPKAttrs))
continue;
// add inclusion dependency
if (addInclusionDependency (ids, rels[fromRelIndex].getName(), rels[toRelIndex].getName(),
fromAttrs, toPKAttrs, true)) {
TrampModelFactory fac = scenario.getDocFac();
fac.addForeignKey(rels[fromRelIndex].getName(), fromAttrs,
rels[toRelIndex].getName(), toPKAttrs, true);
if (log.isDebugEnabled()) {
log.debug("--------- GENERATING NEW RANDOM SOURCE FOREIGN KEY---------");
log.debug("fromRelName: " + rels[fromRelIndex].getName());
log.debug("toRelName: " + rels[toRelIndex].getName());
String temp1 = fromAttrs[0];
for(int k=1; k<fromAttrs.length; k++)
temp1 += fromAttrs[k];
log.debug("fromRelAttrName: " + temp1);
String temp2 = toPKAttrs[0];
for(int k=1; k<toPKAttrs.length; k++)
temp2 += toPKAttrs[k];
log.debug("toRelAttrName: " + temp2);
}
done = true;
}
}
log.debug("attempt to create FK - " + ((done) ? "SUCCESS" : "FAILED"));
}
//
// while(!done && (max_tries>0)){
// //we roll dice to choose from and to relations for generating foreign keys
// int fromRelIndex = _generator.nextInt(rels.length-1);
// int toRelIndex = -1;
//
// int max_triesIn = 10;
// boolean doneIn = false;
// while(max_triesIn>0 && ! doneIn){
// toRelIndex = _generator.nextInt(rels.length-1);
// //self-referring inclusion dependencies are not allowed -
// //note that I considered isSetPrimaryKey for generating foreign key
// //MN size of foreign key should be equal to size of primary key;
// //MN so, I added the new condition to if clause - 24 June 2014
// if((toRelIndex != fromRelIndex) && (rels[toRelIndex].isSetPrimaryKey()) &&
// rels[toRelIndex].getPrimaryKey().sizeOfAttrArray()<=rels[fromRelIndex].getAttrArray().length)
// doneIn=true;
// else
// max_triesIn--;
// }
//
// //it would be better to print it in log that we could not generate random regular inclusion dependencies
// if(!doneIn)
// break;
//
// //we roll dice to choose from rel attr and to rel attr for foreign key
//
// //MN toRelAttrs - 24 June 2014
// String[] toPKAttrs = scenario.getDoc().getPK(rels[toRelIndex].getName(), true);
//
// //int toRelAttrIndex = 0;
// //if(toPKAttrs.length > 1)
// //toRelAttrIndex = _generator.nextInt(toPKAttrs.length-1);
//
// //int fromRelAttrIndex = -1;
// //MN checking that both from and to attributes have the same type
//
// //MN get toRelAttrsTypes - 24 June 2014
// AttrDefType[] toAttrs = rels[toRelIndex].getAttrArray();
// String[] toRelAttrType = new String[toPKAttrs.length];
// for(int j=0; j<toPKAttrs.length; j++)
// for(int index=0; index<toAttrs.length; index++)
// if(toAttrs[index].getName().equals(toPKAttrs[j])){
// toRelAttrType[j] = toAttrs[index].getDataType();
// break;
// }
//
// //MN *****I have toRelAttrs and their types - 24 June 2014
// //int[] fromAttrPos = CollectionUtils.createSequence(0, rels[fromRelIndex].sizeOfAttrArray());
// //String[] fromNonKeyAttrs = (rels[fromRelIndex].isSetPrimaryKey()) ? getNonKeyAttributes(rels[fromRelIndex], scenario)
// // : scenario.getDoc().getAttrNames(rels[fromRelIndex].getName(),fromAttrPos, true);
//
// String[] fromAttrs = new String [toPKAttrs.length];
// int[] fromAttrsPos = new int [toPKAttrs.length];
//
// int max_triesAttr = 10;
// boolean doneAttr = false;
// while(max_triesAttr>0 && !doneAttr){
//
// for(int j=0; j<toPKAttrs.length; j++){
// int fromRelAttrIndex = _generator.nextInt(rels[fromRelIndex].sizeOfAttrArray()-1);
// boolean same =false;
// for(int g=0; g<j; g++)
// if(fromAttrsPos[g] == fromRelAttrIndex)
// same=true;
// if(!same){
// fromAttrs[j] = rels[fromRelIndex].getAttrArray(fromRelAttrIndex).getName();
// fromAttrsPos[j] = fromRelAttrIndex;
// }
// else{
// j--;
// }
// }
//
// //MN get fromRelAttrsTypes - 24 June 2014
// AttrDefType[] fromAttrsDef = rels[fromRelIndex].getAttrArray();
// String[] fromRelAttrType = new String [toPKAttrs.length];
// for(int j=0; j<toPKAttrs.length; j++) {
// for(int index=0; index<fromAttrsDef.length; index++)
// if(fromAttrsDef[index].toString().substring(fromAttrsDef[index].toString().indexOf("<Name>") + 6,
// fromAttrsDef[index].toString().indexOf("</Name>")).equals(fromAttrs[j])) {
// fromRelAttrType[j] = fromAttrsDef[index].toString().substring(fromAttrsDef[index].toString().indexOf("<DataType>") + 10,
// fromAttrsDef[index].toString().indexOf("</DataType>"));
// break;
// }
// }
// //String type = scenario.getSource().getSubElement(fromRelIndex).getSubElement(fromRelAttrIndex).getType().toString();
// doneAttr=true;
// for(int typeIndx=0; typeIndx<toPKAttrs.length; typeIndx++)
// if(!fromRelAttrType[typeIndx].equals(toRelAttrType[typeIndx]))
// doneAttr = false;
// //if(fromRelAttrType.equals(toRelAttrType))
// //doneAttr = true;
// //else
// if(!doneAttr)
// max_triesAttr --;
// }
//
// if(!doneAttr)
// break;
//
// //TODO fail early, check before
// if(existsID(ids, rels[fromRelIndex], rels[toRelIndex], fromAttrs, toPKAttrs)){
// max_tries--;
// }
// else{
// if(sourceCircluarFKs &&
// isCircularInclusionDependencyFK(scenario, ids, rels[fromRelIndex].getName(),
// rels[toRelIndex].getName(), fromAttrs, toPKAttrs)){
// max_tries--;
// }
// else{
// //add foreign key
// if (addInclusionDependency (ids, rels[fromRelIndex].getName(), rels[toRelIndex].getName(),
// fromAttrs, toPKAttrs, true)) {
//
// //MN BEGIN 24 August 2014 - corrected the way foreign keys are stored
// //for(int count=0; count<fromAttrs.length; count++){
// TrampModelFactory fac = scenario.getDocFac();
// fac.addForeignKey(rels[fromRelIndex].getName(), fromAttrs,
// rels[toRelIndex].getName(), toPKAttrs, true);
// //}
// //MN END
// if (log.isDebugEnabled()) {
// log.debug("--------- GENERATING NEW RANDOM SOURCE FOREIGN KEY---------");
// log.debug("fromRelName: " + rels[fromRelIndex].getName());
// log.debug("toRelName: " + rels[toRelIndex].getName());
// String temp1 = fromAttrs[0];
// for(int k=1; k<fromAttrs.length; k++)
// temp1 += fromAttrs[k];
// log.debug("fromRelAttrName: " + temp1);
// String temp2 = toPKAttrs[0];
// for(int k=1; k<toPKAttrs.length; k++)
// temp2 += toPKAttrs[k];
// log.debug("toRelAttrName: " + temp2);
// }
// done = true;
// }
// else
// max_tries --;
// }
// }
// }
}
private void generateRandomRegularInclusionDependency(int numIDs, int numIDFKs, MappingScenario scenario,
Configuration configuration, Random _generator,
Map<String, Map<String, InclusionDependency>> ids) throws Exception{
//for(int i=0; i<numIDs-numIDFKs; i++){
//RelationType[] rels = scenario.getDoc().getSchema(true).getRelationArray();
//boolean done = false;
//int max_tries = 10;
//while (!done && (max_tries > 0)) {
//we roll dice to choose from and to relations for regular inclusion dependencies
//int fromRelIndex = _generator.nextInt(rels.length-1);
//int toRelIndex = -1;
//int max_triesIn = 10;
//boolean doneIn = false;
//while(max_triesIn>0 && ! doneIn){
//toRelIndex = _generator.nextInt(rels.length-1);
//self-referring inclusion dependencies are not allowed
//if((toRelIndex != fromRelIndex))
//doneIn=true;
//else
//max_triesIn--;
//}
//it would be better to print it in log that we could not generate random regular inclusion dependencies
//if(!doneIn)
//break;
//we roll dice to choose from rel attr and to rel attr for regular inclusion dependencies
//int[] fromAttrPos = CollectionUtils.createSequence(0, rels[fromRelIndex].sizeOfAttrArray());
//String[] fromNonKeyAttrs = (rels[fromRelIndex].isSetPrimaryKey()) ? getNonKeyAttributes(rels[fromRelIndex], scenario) : scenario.getDoc().getAttrNames(rels[fromRelIndex].getName(),fromAttrPos, true);
//int[] toAttrPos = CollectionUtils.createSequence(0, rels[toRelIndex].sizeOfAttrArray());
//String[] toNonKeyAttrs = (rels[toRelIndex].isSetPrimaryKey()) ? getNonKeyAttributes(rels[toRelIndex], scenario) : scenario.getDoc().getAttrNames(rels[toRelIndex].getName(),toAttrPos, true);
//int fromRelAttrIndex = 0;
//if(fromNonKeyAttrs.length>1)
//fromRelAttrIndex = _generator.nextInt(fromNonKeyAttrs.length-1);
//int toRelAttrIndex = -1;
//MN checking that both from and to attributes have the same type
//int max_triesAttr = 10;
//boolean doneAttr = false;
//MN compute fromRelAttrType - 11 April 2014
//AttrDefType[] fromAttrs = rels[fromRelIndex].getAttrArray();
//String fromRelAttrType = null;
//for(int index=0; index<fromAttrs.length; index++)
//if(fromAttrs[index].toString().substring(fromAttrs[index].toString().indexOf("<Name>") + 6, fromAttrs[index].toString().indexOf("</Name>")).equals(fromNonKeyAttrs[fromRelAttrIndex])){
//fromRelAttrType = fromAttrs[index].toString().substring(fromAttrs[index].toString().indexOf("<DataType>") + 10, fromAttrs[index].toString().indexOf("</DataType>"));
//break;}
//while(max_triesAttr>0 && !doneAttr){
//if(toNonKeyAttrs.length>1)
//toRelAttrIndex = _generator.nextInt(toNonKeyAttrs.length-1);
//else
//toRelAttrIndex = 0;
//MN the way to compute attr type has been changed - 11 April 2014
//MN get toRelAttrType - 11 April 2014
//AttrDefType[] toAttrs = rels[toRelIndex].getAttrArray();
//String toRelAttrType = null;
//for(int index=0; index<toAttrs.length; index++)
//if(toAttrs[index].toString().substring(toAttrs[index].toString().indexOf("<Name>") + 6, toAttrs[index].toString().indexOf("</Name>")).equals(toNonKeyAttrs[toRelAttrIndex]))
//{toRelAttrType = toAttrs[index].toString().substring(toAttrs[index].toString().indexOf("<DataType>") + 10, toAttrs[index].toString().indexOf("</DataType>"));
//break;}
//if(fromRelAttrType.equals(toRelAttrType))
//doneAttr = true;
//else
//max_triesAttr --;
//}
//if(!doneAttr)
//break;
//if(existsID(ids, rels[fromRelIndex], rels[toRelIndex], fromNonKeyAttrs[fromRelAttrIndex], toNonKeyAttrs[toRelAttrIndex])){
//max_tries--;
//}
//else{
//if(((int)(configuration.getParam(Constants.ParameterName.SourceCircularInclusionDependency)) == 0) &&
//isCircularInclusionDependency(ids, rels[fromRelIndex].getName(), rels[toRelIndex].getName(), fromNonKeyAttrs[fromRelAttrIndex], toNonKeyAttrs[toRelAttrIndex])){
//max_tries--;
//}
//else{
//add inclusion dependency
//if (addInclusionDependency (ids, rels[fromRelIndex].getName(), rels[toRelIndex].getName(), fromNonKeyAttrs[fromRelAttrIndex], toNonKeyAttrs[toRelAttrIndex], false)) {
//I need to implement something like below
//scenario.getDocFac().addFD(r.getName(), arrayLHS, new String[] { RHSAtt });
//if (log.isDebugEnabled()) {
//log.debug("--------- GENERATING NEW RANDOM SOURCE REGULAR INCLUSION DEPENDENCY---------");
//log.debug("fromRelName: " + rels[fromRelIndex].getName());
//log.debug("toRelName: " + rels[toRelIndex].getName());
//log.debug("fromRelAttrName: " + fromNonKeyAttrs[fromRelAttrIndex]);
//log.debug("toRelAttrName: " + toNonKeyAttrs[toRelAttrIndex]);
//}
//done = true;
//}
//else
//max_tries --;
//}
//}
//}
//}
}
////checks if there exists an inclusion dependency with reverse relations in randomly generated fks and fks generated by mapping primitives
//MN 28 May 2014
//MN fixed the method to support primaryKeySize>1 - 24 June 2014
private boolean isCircularInclusionDependencyFK (MappingScenario scenario, Map<String, Map<String,
InclusionDependency>> ids, String from, String to, String[] fromAttr, String[] toAttr)
{
// BG: retarded loop that does the same check in every loop iteration
// for(int i=0; i<ids.size(); i++)
// {
String temp1 = fromAttr[0];
for(int indx=1; indx<fromAttr.length; indx++)
temp1 += fromAttr[indx];
String temp2 = toAttr[0];
for(int indx=1; indx<toAttr.length; indx++)
temp2 += toAttr[indx];
if (ids.containsKey(to + temp2 + from + temp1))
return true;
// }
//MN - 28 May 2014 - wrote code to check circularity with fks that have been generated by mapping primitives
ArrayList<Rule> fks = scenario.getSource().getForeignKeyConstraints();
if(fks.size() == 0)
return false;
//TODO before starting index the FKs created by the primitives in ids
for(int i=0; i<fks.size(); i++)
if(fks.get(i).getLeftTerms().toString().contains(to) && fks.get(i).getRightTerms().toString().contains(from)){
boolean[] circular = new boolean [fromAttr.length];
for(int f=0; f<fromAttr.length; f++)
circular[f] = false;
for(int g=0; g<fromAttr.length; g++){
if(fks.get(i).getRightConditions().toString().contains(fromAttr[g]) &&
fks.get(i).getRightConditions().toString().contains(toAttr[g]))
circular[g] = true;
}
for(int f=0; f<fromAttr.length; f++)
if(!circular[f])
return false;
}
return false;
}
// private boolean isCircularInclusionDependency (Map<String, Map<String, InclusionDependency>> ids, String from, String to, String fromAttr, String toAttr)
// {
// for(int i=0; i<ids.size(); i++)
// {
// if (ids.containsKey(to + toAttr + from + fromAttr))
// return true;
// }
//
// return false;
// }
////checks if there exists an inclusion dependency with the same from, from attr, to, to attr
//MN modified the code to support primaryKeySize>1
private boolean existsID (Map<String, Map<String, InclusionDependency>> ids, RelationType from, RelationType to,
String[] fromAttr, String[] toAttr)
{
// for (int i=0; i< ids.size(); i++)
// {
String temp1 = fromAttr[0];
for(int indx=1; indx<fromAttr.length; indx++)
temp1 += fromAttr[indx];
String temp2 = toAttr[0];
for(int indx=1; indx<toAttr.length; indx++)
temp2 += toAttr[indx];
if (ids.containsKey(from.getName() + temp1 + to.getName() + temp2))
return true;
// }
return false;
}
//MN fixed the method to support primaryKeySize>1 - 24 June 2014
private boolean addInclusionDependency (Map<String, Map<String,InclusionDependency>> ids, String fromRelName, String toRelName,
String[] fromRelAttrName, String[] toRelAttrName, boolean foreignKey){
InclusionDependency id = new InclusionDependency (fromRelName, fromRelAttrName,
toRelName, toRelAttrName);
Map<String, InclusionDependency> relMap;
String temp1 = fromRelAttrName[0];
for(int indx=1; indx<fromRelAttrName.length; indx++)
temp1 += fromRelAttrName[indx];
String temp2 = toRelAttrName[0];
for(int indx=1; indx<toRelAttrName.length; indx++)
temp2 += toRelAttrName[indx];
if (!ids.containsKey(fromRelName + temp1 + toRelName + temp2)) {
relMap = new HashMap<String, InclusionDependency> ();
ids.put(fromRelName + temp1 + toRelName + temp2, relMap);
//MN add random source regular inclusion dependency to sids - 14 April 2014
if(!foreignKey)
sids.add(id.toString());
}
else {
relMap = ids.get(fromRelName + temp1 + toRelName + temp2);
}
if (relMap.get(id.toString()) == null) {
relMap.put(id.toString(), id);
return true;
}
return false;
}
/**
* Get all of the non-key attributes of a relation
*
* @param r
* The relation to get the attributes from
* @param scenario
* The mapping scenario
*
* @return An array of the names of all of the non-key attributes
* @throws Exception
*/
static String[] getNonKeyAttributes(RelationType r, MappingScenario scenario) throws Exception
{
// get the positions of all the primary keys
int[] pkPos = scenario.getDoc().getPKPos(r.getName(), true);
// get positions for all of the attributes
int[] attrPos = new int[r.getAttrArray().length];
for (int i = 0; i < r.getAttrArray().length; i++)
attrPos[i] = i;
int[] nonKeyPos = stripOutPKPositions(r.getAttrArray().length, pkPos, attrPos);
String[] nonKeyAttrs = scenario.getDoc().getAttrNames(r.getName(), nonKeyPos, true);
return nonKeyAttrs;
}
/**
* Takes an array of all attribute positions and removes the ones that are
* associated with the primary key
*
* @param numAttr
* The amount of attributes
* @param pkPos
* The positions associated with the primary key
* @param attrPos
* The positions of all attributes in the relation
*
* @return An array with all of the positions not associated with the
* primary key
*/
static int[] stripOutPKPositions(int numAttr, int[] pkPos, int[] attrPos)
{
int[] nonKeyPos = new int[numAttr - pkPos.length];
Boolean addPosition = true;
int lastAdded = 0;
for (int i = 0; i < attrPos.length; i++)
{
addPosition = true;
for (int pkPosition : pkPos)
if (attrPos[i] == pkPosition)
addPosition = false;
if (addPosition)
nonKeyPos[lastAdded++] = attrPos[i];
}
return nonKeyPos;
}
}
| |
package android.bluetooth;
/*
* #%L
* Matos
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2010 - 2014 Orange SA
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
@com.francetelecom.rd.stubs.annotation.ClassDone(0)
public final class BluetoothClass
implements android.os.Parcelable
{
// Classes
public static final class Service
{
// Fields
public static final int LIMITED_DISCOVERABILITY = 8192;
public static final int POSITIONING = 65536;
public static final int NETWORKING = 131072;
public static final int RENDER = 262144;
public static final int CAPTURE = 524288;
public static final int OBJECT_TRANSFER = 1048576;
public static final int AUDIO = 2097152;
public static final int TELEPHONY = 4194304;
public static final int INFORMATION = 8388608;
// Constructors
public Service(){
}
}
public static class Device
{
// Classes
public static class Major
{
// Fields
public static final int MISC = 0;
public static final int COMPUTER = 256;
public static final int PHONE = 512;
public static final int NETWORKING = 768;
public static final int AUDIO_VIDEO = 1024;
public static final int PERIPHERAL = 1280;
public static final int IMAGING = 1536;
public static final int WEARABLE = 1792;
public static final int TOY = 2048;
public static final int HEALTH = 2304;
public static final int UNCATEGORIZED = 7936;
// Constructors
public Major(){
}
}
// Fields
public static final int COMPUTER_UNCATEGORIZED = 256;
public static final int COMPUTER_DESKTOP = 260;
public static final int COMPUTER_SERVER = 264;
public static final int COMPUTER_LAPTOP = 268;
public static final int COMPUTER_HANDHELD_PC_PDA = 272;
public static final int COMPUTER_PALM_SIZE_PC_PDA = 276;
public static final int COMPUTER_WEARABLE = 280;
public static final int PHONE_UNCATEGORIZED = 512;
public static final int PHONE_CELLULAR = 516;
public static final int PHONE_CORDLESS = 520;
public static final int PHONE_SMART = 524;
public static final int PHONE_MODEM_OR_GATEWAY = 528;
public static final int PHONE_ISDN = 532;
public static final int AUDIO_VIDEO_UNCATEGORIZED = 1024;
public static final int AUDIO_VIDEO_WEARABLE_HEADSET = 1028;
public static final int AUDIO_VIDEO_HANDSFREE = 1032;
public static final int AUDIO_VIDEO_MICROPHONE = 1040;
public static final int AUDIO_VIDEO_LOUDSPEAKER = 1044;
public static final int AUDIO_VIDEO_HEADPHONES = 1048;
public static final int AUDIO_VIDEO_PORTABLE_AUDIO = 1052;
public static final int AUDIO_VIDEO_CAR_AUDIO = 1056;
public static final int AUDIO_VIDEO_SET_TOP_BOX = 1060;
public static final int AUDIO_VIDEO_HIFI_AUDIO = 1064;
public static final int AUDIO_VIDEO_VCR = 1068;
public static final int AUDIO_VIDEO_VIDEO_CAMERA = 1072;
public static final int AUDIO_VIDEO_CAMCORDER = 1076;
public static final int AUDIO_VIDEO_VIDEO_MONITOR = 1080;
public static final int AUDIO_VIDEO_VIDEO_DISPLAY_AND_LOUDSPEAKER = 1084;
public static final int AUDIO_VIDEO_VIDEO_CONFERENCING = 1088;
public static final int AUDIO_VIDEO_VIDEO_GAMING_TOY = 1096;
public static final int WEARABLE_UNCATEGORIZED = 1792;
public static final int WEARABLE_WRIST_WATCH = 1796;
public static final int WEARABLE_PAGER = 1800;
public static final int WEARABLE_JACKET = 1804;
public static final int WEARABLE_HELMET = 1808;
public static final int WEARABLE_GLASSES = 1812;
public static final int TOY_UNCATEGORIZED = 2048;
public static final int TOY_ROBOT = 2052;
public static final int TOY_VEHICLE = 2056;
public static final int TOY_DOLL_ACTION_FIGURE = 2060;
public static final int TOY_CONTROLLER = 2064;
public static final int TOY_GAME = 2068;
public static final int HEALTH_UNCATEGORIZED = 2304;
public static final int HEALTH_BLOOD_PRESSURE = 2308;
public static final int HEALTH_THERMOMETER = 2312;
public static final int HEALTH_WEIGHING = 2316;
public static final int HEALTH_GLUCOSE = 2320;
public static final int HEALTH_PULSE_OXIMETER = 2324;
public static final int HEALTH_PULSE_RATE = 2328;
public static final int HEALTH_DATA_DISPLAY = 2332;
public static final int PERIPHERAL_NON_KEYBOARD_NON_POINTING = 1280;
public static final int PERIPHERAL_KEYBOARD = 1344;
public static final int PERIPHERAL_POINTING = 1408;
public static final int PERIPHERAL_KEYBOARD_POINTING = 1472;
// Constructors
public Device(){
}
}
// Fields
public static final int ERROR = -16777216;
public static final android.os.Parcelable.Creator<BluetoothClass> CREATOR = null;
public static final int PROFILE_HEADSET = 0;
public static final int PROFILE_A2DP = 1;
public static final int PROFILE_OPP = 2;
public static final int PROFILE_HID = 3;
public static final int PROFILE_PANU = 4;
public static final int PROFILE_NAP = 5;
// Constructors
public BluetoothClass(int arg1){
}
// Methods
public boolean equals(java.lang.Object arg1){
return false;
}
public java.lang.String toString(){
return (java.lang.String) null;
}
public int hashCode(){
return 0;
}
public void writeToParcel(android.os.Parcel arg1, int arg2){
}
public int describeContents(){
return 0;
}
public boolean hasService(int arg1){
return false;
}
public int getMajorDeviceClass(){
return 0;
}
public int getDeviceClass(){
return 0;
}
public boolean doesClassMatch(int arg1){
return false;
}
}
| |
/**************************************************************************************
Copyright 2015 Applied Research Associates, Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of the License
at:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under
the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
**************************************************************************************/
/**
*
*/
package mil.tatrc.physiology.utilities;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Collections;
import java.util.List;
/**
* @author abray
*
*/
public class DoubleUtils implements Serializable
{
public static void main(String[] args)
{
System.out.println(truncate(0.004999999999999999,2));
double err=percentTolerance(0.0349066,0.035,1E-20);
System.out.println(err);
if(err<0.001)
System.out.println("equal");
else
System.err.println("not equal");
DoubleUtils opts = new DoubleUtils();
//opts.truncateMantissasBy(9);
opts.matchFromMantissa();
if(DoubleUtils.equals(-0.000792888, -0.000792891,opts))
System.out.println("equal");
else
System.err.println("not equal");
}
public enum EqualMode {PercentDifference,PercentTolerance,AbsoluteError}
protected EqualMode mode;
protected double equalLimit;
protected double epsilon;
public enum MantissaOption { None,MatchFrom, MatchTo, ManualMantissa }
protected MantissaOption mantissaOption;
protected int mantissaLength;
/** Two approaches to checking for less than and less than or equal to.
*
* THE REASON FOR TWO MODES: Depending on the equalLimit the user chooses,
* values can be reported to be less than (and/or
* equal to) when in reality they are numerically
* larger when using the "EqualMode" option.
* This can lead to unexpected behavior if the
* user is not aware.
* For this reason, the "Strict" option will
* provide numerically reliable results.
*
* For checking for less than:
* "Equal Mode" Approach - Ensures the value is (1) truly less than (<) and
* (2) the values are not 'equal' as determined
* using the DoubleUtils equals method (hence
* using the equalLimit and Equalmode defined in
* the DoubleUtils object)
* "Strict" Approach - Ensures the value is truly less than (<). Does not
* check to see if values are equal, only that they
* are less than.
*
* For checking for less than or equal to:
* "Equal Mode" Approach - Ensures the value is (1) truly less than (<) or
* (2) the values are 'equal' as determined
* using the DoubleUtils equals method (hence
* using the equalLimit and Equalmode defined in
* the DoubleUtils object)
* "Strict" Approach - Checks if the value is greater than (>) only. If
* value is greater than, then the return values
* indicates the value is not less than or equal to.
*/
public enum LessThanOption { EqualMode, Strict }
protected LessThanOption lessThanOption;
protected double lastError;
public DoubleUtils()
{
this.mode = EqualMode.PercentDifference;
this.equalLimit = 0.5;
this.epsilon = 1E-20;
this.mantissaOption = MantissaOption.None;
this.mantissaLength=-1;
this.lessThanOption = LessThanOption.Strict;
}
public DoubleUtils(DoubleUtils du)
{
this.mode=du.mode;
this.equalLimit=du.equalLimit;
this.epsilon=du.epsilon;
this.mantissaOption = du.mantissaOption;
this.mantissaLength = du.mantissaLength;
this.lessThanOption = du.lessThanOption;
}
public DoubleUtils(EqualMode mode)
{
this();
if(mode==null)
{
Log.warn("Mode not provided, using defaults");
return;
}
this.mode=mode;
if(EqualMode.PercentDifference.equals(mode))
this.equalLimit=0.5;
else if(EqualMode.PercentTolerance.equals(mode))
this.equalLimit=0.5;
else if(EqualMode.AbsoluteError.equals(mode))
this.equalLimit=0.1;
this.mantissaOption = MantissaOption.None;
this.mantissaLength=-1;
this.lessThanOption = LessThanOption.Strict;
}
public DoubleUtils(EqualMode mode, double limit)
{
this();
if(mode==null)
{
Log.warn("Mode not provided, using defaults");
return;
}
this.mode=mode;
this.equalLimit=limit;
this.mantissaOption = MantissaOption.None;
this.mantissaLength=-1;
this.lessThanOption = LessThanOption.Strict;
}
public void setPercentDifference(double limit)
{
this.mode=EqualMode.PercentDifference;
this.equalLimit=limit;
}
public void setPercentTolerance(double limit)
{
this.mode=EqualMode.PercentTolerance;
this.equalLimit=limit;
this.epsilon=1E-20;
}
/**
*
* @param limit -
* @param epsilon
*/
public void setPercentTolerance(double limit, double epsilon)
{
this.mode=EqualMode.PercentTolerance;
this.equalLimit=limit;
this.epsilon=epsilon;
if(epsilon == 0.0)
Log.warn("Epsilon has been set to 0.0. Unexpected results may occur.");
if(epsilon < 0.0)
Log.warn("Epsilon has been set to a negative value (" + epsilon + "). Unexpected results may occur.");
}
public void setAbsoluteError(double limit)
{
this.mode=EqualMode.AbsoluteError;
this.equalLimit=limit;
}
public void setLessThanOption(LessThanOption lessThanOption)
{
this.lessThanOption = lessThanOption;
}
public EqualMode getMode(){return this.mode;}
public void setMode(EqualMode m){this.mode=m;}
public double getLimit(){return this.equalLimit;}
public void setLimit(double limit){this.equalLimit=limit;}
public LessThanOption getLessThanOption(){return this.lessThanOption;}
/**
* The to value mantissa will be truncated
* to the same length as the from value mantissa
*/
public void matchFromMantissa()
{
this.mantissaOption=MantissaOption.MatchFrom;
}
/**
* The from value mantissa will be truncated
* to the same length as the to value mantissa
*/
public void matchToMantissa()
{
this.mantissaOption=MantissaOption.MatchTo;
}
/**
* Both mantissas will be truncated to this length
* before equal test
*/
public void truncateMantissasTo(int mantissaLength)
{
this.mantissaOption=MantissaOption.ManualMantissa;
if(mantissaLength < 0)
{
Log.warn("Mantissa Length is less than zero (" + mantissaLength + "). Using absolute value of manitssa length instead");
this.mantissaLength=Math.abs(mantissaLength);
}
else
this.mantissaLength=mantissaLength;
}
/**
* Mantissa will not be affected
*/
public void clearMantissaOption()
{
this.mantissaOption=MantissaOption.None;
}
public void set(DoubleUtils to)
{
this.mode=to.mode;
this.equalLimit=to.equalLimit;
this.epsilon=to.epsilon;
this.mantissaOption = to.mantissaOption;
this.mantissaLength = to.mantissaLength;
this.lessThanOption = to.lessThanOption;
}
public double getLastError(){return lastError;}
public static double truncate(double d, int mantissaLength)
{
if(Double.isNaN(d))
return d;
int mantissaLengthcopy = mantissaLength;
if(mantissaLengthcopy < 0)
{
Log.warn("Mantissa Length is less than zero (" + mantissaLengthcopy + "). Using absolute value of manitssa length instead");
mantissaLengthcopy = Math.abs(mantissaLengthcopy);
}
double new_d;
BigDecimal bd = new BigDecimal(d);
bd = bd.setScale(mantissaLengthcopy, BigDecimal.ROUND_HALF_UP);
new_d = bd.doubleValue();
//String s = StringUtils.toString(new_d,mantissaLength);
//double e = Double.parseDouble(s);
return new_d;
}
/**
* Truncate via BigDecimal and you can specify the round mode
* @param d
* @param mantissaLength
* @param bigDecimalRoundMode
* @return
*/
public static double truncate(double d, int mantissaLength, int bigDecimalRoundMode)
{
if(Double.isNaN(d))
return d;
int mantissaLengthcopy = mantissaLength;
if(mantissaLengthcopy < 0)
{
Log.warn("Mantissa Length is less than zero (" + mantissaLengthcopy + "). Using absolute value of manitssa length instead");
mantissaLengthcopy = Math.abs(mantissaLengthcopy);
}
BigDecimal bd = new BigDecimal(d);
bd = bd.setScale(mantissaLengthcopy, bigDecimalRoundMode);
return bd.doubleValue();
}
public static boolean equals(double fValue, double tValue)
{
return equals(fValue, tValue, null);
}
// Tolerance is a percentage, not a decimal value
public static boolean equals(double fValue, double tValue, DoubleUtils opts)
{
return equals(fValue,"",tValue,"", opts);
}
public static boolean equals(double fValue, String fUnit, double tValue, String tUnit)
{
return equals(fValue,fUnit,tValue,tUnit,null);
}
public static boolean equals(double fValue, String fUnit, double tValue, String tUnit, DoubleUtils opts)
{
if(!validDoubles(fValue,tValue))
return false;
if(fValue==0&&tValue==0)
return true;
if(Double.isNaN(fValue)&&Double.isNaN(tValue))
return true;
double tmp=tValue;
if(!tUnit.equals(fUnit))
tmp = UnitConverter.convert(tValue, tUnit, fUnit);
if(opts!=null)
{
if(MantissaOption.MatchFrom.equals(opts.mantissaOption))
{
tmp=truncate(tmp,mantissaLength(fValue));
}
else if(MantissaOption.MatchTo.equals(opts.mantissaOption))
{
tmp=truncate(tmp,mantissaLength(tValue));
fValue=truncate(fValue,mantissaLength(tValue));
}
else if(MantissaOption.ManualMantissa.equals(opts.mantissaOption))
{
fValue=truncate(fValue, opts.mantissaLength);
tmp=truncate(tmp, opts.mantissaLength);
}
}
boolean equals;
if(opts==null || EqualMode.PercentDifference.equals(opts.mode))
{
if(opts!=null)
{
opts.setLastError(percentDifference(fValue,tmp));
// Note: percentDifference returns the absolute value of the difference,
// but it can also return negative infinity in some case. Hence, we
// need the absolute value on the check for equality below.
equals = Math.abs(opts.lastError) <= opts.equalLimit;
}
else
{
equals= Math.abs(percentDifference(fValue,tmp)) <= 0.5;// Default
}
}
else if(EqualMode.PercentTolerance.equals(opts.mode))
{
opts.setLastError(percentTolerance(fValue, tmp, opts.epsilon));
// Percent tolerance can (correctly) be negative. However, when
// comparing to a limit, we should use the absolute value
equals = Math.abs(opts.lastError) <= opts.equalLimit;
}
else
{
opts.setLastError(Math.abs(fValue - tmp));
equals = opts.lastError <= opts.equalLimit;
}
return equals;
}
protected void setLastError(double err){lastError=err;}
public static double percentTolerance(double expected, double calculated, double epsilon)
{
if (expected == 0.0)
{
if (calculated == 0.0)
return 0;
else
{
if (Math.abs(calculated) > epsilon)
return Double.NaN;
else
return 0;
}
}
return (calculated - expected) / expected * 100.0;
}
public static double percentDifference(double expected, double calculated)
{
if(calculated==0&&expected==0)
return 0;
double difference = (calculated - expected);
double average = (calculated + expected) / 2.0;
if(average == 0.0)
{
if(difference > 0.0)
return Double.POSITIVE_INFINITY;
else
return Double.NEGATIVE_INFINITY;
}
else
return (difference / average) * 100.0;
}
public static boolean lessThan(double value, double limit)
{
return lessThan(value,"",limit,"",null);
}
public static boolean lessThan(double value, String vUnit, double limit, String lUnit)
{
return lessThan(value,vUnit,limit,lUnit,null);
}
public static boolean lessThan(double value, double limit, DoubleUtils opts)
{
return lessThan(value,"",limit,"",opts);
}
public static boolean lessThan(double value, String vUnit, double limit, String lUnit, DoubleUtils opts)
{
if(!validDoubles(value,limit))
return false;
double tmp=value;
if(!vUnit.equals(lUnit))
tmp = UnitConverter.convert(value, vUnit, lUnit);
if(LessThanOption.Strict.equals(opts.lessThanOption))
{
if(tmp<limit)
return true;
}
else
{
if(tmp<limit && !equals(tmp,limit,opts))
return true;
}
return false;
}
public static boolean lessThanEqualTo(double value, double limit)
{
return lessThanEqualTo(value,"",limit,"",null);
}
public static boolean lessThanEqualTo(double value, String vUnit, double limit, String lUnit)
{
return lessThanEqualTo(value,vUnit,limit,lUnit,null);
}
public static boolean lessThanEqualTo(double value, double limit, DoubleUtils opts)
{
return lessThanEqualTo(value,"",limit,"",opts);
}
public static boolean lessThanEqualTo(double value, String vUnit, double limit, String lUnit, DoubleUtils opts)
{
if(!validDoubles(value,limit))
return false;
double tmp=value;
if(!vUnit.equals(lUnit))
tmp = UnitConverter.convert(value, vUnit, lUnit);
if(null == opts)
{
// In this case, always assume you want a "strict" check
if(tmp>limit)
return false;
else
return true;
}
else
{
if(LessThanOption.Strict.equals(opts.lessThanOption))
{
if(tmp>limit)
return false;
else
return true;
}
else
{
if(tmp<limit || equals(tmp,limit,opts))
return true;
}
}
return false;
}
public static int mantissaLength(double d)
{
return Math.abs(BigDecimal.valueOf(d).scale());
}
/**
* lowerLimit <= value <= upperLimit
* @param lowerLimit
* @param value
* @param upperLimit
* @return
*/
public static boolean inRange(double lowerLimit, double value, double upperLimit)
{
if(upperLimit < lowerLimit)
Log.warn("While determining if value is in range, upper bound is less than lower bound. Unexpected results may occur.");
else if(upperLimit == lowerLimit)
Log.warn("While determining if value is in range, upper bound is equal to lower bound. Unexpected results may occur.");
return (lessThanEqualTo(lowerLimit, value) && lessThanEqualTo(value, upperLimit));
}
public static double getMax(List<Double> list)
{
double max=-Double.MAX_VALUE;
for(double d : list)
{
if(d>max)
max=d;
}
return max;
}
public static double getMax(double[] ary)
{
double max=-Double.MAX_VALUE;
for(double d : ary)
{
if(d>max)
max=d;
}
return max;
}
public static double getMin(List<Double> list)
{
double min=Double.MAX_VALUE;
for(double d : list)
{
if(d<min)
min=d;
}
return min;
}
public static double getMin(double[] ary)
{
double min=Double.MAX_VALUE;
for(double d : ary)
{
if(d<min)
min=d;
}
return min;
}
public static double getAverage(List<Double> values)
{
double sum=0;
for(double d : values)
sum += d;
return sum/values.size();
}
public static double getAverage(double[] ary)
{
double sum=0;
for(double d : ary)
sum += d;
return sum/ary.length;
}
public static double getStandardDeviation(List<Double> values)
{
double sum=0;
double avg=getAverage(values);
for(double d : values)
sum+=(d-avg)*(d-avg);
return Math.sqrt(sum/values.size());
}
private static boolean validDoubles(double a, double b)
{
if(Double.isNaN(a)&&!Double.isNaN(b)||
Double.isNaN(b)&&!Double.isNaN(a)||
Double.isInfinite(a)&&!Double.isInfinite(b)||
Double.isInfinite(b)&&!Double.isInfinite(a))
{
return false;
}
return true;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.workdocs.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/workdocs-2016-05-01/CreateUser" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateUserRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ID of the organization.
* </p>
*/
private String organizationId;
/**
* <p>
* The login name of the user.
* </p>
*/
private String username;
/**
* <p>
* The email address of the user.
* </p>
*/
private String emailAddress;
/**
* <p>
* The given name of the user.
* </p>
*/
private String givenName;
/**
* <p>
* The surname of the user.
* </p>
*/
private String surname;
/**
* <p>
* The password of the user.
* </p>
*/
private String password;
/**
* <p>
* The time zone ID of the user.
* </p>
*/
private String timeZoneId;
/**
* <p>
* The amount of storage for the user.
* </p>
*/
private StorageRuleType storageRule;
/**
* <p>
* Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the API.
* </p>
*/
private String authenticationToken;
/**
* <p>
* The ID of the organization.
* </p>
*
* @param organizationId
* The ID of the organization.
*/
public void setOrganizationId(String organizationId) {
this.organizationId = organizationId;
}
/**
* <p>
* The ID of the organization.
* </p>
*
* @return The ID of the organization.
*/
public String getOrganizationId() {
return this.organizationId;
}
/**
* <p>
* The ID of the organization.
* </p>
*
* @param organizationId
* The ID of the organization.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateUserRequest withOrganizationId(String organizationId) {
setOrganizationId(organizationId);
return this;
}
/**
* <p>
* The login name of the user.
* </p>
*
* @param username
* The login name of the user.
*/
public void setUsername(String username) {
this.username = username;
}
/**
* <p>
* The login name of the user.
* </p>
*
* @return The login name of the user.
*/
public String getUsername() {
return this.username;
}
/**
* <p>
* The login name of the user.
* </p>
*
* @param username
* The login name of the user.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateUserRequest withUsername(String username) {
setUsername(username);
return this;
}
/**
* <p>
* The email address of the user.
* </p>
*
* @param emailAddress
* The email address of the user.
*/
public void setEmailAddress(String emailAddress) {
this.emailAddress = emailAddress;
}
/**
* <p>
* The email address of the user.
* </p>
*
* @return The email address of the user.
*/
public String getEmailAddress() {
return this.emailAddress;
}
/**
* <p>
* The email address of the user.
* </p>
*
* @param emailAddress
* The email address of the user.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateUserRequest withEmailAddress(String emailAddress) {
setEmailAddress(emailAddress);
return this;
}
/**
* <p>
* The given name of the user.
* </p>
*
* @param givenName
* The given name of the user.
*/
public void setGivenName(String givenName) {
this.givenName = givenName;
}
/**
* <p>
* The given name of the user.
* </p>
*
* @return The given name of the user.
*/
public String getGivenName() {
return this.givenName;
}
/**
* <p>
* The given name of the user.
* </p>
*
* @param givenName
* The given name of the user.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateUserRequest withGivenName(String givenName) {
setGivenName(givenName);
return this;
}
/**
* <p>
* The surname of the user.
* </p>
*
* @param surname
* The surname of the user.
*/
public void setSurname(String surname) {
this.surname = surname;
}
/**
* <p>
* The surname of the user.
* </p>
*
* @return The surname of the user.
*/
public String getSurname() {
return this.surname;
}
/**
* <p>
* The surname of the user.
* </p>
*
* @param surname
* The surname of the user.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateUserRequest withSurname(String surname) {
setSurname(surname);
return this;
}
/**
* <p>
* The password of the user.
* </p>
*
* @param password
* The password of the user.
*/
public void setPassword(String password) {
this.password = password;
}
/**
* <p>
* The password of the user.
* </p>
*
* @return The password of the user.
*/
public String getPassword() {
return this.password;
}
/**
* <p>
* The password of the user.
* </p>
*
* @param password
* The password of the user.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateUserRequest withPassword(String password) {
setPassword(password);
return this;
}
/**
* <p>
* The time zone ID of the user.
* </p>
*
* @param timeZoneId
* The time zone ID of the user.
*/
public void setTimeZoneId(String timeZoneId) {
this.timeZoneId = timeZoneId;
}
/**
* <p>
* The time zone ID of the user.
* </p>
*
* @return The time zone ID of the user.
*/
public String getTimeZoneId() {
return this.timeZoneId;
}
/**
* <p>
* The time zone ID of the user.
* </p>
*
* @param timeZoneId
* The time zone ID of the user.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateUserRequest withTimeZoneId(String timeZoneId) {
setTimeZoneId(timeZoneId);
return this;
}
/**
* <p>
* The amount of storage for the user.
* </p>
*
* @param storageRule
* The amount of storage for the user.
*/
public void setStorageRule(StorageRuleType storageRule) {
this.storageRule = storageRule;
}
/**
* <p>
* The amount of storage for the user.
* </p>
*
* @return The amount of storage for the user.
*/
public StorageRuleType getStorageRule() {
return this.storageRule;
}
/**
* <p>
* The amount of storage for the user.
* </p>
*
* @param storageRule
* The amount of storage for the user.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateUserRequest withStorageRule(StorageRuleType storageRule) {
setStorageRule(storageRule);
return this;
}
/**
* <p>
* Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the API.
* </p>
*
* @param authenticationToken
* Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the
* API.
*/
public void setAuthenticationToken(String authenticationToken) {
this.authenticationToken = authenticationToken;
}
/**
* <p>
* Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the API.
* </p>
*
* @return Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the
* API.
*/
public String getAuthenticationToken() {
return this.authenticationToken;
}
/**
* <p>
* Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the API.
* </p>
*
* @param authenticationToken
* Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the
* API.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateUserRequest withAuthenticationToken(String authenticationToken) {
setAuthenticationToken(authenticationToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getOrganizationId() != null)
sb.append("OrganizationId: ").append(getOrganizationId()).append(",");
if (getUsername() != null)
sb.append("Username: ").append(getUsername()).append(",");
if (getEmailAddress() != null)
sb.append("EmailAddress: ").append(getEmailAddress()).append(",");
if (getGivenName() != null)
sb.append("GivenName: ").append(getGivenName()).append(",");
if (getSurname() != null)
sb.append("Surname: ").append(getSurname()).append(",");
if (getPassword() != null)
sb.append("Password: ").append("***Sensitive Data Redacted***").append(",");
if (getTimeZoneId() != null)
sb.append("TimeZoneId: ").append(getTimeZoneId()).append(",");
if (getStorageRule() != null)
sb.append("StorageRule: ").append(getStorageRule()).append(",");
if (getAuthenticationToken() != null)
sb.append("AuthenticationToken: ").append("***Sensitive Data Redacted***");
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateUserRequest == false)
return false;
CreateUserRequest other = (CreateUserRequest) obj;
if (other.getOrganizationId() == null ^ this.getOrganizationId() == null)
return false;
if (other.getOrganizationId() != null && other.getOrganizationId().equals(this.getOrganizationId()) == false)
return false;
if (other.getUsername() == null ^ this.getUsername() == null)
return false;
if (other.getUsername() != null && other.getUsername().equals(this.getUsername()) == false)
return false;
if (other.getEmailAddress() == null ^ this.getEmailAddress() == null)
return false;
if (other.getEmailAddress() != null && other.getEmailAddress().equals(this.getEmailAddress()) == false)
return false;
if (other.getGivenName() == null ^ this.getGivenName() == null)
return false;
if (other.getGivenName() != null && other.getGivenName().equals(this.getGivenName()) == false)
return false;
if (other.getSurname() == null ^ this.getSurname() == null)
return false;
if (other.getSurname() != null && other.getSurname().equals(this.getSurname()) == false)
return false;
if (other.getPassword() == null ^ this.getPassword() == null)
return false;
if (other.getPassword() != null && other.getPassword().equals(this.getPassword()) == false)
return false;
if (other.getTimeZoneId() == null ^ this.getTimeZoneId() == null)
return false;
if (other.getTimeZoneId() != null && other.getTimeZoneId().equals(this.getTimeZoneId()) == false)
return false;
if (other.getStorageRule() == null ^ this.getStorageRule() == null)
return false;
if (other.getStorageRule() != null && other.getStorageRule().equals(this.getStorageRule()) == false)
return false;
if (other.getAuthenticationToken() == null ^ this.getAuthenticationToken() == null)
return false;
if (other.getAuthenticationToken() != null && other.getAuthenticationToken().equals(this.getAuthenticationToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getOrganizationId() == null) ? 0 : getOrganizationId().hashCode());
hashCode = prime * hashCode + ((getUsername() == null) ? 0 : getUsername().hashCode());
hashCode = prime * hashCode + ((getEmailAddress() == null) ? 0 : getEmailAddress().hashCode());
hashCode = prime * hashCode + ((getGivenName() == null) ? 0 : getGivenName().hashCode());
hashCode = prime * hashCode + ((getSurname() == null) ? 0 : getSurname().hashCode());
hashCode = prime * hashCode + ((getPassword() == null) ? 0 : getPassword().hashCode());
hashCode = prime * hashCode + ((getTimeZoneId() == null) ? 0 : getTimeZoneId().hashCode());
hashCode = prime * hashCode + ((getStorageRule() == null) ? 0 : getStorageRule().hashCode());
hashCode = prime * hashCode + ((getAuthenticationToken() == null) ? 0 : getAuthenticationToken().hashCode());
return hashCode;
}
@Override
public CreateUserRequest clone() {
return (CreateUserRequest) super.clone();
}
}
| |
package virtuozo.interfaces;
import virtuozo.infra.StyleChooser;
import com.google.gwt.dom.client.SpanElement;
public enum FontAwesome implements Icon {
EYEDROPPER( "fa-eyedropper"),
FUTBOL_O( "fa-futbol-o"),
NEWSPAPER_O( "fa-newspaper-o"),
PAINT_BRUSH( "fa-paint-brush"),
PLUG( "fa-plug"),
SOCCER_BALL_O( "fa-soccer-ball-o"),
TOGGLE_OFF( "fa-toggle-off"),
TOGGLE_ON( "fa-toggle-on"),
TRASH( "fa-trash"),
TTY( "fa-tty"),
WIFI( "fa-wifi"),
ADJUST( "fa-adjust"),
ANCHOR( "fa-anchor"),
ARCHIVE( "fa-archive"),
ASTERISK( "fa-asterisk"),
AT( "fa-at"),
AUTOMOBILE( "fa-automobile"),
BAN( "fa-ban"),
BANK( "fa-bank"),
BARCODE( "fa-barcode"),
BARS( "fa-bars"),
BEER( "fa-beer"),
BELL( "fa-bell"),
BELL_O( "fa-bell-o"),
BELL_SLASH( "fa-bell-slash"),
BELL_SLASH_O( "fa-bell-slash-o"),
BICYCLE( "fa-bicycle"),
BINOCULARS( "fa-binoculars"),
BIRTHDAY_CAKE( "fa-birthday-cake"),
BOLT( "fa-bolt"),
BOMB( "fa-bomb"),
BOOK( "fa-book"),
BOOKMARK( "fa-bookmark"),
BOOKMARK_O( "fa-bookmark-o"),
BRIEFCASE( "fa-briefcase"),
BUG( "fa-bug"),
BUILDING( "fa-building"),
BUILDING_O( "fa-building-o"),
BULLHORN( "fa-bullhorn"),
BULLSEYE( "fa-bullseye"),
BUS( "fa-bus"),
CAB( "fa-cab"),
CALCULATOR( "fa-calculator"),
CALENDAR( "fa-calendar"),
CALENDAR_O( "fa-calendar-o"),
CAMERA( "fa-camera"),
CAMERA_RETRO( "fa-camera-retro"),
CAR( "fa-car"),
CC( "fa-cc"),
CERTIFICATE( "fa-certificate"),
CHECK( "fa-check"),
CHECK_CIRCLE( "fa-check-circle"),
CHECK_CIRCLE_O( "fa-check-circle-o"),
CHILD( "fa-child"),
CIRCLE_THIN( "fa-circle-thin"),
CLOCK_O( "fa-clock-o"),
CLOSE( "fa-close"),
CLOUD( "fa-cloud"),
CLOUD_DOWNLOAD( "fa-cloud-download"),
CLOUD_UPLOAD( "fa-cloud-upload"),
CODE( "fa-code"),
CODE_FORK( "fa-code-fork"),
COFFEE( "fa-coffee"),
COGS( "fa-cogs"),
COMMENT( "fa-comment"),
COMMENT_O( "fa-comment-o"),
COMMENTS( "fa-comments"),
COMMENTS_O( "fa-comments-o"),
COMPASS( "fa-compass"),
COPYRIGHT( "fa-copyright"),
CROP( "fa-crop"),
CROSSHAIRS( "fa-crosshairs"),
CUBE( "fa-cube"),
CUBES( "fa-cubes"),
CUTLERY( "fa-cutlery"),
DASHBOARD( "fa-dashboard"),
DATABASE( "fa-database"),
DESKTOP( "fa-desktop"),
DOWNLOAD( "fa-download"),
EDIT( "fa-edit"),
ELLIPSIS_H( "fa-ellipsis-h"),
ELLIPSIS_V( "fa-ellipsis-v"),
ENVELOPE( "fa-envelope"),
ENVELOPE_O( "fa-envelope-o"),
ENVELOPE_SQUARE( "fa-envelope-square"),
EXCHANGE( "fa-exchange"),
EXCLAMATION( "fa-exclamation"),
EXCLAMATION_CIRCLE( "fa-exclamation-circle"),
EXCLAMATION_TRIANGLE( "fa-exclamation-triangle"),
EXTERNAL_LINK( "fa-external-link"),
EXTERNAL_LINK_SQUARE( "fa-external-link-square"),
EYE( "fa-eye"),
EYE_SLASH( "fa-eye-slash"),
FAX( "fa-fax"),
FEMALE( "fa-female"),
FIGHTER_JET( "fa-fighter-jet"),
FILM( "fa-film"),
FILTER( "fa-filter"),
FIRE( "fa-fire"),
FIRE_EXTINGUISHER( "fa-fire-extinguisher"),
FLAG( "fa-flag"),
FLAG_CHECKERED( "fa-flag-checkered"),
FLAG_O( "fa-flag-o"),
FLASH( "fa-flash"),
FLASK( "fa-flask"),
FOLDER( "fa-folder"),
FOLDER_O( "fa-folder-o"),
FOLDER_OPEN( "fa-folder-open"),
FOLDER_OPEN_O( "fa-folder-open-o"),
FROWN_O( "fa-frown-o"),
GAMEPAD( "fa-gamepad"),
GAVEL( "fa-gavel"),
GEARS( "fa-gears"),
GIFT( "fa-gift"),
GLASS( "fa-glass"),
GLOBE( "fa-globe"),
GRADUATION_CAP( "fa-graduation-cap"),
GROUP( "fa-group"),
HDD_O( "fa-hdd-o"),
HEADPHONES( "fa-headphones"),
HEART( "fa-heart"),
HEART_O( "fa-heart-o"),
HISTORY( "fa-history"),
HOME( "fa-home"),
IMAGE( "fa-image"),
INBOX( "fa-inbox"),
INFO( "fa-info"),
INFO_CIRCLE( "fa-info-circle"),
INSTITUTION( "fa-institution"),
KEY( "fa-key"),
KEYBOARD_O( "fa-keyboard-o"),
LANGUAGE( "fa-language"),
LAPTOP( "fa-laptop"),
LEAF( "fa-leaf"),
LEGAL( "fa-legal"),
LEMON_O( "fa-lemon-o"),
LEVEL_DOWN( "fa-level-down"),
LEVEL_UP( "fa-level-up"),
LIFE_BUOY( "fa-life-buoy"),
LIFE_RING( "fa-life-ring"),
LIFE_SAVER( "fa-life-saver"),
LIGHTBULB_O( "fa-lightbulb-o"),
LOCATION_ARROW( "fa-location-arrow"),
LOCK( "fa-lock"),
MAGIC( "fa-magic"),
MAGNET( "fa-magnet"),
MAIL_FORWARD( "fa-mail-forward"),
MAIL_REPLY( "fa-mail-reply"),
MAIL_REPLY_ALL( "fa-mail-reply-all"),
MALE( "fa-male"),
MAP_MARKER( "fa-map-marker"),
MEH_O( "fa-meh-o"),
MICROPHONE( "fa-microphone"),
MICROPHONE_SLASH( "fa-microphone-slash"),
MINUS( "fa-minus"),
MINUS_CIRCLE( "fa-minus-circle"),
MOBILE( "fa-mobile"),
MOBILE_PHONE( "fa-mobile-phone"),
MOON_O( "fa-moon-o"),
MORTAR_BOARD( "fa-mortar-board"),
MUSIC( "fa-music"),
NAVICON( "fa-navicon"),
PAPER_PLANE( "fa-paper-plane"),
PAPER_PLANE_O( "fa-paper-plane-o"),
PAW( "fa-paw"),
PENCIL( "fa-pencil"),
PENCIL_SQUARE( "fa-pencil-square"),
PENCIL_SQUARE_O( "fa-pencil-square-o"),
PHONE( "fa-phone"),
PHONE_SQUARE( "fa-phone-square"),
PHOTO( "fa-photo"),
PICTURE_O( "fa-picture-o"),
PLANE( "fa-plane"),
PLUS( "fa-plus"),
PLUS_CIRCLE( "fa-plus-circle"),
POWER_OFF( "fa-power-off"),
PRINT( "fa-print"),
PUZZLE_PIECE( "fa-puzzle-piece"),
QRCODE( "fa-qrcode"),
QUESTION( "fa-question"),
QUESTION_CIRCLE( "fa-question-circle"),
QUOTE_LEFT( "fa-quote-left"),
QUOTE_RIGHT( "fa-quote-right"),
RANDOM( "fa-random"),
RECYCLE( "fa-recycle"),
REMOVE( "fa-remove"),
REORDER( "fa-reorder"),
REPLY( "fa-reply"),
REPLY_ALL( "fa-reply-all"),
RETWEET( "fa-retweet"),
ROAD( "fa-road"),
ROCKET( "fa-rocket"),
RSS( "fa-rss"),
RSS_SQUARE( "fa-rss-square"),
SEARCH( "fa-search"),
SEARCH_MINUS( "fa-search-minus"),
SEARCH_PLUS( "fa-search-plus"),
SEND( "fa-send"),
SEND_O( "fa-send-o"),
SHARE( "fa-share"),
SHARE_SQUARE( "fa-share-square"),
SHARE_SQUARE_O( "fa-share-square-o"),
SHIELD( "fa-shield"),
SHOPPING_CART( "fa-shopping-cart"),
SIGN_IN( "fa-sign-in"),
SIGN_OUT( "fa-sign-out"),
SIGNAL( "fa-signal"),
SITEMAP( "fa-sitemap"),
SLIDERS( "fa-sliders"),
SMILE_O( "fa-smile-o"),
SORT( "fa-sort"),
SORT_ALPHA_ASC( "fa-sort-alpha-asc"),
SORT_ALPHA_DESC( "fa-sort-alpha-desc"),
SORT_AMOUNT_ASC( "fa-sort-amount-asc"),
SORT_AMOUNT_DESC( "fa-sort-amount-desc"),
SORT_ASC( "fa-sort-asc"),
SORT_DESC( "fa-sort-desc"),
SORT_DOWN( "fa-sort-down"),
SORT_NUMERIC_ASC( "fa-sort-numeric-asc"),
SORT_NUMERIC_DESC( "fa-sort-numeric-desc"),
SORT_UP( "fa-sort-up"),
SPACE_SHUTTLE( "fa-space-shuttle"),
SPOON( "fa-spoon"),
STAR( "fa-star"),
STAR_HALF( "fa-star-half"),
STAR_HALF_EMPTY( "fa-star-half-empty"),
STAR_HALF_FULL( "fa-star-half-full"),
STAR_HALF_O( "fa-star-half-o"),
STAR_O( "fa-star-o"),
SUITCASE( "fa-suitcase"),
SUN_O( "fa-sun-o"),
SUPPORT( "fa-support"),
TABLET( "fa-tablet"),
TACHOMETER( "fa-tachometer"),
TAG( "fa-tag"),
TAGS( "fa-tags"),
TASKS( "fa-tasks"),
TAXI( "fa-taxi"),
TERMINAL( "fa-terminal"),
THUMB_TACK( "fa-thumb-tack"),
THUMBS_DOWN( "fa-thumbs-down"),
THUMBS_O_DOWN( "fa-thumbs-o-down"),
THUMBS_O_UP( "fa-thumbs-o-up"),
THUMBS_UP( "fa-thumbs-up"),
TICKET( "fa-ticket"),
TIMES( "fa-times"),
TIMES_CIRCLE( "fa-times-circle"),
TIMES_CIRCLE_O( "fa-times-circle-o"),
TINT( "fa-tint"),
TRASH_O( "fa-trash-o"),
TREE( "fa-tree"),
TROPHY( "fa-trophy"),
TRUCK( "fa-truck"),
UMBRELLA( "fa-umbrella"),
UNIVERSITY( "fa-university"),
UNLOCK( "fa-unlock"),
UNLOCK_ALT( "fa-unlock-alt"),
UNSORTED( "fa-unsorted"),
UPLOAD( "fa-upload"),
USER( "fa-user"),
USERS( "fa-users"),
VIDEO_CAMERA( "fa-video-camera"),
VOLUME_DOWN( "fa-volume-down"),
VOLUME_OFF( "fa-volume-off"),
VOLUME_UP( "fa-volume-up"),
WARNING( "fa-warning"),
WRENCH( "fa-wrench"),
FILE( "fa-file"),
FILE_ARCHIVE_O( "fa-file-archive-o"),
FILE_AUDIO_O( "fa-file-audio-o"),
FILE_CODE_O( "fa-file-code-o"),
FILE_EXCEL_O( "fa-file-excel-o"),
FILE_IMAGE_O( "fa-file-image-o"),
FILE_MOVIE_O( "fa-file-movie-o"),
FILE_O( "fa-file-o"),
FILE_PDF_O( "fa-file-pdf-o"),
FILE_PHOTO_O( "fa-file-photo-o"),
FILE_PICTURE_O( "fa-file-picture-o"),
FILE_POWERPOINT_O( "fa-file-powerpoint-o"),
FILE_SOUND_O( "fa-file-sound-o"),
FILE_TEXT( "fa-file-text"),
FILE_TEXT_O( "fa-file-text-o"),
FILE_VIDEO_O( "fa-file-video-o"),
FILE_WORD_O( "fa-file-word-o"),
FILE_ZIP_O( "fa-file-zip-o"),
CIRCLE_O_NOTCH( "fa-circle-o-notch fa-spin"),
COG( "fa-cog fa-spin"),
GEAR( "fa-gear"),
REFRESH( "fa-refresh fa-spin"),
SPINNER( "fa-spinner fa-spin"),
CHECK_SQUARE( "fa-check-square"),
CHECK_SQUARE_O( "fa-check-square-o"),
CIRCLE( "fa-circle"),
CIRCLE_O( "fa-circle-o"),
DOT_CIRCLE_O( "fa-dot-circle-o"),
MINUS_SQUARE( "fa-minus-square"),
MINUS_SQUARE_O( "fa-minus-square-o"),
PLUS_SQUARE_O( "fa-plus-square-o"),
SQUARE( "fa-square"),
SQUARE_O( "fa-square-o"),
CREDIT_CARD( "fa-credit-card"),
AREA_CHART( "fa-area-chart"),
BAR_CHART( "fa-bar-chart"),
BAR_CHART_O( "fa-bar-chart-o"),
LINE_CHART( "fa-line-chart"),
PIE_CHART( "fa-pie-chart"),
CNY( "fa-cny"),
DOLLAR( "fa-dollar"),
EUR( "fa-eur"),
EURO( "fa-euro"),
GBP( "fa-gbp"),
ILS( "fa-ils"),
INR( "fa-inr"),
JPY( "fa-jpy"),
KRW( "fa-krw"),
MONEY( "fa-money"),
RMB( "fa-rmb"),
ROUBLE( "fa-rouble"),
RUB( "fa-rub"),
RUBLE( "fa-ruble"),
RUPEE( "fa-rupee"),
SHEKEL( "fa-shekel"),
SHEQEL( "fa-sheqel"),
TRY( "fa-try"),
TURKISH_LIRA( "fa-turkish-lira"),
USD( "fa-usd"),
WON( "fa-won"),
YEN( "fa-yen"),
ALIGN_CENTER( "fa-align-center"),
ALIGN_JUSTIFY( "fa-align-justify"),
ALIGN_LEFT( "fa-align-left"),
ALIGN_RIGHT( "fa-align-right"),
BOLD( "fa-bold"),
CHAIN( "fa-chain"),
CHAIN_BROKEN( "fa-chain-broken"),
CLIPBOARD( "fa-clipboard"),
COLUMNS( "fa-columns"),
COPY( "fa-copy"),
CUT( "fa-cut"),
DEDENT( "fa-dedent"),
ERASER( "fa-eraser"),
FILES_O( "fa-files-o"),
FLOPPY_O( "fa-floppy-o"),
FONT( "fa-font"),
HEADER( "fa-header"),
INDENT( "fa-indent"),
ITALIC( "fa-italic"),
LINK( "fa-link"),
LIST( "fa-list"),
LIST_ALT( "fa-list-alt"),
LIST_OL( "fa-list-ol"),
LIST_UL( "fa-list-ul"),
OUTDENT( "fa-outdent"),
PAPERCLIP( "fa-paperclip"),
PARAGRAPH( "fa-paragraph"),
PASTE( "fa-paste"),
REPEAT( "fa-repeat"),
ROTATE_LEFT( "fa-rotate-left"),
ROTATE_RIGHT( "fa-rotate-right"),
SAVE( "fa-save"),
SCISSORS( "fa-scissors"),
STRIKETHROUGH( "fa-strikethrough"),
SUBSCRIPT( "fa-subscript"),
SUPERSCRIPT( "fa-superscript"),
TABLE( "fa-table"),
TEXT_HEIGHT( "fa-text-height"),
TEXT_WIDTH( "fa-text-width"),
TH( "fa-th"),
TH_LARGE( "fa-th-large"),
TH_LIST( "fa-th-list"),
UNDERLINE( "fa-underline"),
UNDO( "fa-undo"),
UNLINK( "fa-unlink"),
ANGLE_DOUBLE_DOWN( "fa-angle-double-down"),
ANGLE_DOUBLE_LEFT( "fa-angle-double-left"),
ANGLE_DOUBLE_RIGHT( "fa-angle-double-right"),
ANGLE_DOUBLE_UP( "fa-angle-double-up"),
ANGLE_DOWN( "fa-angle-down"),
ANGLE_LEFT( "fa-angle-left"),
ANGLE_RIGHT( "fa-angle-right"),
ANGLE_UP( "fa-angle-up"),
ARROW_CIRCLE_DOWN( "fa-arrow-circle-down"),
ARROW_CIRCLE_LEFT( "fa-arrow-circle-left"),
ARROW_CIRCLE_O_DOWN( "fa-arrow-circle-o-down"),
ARROW_CIRCLE_O_LEFT( "fa-arrow-circle-o-left"),
ARROW_CIRCLE_O_RIGHT( "fa-arrow-circle-o-right"),
ARROW_CIRCLE_O_UP( "fa-arrow-circle-o-up"),
ARROW_CIRCLE_RIGHT( "fa-arrow-circle-right"),
ARROW_CIRCLE_UP( "fa-arrow-circle-up"),
ARROW_DOWN( "fa-arrow-down"),
ARROW_LEFT( "fa-arrow-left"),
ARROW_RIGHT( "fa-arrow-right"),
ARROW_UP( "fa-arrow-up"),
ARROWS( "fa-arrows"),
ARROWS_H( "fa-arrows-h"),
ARROWS_V( "fa-arrows-v"),
CARET_DOWN( "fa-caret-down"),
CARET_LEFT( "fa-caret-left"),
CARET_RIGHT( "fa-caret-right"),
CARET_SQUARE_O_DOWN( "fa-caret-square-o-down"),
CARET_SQUARE_O_LEFT( "fa-caret-square-o-left"),
CARET_SQUARE_O_RIGHT( "fa-caret-square-o-right"),
CARET_SQUARE_O_UP( "fa-caret-square-o-up"),
CARET_UP( "fa-caret-up"),
CHEVRON_CIRCLE_DOWN( "fa-chevron-circle-down"),
CHEVRON_CIRCLE_LEFT( "fa-chevron-circle-left"),
CHEVRON_CIRCLE_RIGHT( "fa-chevron-circle-right"),
CHEVRON_CIRCLE_UP( "fa-chevron-circle-up"),
CHEVRON_DOWN( "fa-chevron-down"),
CHEVRON_LEFT( "fa-chevron-left"),
CHEVRON_RIGHT( "fa-chevron-right"),
CHEVRON_UP( "fa-chevron-up"),
HAND_O_DOWN( "fa-hand-o-down"),
HAND_O_LEFT( "fa-hand-o-left"),
HAND_O_RIGHT( "fa-hand-o-right"),
HAND_O_UP( "fa-hand-o-up"),
LONG_ARROW_DOWN( "fa-long-arrow-down"),
LONG_ARROW_LEFT( "fa-long-arrow-left"),
LONG_ARROW_RIGHT( "fa-long-arrow-right"),
LONG_ARROW_UP( "fa-long-arrow-up"),
TOGGLE_DOWN( "fa-toggle-down"),
TOGGLE_LEFT( "fa-toggle-left"),
TOGGLE_RIGHT( "fa-toggle-right"),
TOGGLE_UP( "fa-toggle-up"),
ARROWS_ALT( "fa-arrows-alt"),
BACKWARD( "fa-backward"),
COMPRESS( "fa-compress"),
EJECT( "fa-eject"),
EXPAND( "fa-expand"),
FAST_BACKWARD( "fa-fast-backward"),
FAST_FORWARD( "fa-fast-forward"),
FORWARD( "fa-forward"),
PAUSE( "fa-pause"),
PLAY( "fa-play"),
PLAY_CIRCLE( "fa-play-circle"),
PLAY_CIRCLE_O( "fa-play-circle-o"),
STEP_BACKWARD( "fa-step-backward"),
STEP_FORWARD( "fa-step-forward"),
STOP( "fa-stop"),
ADN( "fa-adn"),
ANDROID( "fa-android"),
ANGELLIST( "fa-angellist"),
APPLE( "fa-apple"),
BEHANCE( "fa-behance"),
BEHANCE_SQUARE( "fa-behance-square"),
BITBUCKET( "fa-bitbucket"),
BITBUCKET_SQUARE( "fa-bitbucket-square"),
BITCOIN( "fa-bitcoin"),
BTC( "fa-btc"),
CC_AMEX( "fa-cc-amex"),
CC_DISCOVER( "fa-cc-discover"),
CC_MASTERCARD( "fa-cc-mastercard"),
CC_PAYPAL( "fa-cc-paypal"),
CC_STRIPE( "fa-cc-stripe"),
CC_VISA( "fa-cc-visa"),
CODEPEN( "fa-codepen"),
CSS3( "fa-css3"),
DELICIOUS( "fa-delicious"),
DEVIANTART( "fa-deviantart"),
DIGG( "fa-digg"),
DRIBBBLE( "fa-dribbble"),
DROPBOX( "fa-dropbox"),
DRUPAL( "fa-drupal"),
EMPIRE( "fa-empire"),
FACEBOOK( "fa-facebook"),
FACEBOOK_SQUARE( "fa-facebook-square"),
FLICKR( "fa-flickr"),
FOURSQUARE( "fa-foursquare"),
GE( "fa-ge"),
GIT( "fa-git"),
GIT_SQUARE( "fa-git-square"),
GITHUB( "fa-github"),
GITHUB_ALT( "fa-github-alt"),
GITHUB_SQUARE( "fa-github-square"),
GITTIP( "fa-gittip"),
GOOGLE( "fa-google"),
GOOGLE_PLUS( "fa-google-plus"),
GOOGLE_PLUS_SQUARE( "fa-google-plus-square"),
GOOGLE_WALLET( "fa-google-wallet"),
HACKER_NEWS( "fa-hacker-news"),
HTML5( "fa-html5"),
INSTAGRAM( "fa-instagram"),
IOXHOST( "fa-ioxhost"),
JOOMLA( "fa-joomla"),
JSFIDDLE( "fa-jsfiddle"),
LASTFM( "fa-lastfm"),
LASTFM_SQUARE( "fa-lastfm-square"),
LINKEDIN( "fa-linkedin"),
LINKEDIN_SQUARE( "fa-linkedin-square"),
LINUX( "fa-linux"),
MAXCDN( "fa-maxcdn"),
MEANPATH( "fa-meanpath"),
OPENID( "fa-openid"),
PAGELINES( "fa-pagelines"),
PAYPAL( "fa-paypal"),
PIED_PIPER( "fa-pied-piper"),
PIED_PIPER_ALT( "fa-pied-piper-alt"),
PINTEREST( "fa-pinterest"),
PINTEREST_SQUARE( "fa-pinterest-square"),
QQ( "fa-qq"),
RA( "fa-ra"),
REBEL( "fa-rebel"),
REDDIT( "fa-reddit"),
REDDIT_SQUARE( "fa-reddit-square"),
RENREN( "fa-renren"),
SHARE_ALT( "fa-share-alt"),
SHARE_ALT_SQUARE( "fa-share-alt-square"),
SKYPE( "fa-skype"),
SLACK( "fa-slack"),
SLIDESHARE( "fa-slideshare"),
SOUNDCLOUD( "fa-soundcloud"),
SPOTIFY( "fa-spotify"),
STACK_EXCHANGE( "fa-stack-exchange"),
STACK_OVERFLOW( "fa-stack-overflow"),
STEAM( "fa-steam"),
STEAM_SQUARE( "fa-steam-square"),
STUMBLEUPON( "fa-stumbleupon"),
STUMBLEUPON_CIRCLE( "fa-stumbleupon-circle"),
TENCENT_WEIBO( "fa-tencent-weibo"),
TRELLO( "fa-trello"),
TUMBLR( "fa-tumblr"),
TUMBLR_SQUARE( "fa-tumblr-square"),
TWITCH( "fa-twitch"),
TWITTER( "fa-twitter"),
TWITTER_SQUARE( "fa-twitter-square"),
VIMEO_SQUARE( "fa-vimeo-square"),
VINE( "fa-vine"),
VK( "fa-vk"),
WECHAT( "fa-wechat"),
WEIBO( "fa-weibo"),
WEIXIN( "fa-weixin"),
WINDOWS( "fa-windows"),
WORDPRESS( "fa-wordpress"),
XING( "fa-xing"),
XING_SQUARE( "fa-xing-square"),
YAHOO( "fa-yahoo"),
YELP( "fa-yelp"),
YOUTUBE( "fa-youtube"),
YOUTUBE_PLAY( "fa-youtube-play"),
YOUTUBE_SQUARE( "fa-youtube-square"),
AMBULANCE( "fa-ambulance"),
H_SQUARE( "fa-h-square"),
HOSPITAL_O( "fa-hospital-o"),
MEDKIT( "fa-medkit"),
PLUS_SQUARE( "fa-plus-square"),
STETHOSCOPE( "fa-stethoscope"),
USER_MD( "fa-user-md"),
WHEELCHAIR( "fa-wheelchair");
private String key;
private static final String prefix = "fa";
private FontAwesome(String key) {
this.key = key;
}
public String key() {
return key;
}
public UIComponent asComponent(){
Tag<SpanElement> icon = Tag.asSpan();
this.update(icon);
return icon;
}
public void attachTo(UIComponent component) {
Icons.attachTo(component, this);
}
@Override
public void update(UIComponent component) {
component.asComponent().css().set(prefix).append(this.key).append(Styles.LARGE);
}
@Override
public boolean is(UIComponent component) {
return component.asComponent().css().contains(prefix);
}
@Override
public String toString() {
return prefix + " " + this.key;
}
public static class Styles extends CssClass {
private Styles(String name) {
super(name);
}
@Override
protected StyleChooser chooser() {
return STYLES;
}
public static final Styles LARGE = new Styles("fa-lg");
public static final Styles TWICE_LARGE = new Styles("fa-2x");
public static final Styles THREE_TIMES_LARGE = new Styles("fa-3x");
public static final Styles FOUR_TIMES_LARGE = new Styles("fa-4x");
public static final Styles FIVE_TIMES_LARGE = new Styles("fa-5x");
public static final Styles FIXED = new Styles("fa-fw");
public static final Styles BORDERED = new Styles("fa-border");
public static final Styles SPINNING = new Styles("fa-spin");
public static final Styles ROTATE_90 = new Styles("fa-rotate-90");
public static final Styles ROTATE_180 = new Styles("fa-rotate-180");
public static final Styles ROTATE_270 = new Styles("fa-rotate-270");
public static final Styles FLIP_HORIZONTAL = new Styles("fa-flip-horizontal");
public static final Styles FLIP_VERTICAL = new Styles("fa-flip-vertical");
public static final Styles INVERSE = new Styles("fa-inverse");
private static final StyleChooser STYLES = new StyleChooser(LARGE, TWICE_LARGE, THREE_TIMES_LARGE, FOUR_TIMES_LARGE, FIVE_TIMES_LARGE, FIXED, BORDERED, SPINNING, ROTATE_90, ROTATE_180, ROTATE_270, FLIP_HORIZONTAL, FLIP_VERTICAL, INVERSE);
}
}
| |
//============================================================================
//
// Copyright (C) 2006-2022 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
//============================================================================
package org.talend.components.snowflake.tsnowflakeoutput;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.talend.components.api.component.PropertyPathConnector;
import org.talend.components.common.tableaction.TableAction;
import org.talend.components.snowflake.SnowflakeDbTypeProperties;
import org.talend.components.snowflake.tsnowflakeoutput.TSnowflakeOutputProperties.OutputAction;
import org.talend.daikon.avro.AvroUtils;
import org.talend.daikon.avro.SchemaConstants;
import org.talend.daikon.properties.presentation.Form;
import org.talend.daikon.properties.property.Property;
import org.talend.daikon.properties.property.SchemaProperty;
/**
* Unit tests for {@link TSnowflakeOutputProperties} class
*/
public class TSnowflakeOutputPropertiesTest {
private static final String TALEND6_COLUMN_TALEND_TYPE = "di.column.talendType";
TSnowflakeOutputProperties outputProperties;
@Before
public void reset() {
outputProperties = new TSnowflakeOutputProperties("output");
outputProperties.init();
}
@Test
public void testLayoutOnOutputActionChange() {
boolean isOutputActionPropertyVisible;
boolean isUpsertKeyColumnVisible;
boolean isUpsertKeyColumnVisibleWhenOutputActionIsUpsert;
boolean isTableActionProperyVisible;
Form main = outputProperties.getForm(Form.MAIN);
Form advanced = outputProperties.getForm(Form.ADVANCED);
isOutputActionPropertyVisible = main.getWidget(outputProperties.outputAction).isVisible();
isUpsertKeyColumnVisible = main.getWidget(outputProperties.upsertKeyColumn).isVisible();
isTableActionProperyVisible = main.getWidget(outputProperties.tableAction).isVisible();
boolean usePersonalDBTypePropertyVisible = advanced.getWidget(outputProperties.usePersonalDBType).isVisible();
boolean isDbTypePropertyVisible = advanced.getWidget(outputProperties.dbtypeTable).isVisible();
outputProperties.outputAction.setValue(OutputAction.UPSERT);
outputProperties.afterOutputAction();
outputProperties.tableAction.setValue(TableAction.TableActionEnum.CREATE);
outputProperties.afterTableAction();
boolean usePersonalDBTypePropertyVisibleIfCreateAction = advanced.getWidget(outputProperties.usePersonalDBType).isVisible();
boolean isDbTypePropertyVisibleIfCreateAction = advanced.getWidget(outputProperties.dbtypeTable).isVisible();
boolean isUseSchemaKeysVisible = advanced.getWidget(outputProperties.useSchemaKeysForUpsert).isVisible();
isUpsertKeyColumnVisibleWhenOutputActionIsUpsert = main.getWidget(outputProperties.upsertKeyColumn).isVisible();
assertTrue(isOutputActionPropertyVisible);
assertFalse(isUpsertKeyColumnVisible);
assertFalse(isUpsertKeyColumnVisibleWhenOutputActionIsUpsert);
assertTrue(isUseSchemaKeysVisible);
assertTrue(isTableActionProperyVisible);
assertFalse(usePersonalDBTypePropertyVisible);
assertFalse(isDbTypePropertyVisible);
assertTrue(usePersonalDBTypePropertyVisibleIfCreateAction);
assertFalse(isDbTypePropertyVisibleIfCreateAction);
}
@Test
public void testDefaultValue() {
OutputAction defaultValueOutputAction;
boolean defaultConvertColumnsAndTableToUppercase;
TableAction.TableActionEnum tableAction;
boolean useSchemaDatePattern;
defaultValueOutputAction = outputProperties.outputAction.getValue();
defaultConvertColumnsAndTableToUppercase = outputProperties.convertColumnsAndTableToUppercase.getValue();
tableAction = outputProperties.tableAction.getValue();
useSchemaDatePattern = outputProperties.useSchemaDatePattern.getValue();
boolean defaultUsePersonalDBType = outputProperties.usePersonalDBType.getValue();
List<String> defaultDBTypeColumns = outputProperties.dbtypeTable.column.getValue();
List<String> defaultDBTypeType = outputProperties.dbtypeTable.dbtype.getValue();
assertEquals(defaultValueOutputAction, OutputAction.INSERT);
assertTrue(defaultConvertColumnsAndTableToUppercase);
assertEquals(TableAction.TableActionEnum.NONE, tableAction);
assertFalse(defaultUsePersonalDBType);
assertEquals(Collections.emptyList(), defaultDBTypeColumns);
assertEquals(Collections.emptyList(), defaultDBTypeType);
assertFalse(useSchemaDatePattern);
assertTrue(outputProperties.useSchemaKeysForUpsert.getValue());
}
@Test
public void testTriggers() {
Form main = outputProperties.getForm(Form.MAIN);
Form advanced = outputProperties.getForm(Form.ADVANCED);
boolean isOutputActionCalledAfter = main.getWidget(outputProperties.outputAction).isCallAfter();
boolean isUsePersonalDBTypeAfter = advanced.getWidget(outputProperties.usePersonalDBType).isCallAfter();
assertTrue(isOutputActionCalledAfter);
assertTrue(isUsePersonalDBTypeAfter);
}
@Test
public void testGetAllSchemaPropertiesConnectors() {
Set<PropertyPathConnector> schemaPropertyForInputConnection;
schemaPropertyForInputConnection = outputProperties.getAllSchemaPropertiesConnectors(true);
assertEquals(1, schemaPropertyForInputConnection.size());
assertTrue(schemaPropertyForInputConnection.contains(outputProperties.REJECT_CONNECTOR));
}
@Test
public void testGetAllSchemaPropertiesConnectorsForOutputConnection() {
Set<PropertyPathConnector> schemaPropertyForOutputConnection;
schemaPropertyForOutputConnection = outputProperties.getAllSchemaPropertiesConnectors(false);
assertEquals(1, schemaPropertyForOutputConnection.size());
// BUG THERE??? Method sets MAIN_CONNECTOR instead of FLOW_CONNECTOR
assertTrue(schemaPropertyForOutputConnection.contains(outputProperties.FLOW_CONNECTOR));
}
@Test
public void testGetFieldNames() {
Schema runtimeSchema;
Property<Schema> schemaProperty;
List<String> propertyFieldNames;
List<String> expectedPropertyFieldNames;
runtimeSchema = SchemaBuilder.builder().record("Record").fields() //
.name("logicalTime").type(AvroUtils._logicalTime()).noDefault() //
.name("logicalDate").type(AvroUtils._logicalDate()).noDefault() //
.name("logicalTimestamp").type(AvroUtils._logicalTimestamp()).noDefault() //
.name("id").type().intType().noDefault() //
.name("name").type().stringType().noDefault() //
.name("age").type().intType().noDefault() //
.name("valid").type().booleanType().noDefault() //
.name("address").type().stringType().noDefault() //
.name("comment").prop(SchemaConstants.TALEND_COLUMN_DB_LENGTH, "255").type().stringType().noDefault() //
.name("createdDate").prop(TALEND6_COLUMN_TALEND_TYPE, "id_Date") //
.prop(SchemaConstants.TALEND_COLUMN_PATTERN, "yyyy-MM-dd'T'HH:mm:ss'000Z'").type().nullable().longType() //
.noDefault() //
.endRecord(); //
schemaProperty = new SchemaProperty("schema");
schemaProperty.setValue(runtimeSchema);
expectedPropertyFieldNames = new ArrayList<>();
expectedPropertyFieldNames.add("logicalTime");
expectedPropertyFieldNames.add("logicalDate");
expectedPropertyFieldNames.add("logicalTimestamp");
expectedPropertyFieldNames.add("id");
expectedPropertyFieldNames.add("name");
expectedPropertyFieldNames.add("age");
expectedPropertyFieldNames.add("valid");
expectedPropertyFieldNames.add("address");
expectedPropertyFieldNames.add("comment");
expectedPropertyFieldNames.add("createdDate");
propertyFieldNames = outputProperties.getFieldNames(schemaProperty);
assertEquals(propertyFieldNames, expectedPropertyFieldNames);
}
@Test
public void testGetFieldsOfEmptySchema() {
Schema emptySchema;
Property<Schema> emptySchemaProperty;
List<String> emptyPropertyFieldNames;
emptySchema = SchemaBuilder.builder().record("EmptyRecord").fields().endRecord();
emptySchemaProperty = new SchemaProperty("Empty schema");
emptySchemaProperty.setValue(emptySchema);
emptyPropertyFieldNames = outputProperties.getFieldNames(emptySchemaProperty);
assertTrue(emptyPropertyFieldNames.isEmpty());
}
@Test
public void testAfterTableName() throws Exception {
Schema schema = SchemaBuilder.builder().record("Record").fields() //
.requiredInt("id")
.requiredString("name")
.requiredInt("age")
.endRecord();
outputProperties.setupProperties();
outputProperties.table.main.schema.setValue(schema);
Assert.assertTrue(outputProperties.upsertKeyColumn.getPossibleValues().isEmpty());
outputProperties.table.afterTableName();
Assert.assertEquals(3, outputProperties.upsertKeyColumn.getPossibleValues().size());
}
@Test
public void testAfterTableWithNotSetSchema() throws Exception {
outputProperties.setupProperties();
Assert.assertTrue(outputProperties.upsertKeyColumn.getPossibleValues().isEmpty());
outputProperties.table.afterTableName();
Assert.assertTrue(outputProperties.upsertKeyColumn.getPossibleValues().isEmpty());
}
@Test
public void testAfterSchema() {
Schema schema = SchemaBuilder.builder().record("Record").fields() //
.requiredInt("id").endRecord();
outputProperties.setupProperties();
outputProperties.table.main.schema.setValue(schema);
Assert.assertTrue(outputProperties.schemaReject.schema.getValue().getFields().isEmpty());
outputProperties.table.schemaListener.afterSchema();
Assert.assertEquals(9, outputProperties.schemaReject.schema.getValue().getFields().size());
Assert.assertEquals(1, outputProperties.dbtypeTable.column.getPossibleValues().size());
Assert.assertEquals("id", outputProperties.dbtypeTable.column.getPossibleValues().get(0));
}
@Test
public void testAfterUseSchemaDefinedKeysForUpsert() {
outputProperties.outputAction.setValue(OutputAction.UPSERT);
assertFalse(outputProperties.getForm(Form.MAIN).getWidget(outputProperties.upsertKeyColumn).isVisible());
outputProperties.useSchemaKeysForUpsert.setValue(false);
outputProperties.afterUseSchemaKeysForUpsert();
assertTrue(outputProperties.getForm(Form.MAIN).getWidget(outputProperties.upsertKeyColumn).isVisible());
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/enums/served_asset_field_type.proto
package com.google.ads.googleads.v9.enums;
/**
* <pre>
* Container for enum describing possible asset field types.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum}
*/
public final class ServedAssetFieldTypeEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum)
ServedAssetFieldTypeEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use ServedAssetFieldTypeEnum.newBuilder() to construct.
private ServedAssetFieldTypeEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ServedAssetFieldTypeEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ServedAssetFieldTypeEnum();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ServedAssetFieldTypeEnum(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.enums.ServedAssetFieldTypeProto.internal_static_google_ads_googleads_v9_enums_ServedAssetFieldTypeEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.enums.ServedAssetFieldTypeProto.internal_static_google_ads_googleads_v9_enums_ServedAssetFieldTypeEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum.class, com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum.Builder.class);
}
/**
* <pre>
* The possible asset field types.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum.ServedAssetFieldType}
*/
public enum ServedAssetFieldType
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* No value has been specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* The received value is not known in this version.
* This is a response-only value.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* The asset is used in headline 1.
* </pre>
*
* <code>HEADLINE_1 = 2;</code>
*/
HEADLINE_1(2),
/**
* <pre>
* The asset is used in headline 2.
* </pre>
*
* <code>HEADLINE_2 = 3;</code>
*/
HEADLINE_2(3),
/**
* <pre>
* The asset is used in headline 3.
* </pre>
*
* <code>HEADLINE_3 = 4;</code>
*/
HEADLINE_3(4),
/**
* <pre>
* The asset is used in description 1.
* </pre>
*
* <code>DESCRIPTION_1 = 5;</code>
*/
DESCRIPTION_1(5),
/**
* <pre>
* The asset is used in description 2.
* </pre>
*
* <code>DESCRIPTION_2 = 6;</code>
*/
DESCRIPTION_2(6),
UNRECOGNIZED(-1),
;
/**
* <pre>
* No value has been specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* The received value is not known in this version.
* This is a response-only value.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* The asset is used in headline 1.
* </pre>
*
* <code>HEADLINE_1 = 2;</code>
*/
public static final int HEADLINE_1_VALUE = 2;
/**
* <pre>
* The asset is used in headline 2.
* </pre>
*
* <code>HEADLINE_2 = 3;</code>
*/
public static final int HEADLINE_2_VALUE = 3;
/**
* <pre>
* The asset is used in headline 3.
* </pre>
*
* <code>HEADLINE_3 = 4;</code>
*/
public static final int HEADLINE_3_VALUE = 4;
/**
* <pre>
* The asset is used in description 1.
* </pre>
*
* <code>DESCRIPTION_1 = 5;</code>
*/
public static final int DESCRIPTION_1_VALUE = 5;
/**
* <pre>
* The asset is used in description 2.
* </pre>
*
* <code>DESCRIPTION_2 = 6;</code>
*/
public static final int DESCRIPTION_2_VALUE = 6;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ServedAssetFieldType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static ServedAssetFieldType forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return HEADLINE_1;
case 3: return HEADLINE_2;
case 4: return HEADLINE_3;
case 5: return DESCRIPTION_1;
case 6: return DESCRIPTION_2;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ServedAssetFieldType>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
ServedAssetFieldType> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ServedAssetFieldType>() {
public ServedAssetFieldType findValueByNumber(int number) {
return ServedAssetFieldType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum.getDescriptor().getEnumTypes().get(0);
}
private static final ServedAssetFieldType[] VALUES = values();
public static ServedAssetFieldType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ServedAssetFieldType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum.ServedAssetFieldType)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum other = (com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible asset field types.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum)
com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.enums.ServedAssetFieldTypeProto.internal_static_google_ads_googleads_v9_enums_ServedAssetFieldTypeEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.enums.ServedAssetFieldTypeProto.internal_static_google_ads_googleads_v9_enums_ServedAssetFieldTypeEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum.class, com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v9.enums.ServedAssetFieldTypeProto.internal_static_google_ads_googleads_v9_enums_ServedAssetFieldTypeEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum build() {
com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum buildPartial() {
com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum result = new com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum) {
return mergeFrom((com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum other) {
if (other == com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum)
private static final com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum();
}
public static com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ServedAssetFieldTypeEnum>
PARSER = new com.google.protobuf.AbstractParser<ServedAssetFieldTypeEnum>() {
@java.lang.Override
public ServedAssetFieldTypeEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ServedAssetFieldTypeEnum(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ServedAssetFieldTypeEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ServedAssetFieldTypeEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.ServedAssetFieldTypeEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package org.andengine.entity.text;
import java.util.ArrayList;
import org.andengine.engine.camera.Camera;
import org.andengine.entity.shape.Shape;
import org.andengine.entity.text.exception.OutOfCharactersException;
import org.andengine.entity.text.vbo.HighPerformanceTextVertexBufferObject;
import org.andengine.entity.text.vbo.ITextVertexBufferObject;
import org.andengine.opengl.font.FontUtils;
import org.andengine.opengl.font.IFont;
import org.andengine.opengl.shader.PositionColorTextureCoordinatesShaderProgram;
import org.andengine.opengl.shader.ShaderProgram;
import org.andengine.opengl.shader.constants.ShaderProgramConstants;
import org.andengine.opengl.util.GLState;
import org.andengine.opengl.vbo.DrawType;
import org.andengine.opengl.vbo.VertexBufferObjectManager;
import org.andengine.opengl.vbo.attribute.VertexBufferObjectAttributes;
import org.andengine.opengl.vbo.attribute.VertexBufferObjectAttributesBuilder;
import org.andengine.util.adt.align.HorizontalAlign;
import org.andengine.util.adt.data.constants.DataConstants;
import org.andengine.util.adt.list.FloatArrayList;
import org.andengine.util.adt.list.IFloatList;
import android.opengl.GLES20;
/**
* TODO Try Degenerate Triangles?
*
* (c) 2010 Nicolas Gramlich
* (c) 2011 Zynga Inc.
*
* @author Nicolas Gramlich
* @since 10:54:59 - 03.04.2010
*/
public class Text extends Shape {
// ===========================================================
// Constants
// ===========================================================
public static final float LEADING_DEFAULT = 0;
public static final int VERTEX_INDEX_X = 0;
public static final int VERTEX_INDEX_Y = Text.VERTEX_INDEX_X + 1;
public static final int COLOR_INDEX = Text.VERTEX_INDEX_Y + 1;
public static final int TEXTURECOORDINATES_INDEX_U = Text.COLOR_INDEX + 1;
public static final int TEXTURECOORDINATES_INDEX_V = Text.TEXTURECOORDINATES_INDEX_U + 1;
public static final int VERTEX_SIZE = 2 + 1 + 2;
public static final int VERTICES_PER_LETTER = 6;
public static final int LETTER_SIZE = Text.VERTEX_SIZE * Text.VERTICES_PER_LETTER;
public static final int VERTEX_STRIDE = Text.VERTEX_SIZE * DataConstants.BYTES_PER_FLOAT;
public static final VertexBufferObjectAttributes VERTEXBUFFEROBJECTATTRIBUTES_DEFAULT = new VertexBufferObjectAttributesBuilder(3)
.add(ShaderProgramConstants.ATTRIBUTE_POSITION_LOCATION, ShaderProgramConstants.ATTRIBUTE_POSITION, 2, GLES20.GL_FLOAT, false)
.add(ShaderProgramConstants.ATTRIBUTE_COLOR_LOCATION, ShaderProgramConstants.ATTRIBUTE_COLOR, 4, GLES20.GL_UNSIGNED_BYTE, true)
.add(ShaderProgramConstants.ATTRIBUTE_TEXTURECOORDINATES_LOCATION, ShaderProgramConstants.ATTRIBUTE_TEXTURECOORDINATES, 2, GLES20.GL_FLOAT, false)
.build();
// ===========================================================
// Fields
// ===========================================================
protected final IFont mFont;
protected float mLineWidthMaximum;
protected float mLineAlignmentWidth;
protected TextOptions mTextOptions;
protected final int mCharactersMaximum;
protected int mCharactersToDraw;
protected int mVertexCountToDraw;
protected final int mVertexCount;
protected final ITextVertexBufferObject mTextVertexBufferObject;
protected CharSequence mText;
protected ArrayList<CharSequence> mLines = new ArrayList<CharSequence>(1);
protected IFloatList mLineWidths = new FloatArrayList(1);
// ===========================================================
// Constructors
// ===========================================================
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final VertexBufferObjectManager pVertexBufferObjectManager) {
this(pX, pY, pFont, pText, pVertexBufferObjectManager, DrawType.STATIC);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final VertexBufferObjectManager pVertexBufferObjectManager, final ShaderProgram pShaderProgram) {
this(pX, pY, pFont, pText, pVertexBufferObjectManager, DrawType.STATIC, pShaderProgram);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final VertexBufferObjectManager pVertexBufferObjectManager, final DrawType pDrawType) {
this(pX, pY, pFont, pText, new TextOptions(), pVertexBufferObjectManager, pDrawType);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final VertexBufferObjectManager pVertexBufferObjectManager, final DrawType pDrawType, final ShaderProgram pShaderProgram) {
this(pX, pY, pFont, pText, new TextOptions(), pVertexBufferObjectManager, pDrawType, pShaderProgram);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final TextOptions pTextOptions, final VertexBufferObjectManager pVertexBufferObjectManager) {
this(pX, pY, pFont, pText, pTextOptions, pVertexBufferObjectManager, DrawType.STATIC);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final TextOptions pTextOptions, final VertexBufferObjectManager pVertexBufferObjectManager, final ShaderProgram pShaderProgram) {
this(pX, pY, pFont, pText, pTextOptions, pVertexBufferObjectManager, DrawType.STATIC, pShaderProgram);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final TextOptions pTextOptions, final VertexBufferObjectManager pVertexBufferObjectManager, final DrawType pDrawType) {
this(pX, pY, pFont, pText, pText.length(), pTextOptions, pVertexBufferObjectManager, pDrawType);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final TextOptions pTextOptions, final VertexBufferObjectManager pVertexBufferObjectManager, final DrawType pDrawType, final ShaderProgram pShaderProgram) {
this(pX, pY, pFont, pText, pText.length(), pTextOptions, pVertexBufferObjectManager, pDrawType, pShaderProgram);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final int pCharactersMaximum, final VertexBufferObjectManager pVertexBufferObjectManager) {
this(pX, pY, pFont, pText, pCharactersMaximum, pVertexBufferObjectManager, DrawType.STATIC);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final int pCharactersMaximum, final VertexBufferObjectManager pVertexBufferObjectManager, final ShaderProgram pShaderProgram) {
this(pX, pY, pFont, pText, pCharactersMaximum, pVertexBufferObjectManager, DrawType.STATIC, pShaderProgram);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final int pCharactersMaximum, final VertexBufferObjectManager pVertexBufferObjectManager, final DrawType pDrawType) {
this(pX, pY, pFont, pText, pCharactersMaximum, new TextOptions(), pVertexBufferObjectManager, pDrawType);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final int pCharactersMaximum, final VertexBufferObjectManager pVertexBufferObjectManager, final DrawType pDrawType, final ShaderProgram pShaderProgram) {
this(pX, pY, pFont, pText, pCharactersMaximum, new TextOptions(), pVertexBufferObjectManager, pDrawType, pShaderProgram);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final int pCharactersMaximum, final TextOptions pTextOptions, final VertexBufferObjectManager pVertexBufferObjectManager) {
this(pX, pY, pFont, pText, pCharactersMaximum, pTextOptions, pVertexBufferObjectManager, DrawType.STATIC);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final int pCharactersMaximum, final TextOptions pTextOptions, final VertexBufferObjectManager pVertexBufferObjectManager, final ShaderProgram pShaderProgram) {
this(pX, pY, pFont, pText, pCharactersMaximum, pTextOptions, pVertexBufferObjectManager, DrawType.STATIC, pShaderProgram);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final int pCharactersMaximum, final TextOptions pTextOptions, final VertexBufferObjectManager pVertexBufferObjectManager, final DrawType pDrawType) {
this(pX, pY, pFont, pText, pCharactersMaximum, pTextOptions, new HighPerformanceTextVertexBufferObject(pVertexBufferObjectManager, Text.LETTER_SIZE * pCharactersMaximum, pDrawType, true, Text.VERTEXBUFFEROBJECTATTRIBUTES_DEFAULT));
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final int pCharactersMaximum, final TextOptions pTextOptions, final VertexBufferObjectManager pVertexBufferObjectManager, final DrawType pDrawType, final ShaderProgram pShaderProgram) {
this(pX, pY, pFont, pText, pCharactersMaximum, pTextOptions, new HighPerformanceTextVertexBufferObject(pVertexBufferObjectManager, Text.LETTER_SIZE * pCharactersMaximum, pDrawType, true, Text.VERTEXBUFFEROBJECTATTRIBUTES_DEFAULT), pShaderProgram);
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final int pCharactersMaximum, final TextOptions pTextOptions, final ITextVertexBufferObject pTextVertexBufferObject) {
this(pX, pY, pFont, pText, pCharactersMaximum, pTextOptions, pTextVertexBufferObject, PositionColorTextureCoordinatesShaderProgram.getInstance());
}
public Text(final float pX, final float pY, final IFont pFont, final CharSequence pText, final int pCharactersMaximum, final TextOptions pTextOptions, final ITextVertexBufferObject pTextVertexBufferObject, final ShaderProgram pShaderProgram) {
super(pX, pY, pShaderProgram);
this.mFont = pFont;
this.mTextOptions = pTextOptions;
this.mCharactersMaximum = pCharactersMaximum;
this.mVertexCount = Text.VERTICES_PER_LETTER * this.mCharactersMaximum;
this.mTextVertexBufferObject = pTextVertexBufferObject;
this.onUpdateColor();
this.setText(pText);
this.setBlendingEnabled(true);
this.initBlendFunction(this.mFont.getTexture());
}
// ===========================================================
// Getter & Setter
// ===========================================================
public IFont getFont() {
return this.mFont;
}
public int getCharactersMaximum() {
return this.mCharactersMaximum;
}
public CharSequence getText() {
return this.mText;
}
/**
* @param pText
* @throws OutOfCharactersException leaves this {@link Text} object in an undefined state, until {@link #setText(CharSequence)} is called again and no {@link OutOfCharactersException} is thrown.
*/
public void setText(final CharSequence pText) throws OutOfCharactersException {
this.mText = pText;
final IFont font = this.mFont;
this.mLines.clear();
this.mLineWidths.clear();
if (this.mTextOptions.mAutoWrap == AutoWrap.NONE) {
this.mLines = FontUtils.splitLines(this.mText, this.mLines); // TODO Add whitespace-trimming.
} else {
this.mLines = FontUtils.splitLines(this.mFont, this.mText, this.mLines, this.mTextOptions.mAutoWrap, this.mTextOptions.mAutoWrapWidth);
}
final int lineCount = this.mLines.size();
float maximumLineWidth = 0;
for (int i = 0; i < lineCount; i++) {
final float lineWidth = FontUtils.measureText(font, this.mLines.get(i));
maximumLineWidth = Math.max(maximumLineWidth, lineWidth);
this.mLineWidths.add(lineWidth);
}
this.mLineWidthMaximum = maximumLineWidth;
if (this.mTextOptions.mAutoWrap == AutoWrap.NONE) {
this.mLineAlignmentWidth = this.mLineWidthMaximum;
} else {
this.mLineAlignmentWidth = this.mTextOptions.mAutoWrapWidth;
}
final float width = this.mLineAlignmentWidth;
final float height = lineCount * font.getLineHeight() + (lineCount - 1) * this.mTextOptions.mLeading;
this.setSize(width, height);
}
public ArrayList<CharSequence> getLines() {
return this.mLines;
}
public IFloatList getLineWidths() {
return this.mLineWidths;
}
public float getLineAlignmentWidth() {
return this.mLineAlignmentWidth;
}
public float getLineWidthMaximum() {
return this.mLineWidthMaximum;
}
public float getLeading() {
return this.mTextOptions.mLeading;
}
public void setLeading(final float pLeading) {
this.mTextOptions.mLeading = pLeading;
this.invalidateText();
}
public HorizontalAlign getHorizontalAlign() {
return this.mTextOptions.mHorizontalAlign;
}
public void setHorizontalAlign(final HorizontalAlign pHorizontalAlign) {
this.mTextOptions.mHorizontalAlign = pHorizontalAlign;
this.invalidateText();
}
public AutoWrap getAutoWrap() {
return this.mTextOptions.mAutoWrap;
}
public void setAutoWrap(final AutoWrap pAutoWrap) {
this.mTextOptions.mAutoWrap = pAutoWrap;
this.invalidateText();
}
public float getAutoWrapWidth() {
return this.mTextOptions.mAutoWrapWidth;
}
public void setAutoWrapWidth(final float pAutoWrapWidth) {
this.mTextOptions.mAutoWrapWidth = pAutoWrapWidth;
this.invalidateText();
}
public TextOptions getTextOptions() {
return this.mTextOptions;
}
public void setTextOptions(final TextOptions pTextOptions) {
this.mTextOptions = pTextOptions;
}
public void setCharactersToDraw(final int pCharactersToDraw) {
if (pCharactersToDraw > this.mCharactersMaximum) {
throw new OutOfCharactersException("Characters: maximum: '" + this.mCharactersMaximum + "' required: '" + pCharactersToDraw + "'.");
}
this.mCharactersToDraw = pCharactersToDraw;
this.mVertexCountToDraw = pCharactersToDraw * Text.VERTICES_PER_LETTER;
}
// ===========================================================
// Methods for/from SuperClass/Interfaces
// ===========================================================
@Override
public ITextVertexBufferObject getVertexBufferObject() {
return this.mTextVertexBufferObject;
}
@Override
protected void preDraw(final GLState pGLState, final Camera pCamera) {
super.preDraw(pGLState, pCamera);
this.mFont.getTexture().bind(pGLState);
this.mTextVertexBufferObject.bind(pGLState, this.mShaderProgram);
}
@Override
protected void draw(final GLState pGLState, final Camera pCamera) {
this.mTextVertexBufferObject.draw(GLES20.GL_TRIANGLES, this.mVertexCountToDraw);
}
@Override
protected void postDraw(final GLState pGLState, final Camera pCamera) {
this.mTextVertexBufferObject.unbind(pGLState, this.mShaderProgram);
super.postDraw(pGLState, pCamera);
}
@Override
protected void onUpdateColor() {
this.mTextVertexBufferObject.onUpdateColor(this);
}
@Override
protected void onUpdateVertices() {
this.mTextVertexBufferObject.onUpdateVertices(this);
}
// ===========================================================
// Methods
// ===========================================================
public void invalidateText() {
this.setText(this.mText);
}
// ===========================================================
// Inner and Anonymous Classes
// ===========================================================
}
| |
package org.apache.cordova.speech;
import java.util.ArrayList;
import org.apache.cordova.PermissionHelper;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.PluginResult;
import android.content.pm.PackageManager;
import android.util.Log;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.Manifest;
/**
* Style and such borrowed from the TTS and PhoneListener plugins
*/
public class SpeechRecognition extends CordovaPlugin {
private static final String LOG_TAG = SpeechRecognition.class.getSimpleName();
public static final String ACTION_INIT = "init";
public static final String ACTION_SPEECH_RECOGNIZE_START = "start";
public static final String ACTION_SPEECH_RECOGNIZE_STOP = "stop";
public static final String ACTION_SPEECH_RECOGNIZE_ABORT = "abort";
public static final String NOT_PRESENT_MESSAGE = "Speech recognition is not present or enabled";
private CallbackContext speechRecognizerCallbackContext;
private boolean recognizerPresent = false;
private SpeechRecognizer recognizer;
private boolean aborted = false;
private boolean listening = false;
private String lang;
private static String [] permissions = { Manifest.permission.RECORD_AUDIO };
private static int RECORD_AUDIO = 0;
protected void getMicPermission()
{
PermissionHelper.requestPermission(this, RECORD_AUDIO, permissions[RECORD_AUDIO]);
}
private void promptForMic()
{
if(PermissionHelper.hasPermission(this, permissions[RECORD_AUDIO])) {
this.startRecognition();
}
else
{
getMicPermission();
}
}
public void onRequestPermissionResult(int requestCode, String[] permissions,
int[] grantResults) throws JSONException
{
for(int r:grantResults)
{
if(r == PackageManager.PERMISSION_DENIED)
{
fireErrorEvent();
fireEvent("end");
return;
}
}
promptForMic();
}
@Override
public boolean execute(String action, JSONArray args, CallbackContext callbackContext) {
// Dispatcher
if (ACTION_INIT.equals(action)) {
// init
if (DoInit()) {
callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK));
Handler loopHandler = new Handler(Looper.getMainLooper());
loopHandler.post(new Runnable() {
@Override
public void run() {
recognizer = SpeechRecognizer.createSpeechRecognizer(cordova.getActivity().getBaseContext());
recognizer.setRecognitionListener(new SpeechRecognitionListner());
}
});
} else {
callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, NOT_PRESENT_MESSAGE));
}
}
else if (ACTION_SPEECH_RECOGNIZE_START.equals(action)) {
// recognize speech
if (!recognizerPresent) {
callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, NOT_PRESENT_MESSAGE));
}
this.lang = args.optString(0, "en");
this.speechRecognizerCallbackContext = callbackContext;
this.promptForMic();
}
else if (ACTION_SPEECH_RECOGNIZE_STOP.equals(action)) {
stop(false);
}
else if (ACTION_SPEECH_RECOGNIZE_ABORT.equals(action)) {
stop(true);
}
else {
// Invalid action
String res = "Unknown action: " + action;
return false;
}
return true;
}
private void startRecognition() {
final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,"voice.recognition.test");
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE,lang);
intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS,5);
Handler loopHandler = new Handler(Looper.getMainLooper());
loopHandler.post(new Runnable() {
@Override
public void run() {
recognizer.startListening(intent);
}
});
PluginResult res = new PluginResult(PluginResult.Status.NO_RESULT);
res.setKeepCallback(true);
this.speechRecognizerCallbackContext.sendPluginResult(res);
}
private void stop(boolean abort) {
this.aborted = abort;
Handler loopHandler = new Handler(Looper.getMainLooper());
loopHandler.post(new Runnable() {
@Override
public void run() {
recognizer.stopListening();
}
});
}
/**
* Initialize the speech recognizer by checking if one exists.
*/
private boolean DoInit() {
this.recognizerPresent = SpeechRecognizer.isRecognitionAvailable(this.cordova.getActivity().getBaseContext());
return this.recognizerPresent;
}
private void fireRecognitionEvent(ArrayList<String> transcripts, float[] confidences) {
JSONObject event = new JSONObject();
JSONArray results = new JSONArray();
try {
for(int i=0; i<transcripts.size(); i++) {
JSONArray alternatives = new JSONArray();
JSONObject result = new JSONObject();
result.put("transcript", transcripts.get(i));
result.put("final", true);
if (confidences != null) {
result.put("confidence", confidences[i]);
}
alternatives.put(result);
results.put(alternatives);
}
event.put("type", "result");
event.put("emma", null);
event.put("interpretation", null);
event.put("results", results);
} catch (JSONException e) {
// this will never happen
}
PluginResult pr = new PluginResult(PluginResult.Status.OK, event);
pr.setKeepCallback(true);
this.speechRecognizerCallbackContext.sendPluginResult(pr);
}
private void fireEvent(String type) {
JSONObject event = new JSONObject();
try {
event.put("type",type);
} catch (JSONException e) {
// this will never happen
}
PluginResult pr = new PluginResult(PluginResult.Status.OK, event);
pr.setKeepCallback(true);
this.speechRecognizerCallbackContext.sendPluginResult(pr);
}
private void fireErrorEvent() {
JSONObject event = new JSONObject();
try {
event.put("type","error");
} catch (JSONException e) {
// this will never happen
}
PluginResult pr = new PluginResult(PluginResult.Status.ERROR, event);
pr.setKeepCallback(true);
this.speechRecognizerCallbackContext.sendPluginResult(pr);
}
class SpeechRecognitionListner implements RecognitionListener {
@Override
public void onBeginningOfSpeech() {
Log.d(LOG_TAG, "begin speech");
fireEvent("start");
fireEvent("audiostart");
fireEvent("soundstart");
fireEvent("speechstart");
}
@Override
public void onBufferReceived(byte[] buffer) {
Log.d(LOG_TAG, "buffer received");
}
@Override
public void onEndOfSpeech() {
Log.d(LOG_TAG, "end speech");
fireEvent("speechend");
fireEvent("soundend");
fireEvent("audioend");
fireEvent("end");
}
@Override
public void onError(int error) {
Log.d(LOG_TAG, "error speech "+error);
if (listening || error == 9) {
fireErrorEvent();
fireEvent("end");
}
listening = false;
}
@Override
public void onEvent(int eventType, Bundle params) {
Log.d(LOG_TAG, "event speech");
}
@Override
public void onPartialResults(Bundle partialResults) {
Log.d(LOG_TAG, "partial results");
}
@Override
public void onReadyForSpeech(Bundle params) {
Log.d(LOG_TAG, "ready for speech");
listening = true;
}
@Override
public void onResults(Bundle results) {
Log.d(LOG_TAG, "results");
String str = new String();
Log.d(LOG_TAG, "onResults " + results);
ArrayList<String> transcript = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
float[] confidence = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES);
if (transcript.size() > 0) {
Log.d(LOG_TAG, "fire recognition event");
fireRecognitionEvent(transcript, confidence);
} else {
Log.d(LOG_TAG, "fire no match event");
fireEvent("nomatch");
}
listening = false;
}
@Override
public void onRmsChanged(float rmsdB) {
Log.d(LOG_TAG, "rms changed");
}
}
}
| |
package org.jenkinsci.plugins.githubreleaseperformer;
import hudson.Extension;
import hudson.Launcher;
import hudson.Util;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.BuildListener;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.Builder;
import net.sf.json.JSONObject;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.jenkinsci.plugins.tokenmacro.TokenMacro;
import org.kohsuke.github.GHContent;
import org.kohsuke.github.GHReleaseBuilder;
import org.kohsuke.github.GHRepository;
import org.kohsuke.github.GitHub;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.StaplerRequest;
import java.io.*;
public class GitHubReleasePerformer extends Builder {
private final String apiUrl;
private final String tag;
private final String branch;
private final String releaseNotesFile;
private final String user;
private final String password;
private final String owner;
private final String repository;
private GitHub github;
private GHRepository ghRepository;
private static final String RELEASE_NOTES_SEPARATOR = "###################";
private static final String RELEASE_NOTES_TEMPLATE =
"Next release\n============\n\n### New features\n\n* [JIRA-TICKET](https://jira.ontotext.com/browse/): Some feature\n\n### Improvements\n\n* [JIRA-TICKET](https://jira.ontotext.com/browse/): Some improvement\n\n### Bug fixes\n\n* [JIRA-TICKET](https://jira.ontotext.com/browse/): Some bug fix";
@SuppressWarnings("unused")
@DataBoundConstructor
public GitHubReleasePerformer(String apiUrl, String user, String password, String tag, String branch, String releaseNotesFile,
String owner,
String repository) {
this.apiUrl = apiUrl;
this.tag = Util.fixEmptyAndTrim(tag);
this.branch = Util.fixEmptyAndTrim(branch);
this.releaseNotesFile = Util.fixEmptyAndTrim(releaseNotesFile);
this.user = user;
this.password = password;
this.owner = Util.fixEmptyAndTrim(owner);
this.repository = Util.fixEmptyAndTrim(repository);
}
@Override
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher,
BuildListener listener) {
String resolvedUser = user;
String resolvedPassword = password;
String resolvedRepository = repository;
String resolvedOwner = owner;
String resolvedTag = tag;
String resolvedBranch = branch;
String resolvedReleaseNotesFile = releaseNotesFile;
try {
resolvedUser = TokenMacro.expandAll(build, listener, user);
resolvedPassword = TokenMacro.expandAll(build, listener, password);
resolvedRepository = TokenMacro.expandAll(build, listener, repository);
resolvedOwner = TokenMacro.expandAll(build, listener, owner);
resolvedTag = TokenMacro.expandAll(build, listener, tag);
resolvedBranch = TokenMacro.expandAll(build, listener, branch);
resolvedReleaseNotesFile = TokenMacro.expandAll(build, listener, releaseNotesFile);
} catch (Exception e) {
listener.error("Unable to resolve macro [%s]", e.getMessage());
}
try {
if (!StringUtils.isBlank(apiUrl)) {
github = GitHub.connectToEnterprise(apiUrl, resolvedUser, resolvedPassword);
} else {
github = GitHub.connectUsingPassword(resolvedUser, resolvedPassword);
}
ghRepository = github.getRepository(resolvedOwner + "/" + resolvedRepository);
GHContent currentReleaseNotes = getReleaseNotesFile(resolvedReleaseNotesFile, resolvedBranch, listener);
boolean result =
createRelease(currentReleaseNotes, resolvedTag, resolvedReleaseNotesFile, resolvedBranch, listener);
if (result) {
writeNewReleaseNotes(currentReleaseNotes, resolvedTag, resolvedReleaseNotesFile, resolvedBranch, listener);
}
} catch (IOException e) {
listener.error("Unable to connect to repository [%s], [%s]", apiUrl + "/" + resolvedOwner + "/" + resolvedRepository,
e.getMessage());
}
// Release must not fail if GitHub release fails
return true;
}
private GHContent getReleaseNotesFile(String releaseNotesFile, String branch, BuildListener listener) {
GHContent result = null;
try {
result = ghRepository.getFileContent(releaseNotesFile, branch);
} catch (IOException e) {
listener.error("Unable to find file [%s], [%s]", releaseNotesFile, e.getMessage());
}
return result;
}
private boolean createRelease(GHContent releaseNotes, String tag, String releaseNotesFile, String branch,
BuildListener listener) {
boolean result = true;
StringBuilder stringBuilder = null;
boolean separatorFound = false;
if (releaseNotes != null) {
try {
InputStream inputStream = releaseNotes.read();
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"));
stringBuilder = new StringBuilder();
String line;
separatorFound = false;
while ((line = bufferedReader.readLine()) != null) {
if (line.trim().equals(RELEASE_NOTES_SEPARATOR) || line.trim().matches("^###[#]+$")) {
separatorFound = true;
break;
}
stringBuilder.append(line).append("\n");
}
} catch (IOException e) {
listener.error("Unable to read release notes, [%s]", e.getMessage());
}
}
GHReleaseBuilder releaseBuilder = new GHReleaseBuilder(ghRepository, tag);
if (stringBuilder != null && separatorFound) {
releaseBuilder.body(stringBuilder.toString().replace("Next release", tag));
}
releaseBuilder.commitish(branch);
releaseBuilder.draft(false);
releaseBuilder.name(tag);
releaseBuilder.prerelease(false);
try {
releaseBuilder.create();
String message = String.format("Release created successfully [%s]", tag);
listener.getLogger().println(message);
} catch (IOException e) {
listener.error("Unable to create release, [%s]", e.getMessage());
result = false;
}
return result;
}
private void writeNewReleaseNotes(GHContent currentReleaseNotes, String tag, String releaseNotesFile, String branch,
BuildListener listener) {
if (currentReleaseNotes != null) {
try {
InputStream inputStream = currentReleaseNotes.read();
StringWriter writer = new StringWriter();
IOUtils.copy(inputStream, writer, "UTF-8");
StringBuilder stringBuilder = new StringBuilder(RELEASE_NOTES_TEMPLATE);
stringBuilder.append("\n\n");
stringBuilder.append(RELEASE_NOTES_SEPARATOR);
stringBuilder.append("\n\n");
stringBuilder.append(writer.toString().replace("Next release", tag));
currentReleaseNotes
.update(stringBuilder.toString(), String.format("Updating %s after release %s", releaseNotesFile, tag),
branch);
} catch (IOException e) {
listener.error("Unable to update release note file [%s], [%s]", releaseNotesFile, e.getMessage());
}
} else {
try {
ghRepository.createContent(RELEASE_NOTES_TEMPLATE + "\n" + RELEASE_NOTES_SEPARATOR + "\n",
String.format("Updating %s after release %s", releaseNotesFile, tag),
releaseNotesFile, branch);
} catch (IOException e) {
listener.error("Unable to create release note file [%s], [%s]", releaseNotesFile, e.getMessage());
}
}
}
@SuppressWarnings("unused")
public String getTag() {
return tag;
}
@SuppressWarnings("unused")
public String getBranch() {
return branch;
}
@SuppressWarnings("unused")
public String getReleaseNotesFile() {
return releaseNotesFile;
}
@SuppressWarnings("unused")
public String getUser() {
return user;
}
@SuppressWarnings("unused")
public String getPassword() {
return password;
}
@SuppressWarnings("unused")
public String getOwner() {
return owner;
}
@SuppressWarnings("unused")
public String getRepository() {
return repository;
}
@SuppressWarnings("unused")
public String getApiUrl() {
return apiUrl;
}
@Extension
public static final class DescriptorImpl extends
BuildStepDescriptor<Builder> {
public DescriptorImpl() {
super(GitHubReleasePerformer.class);
}
@Override
public String getDisplayName() {
return "Perform GitHub release";
}
@Override
public boolean isApplicable(Class<? extends AbstractProject> jobType) {
return true;
}
@Override
public Builder newInstance(StaplerRequest req, JSONObject formData)
throws FormException {
return req.bindJSON(GitHubReleasePerformer.class, formData);
}
}
@Override
public DescriptorImpl getDescriptor() {
return (DescriptorImpl) super.getDescriptor();
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.form;
import javax.servlet.http.HttpServletRequest;
import org.apache.struts.action.ActionErrors;
import org.apache.struts.action.ActionMapping;
import org.unitime.timetable.defaults.ApplicationProperty;
/**
* MyEclipse Struts
* Creation date: 07-26-2005
*
* XDoclet definition:
* @struts:form name="schedulingSubpartEditForm"
*
* @author Tomas Muller
*/
public class SchedulingSubpartEditForm extends PreferencesForm {
// --------------------------------------------------------- Instance Variables
/**
* Comment for <code>serialVersionUID</code>
*/
private static final long serialVersionUID = 3256445806692087861L;
private String schedulingSubpartId;
private String subjectAreaId;
private String subjectArea;
private String courseNbr;
private String courseTitle;
private String parentSubpart;
private String instructionalType;
private String instructionalTypeLabel;
private Long datePattern;
private String instrOfferingId;
private String parentSubpartId;
private String parentSubpartLabel;
private String managingDeptName;
private String creditFormat;
private Long creditType;
private Long creditUnitType;
private Float units;
private Float maxUnits;
private Boolean fractionalIncrementsAllowed;
private String creditText;
private Boolean sameItypeAsParent;
private Boolean unlimitedEnroll;
private Boolean autoSpreadInTime;
private Boolean subpartCreditEditAllowed;
private boolean itypeBasic;
private Boolean studentAllowOverlap;
// --------------------------------------------------------- Methods
/**
* Method validate
* @param mapping
* @param request
* @return ActionErrors
*/
public ActionErrors validate(
ActionMapping mapping,
HttpServletRequest request) {
return super.validate(mapping, request);
}
/**
* Method reset
* @param mapping
* @param request
*/
public void reset(ActionMapping mapping, HttpServletRequest request) {
schedulingSubpartId = "";
datePattern = null;
unlimitedEnroll = null;
parentSubpartId = null; parentSubpartLabel = null; managingDeptName = null; sameItypeAsParent = null;
creditFormat = null; creditType = null; creditUnitType = null; units = null; maxUnits = null; fractionalIncrementsAllowed = new Boolean(false); creditText = "";
autoSpreadInTime = Boolean.FALSE;
studentAllowOverlap = Boolean.FALSE;
subpartCreditEditAllowed = ApplicationProperty.SubpartCreditEditable.isTrue();
itypeBasic = false;
instructionalType = null; instructionalTypeLabel = null;
super.reset(mapping, request);
}
/**
* @return Returns the schedulingSubpartId.
*/
public String getSchedulingSubpartId() {
return schedulingSubpartId;
}
/**
* @param schedulingSubpartId The schedulingSubpartId to set.
*/
public void setSchedulingSubpartId(String schedulingSubpartId) {
this.schedulingSubpartId = schedulingSubpartId;
}
/**
* @return Returns the courseNbr.
*/
public String getCourseNbr() {
return courseNbr;
}
/**
* @param courseNbr The courseNbr to set.
*/
public void setCourseNbr(String courseNbr) {
this.courseNbr = courseNbr;
}
public String getCourseTitle() {
return this.courseTitle;
}
public void setCourseTitle(String courseTitle) {
this.courseTitle = courseTitle;
}
/**
* @return Returns the instructionalType.
*/
public String getInstructionalType() {
return instructionalType;
}
/**
* @param instructionalType The instructionalType to set.
*/
public void setInstructionalType(String instructionalType) {
this.instructionalType = instructionalType;
}
/**
* @return Returns the parentSubpart.
*/
public String getParentSubpart() {
return parentSubpart;
}
/**
* @param parentSubpart The parentSubpart to set.
*/
public void setParentSubpart(String parentSubpart) {
this.parentSubpart = parentSubpart;
}
public String getParentSubpartId() {
return parentSubpartId;
}
public void setParentSubpartId(String parentSubpartId) {
this.parentSubpartId = parentSubpartId;
}
public String getParentSubpartLabel() {
return parentSubpartLabel;
}
public void setParentSubpartLabel(String parentSubpartLabel) {
this.parentSubpartLabel = parentSubpartLabel;
}
/**
* @return Returns the subjectArea.
*/
public String getSubjectArea() {
return subjectArea;
}
/**
* @param subjectArea The subjectArea to set.
*/
public void setSubjectArea(String subjectArea) {
this.subjectArea = subjectArea;
}
/**
* @return Returns the instructionalTypeLabel.
*/
public String getInstructionalTypeLabel() {
return instructionalTypeLabel;
}
/**
* @param instructionalTypeLabel The instructionalTypeLabel to set.
*/
public void setInstructionalTypeLabel(String instructionalTypeLabel) {
this.instructionalTypeLabel = instructionalTypeLabel;
}
public Long getDatePattern() {
return datePattern;
}
public void setDatePattern(Long datePattern) {
this.datePattern = datePattern;
}
public String getSubjectAreaId() {
return subjectAreaId;
}
public void setSubjectAreaId(String subjectAreaId) {
this.subjectAreaId = subjectAreaId;
}
public String getInstrOfferingId() {
return instrOfferingId;
}
public void setInstrOfferingId(String instrOfferingId) {
this.instrOfferingId = instrOfferingId;
}
public String getManagingDeptName() { return managingDeptName; }
public void setManagingDeptName(String managingDeptName) { this.managingDeptName = managingDeptName; }
public String getCreditFormat() {
return creditFormat;
}
public void setCreditFormat(String creditFormat) {
this.creditFormat = creditFormat;
}
public Long getCreditType() {
return creditType;
}
public void setCreditType(Long creditType) {
this.creditType = creditType;
}
public Long getCreditUnitType() {
return creditUnitType;
}
public void setCreditUnitType(Long creditUnitType) {
this.creditUnitType = creditUnitType;
}
public Boolean getFractionalIncrementsAllowed() {
return fractionalIncrementsAllowed;
}
public void setFractionalIncrementsAllowed(Boolean fractionalIncrementsAllowed) {
this.fractionalIncrementsAllowed = fractionalIncrementsAllowed;
}
public Float getMaxUnits() {
return maxUnits;
}
public void setMaxUnits(Float maxUnits) {
this.maxUnits = maxUnits;
}
public Float getUnits() {
return units;
}
public void setUnits(Float units) {
this.units = units;
}
public String getCreditText() {
return creditText;
}
public void setCreditText(String creditText) {
this.creditText = creditText;
}
public Boolean getSameItypeAsParent() {
return sameItypeAsParent;
}
public void setSameItypeAsParent(Boolean sameItypeAsParent) {
this.sameItypeAsParent = sameItypeAsParent;
}
public Boolean getUnlimitedEnroll() {
return unlimitedEnroll;
}
public void setUnlimitedEnroll(Boolean unlimitedEnroll) {
this.unlimitedEnroll = unlimitedEnroll;
}
public Boolean getAutoSpreadInTime() {
return autoSpreadInTime;
}
public void setAutoSpreadInTime(Boolean autoSpreadInTime) {
this.autoSpreadInTime = autoSpreadInTime;
}
public Boolean getSubpartCreditEditAllowed() {
return subpartCreditEditAllowed;
}
public void setSubpartCreditEditAllowed(Boolean subpartCreditEditAllowed) {
this.subpartCreditEditAllowed = subpartCreditEditAllowed;
}
public boolean getItypeBasic() { return itypeBasic; }
public void setItypeBasic(boolean itypeBasic) { this.itypeBasic = itypeBasic; }
public boolean getStudentAllowOverlap() { return studentAllowOverlap; }
public void setStudentAllowOverlap(boolean studentAllowOverlap) { this.studentAllowOverlap = studentAllowOverlap; }
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.view.hive20.resources.jobs.atsJobs;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.LinkedList;
import java.util.List;
/**
* Parser of ATS responses
*/
public class ATSParser implements IATSParser {
protected final static Logger LOG =
LoggerFactory.getLogger(ATSParser.class);
private ATSRequestsDelegate delegate;
private static final long MillisInSecond = 1000L;
public ATSParser(ATSRequestsDelegate delegate) {
this.delegate = delegate;
}
/**
* returns all HiveQueryIDs from ATS for the given user.
* @param username
* @return
*/
@Override
public List<HiveQueryId> getHiveQueryIdsForUser(String username) {
JSONObject entities = delegate.hiveQueryIdsForUser(username);
return parseHqidJsonFromATS(entities);
}
/**
* parses the JSONArray or hive query IDs
* @param entities: should contain 'entities' element as JSONArray
* @return
*/
private List<HiveQueryId> parseHqidJsonFromATS(JSONObject entities) {
JSONArray jobs = (JSONArray) entities.get("entities");
return getHqidListFromJsonArray(jobs);
}
/**
* parses List of HiveQueryIds from JSON
* @param jobs
* @return
*/
private List<HiveQueryId> getHqidListFromJsonArray(JSONArray jobs) {
List<HiveQueryId> parsedJobs = new LinkedList<>();
for (Object job : jobs) {
try {
HiveQueryId parsedJob = parseAtsHiveJob((JSONObject) job);
parsedJobs.add(parsedJob);
} catch (Exception ex) {
LOG.error("Error while parsing ATS job", ex);
}
}
return parsedJobs;
}
@Override
public List<TezVertexId> getVerticesForDAGId(String dagId) {
JSONObject entities = delegate.tezVerticesListForDAG(dagId);
JSONArray vertices = (JSONArray) entities.get("entities");
List<TezVertexId> parsedVertices = new LinkedList<TezVertexId>();
for(Object vertex : vertices) {
try {
TezVertexId parsedVertex = parseVertex((JSONObject) vertex);
parsedVertices.add(parsedVertex);
} catch (Exception ex) {
LOG.error("Error while parsing the vertex", ex);
}
}
return parsedVertices;
}
@Override
public HiveQueryId getHiveQueryIdByOperationId(String guidString) {
JSONObject entities = delegate.hiveQueryIdByOperationId(guidString);
return getHiveQueryIdFromJson(entities);
}
private HiveQueryId getHiveQueryIdFromJson(JSONObject entities) {
JSONArray jobs = (JSONArray) entities.get("entities");
if (jobs.size() == 0) {
return new HiveQueryId();
}
return parseAtsHiveJob((JSONObject) jobs.get(0));
}
/**
* returns the hive entity from ATS. empty object if not found.
*
* @param hiveId: the entityId of the hive
* @return: empty entity if not found else HiveQueryId
*/
@Override
public HiveQueryId getHiveQueryIdByHiveEntityId(String hiveId) {
JSONObject entity = delegate.hiveQueryEntityByEntityId(hiveId);
return parseAtsHiveJob(entity);
}
@Override
public TezDagId getTezDAGByName(String name) {
JSONArray tezDagEntities = (JSONArray) delegate.tezDagByName(name).get("entities");
return parseTezDag(tezDagEntities);
}
@Override
public TezDagId getTezDAGByEntity(String entity) {
JSONArray tezDagEntities = (JSONArray) delegate.tezDagByEntity(entity).get("entities");
return parseTezDag(tezDagEntities);
}
/**
* fetches the HIVE_QUERY_ID from ATS for given user between given time period
*
* @param username: username for which to fetch hive query IDs
* @param startTime: time in miliseconds, inclusive
* @param endTime: time in miliseconds, exclusive
* @return: List of HIVE_QUERY_ID
*/
@Override
public List<HiveQueryId> getHiveQueryIdsForUserByTime(String username, long startTime, long endTime) {
JSONObject entities = delegate.hiveQueryIdsForUserByTime(username, startTime, endTime);
return parseHqidJsonFromATS(entities);
}
@Override
public List<HiveQueryId> getHiveQueryIdByEntityList(List<String> hiveIds) {
List<HiveQueryId> hiveQueryIds = new LinkedList<>();
for (String id : hiveIds) {
HiveQueryId hqi = this.getHiveQueryIdByHiveEntityId(id);
if (null != hqi.entity) {
hiveQueryIds.add(hqi);
}
}
return hiveQueryIds;
}
private TezDagId parseTezDag(JSONArray tezDagEntities) {
assert tezDagEntities.size() <= 1;
if (tezDagEntities.size() == 0) {
return new TezDagId();
}
JSONObject tezDagEntity = (JSONObject) tezDagEntities.get(0);
TezDagId parsedDag = new TezDagId();
JSONArray applicationIds = (JSONArray) ((JSONObject) tezDagEntity.get("primaryfilters")).get("applicationId");
parsedDag.entity = (String) tezDagEntity.get("entity");
parsedDag.applicationId = (String) applicationIds.get(0);
parsedDag.status = (String) ((JSONObject) tezDagEntity.get("otherinfo")).get("status");
return parsedDag;
}
private HiveQueryId parseAtsHiveJob(JSONObject job) {
HiveQueryId parsedJob = new HiveQueryId();
parsedJob.entity = (String) job.get("entity");
parsedJob.url = delegate.hiveQueryIdDirectUrl((String) job.get("entity"));
parsedJob.starttime = ((Long) job.get("starttime"));
JSONObject primaryfilters = (JSONObject) job.get("primaryfilters");
JSONArray operationIds = (JSONArray) primaryfilters.get("operationid");
if (operationIds != null) {
parsedJob.operationId = (String) (operationIds).get(0);
}
JSONArray users = (JSONArray) primaryfilters.get("user");
if (users != null) {
parsedJob.user = (String) (users).get(0);
}
JSONObject lastEvent = getLastEvent(job);
long lastEventTimestamp = ((Long) lastEvent.get("timestamp"));
parsedJob.duration = (lastEventTimestamp - parsedJob.starttime) / MillisInSecond;
JSONObject otherinfo = (JSONObject) job.get("otherinfo");
if (otherinfo.get("QUERY") != null) { // workaround for HIVE-10829
JSONObject query = (JSONObject) JSONValue.parse((String) otherinfo.get("QUERY"));
parsedJob.query = (String) query.get("queryText");
JSONObject stages = (JSONObject) ((JSONObject) query.get("queryPlan")).get("STAGE PLANS");
List<String> dagIds = new LinkedList<String>();
List<JSONObject> stagesList = new LinkedList<JSONObject>();
for (Object key : stages.keySet()) {
JSONObject stage = (JSONObject) stages.get(key);
if (stage.get("Tez") != null) {
String dagId = (String) ((JSONObject) stage.get("Tez")).get("DagId:");
dagIds.add(dagId);
}
stagesList.add(stage);
}
parsedJob.dagNames = dagIds;
parsedJob.stages = stagesList;
}
if (otherinfo.get("VERSION") != null) {
parsedJob.version = (Long) otherinfo.get("VERSION");
}
return parsedJob;
}
private TezVertexId parseVertex(JSONObject vertex) {
TezVertexId tezVertexId = new TezVertexId();
tezVertexId.entity = (String)vertex.get("entity");
JSONObject otherinfo = (JSONObject)vertex.get("otherinfo");
if (otherinfo != null)
tezVertexId.vertexName = (String)otherinfo.get("vertexName");
return tezVertexId;
}
private JSONObject getLastEvent(JSONObject atsEntity) {
JSONArray events = (JSONArray) atsEntity.get("events");
return (JSONObject) events.get(0);
}
}
| |
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.jdisc.http.server.jetty;
import com.yahoo.container.logging.ConnectionLog;
import com.yahoo.container.logging.ConnectionLogEntry;
import com.yahoo.container.logging.ConnectionLogEntry.SslHandshakeFailure.ExceptionEntry;
import com.yahoo.io.HexDump;
import com.yahoo.jdisc.http.ServerConfig;
import com.yahoo.security.SubjectAlternativeName;
import com.yahoo.security.X509CertificateUtils;
import org.eclipse.jetty.alpn.server.ALPNServerConnection;
import org.eclipse.jetty.http2.server.HTTP2ServerConnection;
import org.eclipse.jetty.io.Connection;
import org.eclipse.jetty.io.EndPoint;
import org.eclipse.jetty.io.SocketChannelEndPoint;
import org.eclipse.jetty.io.ssl.SslConnection;
import org.eclipse.jetty.io.ssl.SslHandshakeListener;
import org.eclipse.jetty.server.HttpChannel;
import org.eclipse.jetty.server.HttpConnection;
import org.eclipse.jetty.server.ProxyConnectionFactory;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.util.component.AbstractLifeCycle;
import javax.net.ssl.ExtendedSSLSession;
import javax.net.ssl.SNIHostName;
import javax.net.ssl.SNIServerName;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLHandshakeException;
import javax.net.ssl.SSLPeerUnverifiedException;
import javax.net.ssl.SSLSession;
import javax.net.ssl.StandardConstants;
import java.net.InetSocketAddress;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
/**
* Jetty integration for jdisc connection log ({@link ConnectionLog}).
*
* @author bjorncs
*/
class JettyConnectionLogger extends AbstractLifeCycle implements Connection.Listener, HttpChannel.Listener, SslHandshakeListener {
static final String CONNECTION_ID_REQUEST_ATTRIBUTE = "jdisc.request.connection.id";
private static final Logger log = Logger.getLogger(JettyConnectionLogger.class.getName());
private final SimpleConcurrentIdentityHashMap<SocketChannelEndPoint, ConnectionInfo> connectionInfos = new SimpleConcurrentIdentityHashMap<>();
private final SimpleConcurrentIdentityHashMap<SocketChannelEndPoint, SSLEngine> sslEngines = new SimpleConcurrentIdentityHashMap<>();
// Extra mapping as callbacks in SslHandshakeListener only provides SSLEngine (no connection reference) as argument
private final SimpleConcurrentIdentityHashMap<SSLEngine, ConnectionInfo> sslToConnectionInfo = new SimpleConcurrentIdentityHashMap<>();
private final boolean enabled;
private final ConnectionLog connectionLog;
JettyConnectionLogger(ServerConfig.ConnectionLog config, ConnectionLog connectionLog) {
this.enabled = config.enabled();
this.connectionLog = connectionLog;
log.log(Level.FINE, () -> "Jetty connection logger is " + (config.enabled() ? "enabled" : "disabled"));
}
//
// AbstractLifeCycle methods start
//
@Override
protected void doStop() {
handleListenerInvocation("AbstractLifeCycle", "doStop", "", List.of(), () -> {
log.log(Level.FINE, () -> "Jetty connection logger is stopped");
});
}
@Override
protected void doStart() {
handleListenerInvocation("AbstractLifeCycle", "doStart", "", List.of(), () -> {
log.log(Level.FINE, () -> "Jetty connection logger is started");
});
}
//
// AbstractLifeCycle methods stop
//
//
// Connection.Listener methods start
//
@Override
public void onOpened(Connection connection) {
handleListenerInvocation("Connection.Listener", "onOpened", "%h", List.of(connection), () -> {
SocketChannelEndPoint endpoint = findUnderlyingSocketEndpoint(connection.getEndPoint());
ConnectionInfo info = connectionInfos.computeIfAbsent(endpoint, ConnectionInfo::from);
String connectionClassName = connection.getClass().getSimpleName(); // For hidden implementations of Connection
if (connection instanceof SslConnection) {
SSLEngine sslEngine = ((SslConnection) connection).getSSLEngine();
addReferenceToSslEngine(endpoint, info, sslEngine);
} else if (connection instanceof ALPNServerConnection) {
SSLEngine sslEngine = ((ALPNServerConnection) connection).getSSLEngine();
addReferenceToSslEngine(endpoint, info, sslEngine);
} else if (connection instanceof HttpConnection) {
info.setHttpProtocol("HTTP/1.1");
} else if (connection instanceof HTTP2ServerConnection) {
info.setHttpProtocol("HTTP/2.0");
} else if (connectionClassName.endsWith("ProxyProtocolV1Connection")) {
info.setProxyProtocolVersion("v1");
} else if (connectionClassName.endsWith("ProxyProtocolV2Connection")) {
info.setProxyProtocolVersion("v2");
}
if (connection.getEndPoint() instanceof ProxyConnectionFactory.ProxyEndPoint) {
InetSocketAddress remoteAddress = connection.getEndPoint().getRemoteAddress();
info.setRemoteAddress(remoteAddress);
}
});
}
private void addReferenceToSslEngine(SocketChannelEndPoint endpoint, ConnectionInfo info, SSLEngine sslEngine) {
if (sslEngine != null) {
sslEngines.put(endpoint, sslEngine)
.ifPresent(sslToConnectionInfo::remove);
sslToConnectionInfo.put(sslEngine, info);
}
}
@Override
public void onClosed(Connection connection) {
handleListenerInvocation("Connection.Listener", "onClosed", "%h", List.of(connection), () -> {
SocketChannelEndPoint endpoint = findUnderlyingSocketEndpoint(connection.getEndPoint());
ConnectionInfo info = connectionInfos.get(endpoint).orElse(null);
if (info == null) return; // Closed connection already handled
if (connection instanceof HttpConnection) {
info.setHttpBytes(connection.getBytesIn(), connection.getBytesOut());
}
if (!endpoint.isOpen()) {
info.setClosedAt(System.currentTimeMillis());
connectionLog.log(info.toLogEntry());
connectionInfos.remove(endpoint);
sslEngines.remove(endpoint)
.ifPresent(sslToConnectionInfo::remove);
}
});
}
//
// Connection.Listener methods end
//
//
// HttpChannel.Listener methods start
//
@Override
public void onRequestBegin(Request request) {
handleListenerInvocation("HttpChannel.Listener", "onRequestBegin", "%h", List.of(request), () -> {
SocketChannelEndPoint endpoint = findUnderlyingSocketEndpoint(request.getHttpChannel().getEndPoint());
ConnectionInfo info = connectionInfos.get(endpoint).get();
info.incrementRequests();
request.setAttribute(CONNECTION_ID_REQUEST_ATTRIBUTE, info.uuid());
});
}
@Override
public void onResponseBegin(Request request) {
handleListenerInvocation("HttpChannel.Listener", "onResponseBegin", "%h", List.of(request), () -> {
SocketChannelEndPoint endpoint = findUnderlyingSocketEndpoint(request.getHttpChannel().getEndPoint());
ConnectionInfo info = connectionInfos.get(endpoint).orElse(null);
if (info == null) return; // Connection closed before response started - observed during Jetty server shutdown
info.incrementResponses();
});
}
//
// HttpChannel.Listener methods end
//
//
// SslHandshakeListener methods start
//
@Override
public void handshakeSucceeded(Event event) {
SSLEngine sslEngine = event.getSSLEngine();
handleListenerInvocation("SslHandshakeListener", "handshakeSucceeded", "sslEngine=%h", List.of(sslEngine), () -> {
ConnectionInfo info = sslToConnectionInfo.get(sslEngine).orElse(null);
if (info == null) return;
info.setSslSessionDetails(sslEngine.getSession());
});
}
@Override
public void handshakeFailed(Event event, Throwable failure) {
SSLEngine sslEngine = event.getSSLEngine();
handleListenerInvocation("SslHandshakeListener", "handshakeFailed", "sslEngine=%h,failure=%s", List.of(sslEngine, failure), () -> {
log.log(Level.FINE, failure, failure::toString);
ConnectionInfo info = sslToConnectionInfo.get(sslEngine).orElse(null);
if (info == null) return;
info.setSslHandshakeFailure((SSLHandshakeException)failure);
});
}
//
// SslHandshakeListener methods end
//
private void handleListenerInvocation(
String listenerType, String methodName, String methodArgumentsFormat, List<Object> methodArguments, ListenerHandler handler) {
if (!enabled) return;
try {
log.log(Level.FINE, () -> String.format(listenerType + "." + methodName + "(" + methodArgumentsFormat + ")", methodArguments.toArray()));
handler.run();
} catch (Exception e) {
log.log(Level.WARNING, String.format("Exception in %s.%s listener: %s", listenerType, methodName, e.getMessage()), e);
}
}
/**
* Protocol layers are connected through each {@link Connection}'s {@link EndPoint} reference.
* This methods iterates through the endpoints recursively to find the underlying socket endpoint.
*/
private static SocketChannelEndPoint findUnderlyingSocketEndpoint(EndPoint endpoint) {
if (endpoint instanceof SocketChannelEndPoint) {
return (SocketChannelEndPoint) endpoint;
} else if (endpoint instanceof SslConnection.DecryptedEndPoint) {
var decryptedEndpoint = (SslConnection.DecryptedEndPoint) endpoint;
return findUnderlyingSocketEndpoint(decryptedEndpoint.getSslConnection().getEndPoint());
} else if (endpoint instanceof ProxyConnectionFactory.ProxyEndPoint) {
var proxyEndpoint = (ProxyConnectionFactory.ProxyEndPoint) endpoint;
return findUnderlyingSocketEndpoint(proxyEndpoint.unwrap());
} else {
throw new IllegalArgumentException("Unknown connection endpoint type: " + endpoint.getClass().getName());
}
}
@FunctionalInterface private interface ListenerHandler { void run() throws Exception; }
private static class ConnectionInfo {
private final UUID uuid;
private final long createdAt;
private final InetSocketAddress localAddress;
private final InetSocketAddress peerAddress;
private long closedAt = 0;
private long httpBytesReceived = 0;
private long httpBytesSent = 0;
private long requests = 0;
private long responses = 0;
private InetSocketAddress remoteAddress;
private byte[] sslSessionId;
private String sslProtocol;
private String sslCipherSuite;
private String sslPeerSubject;
private Date sslPeerNotBefore;
private Date sslPeerNotAfter;
private List<SNIServerName> sslSniServerNames;
private String sslPeerIssuerSubject;
private byte[] sslPeerEncodedCertificate;
private SSLHandshakeException sslHandshakeException;
private List<String> sslSubjectAlternativeNames;
private String proxyProtocolVersion;
private String httpProtocol;
private ConnectionInfo(UUID uuid, long createdAt, InetSocketAddress localAddress, InetSocketAddress peerAddress) {
this.uuid = uuid;
this.createdAt = createdAt;
this.localAddress = localAddress;
this.peerAddress = peerAddress;
}
static ConnectionInfo from(SocketChannelEndPoint endpoint) {
return new ConnectionInfo(
UUID.randomUUID(),
endpoint.getCreatedTimeStamp(),
endpoint.getLocalAddress(),
endpoint.getRemoteAddress());
}
synchronized UUID uuid() { return uuid; }
synchronized ConnectionInfo setClosedAt(long closedAt) {
this.closedAt = closedAt;
return this;
}
synchronized ConnectionInfo setHttpBytes(long received, long sent) {
this.httpBytesReceived = received;
this.httpBytesSent = sent;
return this;
}
synchronized ConnectionInfo incrementRequests() { ++this.requests; return this; }
synchronized ConnectionInfo incrementResponses() { ++this.responses; return this; }
synchronized ConnectionInfo setRemoteAddress(InetSocketAddress remoteAddress) {
this.remoteAddress = remoteAddress;
return this;
}
synchronized ConnectionInfo setSslSessionDetails(SSLSession session) {
this.sslCipherSuite = session.getCipherSuite();
this.sslProtocol = session.getProtocol();
this.sslSessionId = session.getId();
if (session instanceof ExtendedSSLSession) {
ExtendedSSLSession extendedSession = (ExtendedSSLSession) session;
this.sslSniServerNames = extendedSession.getRequestedServerNames();
}
try {
this.sslPeerSubject = session.getPeerPrincipal().getName();
X509Certificate peerCertificate = (X509Certificate) session.getPeerCertificates()[0];
this.sslPeerNotBefore = peerCertificate.getNotBefore();
this.sslPeerNotAfter = peerCertificate.getNotAfter();
this.sslSubjectAlternativeNames = X509CertificateUtils.getSubjectAlternativeNames(peerCertificate).stream()
.map(SubjectAlternativeName::getValue)
.collect(Collectors.toList());
this.sslPeerIssuerSubject = peerCertificate.getIssuerDN().getName();
this.sslPeerEncodedCertificate = peerCertificate.getEncoded();
} catch (SSLPeerUnverifiedException | CertificateEncodingException e) {
// Throw if peer is not authenticated (e.g when client auth is disabled)
// JSSE provides no means of checking for client authentication without catching this exception
}
return this;
}
synchronized ConnectionInfo setSslHandshakeFailure(SSLHandshakeException exception) {
this.sslHandshakeException = exception;
return this;
}
synchronized ConnectionInfo setHttpProtocol(String protocol) { this.httpProtocol = protocol; return this; }
synchronized ConnectionInfo setProxyProtocolVersion(String version) { this.proxyProtocolVersion = version; return this; }
synchronized ConnectionLogEntry toLogEntry() {
ConnectionLogEntry.Builder builder = ConnectionLogEntry.builder(uuid, Instant.ofEpochMilli(createdAt));
if (closedAt > 0) {
builder.withDuration((closedAt - createdAt) / 1000D);
}
if (httpBytesReceived > 0) {
builder.withHttpBytesReceived(httpBytesReceived);
}
if (httpBytesSent > 0) {
builder.withHttpBytesSent(httpBytesSent);
}
if (requests > 0) {
builder.withRequests(requests);
}
if (responses > 0) {
builder.withResponses(responses);
}
if (peerAddress != null) {
builder.withPeerAddress(peerAddress.getHostString())
.withPeerPort(peerAddress.getPort());
}
if (localAddress != null) {
builder.withLocalAddress(localAddress.getHostString())
.withLocalPort(localAddress.getPort());
}
if (remoteAddress != null) {
builder.withRemoteAddress(remoteAddress.getHostString())
.withRemotePort(remoteAddress.getPort());
}
if (sslProtocol != null && sslCipherSuite != null && sslSessionId != null) {
builder.withSslProtocol(sslProtocol)
.withSslCipherSuite(sslCipherSuite)
.withSslSessionId(HexDump.toHexString(sslSessionId));
}
if (sslSniServerNames != null) {
sslSniServerNames.stream()
.filter(name -> name instanceof SNIHostName && name.getType() == StandardConstants.SNI_HOST_NAME)
.map(name -> ((SNIHostName) name).getAsciiName())
.findAny()
.ifPresent(builder::withSslSniServerName);
}
if (sslPeerSubject != null && sslPeerNotAfter != null && sslPeerNotBefore != null
&& sslPeerIssuerSubject != null && sslPeerEncodedCertificate != null) {
builder.withSslPeerSubject(sslPeerSubject)
.withSslPeerIssuerSubject(sslPeerIssuerSubject)
.withSslPeerNotAfter(sslPeerNotAfter.toInstant())
.withSslPeerNotBefore(sslPeerNotBefore.toInstant())
.withSslPeerFingerprint(certificateFingerprint(sslPeerEncodedCertificate));
}
if (sslSubjectAlternativeNames != null && !sslSubjectAlternativeNames.isEmpty()) {
builder.withSslSubjectAlternativeNames(sslSubjectAlternativeNames);
}
if (sslHandshakeException != null) {
List<ExceptionEntry> exceptionChain = new ArrayList<>();
Throwable cause = sslHandshakeException;
while (cause != null) {
exceptionChain.add(new ExceptionEntry(cause.getClass().getName(), cause.getMessage()));
cause = cause.getCause();
}
String type = SslHandshakeFailure.fromSslHandshakeException(sslHandshakeException)
.map(SslHandshakeFailure::failureType)
.orElse("UNKNOWN");
builder.withSslHandshakeFailure(new ConnectionLogEntry.SslHandshakeFailure(type, exceptionChain));
}
if (httpProtocol != null) {
builder.withHttpProtocol(httpProtocol);
}
if (proxyProtocolVersion != null) {
builder.withProxyProtocolVersion(proxyProtocolVersion);
}
return builder.build();
}
private static String certificateFingerprint(byte[] derEncoded) {
try {
return HexDump.toHexString(MessageDigest.getInstance("SHA-1").digest(derEncoded));
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
}
}
| |
/*
* Copyright 2018-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.provider.hostprobing.impl;
import org.onlab.packet.ARP;
import org.onlab.packet.Ethernet;
import org.onlab.packet.IPv6;
import org.onlab.packet.Ip4Address;
import org.onlab.packet.Ip6Address;
import org.onlab.packet.IpAddress;
import org.onlab.packet.MacAddress;
import org.onlab.packet.VlanId;
import org.onlab.packet.ndp.NeighborSolicitation;
import org.onosproject.mastership.MastershipService;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.Host;
import org.onosproject.net.HostLocation;
import org.onosproject.net.flow.DefaultTrafficTreatment;
import org.onosproject.net.flow.TrafficTreatment;
import org.onosproject.net.host.HostProbe;
import org.onosproject.net.host.HostProbingEvent;
import org.onosproject.net.host.HostProbingProvider;
import org.onosproject.net.host.HostProbingProviderRegistry;
import org.onosproject.net.host.HostProbingProviderService;
import org.onosproject.net.host.HostProvider;
import org.onosproject.net.host.HostProviderRegistry;
import org.onosproject.net.host.HostProviderService;
import org.onosproject.net.host.ProbeMode;
import org.onosproject.net.packet.DefaultOutboundPacket;
import org.onosproject.net.packet.OutboundPacket;
import org.onosproject.net.packet.PacketProcessor;
import org.onosproject.net.packet.PacketService;
import org.onosproject.net.provider.AbstractProvider;
import org.onosproject.net.provider.ProviderId;
import org.osgi.service.component.ComponentContext;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.annotations.ReferenceCardinality;
import org.slf4j.Logger;
import java.nio.ByteBuffer;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
import static org.onlab.util.Tools.groupedThreads;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Provider which sends host location probes to discover or verify a host at specific location.
*/
@Component(immediate = true, service = { HostProvider.class, HostProbingProvider.class })
public class DefaultHostProbingProvider extends AbstractProvider implements HostProvider, HostProbingProvider {
private final Logger log = getLogger(getClass());
@Reference(cardinality = ReferenceCardinality.MANDATORY)
private HostProviderRegistry providerRegistry;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
private HostProbingProviderRegistry hostProbingProviderRegistry;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
private PacketService packetService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
private MastershipService mastershipService;
private HostProviderService providerService;
private HostProbingProviderService hostProbingProviderService;
private ExecutorService packetHandler;
private ExecutorService probeEventHandler;
private ScheduledExecutorService hostProber;
private final PacketProcessor packetProcessor = context ->
packetHandler.execute(() -> {
Ethernet eth = context.inPacket().parsed();
if (eth == null) {
return;
}
MacAddress srcMac = eth.getSourceMAC();
MacAddress destMac = eth.getDestinationMAC();
VlanId vlan = VlanId.vlanId(eth.getVlanID());
ConnectPoint heardOn = context.inPacket().receivedFrom();
// Receives a location probe. Invalid entry from the cache
if (destMac.isOnos() && !MacAddress.NONE.equals(destMac)) {
log.debug("Receives probe for {}/{} on {}", srcMac, vlan, heardOn);
hostProbingProviderService.removeProbingHost(destMac);
}
});
// TODO Make this configurable
private static final int PROBE_INIT_DELAY_MS = 1000;
private static final int DEFAULT_RETRY = 5;
/**
* Creates an OpenFlow host provider.
*/
public DefaultHostProbingProvider() {
super(new ProviderId("hostprobing", "org.onosproject.provider.hostprobing"));
}
@Activate
public void activate(ComponentContext context) {
providerService = providerRegistry.register(this);
hostProbingProviderService = hostProbingProviderRegistry.register(this);
packetHandler = newSingleThreadScheduledExecutor(groupedThreads("onos/host-loc-provider",
"packet-handler", log));
probeEventHandler = newSingleThreadScheduledExecutor(groupedThreads("onos/host-loc-provider",
"probe-handler", log));
hostProber = newScheduledThreadPool(32, groupedThreads("onos/host-loc-probe", "%d", log));
packetService.addProcessor(packetProcessor, PacketProcessor.advisor(1));
}
@Deactivate
public void deactivate() {
providerRegistry.unregister(this);
hostProbingProviderRegistry.unregister(this);
providerService = null;
packetService.removeProcessor(packetProcessor);
packetHandler.shutdown();
probeEventHandler.shutdown();
hostProber.shutdown();
}
@Override
public void triggerProbe(Host host) {
// Not doing anything at this moment...
}
@Override
public void processEvent(HostProbingEvent event) {
probeEventHandler.execute(() -> {
log.debug("Receiving HostProbingEvent {}", event);
HostProbe hostProbe = event.subject();
switch (event.type()) {
case PROBE_REQUESTED:
// Do nothing
break;
case PROBE_TIMEOUT:
// Retry probe until PROBE_FAIL
// TODO Only retry DISCOVER probes
probeHostInternal(hostProbe, hostProbe.connectPoint(),
hostProbe.mode(), hostProbe.probeMac(), hostProbe.retry());
break;
case PROBE_FAIL:
// Remove this location if this is a verify probe.
if (hostProbe.mode() == ProbeMode.VERIFY) {
providerService.removeLocationFromHost(hostProbe.id(),
(HostLocation) hostProbe.connectPoint());
}
break;
case PROBE_COMPLETED:
// Add this location if this is a discover probe.
if (hostProbe.mode() == ProbeMode.DISCOVER) {
HostLocation newLocation = new HostLocation(hostProbe.connectPoint(),
System.currentTimeMillis());
providerService.addLocationToHost(hostProbe.id(), newLocation);
}
break;
default:
log.warn("Unknown HostProbingEvent type: {}", event.type());
}
});
}
@Override
public void probeHost(Host host, ConnectPoint connectPoint, ProbeMode probeMode) {
probeHostInternal(host, connectPoint, probeMode, null, DEFAULT_RETRY);
}
// probeMac can be null if this is the very first probe and the mac is to-be-generated.
private void probeHostInternal(Host host, ConnectPoint connectPoint, ProbeMode probeMode,
MacAddress probeMac, int retry) {
if (!mastershipService.isLocalMaster(connectPoint.deviceId())) {
log.debug("Current node is not master of {}, abort probing {}", connectPoint.deviceId(), host);
return;
}
log.debug("probeHostInternal host={}, cp={}, mode={}, probeMac={}, retry={}", host, connectPoint,
probeMode, probeMac, retry);
Optional<IpAddress> ipOptional = host.ipAddresses().stream().findFirst();
if (ipOptional.isPresent()) {
probeMac = hostProbingProviderService.addProbingHost(host, connectPoint, probeMode, probeMac, retry);
IpAddress ip = ipOptional.get();
log.debug("Constructing {} probe for host {} with {}", probeMode, host.id(), ip);
Ethernet probe;
if (ip.isIp4()) {
probe = ARP.buildArpRequest(probeMac.toBytes(), Ip4Address.ZERO.toOctets(),
host.id().mac().toBytes(), ip.toOctets(),
host.id().mac().toBytes(), host.id().vlanId().toShort());
} else {
probe = NeighborSolicitation.buildNdpSolicit(
ip.getIp6Address(),
Ip6Address.valueOf(IPv6.getLinkLocalAddress(probeMac.toBytes())),
ip.getIp6Address(),
probeMac,
host.id().mac(),
host.id().vlanId());
}
// NOTE: delay the probe a little bit to wait for the store synchronization is done
hostProber.schedule(() ->
sendLocationProbe(probe, connectPoint), PROBE_INIT_DELAY_MS, TimeUnit.MILLISECONDS);
} else {
log.debug("Host {} has no IP address yet. Skip probing.", host);
}
}
/**
* Send the probe packet on given port.
*
* @param probe the probe packet
* @param connectPoint the port we want to probe
*/
private void sendLocationProbe(Ethernet probe, ConnectPoint connectPoint) {
log.debug("Sending probe for host {} on location {} with probeMac {}",
probe.getDestinationMAC(), connectPoint, probe.getSourceMAC());
TrafficTreatment treatment = DefaultTrafficTreatment.builder().setOutput(connectPoint.port()).build();
OutboundPacket outboundPacket = new DefaultOutboundPacket(connectPoint.deviceId(),
treatment, ByteBuffer.wrap(probe.serialize()));
packetService.emit(outboundPacket);
}
}
| |
/**
*
*/
package com.winterwell.bob.tasks;
import java.io.File;
import java.io.FileFilter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.winterwell.bob.BuildTask;
import com.winterwell.utils.io.FileUtils;
import com.winterwell.utils.log.Log;
/**
* Copy files from one directory to another.
* <p>
* By default, hidden files such as .svn are copied, but this can be switched
* off via {@link #setIncludeHiddenFiles(boolean)}.
*
* @see RSyncTask
* @author Daniel
*/
public class CopyTask extends BuildTask {
@Override
public String toString() {
return getClass().getSimpleName()+":"+(srcDir==null?"file-list":srcDir);
}
private final File destDir;
private FileFilter filter = FileUtils.TRUE_FILTER;
private boolean includeHiddenFiles = true;
private boolean overwrite = true;
/** The files that weren't copied due to overwrite issues */
private final List<File> skipped = new ArrayList<File>();
private final File srcDir;
private final Collection<File> srcFiles;
private boolean overwriteIfNewer;
private boolean errorOnDuplicate;
/**
* Copy files from one directory to another.
* <p>
* By default, hidden files such as .svn are copied, but this can be switched
* off via {@link #setIncludeHiddenFiles(boolean)}.
* @param destDir This will be created if it does not exist.
*/
public CopyTask(File srcDir, File destDir) {
this.srcDir = srcDir.getAbsoluteFile();
this.destDir = destDir.getAbsoluteFile();
this.srcFiles = null;
}
/**
* A flat copy (no directories will be created) of srcFiles into destDir.
* @param srcFiles
* @param destDir This will be created if it does not exist.
*/
public CopyTask(Collection<File> srcFiles, File destDir) {
this.srcFiles = srcFiles;
srcDir = null;
this.destDir = destDir.getAbsoluteFile();
}
boolean resolveSymlinks;
/**
* If true, symlinks get resolved - the output directory will contain a copy of the file (and not a symlink).
* If false, symlinks are copied as symlinks.
*
* TODO I think true would be a better default
*/
public CopyTask setResolveSymLinks(boolean b) {
this.resolveSymlinks = b;
return this;
}
/*
* (non-Javadoc)
*
* @see winterwell.bob.BuildTask#doTask()
*/
@Override
public void doTask() throws Exception {
boolean verbose = isVerbose();
assert srcDir==null || this.srcDir.isDirectory() : this.srcDir;
if ( ! destDir.exists()) destDir.mkdirs();
assert destDir.isDirectory() : destDir;
// The files to copy
Collection<File> files;
if (srcDir == null) {
assert srcFiles != null;
if (filter==null) {
files = srcFiles;
} else {
// Filter
files =new ArrayList();
for (File file : srcFiles) {
if (filter.accept(file)) {
files.add(file);
}
}
}
} else {
files = FileUtils.find(srcDir, filter, includeHiddenFiles);
}
// Do it
Map copy2original = new HashMap();
List<File> symDirs = new ArrayList();
File prev = null;
for (final File in : files) {
// debug weird
prev = in;
assert in.exists() : in;
String path = srcDir==null? in.getName() : FileUtils.getRelativePath(in, srcDir);
File out = new File(destDir, path);
// Make directory if needed
out.getParentFile().mkdirs();
// Overwrite?
if (out.exists() && ! doOverwrite(in, out)) {
skipped.add(in);
if (verbose) {
System.out.println("\tSkipped: "+in);
}
continue;
}
// sym link? Make a matching sym-link
if (FileUtils.isSymLink(in)) {
boolean symdir = in.isDirectory();
if ( ! resolveSymlinks) {
FileUtils.makeSymLink(in.getCanonicalFile(), out);
// TODO avoid copying files within a symlinked dir
if (symdir) symDirs.add(in);
continue;
} else {
// carry on??
}
}
// if we've sym-linked a directory, skip copying its sub-files
for (File symDir : symDirs) {
if (FileUtils.contains(symDir, in)) {
continue;
}
}
// Don't copy directories - just create matching ones
if (in.isDirectory()) {
out.mkdir();
} else {
// Not part of this class's spec - but almost always a mistake
assert ! out.getName().contains("svn") : in;
// check for overlapping files
if (copy2original.containsKey(out)) {
String msg = "Duplicate file: "+in+" will overwrite "+copy2original.get(out)+" in "+this;
if (errorOnDuplicate) {
throw new IllegalArgumentException(msg);
}
Log.w(LOGTAG, msg);
}
// copy the file
FileUtils.copy(in, out);
copy2original.put(out, in);
}
if (verbose) {
System.out.println("\tCopied: "+in);
}
}
}
public void setExceptionOnDuplicate(boolean errorOnDuplicate) {
this.errorOnDuplicate = errorOnDuplicate;
}
protected boolean doOverwrite(File in, File out) {
if (overwriteIfNewer) {
return in.lastModified() > out.lastModified();
}
return overwrite;
}
public void setFilter(FileFilter filter) {
this.filter = filter;
}
/**
* This overwrites any previous filter, including negative filters.
*
* @param regexFilter
* matches against the entire file path
* @see #setNegativeFilter(String)
*/
public void setFilter(String regexFilter) {
filter = FileUtils.getRegexFilter(regexFilter);
}
/**
* If true (the default), hidden files are copied. Set to false to ignore
* hidden files such as .svn crap.
*
* @param include
* true by default
* @see #setFilter(AFilter)
* @see #setNegativeFilter(String)
*/
public void setIncludeHiddenFiles(boolean include) {
assert srcFiles == null;
includeHiddenFiles = include;
}
/**
* Set the filter according to what it rejects. This overwrites any previous
* filter, including positive filters.
*
* @param regex
* matches against the entire file path E.g. ".*\\.java" would
* reject all .java files (and accept everything else).
*/
public void setNegativeFilter(String regex) {
final FileFilter regexFilter = FileUtils.getRegexFilter(regex);
setFilter(new FileFilter(){
@Override
public boolean accept(File pathname) {
return ! regexFilter.accept(pathname);
}
});
}
/**
* If true (the default), files will try to overwrite existing files. If
* false, files that already exist will be quietly skipped over.
*/
public void setOverwrite(boolean overwrite) {
this.overwrite = overwrite;
if (overwrite) this.overwriteIfNewer = false;
}
public CopyTask setOverwriteIfNewer(boolean o) {
this.overwriteIfNewer = o;
if (o) this.overwrite = false;
return this;
}
}
| |
/**
* Copyright 2011 Galiel 3.14 Ltd. All rights reserved.
* Use is subject to license terms.
*
* Created on 28 December 2012
*/
package com.htmlspeed.server;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.security.Principal;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import javax.servlet.AsyncContext;
import javax.servlet.DispatcherType;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.servlet.http.Part;
/**
* RefreshServletRequest
*
* Passed to HtmlSpeedServlet.service when refreshing a state-less page.
*
* @author Eldad Zamler
* @version $Revision: 1.4 $$Date: 2013/07/25 06:55:57 $
*/
public class RefreshServletRequest implements HttpServletRequest
{
private HashMap<String, Enumeration<String>> _headers = new HashMap<String, Enumeration<String>>();
/**
* To be returned by getRequestURL.
*/
private StringBuffer _url;
/**
* To be returned by getRequestURI.
*/
private String _uri;
/**
* To be returned as value of "Host" header.
*/
private String _host;
/**
* To be returned by getQueryString.
*/
private String _queryString;
/**
* CONSTUCTOR
*
* @param url full url of refreshed page
*/
public RefreshServletRequest(String url)
{
_url = new StringBuffer(url);
int dSlash = _url.indexOf("//");
int sSlash = _url.indexOf("/", dSlash+2);
String uri = _url.substring(sSlash);
_host = _url.substring(dSlash+2, sSlash);
int index = uri.indexOf('?');
if (index < 0)
{
_uri = uri;
_queryString = null;
}
else
{
_uri = uri.substring(0, index);
_queryString = uri.substring(index + 1);
}
// Initializing request-header:
_headers.put("User-Agent", Collections.enumeration(Arrays.asList(new String[] {
"Mozilla/5.0 (X11; Linux i686 on x86_64; rv:9.0.1) Gecko/20100101 Firefox/9.0.1"})));
_headers.put("Accept", Collections.enumeration(Arrays.asList(new String[] {
"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"})));
_headers.put("Accept-Language", Collections.enumeration(Arrays.asList(new String[] {
"he-IL,he;q=0.8,en-US;q=0.6,en;q=0.4"})));
_headers.put("Accept-Encoding", Collections.enumeration(Arrays.asList(new String[] {
"gzip,deflate"})));
_headers.put("Accept-Charset", Collections.enumeration(Arrays.asList(new String[] {
"ISO-8859-1,utf-8;q=0.7,*;q=0.7"})));
_headers.put("Connection", Collections.enumeration(Arrays.asList(new String[] {
"keep-alive"})));
_headers.put("Host", Collections.enumeration(Arrays.asList(new String[] {
""})));
}
@Override
public Object getAttribute(String string) {return null;}
@Override
public Enumeration<String> getAttributeNames() {return null;}
@Override
public String getCharacterEncoding() {return null;}
@Override
public void setCharacterEncoding(String string) throws UnsupportedEncodingException {}
@Override
public int getContentLength() {return 0;}
@Override
public String getContentType() {return null;}
@Override
public ServletInputStream getInputStream() throws IOException {return null;}
@Override
public String getParameter(String string) {return null;}
@Override
public Enumeration<String> getParameterNames() {return null;}
@Override
public String[] getParameterValues(String string) {return null;}
@Override
public Map<String, String[]> getParameterMap() {return null;}
@Override
public String getProtocol()
{
return "HTTP/1.1";
}
@Override
public String getScheme()
{
return "http";
}
@Override
public String getServerName() {return null;}
@Override
public int getServerPort() {return 0;}
@Override
public BufferedReader getReader() throws IOException {return null;}
@Override
public String getRemoteAddr()
{
return "127.0.0.1-Refresh";
}
@Override
public String getRemoteHost() {return null;}
@Override
public void setAttribute(String string, Object o) {}
@Override
public void removeAttribute(String string){}
@Override
public Locale getLocale() {return null;}
@Override
public Enumeration<Locale> getLocales() {return null;}
@Override
public boolean isSecure() {return false;}
@Override
public RequestDispatcher getRequestDispatcher(String string) {return null;}
@Override
public String getRealPath(String string) {return null;}
@Override
public int getRemotePort() {return 0;}
@Override
public String getLocalName() {return null;}
@Override
public String getLocalAddr() {return null;}
@Override
public int getLocalPort() {return 0;}
@Override
public ServletContext getServletContext() {return null;}
@Override
public AsyncContext startAsync() throws IllegalStateException {return null;}
@Override
public AsyncContext startAsync(ServletRequest sr, ServletResponse sr1) throws IllegalStateException {return null;}
@Override
public boolean isAsyncStarted() {return false;}
@Override
public boolean isAsyncSupported() {return false;}
@Override
public AsyncContext getAsyncContext() {return null;}
@Override
public DispatcherType getDispatcherType() {return null;}
@Override
public String getAuthType() {return null;}
@Override
public Cookie[] getCookies() {return null;}
@Override
public long getDateHeader(String string) {return 0;}
@Override
public String getHeader(String name)
{
if (name.equalsIgnoreCase("host"))
return _host;
Enumeration<String> values = _headers.get(name);
if (values != null && values.hasMoreElements())
return values.nextElement();
return null;
}
@Override
public Enumeration<String> getHeaders(String name)
{
return _headers.get(name);
}
@Override
public Enumeration<String> getHeaderNames()
{
return Collections.enumeration(_headers.keySet());
}
@Override
public int getIntHeader(String string) {return 0;}
@Override
public String getMethod()
{
return "GET";
}
@Override
public String getPathInfo() {return null;}
@Override
public String getPathTranslated() {return null;}
@Override
public String getContextPath() {return null;}
@Override
public String getQueryString()
{
return _queryString;
}
@Override
public String getRemoteUser() {return null;}
@Override
public boolean isUserInRole(String string) {return false;}
@Override
public Principal getUserPrincipal() {return null;}
@Override
public String getRequestedSessionId() {return null;}
@Override
public String getRequestURI()
{
return _uri;
}
@Override
public StringBuffer getRequestURL()
{
return _url;
}
@Override
public String getServletPath() {return null;}
@Override
public HttpSession getSession(boolean bln) {return null;}
@Override
public HttpSession getSession() {return null;}
@Override
public boolean isRequestedSessionIdValid() {return false;}
@Override
public boolean isRequestedSessionIdFromCookie() {return false;}
@Override
public boolean isRequestedSessionIdFromURL() {return false;}
@Override
public boolean isRequestedSessionIdFromUrl() {return false;}
@Override
public boolean authenticate(HttpServletResponse hsr) throws IOException, ServletException {return false;}
@Override
public void login(String string, String string1) throws ServletException {}
@Override
public void logout() throws ServletException {}
@Override
public Collection<Part> getParts() throws IOException, ServletException {return null;}
@Override
public Part getPart(String string) throws IOException, ServletException {return null;}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.bpel.extvar.jdbc;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.sql.DataSource;
import javax.xml.namespace.QName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ode.bpel.extvar.jdbc.DbExternalVariable.Column;
import org.apache.ode.bpel.extvar.jdbc.DbExternalVariable.RowKey;
import org.apache.ode.bpel.extvar.jdbc.DbExternalVariable.RowVal;
import org.apache.ode.utils.DOMUtils;
import org.apache.ode.utils.ObjectPrinter;
import org.apache.ode.bpel.evar.ExternalVariableModule;
import org.apache.ode.bpel.evar.ExternalVariableModuleException;
import org.apache.ode.bpel.evar.IncompleteKeyException;
import org.w3c.dom.Element;
public class JdbcExternalVariableModule implements ExternalVariableModule {
private static final Log __log = LogFactory.getLog(JdbcExternalVariableModule.class);
public static final String JDBC_NS = "http://ode.apache.org/externalVariables/jdbc";
/** Unique QName for the engine, this should be the element used for the external-variable configuration. */
public static final QName NAME = new QName(JDBC_NS, "jdbc");
/** Manually configured data sources. */
private final HashMap<String, DataSource> _dataSources = new HashMap<String, DataSource>();
/** Variables we know about via configure() method calls. */
private final HashMap<EVarId, DbExternalVariable> _vars = new HashMap<EVarId, DbExternalVariable>();
public void configure(QName pid, String extVarId, Element config) throws ExternalVariableModuleException {
EVarId evarId = new EVarId(pid, extVarId);
DataSource ds = null;
Element jndiDs = DOMUtils.findChildByName(config, new QName(JDBC_NS, "datasource-jndi"));
Element jndiRef = DOMUtils.findChildByName(config, new QName(JDBC_NS, "datasource-ref"));
Element initMode = DOMUtils.findChildByName(config, new QName(JDBC_NS, "init-mode"));
if (jndiRef != null) {
String refname = jndiRef.getTextContent().trim();
ds = _dataSources.get(refname);
if (ds == null)
throw new ExternalVariableModuleException("Data source reference \"" + refname
+ "\" not found for external variable " + evarId
+ "; make sure to register the data source with the engine!");
} else if (jndiDs != null) {
String name = jndiDs.getTextContent().trim();
Object dsCandidate;
InitialContext ctx;
try {
ctx = new InitialContext();
} catch (Exception ex) {
throw new ExternalVariableModuleException("Unable to access JNDI context for external variable " + evarId, ex);
}
try {
dsCandidate = ctx.lookup(name);
} catch (Exception ex) {
throw new ExternalVariableModuleException("Lookup of data source for " + evarId + " failed.", ex);
} finally {
try {
ctx.close();
} catch (NamingException e) { /* ignore */ }
}
if (dsCandidate == null)
throw new ExternalVariableModuleException("Data source \"" + name + "\" not found in JNDI!");
if (!(dsCandidate instanceof DataSource))
throw new ExternalVariableModuleException("JNDI object \"" + name + "\" does not implement javax.sql.DataSource");
ds = (DataSource) dsCandidate;
}
if (ds == null) {
throw new ExternalVariableModuleException("No valid data source configuration for JDBC external varible " + evarId);
}
Connection conn;
DatabaseMetaData metaData;
try {
conn = ds.getConnection();
metaData = conn.getMetaData();
} catch (Exception ex) {
throw new ExternalVariableModuleException("Unable to open database connection for external variable " + evarId, ex);
}
try {
DbExternalVariable dbev = new DbExternalVariable(evarId, ds);
if (initMode != null)
try {
dbev._initType = InitType.valueOf(initMode.getTextContent().trim());
} catch (Exception ex) {
throw new ExternalVariableModuleException("Invalid <init-mode> value: " + initMode.getTextContent().trim());
}
Element tableName = DOMUtils.findChildByName(config, new QName(JDBC_NS, "table"));
if (tableName == null || tableName.getTextContent().trim().equals(""))
throw new ExternalVariableModuleException("Must specify <table> for external variable " + evarId);
String table = tableName.getTextContent().trim();
String schema = null;
if (table.indexOf('.') != -1) {
schema = table.substring(0, table.indexOf('.'));
table = table.substring(table.indexOf('.') + 1);
}
if (metaData.storesLowerCaseIdentifiers()) {
table = table.toLowerCase();
if (schema != null)
schema = table.toLowerCase();
} else if (metaData.storesUpperCaseIdentifiers()) {
table = table.toUpperCase();
if (schema != null)
schema = schema.toUpperCase();
}
dbev.generatedKeys = metaData.supportsGetGeneratedKeys();
ResultSet tables = metaData.getTables(null, schema, table, null);
if (tables.next()) {
dbev.table = tables.getString("TABLE_NAME");
dbev.schema = tables.getString("TABLE_SCHEM");
} else
throw new ExternalVariableModuleException("Table \"" + table + "\" not found in database.");
tables.close();
List<Element> columns = DOMUtils.findChildrenByName(config, new QName(JDBC_NS, "column"));
for (Element col : columns) {
String name = col.getAttribute("name");
String colname = col.getAttribute("column-name");
String key = col.getAttribute("key");
String gentype = col.getAttribute("generator");
String expression = col.getAttribute("expression");
if (key == null || "".equals(key))
key = "no";
if (gentype == null || "".equals(gentype))
gentype = GenType.none.toString();
if (colname == null || "".equals(colname))
colname = name;
if (name == null || "".equals(name))
throw new ExternalVariableModuleException("External variable " + evarId
+ " <column> element must have \"name\" attribute. ");
if (metaData.storesLowerCaseIdentifiers())
colname = colname.toLowerCase();
else if (metaData.storesUpperCaseIdentifiers())
colname = colname.toUpperCase();
GenType gtype;
try {
gtype = GenType.valueOf(gentype);
} catch (Exception ex) {
throw new ExternalVariableModuleException("External variable " + evarId + " column \"" + name
+ "\" generator type \"" + gentype + "\" is unknown.");
}
if (gtype == GenType.expression && (expression == null || "".equals(expression)))
throw new ExternalVariableModuleException("External variable " + evarId + " column \"" + name
+ "\" used \"expression\" generator, but did not specify an expression");
Column c = dbev.new Column(name, colname, key.equalsIgnoreCase("yes"), gtype, expression);
ResultSet cmd = metaData.getColumns(null, dbev.schema, dbev.table, colname);
if (cmd.next()) {
c.dataType = cmd.getInt("DATA_TYPE");
c.nullok = cmd.getInt("NULLABLE") != 0;
} else
throw new ExternalVariableModuleException("External variable " + evarId + " referenced "
+ "non-existant column \"" + colname + "\"!");
dbev.addColumn(c);
}
if (dbev.numColumns() == 0)
throw new ExternalVariableModuleException("External variable " + evarId + " did not have any <column> elements!");
_vars.put(evarId, dbev);
} catch (SQLException se) {
throw new ExternalVariableModuleException("SQL Error", se);
} finally {
try {
conn.close();
} catch (SQLException e) {
}
}
}
public QName getName() {
return NAME;
}
public boolean isTransactional() {
return true;
}
public void shutdown() {
}
public void start() {
}
public void stop() {
}
public Value writeValue(QName varType, Value newval) throws ExternalVariableModuleException {
EVarId evarId = new EVarId(newval.locator.pid, newval.locator.varId);
DbExternalVariable evar = _vars.get(evarId);
if (evar == null)
throw new ExternalVariableModuleException("No such variable. "); // todo
RowKey key = evar.keyFromLocator(newval.locator);
RowVal val = evar.parseXmlRow(evar.new RowVal(), (Element) newval.value);
if (__log.isDebugEnabled())
__log.debug("JdbcExternalVariable.writeValue() RowKey: " + key + " RowVal: " + val);
if (!key.missingValues() && evar._initType == InitType.delete_insert) {
// do delete...
throw new ExternalVariableModuleException("Delete not implemented. "); // todo
}
// should we try an update first? to do this we need to have all the required keys
// and there should be some keys
boolean tryupdatefirst = (evar._initType == InitType.update || evar._initType == InitType.update_insert)
&& !evar._keycolumns.isEmpty() && !key.missingDatabaseGeneratedValues();
boolean insert = evar._initType != InitType.update;
if (__log.isDebugEnabled())
__log.debug("tryUpdateFirst: " + tryupdatefirst
+ " insert: " + insert
+ " initType: " + evar._initType
+ " key.isEmpty: " + evar._keycolumns.isEmpty()
+ " key.missingValues: " + key.missingValues()
+ " key.missingDBValues: " + key.missingDatabaseGeneratedValues());
try {
if (tryupdatefirst)
insert = execUpdate(evar, key, val) == 0;
if (insert) {
key = execInsert(evar, newval.locator, key, val);
// Transfer the keys obtained from the db.
key.write(varType, newval.locator);
}
} catch (SQLException se) {
throw new ExternalVariableModuleException("Error updating row.", se);
}
return newval;
}
public Value readValue(QName varType, Locator locator) throws ExternalVariableModuleException {
EVarId evarId = new EVarId(locator.pid, locator.varId);
DbExternalVariable evar = _vars.get(evarId);
if (evar == null)
throw new ExternalVariableModuleException("No such variable: "+evarId);
Element val;
try {
RowVal rowval = execSelect(evar, locator);
val = evar.renderXmlRow(locator, varType, rowval);
} catch (SQLException se) {
throw new ExternalVariableModuleException("SQL Error.", se);
}
return new Value(locator, val, null);
}
/**
* Manually register a data source. Handy if you don't want to use JNDI to look these up.
*
* @param dsName
* @param ds
*/
public void registerDataSource(String dsName, DataSource ds) {
_dataSources.put(dsName, ds);
}
int execUpdate(DbExternalVariable dbev, RowKey key, RowVal values) throws SQLException {
Connection conn = dbev.dataSource.getConnection();
try {
if (__log.isDebugEnabled()) {
__log.debug("execUpdate: key=" + key + " values=" + values);
__log.debug("Prepare statement: " + dbev.update);
}
PreparedStatement stmt = conn.prepareStatement(dbev.update);
int idx = 1;
for (Column c : dbev._updcolumns) {
Object val = values.get(c.name);
if (__log.isDebugEnabled()) __log.debug("Set value parameter "+idx+": "+val);
if (val == null)
stmt.setNull(idx, c.dataType);
else
stmt.setObject(idx, val);
idx++;
}
for (Column ck : dbev._keycolumns) {
Object val = key.get(ck.name);
if (__log.isDebugEnabled()) __log.debug("Set key parameter "+idx+": "+val);
if (val == null)
stmt.setNull(idx, ck.dataType);
else
stmt.setObject(idx, val);
idx++;
}
return stmt.executeUpdate();
} finally {
conn.close();
}
}
RowVal execSelect(DbExternalVariable dbev, Locator locator) throws SQLException, ExternalVariableModuleException {
RowKey rowkey = dbev.keyFromLocator(locator);
if (__log.isDebugEnabled()) __log.debug("execSelect: " + rowkey);
if (rowkey.missingDatabaseGeneratedValues()) {
return null;
}
if (rowkey.missingValues()) {
throw new IncompleteKeyException(rowkey.getMissing());
}
RowVal ret = dbev.new RowVal();
Connection conn = dbev.dataSource.getConnection();
try {
if (__log.isDebugEnabled()) __log.debug("Prepare statement: " + dbev.select);
PreparedStatement stmt = conn.prepareStatement(dbev.select);
int idx = 1;
for (Object k : rowkey) {
if (__log.isDebugEnabled()) __log.debug("Set key parameter "+idx+": "+k);
stmt.setObject(idx++, k);
}
ResultSet rs = stmt.executeQuery();
try {
if (rs.next()) {
for (Column cr : dbev._columns) {
Object val = rs.getObject(cr.idx+1);
if (__log.isDebugEnabled()) __log.debug("Result column index "+cr.idx+": "+val);
ret.set(cr.idx,val);
}
} else
return null;
} finally {
rs.close();
}
} finally {
conn.close();
}
return ret;
}
RowKey execInsert(DbExternalVariable dbev, Locator locator, RowKey keys, RowVal values) throws SQLException {
Connection conn = dbev.dataSource.getConnection();
try {
if (__log.isDebugEnabled()) {
__log.debug("execInsert: keys=" + keys + " values=" + values);
__log.debug("Prepare statement: " + dbev.insert);
__log.debug("missingDatabaseGeneratedValues: " + keys.missingDatabaseGeneratedValues());
__log.debug("_autoColNames: " + ObjectPrinter.stringifyNvList(dbev._autoColNames));
}
PreparedStatement stmt = keys.missingDatabaseGeneratedValues()
? conn.prepareStatement(dbev.insert, dbev._autoColNames)
: conn.prepareStatement(dbev.insert);
int idx = 1;
for (Column c : dbev._inscolumns) {
Object val = c.getValue(c.name, keys, values, locator.iid);
values.put(c.name, val);
if (__log.isDebugEnabled()) __log.debug("Set parameter "+idx+": "+val);
if (val == null)
stmt.setNull(idx, c.dataType);
else
stmt.setObject(idx, val);
idx++;
}
stmt.execute();
for (Column ck : keys._columns) {
Object val = values.get(ck.name);
if (__log.isDebugEnabled()) __log.debug("Key "+ck.name+": "+val);
keys.put(ck.name,val);
}
if (keys.missingDatabaseGeneratedValues() ) {
// With JDBC 3, we can get the values of the key columns (if the db supports it)
ResultSet keyRS = stmt.getGeneratedKeys();
if (keyRS == null)
throw new SQLException("Database did not return generated keys");
keyRS.next();
for (Column ck : keys._columns) {
Object value = keyRS.getObject(ck.idx+1);
if (__log.isDebugEnabled()) __log.debug("Generated key "+ck.name+": "+value);
keys.put(ck.name, value);
}
}
return keys;
} finally {
conn.close();
}
}
}
| |
/**
* Copyright (C) 2013 Carnegie Mellon University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tdb.reef;
import org.apache.reef.client.*;
import org.apache.reef.tang.Configuration;
import org.apache.reef.tang.JavaConfigurationBuilder;
import org.apache.reef.tang.Tang;
import org.apache.reef.tang.annotations.Parameter;
import org.apache.reef.tang.annotations.Unit;
import org.apache.reef.tang.exceptions.BindException;
import org.apache.reef.wake.EventHandler;
import org.apache.reef.wake.remote.impl.ObjectSerializableCodec;
import javax.inject.Inject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.logging.Level;
import java.util.logging.Logger;
import tdb.reef.param.*;
/**
* TDB REEF application client.
*/
@Unit
public class Client {
/**
* Standard java logger.
*/
private static final Logger LOG =
Logger.getLogger(Client.class.getName());
/**
* Codec to translate messages to and from the job driver
*/
private static final ObjectSerializableCodec<String> CODEC =
new ObjectSerializableCodec<>();
/**
* Reference to the REEF framework.
* This variable is injected automatically in the constructor.
*/
private final REEF reef;
/**
* Job Driver configuration.
*/
private final Configuration driverConfiguration;
/**
* If true, take commands from stdin; otherwise, in non-interactive mode.
*/
private final boolean isInteractive;
/**
* Command prompt reader for the interactive mode (stdin).
*/
private final BufferedReader prompt;
/**
* A reference to the running job that allows client to
* send messages back to the job driver
*/
private RunningJob runningJob;
/**
* Start timestamp of the current task.
*/
private long startTime = 0;
/**
* Total time spent performing tasks in Evaluators.
*/
private long totalTime = 0;
/**
* Number of experiments ran so far.
*/
private int numRuns = 0;
/**
* Set to false when job driver is done.
*/
public boolean isBusy = true;
/**
* Last result returned from the job driver.
*/
private String lastResult;
private String masterAkka = "unknown";
private final int numWorkers;
private final int timeout;
/**
* TDB REEF application client.
* Parameters are injected automatically by TANG.
*
* @param reef Reference to the REEF framework.
*/
@Inject
Client(final REEF reef,
@Parameter(Main.NumWorkers.class) final Integer numWorkers,
@Parameter(Main.Timeout.class) final Integer timeout,
@Parameter(Memory.class) final Integer memory)
throws BindException {
this.reef = reef;
this.numWorkers = numWorkers;
this.timeout = timeout;
this.isInteractive = true;
this.prompt = this.isInteractive ?
new BufferedReader(new InputStreamReader(System.in)) : null;
final JavaConfigurationBuilder configBuilder =
Tang.Factory.getTang().newConfigurationBuilder();
configBuilder.addConfiguration(
DriverConfiguration.CONF
.set(DriverConfiguration.GLOBAL_LIBRARIES,
Client.class.getProtectionDomain()
.getCodeSource().getLocation().getFile())
.set(DriverConfiguration.DRIVER_IDENTIFIER, "TDBReefYarn")
.set(DriverConfiguration.ON_EVALUATOR_ALLOCATED,
Driver.EvaluatorAllocatedHandler.class)
.set(DriverConfiguration.ON_EVALUATOR_FAILED,
Driver.EvaluatorFailedHandler.class)
.set(DriverConfiguration.ON_CONTEXT_ACTIVE,
Driver.ActiveContextHandler.class)
.set(DriverConfiguration.ON_CONTEXT_CLOSED,
Driver.ClosedContextHandler.class)
.set(DriverConfiguration.ON_CONTEXT_FAILED,
Driver.FailedContextHandler.class)
.set(DriverConfiguration.ON_TASK_RUNNING,
Driver.RunningTaskHandler.class)
.set(DriverConfiguration.ON_DRIVER_STARTED,
Driver.StartHandler.class)
.set(DriverConfiguration.ON_DRIVER_STOP,
Driver.StopHandler.class)
.set(DriverConfiguration.ON_CLIENT_MESSAGE,
Driver.ClientMessageHandler.class)
.build()
);
configBuilder.bindNamedParameter(Main.NumWorkers.class,
"" + numWorkers);
configBuilder.bindNamedParameter(Main.Timeout.class,
"" + timeout);
configBuilder.bindNamedParameter(Memory.class, "" + memory);
this.driverConfiguration = configBuilder.build();
}
/**
* @return a Configuration binding the ClientConfiguration
* event handlers to this Client.
*/
public static Configuration getClientConfiguration() {
return ClientConfiguration.CONF
.set(ClientConfiguration.ON_JOB_RUNNING,
Client.RunningJobHandler.class)
.set(ClientConfiguration.ON_JOB_MESSAGE,
Client.JobMessageHandler.class)
.set(ClientConfiguration.ON_JOB_COMPLETED,
Client.CompletedJobHandler.class)
.set(ClientConfiguration.ON_JOB_FAILED,
Client.FailedJobHandler.class)
.set(ClientConfiguration.ON_RUNTIME_ERROR,
Client.RuntimeErrorHandler.class)
.build();
}
/**
* Launch the job driver.
*
* @throws BindException configuration error.
*/
public void submit() {
this.reef.submit(this.driverConfiguration);
}
public void processCmd(String cmd) {
if (cmd.equals("help")){
printList();
} else if (cmd.equals("exit")) {
//this.runningJob.send(CODEC.encode(cmd));
this.runningJob.close();
stopAndNotify();
} else if (cmd.equals("master")) {
System.out.println(masterAkka);
} else if (cmd.equals("workers")) {
this.runningJob.send(CODEC.encode(cmd));
} else if (cmd.equals("add")) {
System.out.println("Not implemented.");
} else if (cmd.equals("remove")) {
System.out.println("Not implemented.");
} else {
System.out.println("Illegal command.");
}
}
private void printList() {
System.out.println("Commands:");
System.out.println("master - look up master");
System.out.println("workers - look up workers");
System.out.println("add - add a worker");
System.out.println("remove - remove a worker");
System.out.println("exit - terminate system");
}
/**
* Notify the process in waitForCompletion() method that
* the main process has finished.
*/
private synchronized void stopAndNotify() {
this.runningJob = null;
this.isBusy = false;
//this.notify();
}
/**
* Wait for the job driver to complete. This method is called
* from Launch.main()
*/
public String waitForCompletion() {
while (this.isBusy) {
LOG.log(Level.FINE, "Waiting for the Job Driver to complete.");
try {
synchronized (this) {
this.wait();
}
} catch (final InterruptedException ex) {
LOG.log(Level.WARNING, "Waiting for result interrupted.", ex);
}
}
return this.lastResult;
}
public void close() {
this.reef.close();
}
/**
* Receive notification from the job driver that the job is running.
*/
final class RunningJobHandler implements EventHandler<RunningJob> {
@Override
public void onNext(final RunningJob job) {
LOG.log(Level.FINE, "Running job: {0}", job.getId());
synchronized (Client.this) {
Client.this.runningJob = job;
}
}
}
/**
* Receive message from the job driver.
* There is only one message, master's akka address.
*/
final class JobMessageHandler implements EventHandler<JobMessage> {
@Override
public void onNext(final JobMessage message) {
String msg = CODEC.decode(message.get());
if (msg.startsWith("akka")) {
masterAkka = msg;
System.out.println("master Akka: " + masterAkka);
System.out.println("");
} else if (msg.startsWith("workers")) {
System.out.println(msg);
}
}
}
/**
* Receive notification from the job driver that the job had failed.
*/
final class FailedJobHandler implements EventHandler<FailedJob> {
@Override
public void onNext(final FailedJob job) {
LOG.log(Level.SEVERE, "Failed job: " + job.getId(),
job.getReason().orElse(null));
stopAndNotify();
}
}
/**
* Receive notification from the job driver that
* the job had completed successfully.
*/
final class CompletedJobHandler implements EventHandler<CompletedJob> {
@Override
public void onNext(final CompletedJob job) {
LOG.log(Level.FINE, "Completed job: {0}", job.getId());
stopAndNotify();
}
}
/**
* Receive notification that there was an exception
* thrown from the job driver.
*/
final class RuntimeErrorHandler implements EventHandler<FailedRuntime> {
@Override
public void onNext(final FailedRuntime error) {
LOG.log(Level.SEVERE,
"Error in job driver: " + error, error.getReason().orElse(null));
stopAndNotify();
}
}
}
| |
package com.rey.material.widget;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.TypedArray;
import android.database.DataSetObserver;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.os.Build;
import android.support.annotation.NonNull;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.text.TextUtils.TruncateAt;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.HorizontalScrollView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.rey.material.R;
import com.rey.material.app.ThemeManager;
import com.rey.material.drawable.RippleDrawable;
import com.rey.material.util.ThemeUtil;
import com.rey.material.util.ViewUtil;
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public class TabPageIndicator extends HorizontalScrollView implements ViewPager.OnPageChangeListener, android.view.View.OnClickListener, ThemeManager.OnThemeChangedListener{
protected int mStyleId;
protected int mCurrentStyle = ThemeManager.THEME_UNDEFINED;
private TabContainerLayout mTabContainer;
private ViewPager mViewPager;
private int mMode;
private int mTabPadding = -1;
private int mTabRippleStyle = 0;
private int mTextAppearance = 0;
private boolean mTabSingleLine = true;
private int mIndicatorOffset;
private int mIndicatorWidth;
private int mIndicatorHeight = -1;
private Paint mPaint;
public static final int MODE_SCROLL = 0;
public static final int MODE_FIXED = 1;
private int mSelectedPosition;
private boolean mScrolling = false;
private boolean mIsRtl = false;
private Runnable mTabAnimSelector;
private ViewPager.OnPageChangeListener mListener;
private DataSetObserver mObserver = new DataSetObserver(){
@Override
public void onChanged() {
notifyDataSetChanged();
}
@Override
public void onInvalidated() {
notifyDataSetInvalidated();
}
};
public TabPageIndicator(Context context) {
super(context);
init(context, null, 0, 0);
}
public TabPageIndicator(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs, 0, 0);
}
public TabPageIndicator(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context, attrs, defStyleAttr, 0);
}
public TabPageIndicator(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr);
init(context, attrs, defStyleAttr, defStyleRes);
}
private void init(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes){
setHorizontalScrollBarEnabled(false);
mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mPaint.setStyle(Paint.Style.FILL);
mPaint.setColor(ThemeUtil.colorAccent(context, 0xFFFFFFFF));
mTabContainer = new TabContainerLayout(context);
applyStyle(context, attrs, defStyleAttr, defStyleRes);
if(isInEditMode())
addTemporaryTab();
mStyleId = ThemeManager.getStyleId(context, attrs, defStyleAttr, defStyleRes);
}
public void applyStyle(int resId){
ViewUtil.applyStyle(this, resId);
applyStyle(getContext(), null, 0, resId);
}
protected void applyStyle(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes){
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.TabPageIndicator, defStyleAttr, defStyleRes);
int textAppearance = 0;
int mode = -1;
int rippleStyle = 0;
for(int i = 0, count = a.getIndexCount(); i < count; i++){
int attr = a.getIndex(i);
if(attr == R.styleable.TabPageIndicator_tpi_tabPadding)
mTabPadding = a.getDimensionPixelSize(attr, 0);
else if(attr == R.styleable.TabPageIndicator_tpi_tabRipple)
rippleStyle = a.getResourceId(attr, 0);
else if(attr == R.styleable.TabPageIndicator_tpi_indicatorColor)
mPaint.setColor(a.getColor(attr, 0));
else if(attr == R.styleable.TabPageIndicator_tpi_indicatorHeight)
mIndicatorHeight = a.getDimensionPixelSize(attr, 0);
else if(attr == R.styleable.TabPageIndicator_tpi_tabSingleLine)
mTabSingleLine = a.getBoolean(attr, true);
else if(attr == R.styleable.TabPageIndicator_android_textAppearance)
textAppearance = a.getResourceId(attr, 0);
else if(attr == R.styleable.TabPageIndicator_tpi_mode)
mode = a.getInteger(attr, 0);
}
a.recycle();
if(mTabPadding < 0)
mTabPadding = ThemeUtil.dpToPx(context, 12);
if(mIndicatorHeight < 0)
mIndicatorHeight = ThemeUtil.dpToPx(context, 2);
if(mode >= 0){
if(mMode != mode || getChildCount() == 0){
mMode = mode;
removeAllViews();
if(mMode == MODE_SCROLL) {
addView(mTabContainer, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT));
setFillViewport(false);
}
else if(mMode == MODE_FIXED){
addView(mTabContainer, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
setFillViewport(true);
}
}
}
if(textAppearance != 0 && mTextAppearance != textAppearance){
mTextAppearance = textAppearance;
for(int i = 0, count = mTabContainer.getChildCount(); i < count; i++){
CheckedTextView tv = (CheckedTextView)mTabContainer.getChildAt(i);
tv.setTextAppearance(context, mTextAppearance);
}
}
if(rippleStyle != 0 && rippleStyle != mTabRippleStyle){
mTabRippleStyle = rippleStyle;
for(int i = 0, count = mTabContainer.getChildCount(); i < count; i++)
ViewUtil.setBackground(mTabContainer.getChildAt(i), new RippleDrawable.Builder(getContext(), mTabRippleStyle).build());
}
if(mViewPager != null)
notifyDataSetChanged();
requestLayout();
}
@Override
public void onThemeChanged(ThemeManager.OnThemeChangedEvent event) {
int style = ThemeManager.getInstance().getCurrentStyle(mStyleId);
if(mCurrentStyle != style){
mCurrentStyle = style;
applyStyle(mCurrentStyle);
}
}
@Override
public void onAttachedToWindow() {
super.onAttachedToWindow();
// Re-post the selector we saved
if (mTabAnimSelector != null)
post(mTabAnimSelector);
if(mStyleId != 0) {
ThemeManager.getInstance().registerOnThemeChangedListener(this);
onThemeChanged(null);
}
}
@Override
public void onDetachedFromWindow() {
super.onDetachedFromWindow();
if (mTabAnimSelector != null)
removeCallbacks(mTabAnimSelector);
if(mStyleId != 0)
ThemeManager.getInstance().unregisterOnThemeChangedListener(this);
}
@Override
public void onRtlPropertiesChanged(int layoutDirection) {
boolean rtl = layoutDirection == LAYOUT_DIRECTION_RTL;
if(mIsRtl != rtl) {
mIsRtl = rtl;
invalidate();
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int widthMode = MeasureSpec.getMode(widthMeasureSpec);
int widthSize = MeasureSpec.getSize(widthMeasureSpec);
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
int heightSize = MeasureSpec.getSize(heightMeasureSpec);
int ws = widthMeasureSpec;
if(ws != MeasureSpec.UNSPECIFIED)
ws = MeasureSpec.makeMeasureSpec(widthSize - getPaddingLeft() - getPaddingRight(), widthMode);
int hs = heightMeasureSpec;
if(heightMode != MeasureSpec.UNSPECIFIED)
hs = MeasureSpec.makeMeasureSpec(heightSize - getPaddingTop() - getPaddingBottom(), heightMode);
mTabContainer.measure(ws, hs);
int width = 0;
switch (widthMode){
case MeasureSpec.UNSPECIFIED:
width = mTabContainer.getMeasuredWidth() + getPaddingLeft() + getPaddingRight();
break;
case MeasureSpec.AT_MOST:
width = Math.min(mTabContainer.getMeasuredWidth() + getPaddingLeft() + getPaddingRight(), widthSize);
break;
case MeasureSpec.EXACTLY:
width = widthSize;
break;
}
int height = 0;
switch (heightMode){
case MeasureSpec.UNSPECIFIED:
height = mTabContainer.getMeasuredHeight() + getPaddingTop() + getPaddingBottom();
break;
case MeasureSpec.AT_MOST:
height = Math.min(mTabContainer.getMeasuredHeight() + getPaddingTop() + getPaddingBottom(), heightSize);
break;
case MeasureSpec.EXACTLY:
height = heightSize;
break;
}
if(mTabContainer.getMeasuredWidth() != width - getPaddingLeft() - getPaddingRight() || mTabContainer.getMeasuredHeight() != height - getPaddingTop() - getPaddingBottom())
mTabContainer.measure(MeasureSpec.makeMeasureSpec(width - getPaddingLeft() - getPaddingRight(), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height - getPaddingTop() - getPaddingBottom(), MeasureSpec.EXACTLY));
setMeasuredDimension(width, height);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
TextView tv = getTabView(mSelectedPosition);
if(tv != null)
updateIndicator(tv.getLeft(), tv.getMeasuredWidth());
}
private CheckedTextView getTabView(int position){
return (CheckedTextView)mTabContainer.getChildAt(position);
}
private void animateToTab(final int position) {
if(getTabView(position) == null)
return;
if (mTabAnimSelector != null)
removeCallbacks(mTabAnimSelector);
mTabAnimSelector = new Runnable() {
public void run() {
CheckedTextView tv = getTabView(position);
if(!mScrolling) {
updateIndicator(tv.getLeft(), tv.getMeasuredWidth());
}
smoothScrollTo(tv.getLeft() - (getWidth() - tv.getWidth()) / 2 + getPaddingLeft(), 0);
mTabAnimSelector = null;
}
};
post(mTabAnimSelector);
}
/**
* Set a listener will be called when the current page is changed.
* @param listener The {@link android.support.v4.view.ViewPager.OnPageChangeListener} will be called.
*/
public void setOnPageChangeListener(ViewPager.OnPageChangeListener listener) {
mListener = listener;
}
/**
* Set the ViewPager associate with this indicator view.
* @param view The ViewPager view.
*/
public void setViewPager(ViewPager view) {
if (mViewPager == view)
return;
if (mViewPager != null){
mViewPager.removeOnPageChangeListener(this);
PagerAdapter adapter = view.getAdapter();
if(adapter != null)
adapter.unregisterDataSetObserver(mObserver);
}
PagerAdapter adapter = view.getAdapter();
if (adapter == null)
throw new IllegalStateException("ViewPager does not have adapter instance.");
adapter.registerDataSetObserver(mObserver);
mViewPager = view;
view.addOnPageChangeListener(this);
notifyDataSetChanged();
onPageSelected(mViewPager.getCurrentItem());
}
/**
* Set the ViewPager associate with this indicator view and the current position;
* @param view The ViewPager view.
* @param initialPosition The current position.
*/
public void setViewPager(ViewPager view, int initialPosition) {
setViewPager(view);
setCurrentItem(initialPosition);
}
private void updateIndicator(int offset, int width){
mIndicatorOffset = offset;
mIndicatorWidth = width;
invalidate();
}
@Override
public void draw(@NonNull Canvas canvas) {
super.draw(canvas);
int x = mIndicatorOffset + getPaddingLeft();
canvas.drawRect(x, getHeight() - mIndicatorHeight, x + mIndicatorWidth, getHeight(), mPaint);
if(isInEditMode())
canvas.drawRect(getPaddingLeft(), getHeight() - mIndicatorHeight, getPaddingLeft() + mTabContainer.getChildAt(0).getWidth(), getHeight(), mPaint);
}
@Override
public void onPageScrollStateChanged(int state) {
if(state == ViewPager.SCROLL_STATE_IDLE){
mScrolling = false;
TextView tv = getTabView(mSelectedPosition);
if(tv != null) {
updateIndicator(tv.getLeft(), tv.getMeasuredWidth());
}
}
else
mScrolling = true;
if (mListener != null)
mListener.onPageScrollStateChanged(state);
}
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
if (mListener != null)
mListener.onPageScrolled(position, positionOffset, positionOffsetPixels);
CheckedTextView tv_scroll = getTabView(position);
CheckedTextView tv_next = getTabView(position + 1);
if(tv_scroll != null && tv_next != null){
int width_scroll = tv_scroll.getMeasuredWidth();
int width_next = tv_next.getMeasuredWidth();
float distance = (width_scroll + width_next) / 2f;
int width = (int)(width_scroll + (width_next - width_scroll) * positionOffset + 0.5f);
int offset = (int)(tv_scroll.getLeft() + width_scroll / 2f + distance * positionOffset - width / 2f + 0.5f);
updateIndicator(offset, width);
}
}
@Override
public void onPageSelected(int position) {
setCurrentItem(position);
if (mListener != null)
mListener.onPageSelected(position);
}
@Override
public void onClick(android.view.View v) {
int position = (Integer)v.getTag();
if(position == mSelectedPosition && mListener != null)
mListener.onPageSelected(position);
mViewPager.setCurrentItem(position, true);
}
/**
* Set the current page of this TabPageIndicator.
* @param position The position of current page.
*/
public void setCurrentItem(int position) {
if(mSelectedPosition != position){
CheckedTextView tv = getTabView(mSelectedPosition);
if(tv != null)
tv.setChecked(false);
}
mSelectedPosition = position;
CheckedTextView tv = getTabView(mSelectedPosition);
if(tv != null)
tv.setChecked(true);
animateToTab(position);
}
private void notifyDataSetChanged() {
mTabContainer.removeAllViews();
PagerAdapter adapter = mViewPager.getAdapter();
final int count = adapter.getCount();
if (mSelectedPosition > count)
mSelectedPosition = count - 1;
for (int i = 0; i < count; i++) {
CharSequence title = adapter.getPageTitle(i);
if (title == null)
title = "NULL";
CheckedTextView tv = new CheckedTextView(getContext());
tv.setCheckMarkDrawable(null);
tv.setText(title);
tv.setGravity(Gravity.CENTER);
tv.setTextAppearance(getContext(), mTextAppearance);
if(mTabSingleLine)
tv.setSingleLine(true);
else {
tv.setSingleLine(false);
tv.setMaxLines(2);
}
tv.setEllipsize(TruncateAt.END);
tv.setOnClickListener(this);
tv.setTag(i);
if(mTabRippleStyle > 0)
ViewUtil.setBackground(tv, new RippleDrawable.Builder(getContext(), mTabRippleStyle).build());
tv.setPadding(mTabPadding, 0, mTabPadding, 0);
mTabContainer.addView(tv, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT));
}
setCurrentItem(mSelectedPosition);
requestLayout();
}
private void notifyDataSetInvalidated() {
PagerAdapter adapter = mViewPager.getAdapter();
final int count = adapter.getCount();
for (int i = 0; i < count; i++) {
TextView tv = getTabView(i);
CharSequence title = adapter.getPageTitle(i);
if (title == null)
title = "NULL";
tv.setText(title);
}
requestLayout();
}
private void addTemporaryTab(){
for (int i = 0; i < 3; i++) {
CharSequence title = null;
if (i == 0)
title = "TAB ONE";
else if (i == 1)
title = "TAB TWO";
else if (i == 2)
title = "TAB THREE";
CheckedTextView tv = new CheckedTextView(getContext());
tv.setCheckMarkDrawable(null);
tv.setText(title);
tv.setGravity(Gravity.CENTER);
tv.setTextAppearance(getContext(), mTextAppearance);
tv.setSingleLine(true);
tv.setEllipsize(TruncateAt.END);
tv.setTag(i);
tv.setChecked(i == 0);
if(mMode == MODE_SCROLL){
tv.setPadding(mTabPadding, 0, mTabPadding, 0);
mTabContainer.addView(tv, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT));
}
else if(mMode == MODE_FIXED){
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(0, LinearLayout.LayoutParams.MATCH_PARENT);
params.weight = 1f;
mTabContainer.addView(tv, params);
}
}
}
private class TabContainerLayout extends FrameLayout{
public TabContainerLayout(Context context) {
super(context);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int widthMode = MeasureSpec.getMode(widthMeasureSpec);
int widthSize = MeasureSpec.getSize(widthMeasureSpec);
int width = 0;
int height = 0;
if(mMode == MODE_SCROLL){
int ws = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
for (int i = 0; i < getChildCount(); i++) {
View child = getChildAt(i);
child.measure(ws, heightMeasureSpec);
width += child.getMeasuredWidth();
height = Math.max(height, child.getMeasuredHeight());
}
setMeasuredDimension(width, height);
}
else{
if(widthMode != MeasureSpec.EXACTLY){
int ws = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
for (int i = 0; i < getChildCount(); i++) {
View child = getChildAt(i);
child.measure(ws, heightMeasureSpec);
width += child.getMeasuredWidth();
height = Math.max(height, child.getMeasuredHeight());
}
if(widthMode == MeasureSpec.UNSPECIFIED || width < widthSize)
setMeasuredDimension(widthSize, height);
else{
int childWidth = widthSize / getChildCount();
for (int i = 0, count = getChildCount(); i < count; i++) {
View child = getChildAt(i);
if(i != count - 1)
child.measure(MeasureSpec.makeMeasureSpec(childWidth, MeasureSpec.EXACTLY), heightMeasureSpec);
else
child.measure(MeasureSpec.makeMeasureSpec(widthSize - childWidth * (count - 1), MeasureSpec.EXACTLY), heightMeasureSpec);
}
setMeasuredDimension(widthSize, height);
}
}
else {
int childWidth = widthSize / getChildCount();
for (int i = 0, count = getChildCount(); i < count; i++) {
View child = getChildAt(i);
if(i != count - 1)
child.measure(MeasureSpec.makeMeasureSpec(childWidth, MeasureSpec.EXACTLY), heightMeasureSpec);
else
child.measure(MeasureSpec.makeMeasureSpec(widthSize - childWidth * (count - 1), MeasureSpec.EXACTLY), heightMeasureSpec);
height = Math.max(height, child.getMeasuredHeight());
}
setMeasuredDimension(widthSize, height);
}
}
int hs = MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY);
for (int i = 0; i < getChildCount(); i++) {
View child = getChildAt(i);
if(child.getMeasuredHeight() != height)
child.measure(MeasureSpec.makeMeasureSpec(child.getMeasuredWidth(), MeasureSpec.EXACTLY), hs);
}
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int childLeft = 0;
int childTop = 0;
int childRight = right - left;
int childBottom = bottom - top;
if(mIsRtl)
for(int i = 0, count = getChildCount(); i < count; i++){
View child = getChildAt(i);
child.layout(childRight - child.getMeasuredWidth(), childTop, childRight, childBottom);
childRight -= child.getMeasuredWidth();
}
else
for(int i = 0, count = getChildCount(); i < count; i++){
View child = getChildAt(i);
child.layout(childLeft, childTop, childLeft + child.getMeasuredWidth(), childBottom);
childLeft += child.getMeasuredWidth();
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.raptor.storage;
import com.facebook.presto.block.BlockEncodingManager;
import com.facebook.presto.metadata.FunctionRegistry;
import com.facebook.presto.orc.OrcDataSource;
import com.facebook.presto.orc.OrcRecordReader;
import com.facebook.presto.raptor.storage.OrcFileRewriter.OrcFileInfo;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.type.ArrayType;
import com.facebook.presto.spi.type.StandardTypes;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.TypeManager;
import com.facebook.presto.spi.type.TypeSignature;
import com.facebook.presto.spi.type.TypeSignatureParameter;
import com.facebook.presto.sql.analyzer.FeaturesConfig;
import com.facebook.presto.type.TypeRegistry;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.json.JsonCodec;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
import java.util.BitSet;
import java.util.List;
import static com.facebook.presto.RowPagesBuilder.rowPagesBuilder;
import static com.facebook.presto.raptor.storage.OrcTestingUtil.createReader;
import static com.facebook.presto.raptor.storage.OrcTestingUtil.fileOrcDataSource;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.spi.type.VarcharType.createVarcharType;
import static com.facebook.presto.tests.StructuralTestUtil.arrayBlockOf;
import static com.facebook.presto.tests.StructuralTestUtil.arrayBlocksEqual;
import static com.facebook.presto.tests.StructuralTestUtil.mapBlockOf;
import static com.facebook.presto.tests.StructuralTestUtil.mapBlocksEqual;
import static com.google.common.io.Files.createTempDir;
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
import static io.airlift.json.JsonCodec.jsonCodec;
import static io.airlift.slice.Slices.utf8Slice;
import static java.nio.file.Files.readAllBytes;
import static java.util.UUID.randomUUID;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
@Test(singleThreaded = true)
public class TestOrcFileRewriter
{
private static final JsonCodec<OrcFileMetadata> METADATA_CODEC = jsonCodec(OrcFileMetadata.class);
private File temporary;
@BeforeClass
public void setup()
{
temporary = createTempDir();
}
@AfterClass(alwaysRun = true)
public void tearDown()
throws Exception
{
deleteRecursively(temporary.toPath(), ALLOW_INSECURE);
}
@Test
public void testRewrite()
throws Exception
{
TypeManager typeManager = new TypeRegistry();
// associate typeManager with a function registry
new FunctionRegistry(typeManager, new BlockEncodingManager(typeManager), new FeaturesConfig());
ArrayType arrayType = new ArrayType(BIGINT);
ArrayType arrayOfArrayType = new ArrayType(arrayType);
Type mapType = typeManager.getParameterizedType(StandardTypes.MAP, ImmutableList.of(
TypeSignatureParameter.of(createVarcharType(5).getTypeSignature()),
TypeSignatureParameter.of(BOOLEAN.getTypeSignature())));
List<Long> columnIds = ImmutableList.of(3L, 7L, 9L, 10L, 11L);
List<Type> columnTypes = ImmutableList.of(BIGINT, createVarcharType(20), arrayType, mapType, arrayOfArrayType);
File file = new File(temporary, randomUUID().toString());
try (OrcFileWriter writer = new OrcFileWriter(columnIds, columnTypes, file)) {
List<Page> pages = rowPagesBuilder(columnTypes)
.row(123L, "hello", arrayBlockOf(BIGINT, 1, 2), mapBlockOf(createVarcharType(5), BOOLEAN, "k1", true), arrayBlockOf(arrayType, arrayBlockOf(BIGINT, 5)))
.row(777L, "sky", arrayBlockOf(BIGINT, 3, 4), mapBlockOf(createVarcharType(5), BOOLEAN, "k2", false), arrayBlockOf(arrayType, arrayBlockOf(BIGINT, 6)))
.row(456L, "bye", arrayBlockOf(BIGINT, 5, 6), mapBlockOf(createVarcharType(5), BOOLEAN, "k3", true), arrayBlockOf(arrayType, arrayBlockOf(BIGINT, 7)))
.row(888L, "world", arrayBlockOf(BIGINT, 7, 8), mapBlockOf(createVarcharType(5), BOOLEAN, "k4", true), arrayBlockOf(arrayType, null, arrayBlockOf(BIGINT, 8), null))
.row(999L, "done", arrayBlockOf(BIGINT, 9, 10), mapBlockOf(createVarcharType(5), BOOLEAN, "k5", true), arrayBlockOf(arrayType, arrayBlockOf(BIGINT, 9, 10)))
.build();
writer.appendPages(pages);
}
try (OrcDataSource dataSource = fileOrcDataSource(file)) {
OrcRecordReader reader = createReader(dataSource, columnIds, columnTypes);
assertEquals(reader.getReaderRowCount(), 5);
assertEquals(reader.getFileRowCount(), 5);
assertEquals(reader.getSplitLength(), file.length());
assertEquals(reader.nextBatch(), 5);
Block column0 = reader.readBlock(BIGINT, 0);
assertEquals(column0.getPositionCount(), 5);
for (int i = 0; i < 5; i++) {
assertEquals(column0.isNull(i), false);
}
assertEquals(BIGINT.getLong(column0, 0), 123L);
assertEquals(BIGINT.getLong(column0, 1), 777L);
assertEquals(BIGINT.getLong(column0, 2), 456L);
assertEquals(BIGINT.getLong(column0, 3), 888L);
assertEquals(BIGINT.getLong(column0, 4), 999L);
Block column1 = reader.readBlock(createVarcharType(20), 1);
assertEquals(column1.getPositionCount(), 5);
for (int i = 0; i < 5; i++) {
assertEquals(column1.isNull(i), false);
}
assertEquals(createVarcharType(20).getSlice(column1, 0), utf8Slice("hello"));
assertEquals(createVarcharType(20).getSlice(column1, 1), utf8Slice("sky"));
assertEquals(createVarcharType(20).getSlice(column1, 2), utf8Slice("bye"));
assertEquals(createVarcharType(20).getSlice(column1, 3), utf8Slice("world"));
assertEquals(createVarcharType(20).getSlice(column1, 4), utf8Slice("done"));
Block column2 = reader.readBlock(arrayType, 2);
assertEquals(column2.getPositionCount(), 5);
for (int i = 0; i < 5; i++) {
assertEquals(column2.isNull(i), false);
}
assertTrue(arrayBlocksEqual(BIGINT, arrayType.getObject(column2, 0), arrayBlockOf(BIGINT, 1, 2)));
assertTrue(arrayBlocksEqual(BIGINT, arrayType.getObject(column2, 1), arrayBlockOf(BIGINT, 3, 4)));
assertTrue(arrayBlocksEqual(BIGINT, arrayType.getObject(column2, 2), arrayBlockOf(BIGINT, 5, 6)));
assertTrue(arrayBlocksEqual(BIGINT, arrayType.getObject(column2, 3), arrayBlockOf(BIGINT, 7, 8)));
assertTrue(arrayBlocksEqual(BIGINT, arrayType.getObject(column2, 4), arrayBlockOf(BIGINT, 9, 10)));
Block column3 = reader.readBlock(mapType, 3);
assertEquals(column3.getPositionCount(), 5);
for (int i = 0; i < 5; i++) {
assertEquals(column3.isNull(i), false);
}
assertTrue(mapBlocksEqual(createVarcharType(5), BOOLEAN, arrayType.getObject(column3, 0), mapBlockOf(createVarcharType(5), BOOLEAN, "k1", true)));
assertTrue(mapBlocksEqual(createVarcharType(5), BOOLEAN, arrayType.getObject(column3, 1), mapBlockOf(createVarcharType(5), BOOLEAN, "k2", false)));
assertTrue(mapBlocksEqual(createVarcharType(5), BOOLEAN, arrayType.getObject(column3, 2), mapBlockOf(createVarcharType(5), BOOLEAN, "k3", true)));
assertTrue(mapBlocksEqual(createVarcharType(5), BOOLEAN, arrayType.getObject(column3, 3), mapBlockOf(createVarcharType(5), BOOLEAN, "k4", true)));
assertTrue(mapBlocksEqual(createVarcharType(5), BOOLEAN, arrayType.getObject(column3, 4), mapBlockOf(createVarcharType(5), BOOLEAN, "k5", true)));
Block column4 = reader.readBlock(arrayOfArrayType, 4);
assertEquals(column4.getPositionCount(), 5);
for (int i = 0; i < 5; i++) {
assertEquals(column4.isNull(i), false);
}
assertTrue(arrayBlocksEqual(arrayType, arrayOfArrayType.getObject(column4, 0), arrayBlockOf(arrayType, arrayBlockOf(BIGINT, 5))));
assertTrue(arrayBlocksEqual(arrayType, arrayOfArrayType.getObject(column4, 1), arrayBlockOf(arrayType, arrayBlockOf(BIGINT, 6))));
assertTrue(arrayBlocksEqual(arrayType, arrayOfArrayType.getObject(column4, 2), arrayBlockOf(arrayType, arrayBlockOf(BIGINT, 7))));
assertTrue(arrayBlocksEqual(arrayType, arrayOfArrayType.getObject(column4, 3), arrayBlockOf(arrayType, null, arrayBlockOf(BIGINT, 8), null)));
assertTrue(arrayBlocksEqual(arrayType, arrayOfArrayType.getObject(column4, 4), arrayBlockOf(arrayType, arrayBlockOf(BIGINT, 9, 10))));
assertEquals(reader.nextBatch(), -1);
OrcFileMetadata orcFileMetadata = METADATA_CODEC.fromJson(reader.getUserMetadata().get(OrcFileMetadata.KEY).getBytes());
assertEquals(orcFileMetadata, new OrcFileMetadata(ImmutableMap.<Long, TypeSignature>builder()
.put(3L, BIGINT.getTypeSignature())
.put(7L, createVarcharType(20).getTypeSignature())
.put(9L, arrayType.getTypeSignature())
.put(10L, mapType.getTypeSignature())
.put(11L, arrayOfArrayType.getTypeSignature())
.build()));
}
BitSet rowsToDelete = new BitSet(5);
rowsToDelete.set(1);
rowsToDelete.set(3);
rowsToDelete.set(4);
File newFile = new File(temporary, randomUUID().toString());
OrcFileInfo info = OrcFileRewriter.rewrite(file, newFile, rowsToDelete);
assertEquals(info.getRowCount(), 2);
assertEquals(info.getUncompressedSize(), 78);
try (OrcDataSource dataSource = fileOrcDataSource(newFile)) {
OrcRecordReader reader = createReader(dataSource, columnIds, columnTypes);
assertEquals(reader.getReaderRowCount(), 2);
assertEquals(reader.getFileRowCount(), 2);
assertEquals(reader.getSplitLength(), newFile.length());
assertEquals(reader.nextBatch(), 2);
Block column0 = reader.readBlock(BIGINT, 0);
assertEquals(column0.getPositionCount(), 2);
for (int i = 0; i < 2; i++) {
assertEquals(column0.isNull(i), false);
}
assertEquals(BIGINT.getLong(column0, 0), 123L);
assertEquals(BIGINT.getLong(column0, 1), 456L);
Block column1 = reader.readBlock(createVarcharType(20), 1);
assertEquals(column1.getPositionCount(), 2);
for (int i = 0; i < 2; i++) {
assertEquals(column1.isNull(i), false);
}
assertEquals(createVarcharType(20).getSlice(column1, 0), utf8Slice("hello"));
assertEquals(createVarcharType(20).getSlice(column1, 1), utf8Slice("bye"));
Block column2 = reader.readBlock(arrayType, 2);
assertEquals(column2.getPositionCount(), 2);
for (int i = 0; i < 2; i++) {
assertEquals(column2.isNull(i), false);
}
assertTrue(arrayBlocksEqual(BIGINT, arrayType.getObject(column2, 0), arrayBlockOf(BIGINT, 1, 2)));
assertTrue(arrayBlocksEqual(BIGINT, arrayType.getObject(column2, 1), arrayBlockOf(BIGINT, 5, 6)));
Block column3 = reader.readBlock(mapType, 3);
assertEquals(column3.getPositionCount(), 2);
for (int i = 0; i < 2; i++) {
assertEquals(column3.isNull(i), false);
}
assertTrue(mapBlocksEqual(createVarcharType(5), BOOLEAN, arrayType.getObject(column3, 0), mapBlockOf(createVarcharType(5), BOOLEAN, "k1", true)));
assertTrue(mapBlocksEqual(createVarcharType(5), BOOLEAN, arrayType.getObject(column3, 1), mapBlockOf(createVarcharType(5), BOOLEAN, "k3", true)));
Block column4 = reader.readBlock(arrayOfArrayType, 4);
assertEquals(column4.getPositionCount(), 2);
for (int i = 0; i < 2; i++) {
assertEquals(column4.isNull(i), false);
}
assertTrue(arrayBlocksEqual(arrayType, arrayOfArrayType.getObject(column4, 0), arrayBlockOf(arrayType, arrayBlockOf(BIGINT, 5))));
assertTrue(arrayBlocksEqual(arrayType, arrayOfArrayType.getObject(column4, 1), arrayBlockOf(arrayType, arrayBlockOf(BIGINT, 7))));
assertEquals(reader.nextBatch(), -1);
OrcFileMetadata orcFileMetadata = METADATA_CODEC.fromJson(reader.getUserMetadata().get(OrcFileMetadata.KEY).getBytes());
assertEquals(orcFileMetadata, new OrcFileMetadata(ImmutableMap.<Long, TypeSignature>builder()
.put(3L, BIGINT.getTypeSignature())
.put(7L, createVarcharType(20).getTypeSignature())
.put(9L, arrayType.getTypeSignature())
.put(10L, mapType.getTypeSignature())
.put(11L, arrayOfArrayType.getTypeSignature())
.build()));
}
}
@Test
public void testRewriteWithoutMetadata()
throws Exception
{
List<Long> columnIds = ImmutableList.of(3L, 7L);
List<Type> columnTypes = ImmutableList.of(BIGINT, createVarcharType(20));
File file = new File(temporary, randomUUID().toString());
try (OrcFileWriter writer = new OrcFileWriter(columnIds, columnTypes, file, false)) {
List<Page> pages = rowPagesBuilder(columnTypes)
.row(123L, "hello")
.row(777L, "sky")
.build();
writer.appendPages(pages);
}
try (OrcDataSource dataSource = fileOrcDataSource(file)) {
OrcRecordReader reader = createReader(dataSource, columnIds, columnTypes);
assertEquals(reader.getReaderRowCount(), 2);
assertEquals(reader.getFileRowCount(), 2);
assertEquals(reader.getSplitLength(), file.length());
assertEquals(reader.nextBatch(), 2);
Block column0 = reader.readBlock(BIGINT, 0);
assertEquals(column0.getPositionCount(), 2);
for (int i = 0; i < 2; i++) {
assertEquals(column0.isNull(i), false);
}
assertEquals(BIGINT.getLong(column0, 0), 123L);
assertEquals(BIGINT.getLong(column0, 1), 777L);
Block column1 = reader.readBlock(createVarcharType(20), 1);
assertEquals(column1.getPositionCount(), 2);
for (int i = 0; i < 2; i++) {
assertEquals(column1.isNull(i), false);
}
assertEquals(createVarcharType(20).getSlice(column1, 0), utf8Slice("hello"));
assertEquals(createVarcharType(20).getSlice(column1, 1), utf8Slice("sky"));
assertFalse(reader.getUserMetadata().containsKey(OrcFileMetadata.KEY));
}
BitSet rowsToDelete = new BitSet(5);
rowsToDelete.set(1);
File newFile = new File(temporary, randomUUID().toString());
OrcFileInfo info = OrcFileRewriter.rewrite(file, newFile, rowsToDelete);
assertEquals(info.getRowCount(), 1);
assertEquals(info.getUncompressedSize(), 13);
try (OrcDataSource dataSource = fileOrcDataSource(newFile)) {
OrcRecordReader reader = createReader(dataSource, columnIds, columnTypes);
assertEquals(reader.getReaderRowCount(), 1);
assertEquals(reader.getFileRowCount(), 1);
assertEquals(reader.getSplitLength(), newFile.length());
assertEquals(reader.nextBatch(), 1);
Block column0 = reader.readBlock(BIGINT, 0);
assertEquals(column0.getPositionCount(), 1);
assertEquals(column0.isNull(0), false);
assertEquals(BIGINT.getLong(column0, 0), 123L);
Block column1 = reader.readBlock(createVarcharType(20), 1);
assertEquals(column1.getPositionCount(), 1);
assertEquals(column1.isNull(0), false);
assertEquals(createVarcharType(20).getSlice(column1, 0), utf8Slice("hello"));
assertFalse(reader.getUserMetadata().containsKey(OrcFileMetadata.KEY));
}
}
@Test
public void testRewriteAllRowsDeleted()
throws Exception
{
List<Long> columnIds = ImmutableList.of(3L);
List<Type> columnTypes = ImmutableList.of(BIGINT);
File file = new File(temporary, randomUUID().toString());
try (OrcFileWriter writer = new OrcFileWriter(columnIds, columnTypes, file)) {
writer.appendPages(rowPagesBuilder(columnTypes).row(123L).row(456L).build());
}
BitSet rowsToDelete = new BitSet();
rowsToDelete.set(0);
rowsToDelete.set(1);
File newFile = new File(temporary, randomUUID().toString());
OrcFileInfo info = OrcFileRewriter.rewrite(file, newFile, rowsToDelete);
assertEquals(info.getRowCount(), 0);
assertEquals(info.getUncompressedSize(), 0);
assertFalse(newFile.exists());
}
@Test
public void testRewriteNoRowsDeleted()
throws Exception
{
List<Long> columnIds = ImmutableList.of(3L);
List<Type> columnTypes = ImmutableList.of(BIGINT);
File file = new File(temporary, randomUUID().toString());
try (OrcFileWriter writer = new OrcFileWriter(columnIds, columnTypes, file)) {
writer.appendPages(rowPagesBuilder(columnTypes).row(123L).row(456L).build());
}
BitSet rowsToDelete = new BitSet();
File newFile = new File(temporary, randomUUID().toString());
OrcFileInfo info = OrcFileRewriter.rewrite(file, newFile, rowsToDelete);
assertEquals(info.getRowCount(), 2);
assertEquals(info.getUncompressedSize(), 16);
assertEquals(readAllBytes(newFile.toPath()), readAllBytes(file.toPath()));
}
@Test
public void testUncompressedSize()
throws Exception
{
List<Long> columnIds = ImmutableList.of(1L, 2L, 3L, 4L, 5L);
List<Type> columnTypes = ImmutableList.of(BOOLEAN, BIGINT, DOUBLE, createVarcharType(10), VARBINARY);
File file = new File(temporary, randomUUID().toString());
try (OrcFileWriter writer = new OrcFileWriter(columnIds, columnTypes, file)) {
List<Page> pages = rowPagesBuilder(columnTypes)
.row(true, 123L, 98.7, "hello", utf8Slice("abc"))
.row(false, 456L, 65.4, "world", utf8Slice("xyz"))
.row(null, null, null, null, null)
.build();
writer.appendPages(pages);
}
File newFile = new File(temporary, randomUUID().toString());
OrcFileInfo info = OrcFileRewriter.rewrite(file, newFile, new BitSet());
assertEquals(info.getRowCount(), 3);
assertEquals(info.getUncompressedSize(), 55);
}
}
| |
/*
* #%L
* Native ARchive plugin for Maven
* %%
* Copyright (C) 2002 - 2014 NAR Maven Plugin developers.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.github.maven_nar.cpptasks;
import java.util.Enumeration;
import java.util.List;
import java.util.Vector;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import com.github.maven_nar.cpptasks.compiler.CommandLineCompiler;
import com.github.maven_nar.cpptasks.compiler.Compiler;
import com.github.maven_nar.cpptasks.compiler.Processor;
import com.github.maven_nar.cpptasks.gcc.GccCCompiler;
import com.github.maven_nar.cpptasks.types.CompilerArgument;
import com.github.maven_nar.cpptasks.types.ConditionalPath;
import com.github.maven_nar.cpptasks.types.DefineSet;
import com.github.maven_nar.cpptasks.types.IncludePath;
import com.github.maven_nar.cpptasks.types.SystemIncludePath;
import com.github.maven_nar.cpptasks.types.UndefineArgument;
import java.io.File;
/**
* A compiler definition. compiler elements may be placed either as children of
* a cc element or the project element. A compiler element with an id attribute
* may be referenced from compiler elements with refid or extends attributes.
*
* @author Adam Murdoch
*/
public final class CompilerDef extends ProcessorDef {
/** The source file sets. */
private final Vector defineSets = new Vector();
private Boolean ccache = false;
private Boolean exceptions;
private Boolean rtti;
private final Vector includePaths = new Vector();
private Boolean multithreaded;
private final Vector precompileDefs = new Vector();
private final Vector sysIncludePaths = new Vector();
private OptimizationEnum optimization;
private int warnings = -1;
private List<String> order;
private String toolPath;
private String compilerPrefix;
private File workDir;
private boolean gccFileAbsolutePath;
private String fortifyID="";
private boolean clearDefaultOptions;
public CompilerDef() {
}
/**
* Adds a compiler command-line arg.
*/
public void addConfiguredCompilerArg(final CompilerArgument arg) {
if (isReference()) {
throw noChildrenAllowed();
}
addConfiguredProcessorArg(arg);
}
/**
* Adds a compiler command-line arg.
*/
public void addConfiguredCompilerParam(final CompilerParam param) {
if (isReference()) {
throw noChildrenAllowed();
}
addConfiguredProcessorParam(param);
}
/**
* Adds a defineset.
*/
public void addConfiguredDefineset(final DefineSet defs) {
if (defs == null) {
throw new NullPointerException("defs");
}
if (isReference()) {
throw noChildrenAllowed();
}
this.defineSets.addElement(defs);
}
/**
* Creates an include path.
*/
public IncludePath createIncludePath() {
final Project p = getProject();
if (p == null) {
throw new java.lang.IllegalStateException("project must be set");
}
if (isReference()) {
throw noChildrenAllowed();
}
final IncludePath path = new IncludePath(p);
this.includePaths.addElement(path);
return path;
}
/**
* Specifies precompilation prototype file and exclusions.
*
*/
public PrecompileDef createPrecompile() throws BuildException {
final Project p = getProject();
if (isReference()) {
throw noChildrenAllowed();
}
final PrecompileDef precomp = new PrecompileDef();
precomp.setProject(p);
this.precompileDefs.addElement(precomp);
return precomp;
}
/**
* Creates a system include path. Locations and timestamps of files located
* using the system include paths are not used in dependency analysis.
*
*
* Standard include locations should not be specified. The compiler
* adapters should recognized the settings from the appropriate environment
* variables or configuration files.
*/
public SystemIncludePath createSysIncludePath() {
final Project p = getProject();
if (p == null) {
throw new java.lang.IllegalStateException("project must be set");
}
if (isReference()) {
throw noChildrenAllowed();
}
final SystemIncludePath path = new SystemIncludePath(p);
this.sysIncludePaths.addElement(path);
return path;
}
public void execute() throws org.apache.tools.ant.BuildException {
throw new org.apache.tools.ant.BuildException("Not an actual task, but looks like one for documentation purposes");
}
public UndefineArgument[] getActiveDefines() {
final Project p = getProject();
if (p == null) {
throw new java.lang.IllegalStateException("project must be set before this call");
}
if (isReference()) {
return ((CompilerDef) getCheckedRef(CompilerDef.class, "CompilerDef")).getActiveDefines();
}
final Vector actives = new Vector();
for (int i = 0; i < this.defineSets.size(); i++) {
final DefineSet currentSet = (DefineSet) this.defineSets.elementAt(i);
final UndefineArgument[] defines = currentSet.getDefines();
for (final UndefineArgument define : defines) {
if (define.isActive(p)) {
actives.addElement(define);
}
}
}
final UndefineArgument[] retval = new UndefineArgument[actives.size()];
actives.copyInto(retval);
return retval;
}
/**
* Returns the compiler-specific include path.
*/
public String[] getActiveIncludePaths() {
if (isReference()) {
return ((CompilerDef) getCheckedRef(CompilerDef.class, "CompilerDef")).getActiveIncludePaths();
}
return getActivePaths(this.includePaths);
}
private String[] getActivePaths(final Vector paths) {
final Project p = getProject();
if (p == null) {
throw new java.lang.IllegalStateException("project not set");
}
final Vector activePaths = new Vector(paths.size());
for (int i = 0; i < paths.size(); i++) {
final ConditionalPath path = (ConditionalPath) paths.elementAt(i);
if (path.isActive(p)) {
final String[] pathEntries = path.list();
for (final String pathEntrie : pathEntries) {
activePaths.addElement(pathEntrie);
}
}
}
final String[] pathNames = new String[activePaths.size()];
activePaths.copyInto(pathNames);
return pathNames;
}
public PrecompileDef getActivePrecompile(final CompilerDef ccElement) {
if (isReference()) {
return ((CompilerDef) getCheckedRef(CompilerDef.class, "CompilerDef")).getActivePrecompile(ccElement);
}
PrecompileDef current = null;
final Enumeration iter = this.precompileDefs.elements();
while (iter.hasMoreElements()) {
current = (PrecompileDef) iter.nextElement();
if (current.isActive()) {
return current;
}
}
final CompilerDef extendedDef = (CompilerDef) getExtends();
if (extendedDef != null) {
current = extendedDef.getActivePrecompile(null);
if (current != null) {
return current;
}
}
if (ccElement != null && getInherit()) {
return ccElement.getActivePrecompile(null);
}
return null;
}
public String[] getActiveSysIncludePaths() {
if (isReference()) {
return ((CompilerDef) getCheckedRef(CompilerDef.class, "CompilerDef")).getActiveSysIncludePaths();
}
return getActivePaths(this.sysIncludePaths);
}
public Boolean getCcache() {
return this.ccache;
}
public final boolean getExceptions(final CompilerDef[] defaultProviders, final int index) {
if (isReference()) {
return ((CompilerDef) getCheckedRef(CompilerDef.class, "CompilerDef")).getExceptions(defaultProviders, index);
}
if (this.exceptions != null) {
return this.exceptions.booleanValue();
} else {
if (defaultProviders != null && index < defaultProviders.length) {
return defaultProviders[index].getExceptions(defaultProviders, index + 1);
}
}
return false;
}
public boolean getMultithreaded(final CompilerDef[] defaultProviders, final int index) {
if (isReference()) {
return ((CompilerDef) getCheckedRef(CompilerDef.class, "CompilerDef")).getMultithreaded(defaultProviders, index);
}
if (this.multithreaded != null) {
return this.multithreaded.booleanValue();
} else {
if (defaultProviders != null && index < defaultProviders.length) {
return defaultProviders[index].getMultithreaded(defaultProviders, index + 1);
}
}
return true;
}
public final OptimizationEnum getOptimization(final CompilerDef[] defaultProviders, final int index) {
if (isReference()) {
return ((CompilerDef) getCheckedRef(CompilerDef.class, "CompilerDef")).getOptimization(defaultProviders, index);
}
if (this.optimization != null) {
return this.optimization;
} else {
if (defaultProviders != null && index < defaultProviders.length) {
return defaultProviders[index].getOptimization(defaultProviders, index + 1);
}
}
return null;
}
public List<String> getOrder() {
return this.order;
}
@Override
public Processor getProcessor() {
Processor processor = super.getProcessor();
if (processor == null) {
processor = GccCCompiler.getInstance();
}
if (getLibtool() && processor instanceof CommandLineCompiler) {
final CommandLineCompiler compiler = (CommandLineCompiler) processor;
processor = compiler.getLibtoolCompiler();
}
return processor;
}
public final Boolean getRtti(final CompilerDef[] defaultProviders, final int index) {
if (isReference()) {
return ((CompilerDef) getCheckedRef(CompilerDef.class, "CompilerDef")).getRtti(defaultProviders, index);
}
if (this.rtti != null) {
return this.rtti;
} else {
if (defaultProviders != null && index < defaultProviders.length) {
return defaultProviders[index].getRtti(defaultProviders, index + 1);
}
}
return null;
}
public String getToolPath() {
return this.toolPath;
}
public String getCompilerPrefix() {
return this.compilerPrefix;
}
public File getWorkDir() {
return this.workDir;
}
public int getWarnings(final CompilerDef[] defaultProviders, final int index) {
if (isReference()) {
return ((CompilerDef) getCheckedRef(CompilerDef.class, "CompilerDef")).getWarnings(defaultProviders, index);
}
if (this.warnings == -1 && defaultProviders != null && index < defaultProviders.length) {
return defaultProviders[index].getWarnings(defaultProviders, index + 1);
}
return this.warnings;
}
public boolean isClearDefaultOptions() {
return this.clearDefaultOptions;
}
public void setCcache(final Boolean ccache) {
this.ccache = ccache;
}
/**
* Sets the default compiler adapter. Use the "name" attribute when the
* compiler is a supported compiler.
*
* @param classname
* fully qualified classname which implements CompilerAdapter
*/
@Override
public void setClassname(final String classname) throws BuildException {
if (isReference()) {
throw tooManyAttributes();
}
super.setClassname(classname);
final Processor proc = getProcessor();
if (!(proc instanceof Compiler)) {
throw new BuildException(classname + " does not implement Compiler");
}
}
public void setClearDefaultOptions(final boolean clearDefaultOptions) {
this.clearDefaultOptions = clearDefaultOptions;
}
/**
* Enables or disables exception support.
*
* @param exceptions
* if true, exceptions are supported.
*
*/
public void setExceptions(final boolean exceptions) {
if (isReference()) {
throw tooManyAttributes();
}
this.exceptions = booleanValueOf(exceptions);
}
/**
* Enables or disables generation of multithreaded code. Unless specified,
* multithreaded code generation is enabled.
*
* @param multithreaded
* If true, generated code may be multithreaded.
*/
public void setMultithreaded(final boolean multithreaded) {
if (isReference()) {
throw tooManyAttributes();
}
this.multithreaded = booleanValueOf(multithreaded);
}
/**
* Sets compiler type.
*
*
* <table width="100%" border="1">
* <thead>Supported compilers </thead>
* <tr>
* <td>gcc (default)</td>
* <td>GCC C++ compiler</td>
* </tr>
* <tr>
* <td>g++</td>
* <td>GCC C++ compiler</td>
* </tr>
* <tr>
* <td>c++</td>
* <td>GCC C++ compiler</td>
* </tr>
* <tr>
* <td>g77</td>
* <td>GNU Fortran compiler</td>
* </tr>
* <tr>
* <td>msvc</td>
* <td>Microsoft Visual C++</td>
* </tr>
* <tr>
* <td>bcc</td>
* <td>Borland C++ Compiler</td>
* </tr>
* <tr>
* <td>msrc</td>
* <td>Microsoft Resource Compiler</td>
* </tr>
* <tr>
* <td>brc</td>
* <td>Borland Resource Compiler</td>
* </tr>
* <tr>
* <td>df</td>
* <td>Compaq Visual Fortran Compiler</td>
* </tr>
* <tr>
* <td>midl</td>
* <td>Microsoft MIDL Compiler</td>
* </tr>
* <tr>
* <td>icl</td>
* <td>Intel C++ compiler for Windows (IA-32)</td>
* </tr>
* <tr>
* <td>ecl</td>
* <td>Intel C++ compiler for Windows (IA-64)</td>
* </tr>
* <tr>
* <td>icc</td>
* <td>Intel C++ compiler for Linux (IA-32)</td>
* </tr>
* <tr>
* <td>ecc</td>
* <td>Intel C++ compiler for Linux (IA-64)</td>
* </tr>
* <tr>
* <td>CC</td>
* <td>Sun ONE C++ compiler</td>
* </tr>
* <tr>
* <td>aCC</td>
* <td>HP aC++ C++ Compiler</td>
* </tr>
* <tr>
* <td>os390</td>
* <td>OS390 C Compiler</td>
* </tr>
* <tr>
* <td>os400</td>
* <td>Icc Compiler</td>
* </tr>
* <tr>
* <td>sunc89</td>
* <td>Sun C89 C Compiler</td>
* </tr>
* <tr>
* <td>xlC</td>
* <td>VisualAge C Compiler</td>
* </tr>
* <tr>
* <td>uic</td>
* <td>Qt user interface compiler</td>
* </tr>
* <tr>
* <td>moc</td>
* <td>Qt meta-object compiler</td>
* </tr>
* <tr>
* <td>wcl</td>
* <td>OpenWatcom C/C++ compiler</td>
* </tr>
* <tr>
* <td>wfl</td>
* <td>OpenWatcom FORTRAN compiler</td>
* </tr>
* </table>
*
*/
public void setName(final CompilerEnum name) throws BuildException {
if (isReference()) {
throw tooManyAttributes();
}
final Compiler compiler = name.getCompiler();
setProcessor(compiler);
}
/**
* Sets optimization level.
*
* @param value
* optimization level
*/
public void setOptimize(final OptimizationEnum value) {
if (isReference()) {
throw tooManyAttributes();
}
this.optimization = value;
}
// FREEHEP
/**
* List of source filenames without extensions
*
* @param asList
*/
public void setOrder(final List<String> order) {
this.order = order;
}
@Override
protected void setProcessor(final Processor proc) throws BuildException {
try {
super.setProcessor(proc);
} catch (final ClassCastException ex) {
throw new BuildException(ex);
}
}
/**
* Enables or disables run-time type information.
*
* @param rtti
* if true, run-time type information is supported.
*
*/
public void setRtti(final boolean rtti) {
if (isReference()) {
throw tooManyAttributes();
}
this.rtti = booleanValueOf(rtti);
}
public void setToolPath(final String path) {
this.toolPath = path;
}
public void setCompilerPrefix(final String prefix) {
this.compilerPrefix = prefix;
}
public void setWorkDir(final File workDir) {
this.workDir = workDir;
}
public void setFortifyID(final String fortifyID) {
this.fortifyID = fortifyID;
}
public String getFortifyID() {
return this.fortifyID;
}
/**
* Enumerated attribute with the values "none", "severe", "default",
* "production", "diagnostic", and "aserror".
*/
public void setWarnings(final WarningLevelEnum level) {
this.warnings = level.getIndex();
}
public void setGccFileAbsolutePath(final boolean sourceFileAbsPath) {
this.gccFileAbsolutePath = sourceFileAbsPath;
return;
}
public boolean getGccFileAbsolutePath() {
return this.gccFileAbsolutePath;
}
}
| |
// Copyright 2012 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.collide.shared.ot;
import com.google.collide.dto.DocOp;
import com.google.collide.dto.DocOpComponent;
import com.google.collide.dto.shared.DocOpFactory;
import com.google.collide.json.shared.JsonArray;
import com.google.common.base.Preconditions;
/*
* Influenced by Wave's Transformer and Composer classes. Generally, we can't
* use Wave's because we introduce the RetainLine doc op component, and don't
* support a few extra components that Wave has. We didn't fork Wave's
* Transformer because its design wasn't amenable to the changes required by
* RetainLine. Instead, we wrote this from scratch to be able to handle that
* component easily.
*
* The operations being transformed are A and B. Both are intended to be applied
* to the same document. The output of the transformation will be A' and B'. For
* example, A' will be A transformed so it can be cleanly applied after B has
* been applied.
*
* The Processor class maintains the state for a document operation component.
* Each method in the Processor handles a component from the other document
* operation. As each method is executed, it outputs to its output document
* operation and to the other processor's output document operation. It also
* marks the general state into the ProcessorResult.
*/
/**
* Transforms document operations for the code editor.
*
*/
public class Transformer {
/**
* Exception that is thrown when there is a problem transforming two document
* operations.
*/
public static class TransformException extends RuntimeException {
public TransformException(String message, Throwable cause) {
super(message, cause);
}
}
private static class DeleteProcessor extends Processor {
static String performDelete(DocOpCapturer output, String text, int deleteLength,
ProcessorResult result) {
output.delete(text.substring(0, deleteLength));
if (text.length() == deleteLength) {
result.markMyStateFinished();
}
return text.substring(deleteLength);
}
private String text;
DeleteProcessor(DocOpCapturer output, String text) {
super(output);
this.text = text;
}
@Override
void handleOtherDelete(DeleteProcessor other, ProcessorResult result) {
/*
* The transformed op shouldn't know about the deletes, so don't output
* anything
*/
if (text.length() == other.text.length()) {
result.markMyStateFinished();
result.markOtherStateFinished();
} else if (text.length() < other.text.length()) {
other.text = other.text.substring(text.length());
result.markMyStateFinished();
} else {
text = text.substring(other.text.length());
result.markOtherStateFinished();
}
}
@Override
void handleOtherFinished(Processor other, ProcessorResult result) {
throw new IllegalStateException("Cannot delete if the other side is finished");
}
@Override
void handleOtherInsert(InsertProcessor other, ProcessorResult result) {
/*
* Look at comments in InsertProcessor on why it needs to always handle
* these components
*/
result.flip();
other.handleOtherDelete(this, result);
}
@Override
void handleOtherRetain(RetainProcessor other, ProcessorResult result) {
/*
* The other transformed op won't have anything to retain, so output
* nothing for it. Our transformed op needs to delete though, since the
* other original op is just retaining.
*/
int minCount = Math.min(text.length(), other.count);
other.count -= minCount;
text = performDelete(output, text, minCount, result);
if (other.count == 0) {
result.markOtherStateFinished();
}
}
@Override
void handleOtherRetainLine(RetainLineProcessor other, ProcessorResult result) {
// Let RetainLineProcessor handle this
result.flip();
other.handleOtherDelete(this, result);
}
}
private static class FinishedProcessor extends Processor {
FinishedProcessor(DocOpCapturer output) {
super(output);
}
@Override
void handleOtherDelete(DeleteProcessor other, ProcessorResult result) {
result.flip();
other.handleOtherFinished(this, result);
}
@Override
void handleOtherFinished(Processor other, ProcessorResult result) {
throw new IllegalStateException("Both should not be finished");
}
@Override
void handleOtherInsert(InsertProcessor other, ProcessorResult result) {
result.flip();
other.handleOtherFinished(this, result);
}
@Override
void handleOtherRetain(RetainProcessor other, ProcessorResult result) {
result.flip();
other.handleOtherFinished(this, result);
}
@Override
void handleOtherRetainLine(RetainLineProcessor other, ProcessorResult result) {
result.flip();
other.handleOtherFinished(this, result);
}
}
private static class InsertProcessor extends Processor {
private static void performInsert(DocOpCapturer output, DocOpCapturer otherOutput,
String text, ProcessorResult result) {
output.insert(text);
boolean endsWithNewline = text.endsWith(NEWLINE);
otherOutput.retain(text.length(), endsWithNewline);
if (endsWithNewline) {
result.markMyCurrentComponentInsertOfNewline();
}
result.markMyStateFinished();
}
private String text;
InsertProcessor(DocOpCapturer output, String text) {
super(output);
this.text = text;
}
@Override
void handleOtherDelete(DeleteProcessor other, ProcessorResult result) {
// Handle insertion
performInsert(output, other.output, text, result);
// The delete will be handled by the successor for this processor
}
@Override
void handleOtherFinished(Processor other, ProcessorResult result) {
performInsert(output, other.output, text, result);
}
@Override
void handleOtherInsert(InsertProcessor other, ProcessorResult result) {
/*
* Instead of inserting both, we only insert one so contiguous insertions
* by one side will end up being contiguous in the transformed op.
* (Otherwise, you get interleaved I, RL, I, RL, ...)
*/
performInsert(output, other.output, text, result);
}
@Override
void handleOtherRetain(RetainProcessor other, ProcessorResult result) {
performInsert(output, other.output, text, result);
// The retain will be handled by the successor for this processor
}
@Override
void handleOtherRetainLine(RetainLineProcessor other, ProcessorResult result) {
result.flip();
other.handleOtherInsert(this, result);
}
}
private abstract static class Processor {
final DocOpCapturer output;
Processor(DocOpCapturer output) {
this.output = output;
}
abstract void handleOtherDelete(DeleteProcessor other, ProcessorResult result);
abstract void handleOtherFinished(Processor other, ProcessorResult result);
abstract void handleOtherInsert(InsertProcessor other, ProcessorResult result);
abstract void handleOtherRetain(RetainProcessor other, ProcessorResult result);
abstract void handleOtherRetainLine(RetainLineProcessor other, ProcessorResult result);
}
private static class ProcessorFactory implements DocOpCursor {
private DocOpCapturer curOutput;
private Processor returnProcessor;
@Override
public void delete(String text) {
returnProcessor = new DeleteProcessor(curOutput, text);
}
@Override
public void insert(String text) {
returnProcessor = new InsertProcessor(curOutput, text);
}
@Override
public void retain(int count, boolean hasTrailingNewline) {
returnProcessor = new RetainProcessor(curOutput, count, hasTrailingNewline);
}
@Override
public void retainLine(int lineCount) {
returnProcessor = new RetainLineProcessor(curOutput, lineCount);
}
Processor create(DocOpCapturer output, DocOpComponent component) {
curOutput = output;
DocOpUtils.acceptComponent(component, this);
return returnProcessor;
}
}
private static class ProcessorResult {
private boolean isMyStateFinished;
private boolean isOtherStateFinished;
/*
* We need to know the value of the previous component, but if we only tracked that then reset
* would clear it.
*/
private boolean isMyCurrentComponentInsertOfNewline;
private boolean isOtherCurrentComponentInsertOfNewline;
private boolean isMyPreviousComponentInsertOfNewline;
private boolean isOtherPreviousComponentInsertOfNewline;
private boolean isFlipped;
private ProcessorResult() {
}
/**
* Flips the "my" and "other" states. This should be called before one
* processor is handing over execution to the other processor, including
* passing this instance to the other processor.
*/
void flip() {
boolean origMyStateFinished = isMyStateFinished;
isMyStateFinished = isOtherStateFinished;
isOtherStateFinished = origMyStateFinished;
boolean origMyPreviousComponentInsertOfNewline = isMyPreviousComponentInsertOfNewline;
isMyPreviousComponentInsertOfNewline = isOtherPreviousComponentInsertOfNewline;
isOtherPreviousComponentInsertOfNewline = origMyPreviousComponentInsertOfNewline;
boolean origMyCurrentComponentInsertOfNewline = isMyCurrentComponentInsertOfNewline;
isMyCurrentComponentInsertOfNewline = isOtherCurrentComponentInsertOfNewline;
isOtherCurrentComponentInsertOfNewline = origMyCurrentComponentInsertOfNewline;
isFlipped = !isFlipped;
}
void markMyStateFinished() {
if (!isFlipped) {
isMyStateFinished = true;
} else {
isOtherStateFinished = true;
}
}
void markOtherStateFinished() {
if (!isFlipped) {
isOtherStateFinished = true;
} else {
isMyStateFinished = true;
}
}
void markMyCurrentComponentInsertOfNewline() {
if (!isFlipped) {
isMyCurrentComponentInsertOfNewline = true;
} else {
isOtherCurrentComponentInsertOfNewline = true;
}
}
void markOtherCurrentComponentInsertOfNewline() {
if (!isFlipped) {
isOtherCurrentComponentInsertOfNewline = true;
} else {
isMyCurrentComponentInsertOfNewline = true;
}
}
void reset() {
isMyPreviousComponentInsertOfNewline = isMyCurrentComponentInsertOfNewline;
isOtherPreviousComponentInsertOfNewline = isOtherCurrentComponentInsertOfNewline;
isFlipped = isMyStateFinished = isOtherStateFinished =
isMyCurrentComponentInsertOfNewline = isOtherCurrentComponentInsertOfNewline = false;
}
}
private static class RetainLineProcessor extends Processor {
private int lineCount;
/**
* In the event that we need to expand the retain line, we need to know
* exactly how many retains it should be expanded to. This tracks that
* number.
*/
private int substituteRetainCount;
RetainLineProcessor(DocOpCapturer output, int lineCount) {
super(output);
this.lineCount = lineCount;
}
@Override
void handleOtherDelete(DeleteProcessor other, ProcessorResult result) {
other.output.delete(other.text);
if (other.text.endsWith(NEWLINE)) {
handleOtherLineEnd(false, result);
} else {
// My transformed op won't see the delete, so do nothing
}
result.markOtherStateFinished();
}
@Override
void handleOtherFinished(Processor other, ProcessorResult result) {
Preconditions.checkState(
lineCount == 1, "Cannot retain more than one line if other side is finished");
if (result.isMyPreviousComponentInsertOfNewline) {
other.output.retainLine(1);
}
lineCount = 0;
output.retainLine(1);
result.markMyStateFinished();
}
@Override
void handleOtherInsert(InsertProcessor other, ProcessorResult result) {
other.output.insert(other.text);
if (other.text.endsWith(NEWLINE)) {
// Retain the line just inserted by other
lineCount++;
handleOtherLineEnd(true, result);
result.markOtherCurrentComponentInsertOfNewline();
} else {
substituteRetainCount += other.text.length();
}
result.markOtherStateFinished();
}
void handleOtherLineEnd(boolean canUseRetainLine, ProcessorResult result) {
if (canUseRetainLine) {
output.retainLine(1);
} else {
if (substituteRetainCount > 0) {
output.retain(substituteRetainCount, false);
}
}
lineCount--;
substituteRetainCount = 0;
if (lineCount == 0) {
result.markMyStateFinished();
}
}
@Override
void handleOtherRetain(RetainProcessor other, ProcessorResult result) {
other.output.retain(other.count, other.hasTrailingNewline);
substituteRetainCount += other.count;
if (other.hasTrailingNewline) {
handleOtherLineEnd(true, result);
}
result.markOtherStateFinished();
}
@Override
void handleOtherRetainLine(RetainLineProcessor other, ProcessorResult result) {
int minLineCount = Math.min(lineCount, other.lineCount);
output.retainLine(minLineCount);
lineCount -= minLineCount;
other.output.retainLine(minLineCount);
other.lineCount -= minLineCount;
if (lineCount == 0) {
result.markMyStateFinished();
}
if (other.lineCount == 0) {
result.markOtherStateFinished();
}
}
}
private static class RetainProcessor extends Processor {
static int performRetain(DocOpCapturer output, int fullCount, int retainCount,
boolean hasTrailingNewline, ProcessorResult result, boolean useOtherInResult) {
output.retain(retainCount, fullCount == retainCount ? hasTrailingNewline : false);
if (retainCount == fullCount) {
if (useOtherInResult) {
result.markOtherStateFinished();
} else {
result.markMyStateFinished();
}
}
return fullCount - retainCount;
}
private int count;
private final boolean hasTrailingNewline;
RetainProcessor(DocOpCapturer output, int count, boolean hasTrailingNewline) {
super(output);
this.count = count;
this.hasTrailingNewline = hasTrailingNewline;
}
@Override
void handleOtherDelete(DeleteProcessor other, ProcessorResult result) {
result.flip();
other.handleOtherRetain(this, result);
}
@Override
void handleOtherFinished(Processor other, ProcessorResult result) {
throw new IllegalStateException("Cannot retain if other side is finished");
}
@Override
void handleOtherInsert(InsertProcessor other, ProcessorResult result) {
result.flip();
other.handleOtherRetain(this, result);
}
@Override
void handleOtherRetain(RetainProcessor other, ProcessorResult result) {
int minCount = Math.min(count, other.count);
count = performRetain(output, count, minCount, hasTrailingNewline, result, false);
other.count =
performRetain(other.output, other.count, minCount, other.hasTrailingNewline, result,
true);
}
@Override
void handleOtherRetainLine(RetainLineProcessor other, ProcessorResult result) {
result.flip();
other.handleOtherRetain(this, result);
}
}
private static final String NEWLINE = "\n";
private static final ProcessorFactory PROCESSOR_FACTORY = new ProcessorFactory();
public static OperationPair transform(DocOpFactory factory, DocOp clientOp, DocOp serverOp)
throws TransformException {
try {
return new Transformer(factory).transformImpl(clientOp, serverOp);
} catch (Throwable t) {
throw new TransformException("Could not transform doc ops:\nClient: "
+ DocOpUtils.toString(clientOp, false) + "\nServer: "
+ DocOpUtils.toString(serverOp, false) + "\n", t);
}
}
private static void dispatchProcessor(Processor a, Processor b, ProcessorResult result) {
if (b instanceof DeleteProcessor) {
a.handleOtherDelete((DeleteProcessor) b, result);
} else if (b instanceof InsertProcessor) {
a.handleOtherInsert((InsertProcessor) b, result);
} else if (b instanceof RetainProcessor) {
a.handleOtherRetain((RetainProcessor) b, result);
} else if (b instanceof RetainLineProcessor) {
a.handleOtherRetainLine((RetainLineProcessor) b, result);
} else if (b instanceof FinishedProcessor) {
a.handleOtherFinished(b, result);
}
}
private final DocOpFactory factory;
private Transformer(DocOpFactory factory) {
this.factory = factory;
}
private OperationPair transformImpl(DocOp clientOp, DocOp serverOp) {
/*
* These capturers will create the respective side's doc op which will be
* transformed from the respective side's original doc op to apply to the
* document *after* the other side's original doc op.
*/
DocOpCapturer clientOutput = new DocOpCapturer(factory, true);
DocOpCapturer serverOutput = new DocOpCapturer(factory, true);
JsonArray<DocOpComponent> clientComponents = clientOp.getComponents();
JsonArray<DocOpComponent> serverComponents = serverOp.getComponents();
int clientIndex = 0;
int serverIndex = 0;
boolean clientComponentsFinished = false;
boolean serverComponentsFinished = false;
Processor client = null;
Processor server = null;
ProcessorResult result = new ProcessorResult();
while (!clientComponentsFinished || !serverComponentsFinished) {
if (client == null) {
if (clientIndex < clientComponents.size()) {
client = PROCESSOR_FACTORY.create(clientOutput, clientComponents.get(clientIndex++));
} else {
client = new FinishedProcessor(clientOutput);
clientComponentsFinished = true;
}
}
if (server == null) {
if (serverIndex < serverComponents.size()) {
server = PROCESSOR_FACTORY.create(serverOutput, serverComponents.get(serverIndex++));
} else {
server = new FinishedProcessor(serverOutput);
serverComponentsFinished = true;
}
}
if (!clientComponentsFinished || !serverComponentsFinished) {
dispatchProcessor(client, server, result);
}
if (result.isMyStateFinished) {
client = null;
}
if (result.isOtherStateFinished) {
server = null;
}
result.reset();
}
return new OperationPair(clientOutput.getDocOp(), serverOutput.getDocOp());
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.j2objc.translate;
import com.google.devtools.j2objc.GenerationTest;
import com.google.devtools.j2objc.Options;
import com.google.devtools.j2objc.Options.MemoryManagementOption;
import java.io.IOException;
/**
* Tests for {@link Functionizer}.
*
* @author Tom Ball
*/
public class FunctionizerTest extends GenerationTest {
public void testPrivateInstanceMethodNoArgs() throws IOException {
String translation = translateSourceFile(
"class A { String test(String msg) { return str(); } "
+ " private String str() { return toString(); }}",
"A", "A.h");
String functionHeader = "NSString *A_str(A *self)";
assertNotInTranslation(translation, functionHeader);
translation = getTranslatedFile("A.m");
assertTranslation(translation, "static " + functionHeader + ";");
assertTranslation(translation, functionHeader + " {");
assertTranslation(translation, "return A_str(self);");
assertTranslation(translation, "return [self description];");
}
// Verify one function calls another with the instance parameter.
public void testPrivateToPrivate() throws IOException {
String translation = translateSourceFile(
"class A { private String test(String msg) { return str(); } "
+ " private String str() { return toString(); }}",
"A", "A.m");
assertTranslation(translation, "return A_str(self);");
assertTranslation(translation, "return [self description];");
}
public void testPrivateInstanceMethod() throws IOException {
String translation = translateSourceFile(
"class A { String test(String msg) { return str(msg, getClass()); } "
+ " private String str(String msg, Class<?> cls) { return msg + cls; }}",
"A", "A.h");
String functionHeader =
"NSString *A_strWithNSString_withIOSClass_(A *self, NSString *msg, IOSClass *cls)";
assertNotInTranslation(translation, functionHeader);
translation = getTranslatedFile("A.m");
assertTranslation(translation, "static " + functionHeader + ";");
assertTranslatedLines(translation, functionHeader + " {",
"return JreStrcat(\"$@\", msg, cls);");
assertTranslation(translation,
"return A_strWithNSString_withIOSClass_(self, msg, [self getClass]);");
}
// Verify non-private instance method is generated normally.
public void testNonPrivateInstanceMethod() throws IOException {
String translation = translateSourceFile(
"class A { String test(String msg) { return str(msg, getClass()); } "
+ " String str(String msg, Class<?> cls) { return msg + cls; }}",
"A", "A.m");
assertTranslatedLines(translation,
"- (NSString *)strWithNSString:(NSString *)msg",
"withIOSClass:(IOSClass *)cls {");
assertTranslation(translation,
"return [self strWithNSString:msg withIOSClass:[self getClass]];");
}
// Verify instance field access in function.
public void testFieldAccessInFunction() throws IOException {
String translation = translateSourceFile(
"class A { String hello = \"hello\";"
+ " String test() { return str(); } "
+ " private String str() { return hello; }}",
"A", "A.m");
assertTranslatedLines(translation,
"- (NSString *)test {",
"return A_str(self);");
assertTranslatedLines(translation,
"NSString *A_str(A *self) {",
"return self->hello_;");
}
// Verify super field access in function.
public void testSuperFieldAccessInFunction() throws IOException {
String translation = translateSourceFile(
"class A { String hello = \"hello\";"
+ " static class B extends A { void use() { str(); }"
+ " private String str() { super.hello = \"hi\"; return super.hello; }}}",
"A", "A.m");
assertTranslatedLines(translation,
"NSString *A_B_str(A_B *self) {",
"JreStrongAssign(&self->hello_, @\"hi\");",
"return self->hello_;");
}
// Verify there isn't any super method invocations in functions.
public void testSuperMethodInvocationInFunction() throws IOException {
String translation = translateSourceFile(
"class A { "
+ " private String hello() { return \"hello\"; } "
+ " public String shout() { return \"HELLO\"; } "
+ " void use() { hello(); } "
+ " static class B extends A { "
+ " private String test1() { return super.hello(); } "
+ " private String test2() { return super.shout(); }"
+ " void use() { test1(); test2(); }}}",
"A", "A.m");
assertTranslatedLines(translation,
"- (NSString *)test1 {",
"return [super hello];");
assertTranslatedLines(translation,
"- (NSString *)test2 {",
"return [super shout];");
}
// Verify functions can call other functions, correctly passing the instance variable.
// Also tests that overloaded functions work.
public void testFunctionCallingFunction() throws IOException {
String translation = translateSourceFile(
"class A { String hello = \"hello\";"
+ " String test() { return str(0); } "
+ " private String str(int i) { return str(); }"
+ " private String str() { return hello; } }",
"A", "A.m");
translation = getTranslatedFile("A.m");
assertTranslatedLines(translation,
"- (NSString *)test {",
"return A_strWithInt_(self, 0);");
assertTranslatedLines(translation,
"NSString *A_strWithInt_(A *self, jint i) {",
"return A_str(self);");
assertTranslatedLines(translation,
"NSString *A_str(A *self) {",
"return self->hello_;");
}
// Verify this expressions are changed to self parameters in functions.
public void testThisParameters() throws IOException {
String translation = translateSourceFile(
"class A { private void test(java.util.List list) { list.add(this); }}",
"A", "A.m");
assertTranslatedLines(translation, "[((id<JavaUtilList>) nil_chk(list)) addWithId:self];");
}
// Verify that a call to a private method in an outer class is converted correctly.
public void testOuterCall() throws IOException {
String translation = translateSourceFile(
"class A { int outerN = str(); private int str() { return 0; }"
+ " class B { "
+ " private int test1() { return str(); } "
+ " private int test2() { return A.this.str(); }"
+ " private int test3() { return A.this.outerN; }}}",
"A", "A.m");
assertTranslatedLines(translation,
"int A_str(A *self) {",
"return 0;");
assertTranslatedLines(translation,
"- (jint)test1 {",
"return A_str(this$0_);");
assertTranslatedLines(translation,
"- (jint)test2 {",
"return A_str(this$0_);");
assertTranslatedLines(translation,
"- (jint)test3 {",
"return this$0_->outerN_;");
}
// Verify that a call to a private method in an outer class is converted correctly.
public void testInnerOuterCall() throws IOException {
String translation = translateSourceFile(
"class A { private int str() { return 0; }"
+ " class B { "
+ " private int test() { return str(); }}"
+ " class C { "
+ " private void test(B b) { b.test(); }}}",
"A", "A.m");
assertTranslatedLines(translation,
"- (void)testWithA_B:(A_B *)b {",
"A_B_test(nil_chk(b));");
}
// Verify annotation parameters are ignored.
public void testAnnotationParameters() throws IOException {
String translation = translateSourceFile(
"import java.lang.annotation.*; @Target({ElementType.METHOD}) public @interface Test {}",
"Test", "Test.m");
assertNotInTranslation(translation, "self");
}
// Verify function declaration is in .m file, not the header.
public void testPrivateStaticMethod() throws IOException {
String translation = translateSourceFile(
"class A { String test(String msg) { return str(msg, getClass()); } "
+ " private static String str(String msg, Class<?> cls) { return msg + cls; }}",
"A", "A.h");
String functionHeader =
"NSString *A_strWithNSString_withIOSClass_(NSString *msg, IOSClass *cls)";
assertNotInTranslation(translation, functionHeader + ';');
translation = getTranslatedFile("A.m");
// Check new function.
assertTranslatedLines(translation, functionHeader + " {",
"A_initialize();",
"return JreStrcat(\"$@\", msg, cls);");
// Check wrapper.
assertTranslatedLines(translation,
"+ (NSString *)strWithNSString:(NSString *)msg",
"withIOSClass:(IOSClass *)cls {",
"return A_strWithNSString_withIOSClass_(msg, cls);");
// Check invocation.
assertTranslatedLines(translation,
"- (NSString *)testWithNSString:(NSString *)msg {",
"return A_strWithNSString_withIOSClass_(msg, [self getClass]);");
}
// Verify function declaration is in the header.
public void testStaticMethod() throws IOException {
String translation = translateSourceFile(
"class A { String test(String msg) { return str(msg, getClass()); } "
+ " static String str(String msg, Class<?> cls) { return msg + cls; }}",
"A", "A.h");
String functionHeader =
"NSString *A_strWithNSString_withIOSClass_(NSString *msg, IOSClass *cls)";
assertTranslation(translation, functionHeader + ';');
translation = getTranslatedFile("A.m");
// Check new function.
assertTranslatedLines(translation, functionHeader + " {",
"A_initialize();",
"return JreStrcat(\"$@\", msg, cls);");
// Check wrapper.
assertTranslatedLines(translation,
"+ (NSString *)strWithNSString:(NSString *)msg",
"withIOSClass:(IOSClass *)cls {",
"return A_strWithNSString_withIOSClass_(msg, cls);");
// Check invocation.
assertTranslatedLines(translation,
"- (NSString *)testWithNSString:(NSString *)msg {",
"return A_strWithNSString_withIOSClass_(msg, [self getClass]);");
}
public void testFunctionParameter() throws IOException {
String translation = translateSourceFile(
"class A { private String test(String msg) { return echo(str(msg)); } "
+ " private String echo(String msg) { return msg; } "
+ " private String str(String msg) { return msg; }}",
"A", "A.m");
assertTranslatedLines(translation, "A_echoWithNSString_(self, A_strWithNSString_(self, msg))");
}
public void testStaticVarargsMethod() throws IOException {
String translation = translateSourceFile(
"class A { String test(String msg) { return strchars('a', 'b', 'c'); } "
+ " private static String strchars(char... args) { return String.valueOf(args); }}",
"A", "A.h");
String functionHeader = "NSString *A_strcharsWithCharArray_(IOSCharArray *args)";
assertNotInTranslation(translation, functionHeader + ';');
translation = getTranslatedFile("A.m");
assertTranslation(translation, functionHeader + " {");
assertTranslation(translation, "return A_strcharsWithCharArray_("
+ "[IOSCharArray arrayWithChars:(jchar[]){ 'a', 'b', 'c' } count:3]);");
}
public void testSynchronizedFunction() throws IOException {
String translation = translateSourceFile(
"class A { void test() { str(); } "
+ "private synchronized String str() { return toString(); }}",
"A", "A.m");
assertTranslation(translation, "@synchronized(self)");
assertOccurrences(translation, "@synchronized", 1);
translation = translateSourceFile(
"class A { void test() { str(); } "
+ " private String str() { synchronized(this) { return toString(); }}}",
"A", "A.m");
assertTranslation(translation, "@synchronized(self)");
translation = translateSourceFile(
"class A { void test() { str(); } "
+ " private static synchronized String str() { return \"abc\"; }}",
"A", "A.m");
assertTranslation(translation, "@synchronized(A_class_())");
assertOccurrences(translation, "@synchronized", 1);
translation = translateSourceFile(
"class A { void test() { str(); } "
+ " private String str() { synchronized(this.getClass()) { return \"abc\"; }}}",
"A", "A.m");
assertTranslation(translation, "@synchronized([self getClass])");
translation = translateSourceFile(
"class A { void test() { str(); } "
+ " private static String str() { synchronized(A.class) { return \"abc\"; }}}",
"A", "A.m");
assertTranslation(translation, "@synchronized(A_class_())");
}
public void testSetter() throws IOException {
String translation = translateSourceFile(
"class A { Object o; private void setO(Object o) { this.o = o; }}",
"A", "A.m");
assertTranslation(translation, "JreStrongAssign(&self->o_, o)");
}
public void testClassInitializerCalledFromFunction() throws IOException {
String translation = translateSourceFile(
"class A { static Object o = new Object(); "
+ " private static Object foo() { return o; }"
+ " void test() { A.foo(); }"
+ " private void test2() {}"
+ " void use() { test2(); }}",
"A", "A.m");
// Verify static class function calls class init.
assertTranslatedLines(translation, "id A_foo() {", "A_initialize();", "return A_o;", "}");
// Verify class method doesn't call class init.
assertTranslatedLines(translation, "- (void)test {", "A_foo();", "}");
// Verify non-static class function doesn't call class init.
assertTranslatedLines(translation, "void A_test2(A *self) {", "}");
}
public void testClassInitializerCalledFromEnumFunctions() throws IOException {
String translation = translateSourceFile(
"enum A { A, B; static Object o = new Object(); "
+ " private static Object foo() { return o; }"
+ " void test() { A.foo(); }"
+ " private void test2() {}"
+ " void use() { test2(); }}",
"A", "A.m");
// Verify valueOf function calls class init.
assertTranslatedLines(translation, "A *A_valueOfWithNSString_(NSString *name) {",
"A_initialize();", "for (int i = 0; i < 2; i++) {");
// Verify static class function calls class init.
assertTranslatedLines(translation,
"id A_foo() {", "A_initialize();", "return A_o;", "}");
// Verify class method doesn't call class init.
assertTranslatedLines(translation, "- (void)test {", "A_foo();", "}");
// Verify non-static class function doesn't call class init.
assertTranslatedLines(translation, "void A_test2(A *self) {", "}");
}
public void testPrivateNativeMethod() throws IOException {
String translation = translateSourceFile(
"class A { Object o; void use() { setO(null); } "
+ " private native void setO(Object o) /*-[ self->o_ = o; ]-*/; }",
"A", "A.m");
assertTranslation(translation, "static void A_setOWithId_(A *self, id o);");
assertTranslatedLines(translation, "void A_setOWithId_(A *self, id o) {", "self->o_ = o;", "}");
assertTranslatedLines(translation,
"- (void)setOWithId:(id)o {", "A_setOWithId_(self, o);", "}");
}
public void testGenericMethod() throws IOException {
String translation = translateSourceFile(
"class Test { private static <T> void foo(T t) {} static void bar() { foo(\"test\"); } }",
"Test", "Test.m");
assertTranslation(translation, "Test_fooWithId_(@\"test\");");
}
public void testProtectedMethodInPrivateClass() throws IOException {
String translation = translateSourceFile(
"class Test { private static class A { protected void foo() {} void bar() { foo(); } } "
+ "private static class B extends A { protected void foo() {} } }",
"Test", "Test.m");
assertNotInTranslation(translation, "Test_A_foo");
}
public void testPrivateMethodCalledFromAnonymousEnum() throws IOException {
String translation = translateSourceFile(
"enum Test { A { void bar() { foo(); } }; private static void foo() {} }",
"Test", "Test.m");
assertTranslatedLines(translation, "- (void)bar {", "Test_foo();");
assertTranslation(translation, "static void Test_foo();");
assertTranslation(translation, "void Test_foo() {");
}
public void testNativeMethodsWithoutOcni() throws IOException {
String translation = translateSourceFile(
"class Test { public native void foo(); public native static void bar(); }",
"Test", "Test.h");
// Public declaration for "foo" instance method, within "NativeMethods" category.
assertTranslation(translation, "- (void)foo;");
// Public declaration for "bar". both the class method and c-function.
assertTranslation(translation, "+ (void)bar;");
assertTranslation(translation, "FOUNDATION_EXPORT void Test_bar();");
translation = getTranslatedFile("Test.m");
// Implementation for "foo" is functionized.
assertTranslation(translation, "void Test_foo(Test *self);");
assertTranslatedLines(translation, "- (void)foo {", "Test_foo(self);", "}");
// class method wrapper for "bar".
assertTranslatedLines(translation, "+ (void)bar {", "Test_bar();", "}");
// JNI external function declarations
assertTranslation(translation, "JNIEXPORT void Java_Test_foo(JNIEnv *_env_, jobject self);");
assertTranslation(translation, "JNIEXPORT void Java_Test_bar(JNIEnv *_env_, jclass _cls_);");
// JNI wrapper functions
assertTranslatedLines(translation,
"void Test_foo(Test *self) {", "Java_Test_foo(&J2ObjC_JNIEnv, self);", "}");
assertTranslatedLines(translation,
"void Test_bar() {", "Java_Test_bar(&J2ObjC_JNIEnv, Test_class_());", "}");
}
public void testOverloadedNativeMethodsWithoutOcni() throws IOException {
String translation = translateSourceFile(
"class Test { public native void foo(int i); public native static void foo(String s); }",
"Test", "Test.h");
// Public declaration for "foo" instance method, within "NativeMethods" category.
assertTranslation(translation, "- (void)fooWithInt:(jint)i;");
// Public declaration for "bar". both the class method and c-function.
assertTranslation(translation, "+ (void)fooWithNSString:(NSString *)s;");
assertTranslation(translation, "FOUNDATION_EXPORT void Test_fooWithNSString_(NSString *s);");
translation = getTranslatedFile("Test.m");
// Implementation for "foo" is functionized.
assertTranslation(translation, "void Test_fooWithInt_(Test *self, jint i);");
assertTranslatedLines(translation,
"- (void)fooWithInt:(jint)i {", "Test_fooWithInt_(self, i);", "}");
// class method wrapper for "bar".
assertTranslatedLines(translation,
"+ (void)fooWithNSString:(NSString *)s {", "Test_fooWithNSString_(s);", "}");
// JNI external function declarations
assertTranslation(translation,
"JNIEXPORT void Java_Test_foo__I(JNIEnv *_env_, jobject self, jint i);");
assertTranslation(translation,
"JNIEXPORT void Java_Test_foo__Ljava_lang_String_2("
+ "JNIEnv *_env_, jclass _cls_, jstring s);");
// JNI wrapper functions
assertTranslatedLines(translation,
"void Test_fooWithInt_(Test *self, jint i) {",
"Java_Test_foo__I(&J2ObjC_JNIEnv, self, i);",
"}");
assertTranslatedLines(translation,
"void Test_fooWithNSString_(NSString *s) {",
"Java_Test_foo__Ljava_lang_String_2(&J2ObjC_JNIEnv, Test_class_(), s);",
"}");
}
public void testExtraSelectorsFromMultipleOverrides() throws IOException {
addSourceFile("interface I { int foo(String t); }", "I.java");
addSourceFile("class A<T> { int foo(T t) {} }", "A.java");
String translation = translateSourceFile(
"class B extends A<String> implements I { public int foo(String t) { return 7; } }",
"B", "B.h");
assertTranslation(translation, "- (jint)fooWithNSString:(NSString *)t;");
assertTranslation(translation, "- (jint)fooWithId:(NSString *)t;");
translation = getTranslatedFile("B.m");
assertTranslatedLines(translation,
"- (jint)fooWithNSString:(NSString *)t {",
" return B_fooWithNSString_(self, t);",
"}");
assertTranslatedLines(translation,
"- (jint)fooWithId:(NSString *)t {",
" return B_fooWithNSString_(self, t);",
"}");
}
// Verify that static methods called via a super invokation are correctly
// functionized.
public void testStaticSuperInvocation() throws IOException {
String translation = translateSourceFile(
"public class A { static class Base { static void test() {} } "
+ "static class Foo extends Base { void test2() { super.test(); } }}", "A", "A.m");
assertTranslatedLines(translation,
"- (void)test2 {",
" A_Base_test();",
"}");
}
public void testSuperInvocationFromConstructor() throws IOException {
String translation = translateSourceFile(
"class Test { Test() { super.toString(); } }", "Test", "Test.m");
assertTranslation(translation, "Test_super$_description(self, @selector(description));");
}
public String innerTestFunctionizedConstructors() throws IOException {
String translation = translateSourceFile(
"class Test { int i; "
+ "Test() { this(0); } "
+ "private Test(int i) { this.i = i; } }", "Test", "Test.h");
// Functionized constructor.
assertTranslation(translation, "FOUNDATION_EXPORT void Test_init(Test *self);");
// Retaining allocating constructor.
assertTranslation(translation, "FOUNDATION_EXPORT Test *new_Test_init() NS_RETURNS_RETAINED;");
// Releasing allocating constructor.
assertTranslation(translation, "FOUNDATION_EXPORT Test *create_Test_init();");
translation = getTranslatedFile("Test.m");
// Declarations for the private constructor.
assertTranslation(translation,
"__attribute__((unused)) static void Test_initWithInt_(Test *self, jint i);");
assertTranslation(translation,
"__attribute__((unused)) static Test *new_Test_initWithInt_(jint i) NS_RETURNS_RETAINED;");
assertTranslation(translation,
"__attribute__((unused)) static Test *create_Test_initWithInt_(jint i);");
// Implementations.
assertTranslatedLines(translation,
"void Test_init(Test *self) {",
" Test_initWithInt_(self, 0);",
"}");
assertTranslatedLines(translation,
"Test *new_Test_init() {",
" J2OBJC_NEW_IMPL(Test, init)",
"}");
assertTranslatedLines(translation,
"void Test_initWithInt_(Test *self, jint i) {",
" NSObject_init(self);",
" self->i_ = i;",
"}");
assertTranslatedLines(translation,
"Test *new_Test_initWithInt_(jint i) {",
" J2OBJC_NEW_IMPL(Test, initWithInt_, i)",
"}");
assertTranslatedLines(translation,
"Test *create_Test_init() {",
" J2OBJC_CREATE_IMPL(Test, init)",
"}");
assertTranslatedLines(translation,
"Test *create_Test_initWithInt_(jint i) {",
" J2OBJC_CREATE_IMPL(Test, initWithInt_, i)",
"}");
return translation;
}
public void testFunctionizedConstructors() throws IOException {
innerTestFunctionizedConstructors();
}
public void testFunctionizedConstructorsARC() throws IOException {
Options.setMemoryManagementOption(MemoryManagementOption.ARC);
innerTestFunctionizedConstructors();
}
public void testNoAllocatingConstructorsForAbstractClass() throws IOException {
String translation = translateSourceFile("abstract class Test {}", "Test", "Test.h");
assertTranslation(translation, "FOUNDATION_EXPORT void Test_init(Test *self);");
assertNotInTranslation(translation, "new_Test_init");
assertNotInTranslation(translation, "create_Test_init");
translation = getTranslatedFile("Test.m");
assertNotInTranslation(translation, "new_Test_init");
assertNotInTranslation(translation, "create_Test_init");
}
}
| |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.internal.app;
import static android.view.ViewGroup.LayoutParams.MATCH_PARENT;
import com.android.internal.R;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.TypedArray;
import android.database.Cursor;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.view.Window;
import android.view.WindowInsets;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CheckedTextView;
import android.widget.CursorAdapter;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListAdapter;
import android.widget.ListView;
import android.widget.ScrollView;
import android.widget.SimpleCursorAdapter;
import android.widget.TextView;
import java.lang.ref.WeakReference;
public class AlertController {
private final Context mContext;
private final DialogInterface mDialogInterface;
private final Window mWindow;
private CharSequence mTitle;
private CharSequence mMessage;
private ListView mListView;
private View mView;
private int mViewSpacingLeft;
private int mViewSpacingTop;
private int mViewSpacingRight;
private int mViewSpacingBottom;
private boolean mViewSpacingSpecified = false;
private Button mButtonPositive;
private CharSequence mButtonPositiveText;
private Message mButtonPositiveMessage;
private Button mButtonNegative;
private CharSequence mButtonNegativeText;
private Message mButtonNegativeMessage;
private Button mButtonNeutral;
private CharSequence mButtonNeutralText;
private Message mButtonNeutralMessage;
private ScrollView mScrollView;
private int mIconId = -1;
private Drawable mIcon;
private ImageView mIconView;
private TextView mTitleView;
private TextView mMessageView;
private View mCustomTitleView;
private boolean mForceInverseBackground;
private ListAdapter mAdapter;
private int mCheckedItem = -1;
private int mAlertDialogLayout;
private int mListLayout;
private int mMultiChoiceItemLayout;
private int mSingleChoiceItemLayout;
private int mListItemLayout;
private Handler mHandler;
View.OnClickListener mButtonHandler = new View.OnClickListener() {
public void onClick(View v) {
Message m = null;
if (v == mButtonPositive && mButtonPositiveMessage != null) {
m = Message.obtain(mButtonPositiveMessage);
} else if (v == mButtonNegative && mButtonNegativeMessage != null) {
m = Message.obtain(mButtonNegativeMessage);
} else if (v == mButtonNeutral && mButtonNeutralMessage != null) {
m = Message.obtain(mButtonNeutralMessage);
}
if (m != null) {
m.sendToTarget();
}
// Post a message so we dismiss after the above handlers are executed
mHandler.obtainMessage(ButtonHandler.MSG_DISMISS_DIALOG, mDialogInterface)
.sendToTarget();
}
};
private static final class ButtonHandler extends Handler {
// Button clicks have Message.what as the BUTTON{1,2,3} constant
private static final int MSG_DISMISS_DIALOG = 1;
private WeakReference<DialogInterface> mDialog;
public ButtonHandler(DialogInterface dialog) {
mDialog = new WeakReference<DialogInterface>(dialog);
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case DialogInterface.BUTTON_POSITIVE:
case DialogInterface.BUTTON_NEGATIVE:
case DialogInterface.BUTTON_NEUTRAL:
((DialogInterface.OnClickListener) msg.obj).onClick(mDialog.get(), msg.what);
break;
case MSG_DISMISS_DIALOG:
((DialogInterface) msg.obj).dismiss();
}
}
}
private static boolean shouldCenterSingleButton(Context context) {
TypedValue outValue = new TypedValue();
context.getTheme().resolveAttribute(com.android.internal.R.attr.alertDialogCenterButtons,
outValue, true);
return outValue.data != 0;
}
public AlertController(Context context, DialogInterface di, Window window) {
mContext = context;
mDialogInterface = di;
mWindow = window;
mHandler = new ButtonHandler(di);
TypedArray a = context.obtainStyledAttributes(null,
com.android.internal.R.styleable.AlertDialog,
com.android.internal.R.attr.alertDialogStyle, 0);
mAlertDialogLayout = a.getResourceId(com.android.internal.R.styleable.AlertDialog_layout,
com.android.internal.R.layout.alert_dialog);
mListLayout = a.getResourceId(
com.android.internal.R.styleable.AlertDialog_listLayout,
com.android.internal.R.layout.select_dialog);
mMultiChoiceItemLayout = a.getResourceId(
com.android.internal.R.styleable.AlertDialog_multiChoiceItemLayout,
com.android.internal.R.layout.select_dialog_multichoice);
mSingleChoiceItemLayout = a.getResourceId(
com.android.internal.R.styleable.AlertDialog_singleChoiceItemLayout,
com.android.internal.R.layout.select_dialog_singlechoice);
mListItemLayout = a.getResourceId(
com.android.internal.R.styleable.AlertDialog_listItemLayout,
com.android.internal.R.layout.select_dialog_item);
a.recycle();
}
static boolean canTextInput(View v) {
if (v.onCheckIsTextEditor()) {
return true;
}
if (!(v instanceof ViewGroup)) {
return false;
}
ViewGroup vg = (ViewGroup)v;
int i = vg.getChildCount();
while (i > 0) {
i--;
v = vg.getChildAt(i);
if (canTextInput(v)) {
return true;
}
}
return false;
}
public void installContent() {
/* We use a custom title so never request a window title */
mWindow.requestFeature(Window.FEATURE_NO_TITLE);
if (mView == null || !canTextInput(mView)) {
mWindow.setFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM,
WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
}
mWindow.setContentView(mAlertDialogLayout);
setupView();
setupDecor();
}
public void setTitle(CharSequence title) {
mTitle = title;
if (mTitleView != null) {
mTitleView.setText(title);
}
}
/**
* @see AlertDialog.Builder#setCustomTitle(View)
*/
public void setCustomTitle(View customTitleView) {
mCustomTitleView = customTitleView;
}
public void setMessage(CharSequence message) {
mMessage = message;
if (mMessageView != null) {
mMessageView.setText(message);
}
}
/**
* Set the view to display in the dialog.
*/
public void setView(View view) {
mView = view;
mViewSpacingSpecified = false;
}
/**
* Set the view to display in the dialog along with the spacing around that view
*/
public void setView(View view, int viewSpacingLeft, int viewSpacingTop, int viewSpacingRight,
int viewSpacingBottom) {
mView = view;
mViewSpacingSpecified = true;
mViewSpacingLeft = viewSpacingLeft;
mViewSpacingTop = viewSpacingTop;
mViewSpacingRight = viewSpacingRight;
mViewSpacingBottom = viewSpacingBottom;
}
/**
* Sets a click listener or a message to be sent when the button is clicked.
* You only need to pass one of {@code listener} or {@code msg}.
*
* @param whichButton Which button, can be one of
* {@link DialogInterface#BUTTON_POSITIVE},
* {@link DialogInterface#BUTTON_NEGATIVE}, or
* {@link DialogInterface#BUTTON_NEUTRAL}
* @param text The text to display in positive button.
* @param listener The {@link DialogInterface.OnClickListener} to use.
* @param msg The {@link Message} to be sent when clicked.
*/
public void setButton(int whichButton, CharSequence text,
DialogInterface.OnClickListener listener, Message msg) {
if (msg == null && listener != null) {
msg = mHandler.obtainMessage(whichButton, listener);
}
switch (whichButton) {
case DialogInterface.BUTTON_POSITIVE:
mButtonPositiveText = text;
mButtonPositiveMessage = msg;
break;
case DialogInterface.BUTTON_NEGATIVE:
mButtonNegativeText = text;
mButtonNegativeMessage = msg;
break;
case DialogInterface.BUTTON_NEUTRAL:
mButtonNeutralText = text;
mButtonNeutralMessage = msg;
break;
default:
throw new IllegalArgumentException("Button does not exist");
}
}
/**
* Set resId to 0 if you don't want an icon.
* @param resId the resourceId of the drawable to use as the icon or 0
* if you don't want an icon.
*/
public void setIcon(int resId) {
mIconId = resId;
if (mIconView != null) {
if (resId > 0) {
mIconView.setImageResource(mIconId);
} else if (resId == 0) {
mIconView.setVisibility(View.GONE);
}
}
}
public void setIcon(Drawable icon) {
mIcon = icon;
if ((mIconView != null) && (mIcon != null)) {
mIconView.setImageDrawable(icon);
}
}
/**
* @param attrId the attributeId of the theme-specific drawable
* to resolve the resourceId for.
*
* @return resId the resourceId of the theme-specific drawable
*/
public int getIconAttributeResId(int attrId) {
TypedValue out = new TypedValue();
mContext.getTheme().resolveAttribute(attrId, out, true);
return out.resourceId;
}
public void setInverseBackgroundForced(boolean forceInverseBackground) {
mForceInverseBackground = forceInverseBackground;
}
public ListView getListView() {
return mListView;
}
public Button getButton(int whichButton) {
switch (whichButton) {
case DialogInterface.BUTTON_POSITIVE:
return mButtonPositive;
case DialogInterface.BUTTON_NEGATIVE:
return mButtonNegative;
case DialogInterface.BUTTON_NEUTRAL:
return mButtonNeutral;
default:
return null;
}
}
@SuppressWarnings({"UnusedDeclaration"})
public boolean onKeyDown(int keyCode, KeyEvent event) {
return mScrollView != null && mScrollView.executeKeyEvent(event);
}
@SuppressWarnings({"UnusedDeclaration"})
public boolean onKeyUp(int keyCode, KeyEvent event) {
return mScrollView != null && mScrollView.executeKeyEvent(event);
}
private void setupDecor() {
final View decor = mWindow.getDecorView();
final View parent = mWindow.findViewById(R.id.parentPanel);
if (parent != null && decor != null) {
decor.setOnApplyWindowInsetsListener(new View.OnApplyWindowInsetsListener() {
@Override
public WindowInsets onApplyWindowInsets(View view, WindowInsets insets) {
if (insets.isRound()) {
// TODO: Get the padding as a function of the window size.
int roundOffset = mContext.getResources().getDimensionPixelOffset(
R.dimen.alert_dialog_round_padding);
parent.setPadding(roundOffset, roundOffset, roundOffset, roundOffset);
}
return insets.consumeSystemWindowInsets();
}
});
decor.setFitsSystemWindows(true);
decor.requestApplyInsets();
}
}
private void setupView() {
LinearLayout contentPanel = (LinearLayout) mWindow.findViewById(R.id.contentPanel);
setupContent(contentPanel);
boolean hasButtons = setupButtons();
LinearLayout topPanel = (LinearLayout) mWindow.findViewById(R.id.topPanel);
TypedArray a = mContext.obtainStyledAttributes(
null, com.android.internal.R.styleable.AlertDialog, com.android.internal.R.attr.alertDialogStyle, 0);
boolean hasTitle = setupTitle(topPanel);
View buttonPanel = mWindow.findViewById(R.id.buttonPanel);
if (!hasButtons) {
buttonPanel.setVisibility(View.GONE);
mWindow.setCloseOnTouchOutsideIfNotSet(true);
}
FrameLayout customPanel = null;
if (mView != null) {
customPanel = (FrameLayout) mWindow.findViewById(R.id.customPanel);
FrameLayout custom = (FrameLayout) mWindow.findViewById(R.id.custom);
custom.addView(mView, new LayoutParams(MATCH_PARENT, MATCH_PARENT));
if (mViewSpacingSpecified) {
custom.setPadding(mViewSpacingLeft, mViewSpacingTop, mViewSpacingRight,
mViewSpacingBottom);
}
if (mListView != null) {
((LinearLayout.LayoutParams) customPanel.getLayoutParams()).weight = 0;
}
} else {
mWindow.findViewById(R.id.customPanel).setVisibility(View.GONE);
}
/* Only display the divider if we have a title and a
* custom view or a message.
*/
if (hasTitle) {
View divider = null;
if (mMessage != null || mView != null || mListView != null) {
divider = mWindow.findViewById(R.id.titleDivider);
} else {
divider = mWindow.findViewById(R.id.titleDividerTop);
}
if (divider != null) {
divider.setVisibility(View.VISIBLE);
}
}
setBackground(topPanel, contentPanel, customPanel, hasButtons, a, hasTitle, buttonPanel);
a.recycle();
}
private boolean setupTitle(LinearLayout topPanel) {
boolean hasTitle = true;
if (mCustomTitleView != null) {
// Add the custom title view directly to the topPanel layout
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
topPanel.addView(mCustomTitleView, 0, lp);
// Hide the title template
View titleTemplate = mWindow.findViewById(R.id.title_template);
titleTemplate.setVisibility(View.GONE);
} else {
final boolean hasTextTitle = !TextUtils.isEmpty(mTitle);
mIconView = (ImageView) mWindow.findViewById(R.id.icon);
if (hasTextTitle) {
/* Display the title if a title is supplied, else hide it */
mTitleView = (TextView) mWindow.findViewById(R.id.alertTitle);
mTitleView.setText(mTitle);
/* Do this last so that if the user has supplied any
* icons we use them instead of the default ones. If the
* user has specified 0 then make it disappear.
*/
if (mIconId > 0) {
mIconView.setImageResource(mIconId);
} else if (mIcon != null) {
mIconView.setImageDrawable(mIcon);
} else if (mIconId == 0) {
/* Apply the padding from the icon to ensure the
* title is aligned correctly.
*/
mTitleView.setPadding(mIconView.getPaddingLeft(),
mIconView.getPaddingTop(),
mIconView.getPaddingRight(),
mIconView.getPaddingBottom());
mIconView.setVisibility(View.GONE);
}
} else {
// Hide the title template
View titleTemplate = mWindow.findViewById(R.id.title_template);
titleTemplate.setVisibility(View.GONE);
mIconView.setVisibility(View.GONE);
topPanel.setVisibility(View.GONE);
hasTitle = false;
}
}
return hasTitle;
}
private void setupContent(LinearLayout contentPanel) {
mScrollView = (ScrollView) mWindow.findViewById(R.id.scrollView);
mScrollView.setFocusable(false);
// Special case for users that only want to display a String
mMessageView = (TextView) mWindow.findViewById(R.id.message);
if (mMessageView == null) {
return;
}
if (mMessage != null) {
mMessageView.setText(mMessage);
} else {
mMessageView.setVisibility(View.GONE);
mScrollView.removeView(mMessageView);
if (mListView != null) {
contentPanel.removeView(mWindow.findViewById(R.id.scrollView));
contentPanel.addView(mListView,
new LinearLayout.LayoutParams(MATCH_PARENT, MATCH_PARENT));
contentPanel.setLayoutParams(new LinearLayout.LayoutParams(MATCH_PARENT, 0, 1.0f));
} else {
contentPanel.setVisibility(View.GONE);
}
}
}
private boolean setupButtons() {
int BIT_BUTTON_POSITIVE = 1;
int BIT_BUTTON_NEGATIVE = 2;
int BIT_BUTTON_NEUTRAL = 4;
int whichButtons = 0;
mButtonPositive = (Button) mWindow.findViewById(R.id.button1);
mButtonPositive.setOnClickListener(mButtonHandler);
if (TextUtils.isEmpty(mButtonPositiveText)) {
mButtonPositive.setVisibility(View.GONE);
} else {
mButtonPositive.setText(mButtonPositiveText);
mButtonPositive.setVisibility(View.VISIBLE);
whichButtons = whichButtons | BIT_BUTTON_POSITIVE;
}
mButtonNegative = (Button) mWindow.findViewById(R.id.button2);
mButtonNegative.setOnClickListener(mButtonHandler);
if (TextUtils.isEmpty(mButtonNegativeText)) {
mButtonNegative.setVisibility(View.GONE);
} else {
mButtonNegative.setText(mButtonNegativeText);
mButtonNegative.setVisibility(View.VISIBLE);
whichButtons = whichButtons | BIT_BUTTON_NEGATIVE;
}
mButtonNeutral = (Button) mWindow.findViewById(R.id.button3);
mButtonNeutral.setOnClickListener(mButtonHandler);
if (TextUtils.isEmpty(mButtonNeutralText)) {
mButtonNeutral.setVisibility(View.GONE);
} else {
mButtonNeutral.setText(mButtonNeutralText);
mButtonNeutral.setVisibility(View.VISIBLE);
whichButtons = whichButtons | BIT_BUTTON_NEUTRAL;
}
if (shouldCenterSingleButton(mContext)) {
/*
* If we only have 1 button it should be centered on the layout and
* expand to fill 50% of the available space.
*/
if (whichButtons == BIT_BUTTON_POSITIVE) {
centerButton(mButtonPositive);
} else if (whichButtons == BIT_BUTTON_NEGATIVE) {
centerButton(mButtonNegative);
} else if (whichButtons == BIT_BUTTON_NEUTRAL) {
centerButton(mButtonNeutral);
}
}
return whichButtons != 0;
}
private void centerButton(Button button) {
LinearLayout.LayoutParams params = (LinearLayout.LayoutParams) button.getLayoutParams();
params.gravity = Gravity.CENTER_HORIZONTAL;
params.weight = 0.5f;
button.setLayoutParams(params);
View leftSpacer = mWindow.findViewById(R.id.leftSpacer);
if (leftSpacer != null) {
leftSpacer.setVisibility(View.VISIBLE);
}
View rightSpacer = mWindow.findViewById(R.id.rightSpacer);
if (rightSpacer != null) {
rightSpacer.setVisibility(View.VISIBLE);
}
}
private void setBackground(LinearLayout topPanel, LinearLayout contentPanel,
View customPanel, boolean hasButtons, TypedArray a, boolean hasTitle,
View buttonPanel) {
/* Get all the different background required */
int fullDark = 0;
int topDark = 0;
int centerDark = 0;
int bottomDark = 0;
int fullBright = 0;
int topBright = 0;
int centerBright = 0;
int bottomBright = 0;
int bottomMedium = 0;
if (mContext.getApplicationInfo().targetSdkVersion <= Build.VERSION_CODES.KITKAT) {
fullDark = R.drawable.popup_full_dark;
topDark = R.drawable.popup_top_dark;
centerDark = R.drawable.popup_center_dark;
bottomDark = R.drawable.popup_bottom_dark;
fullBright = R.drawable.popup_full_bright;
topBright = R.drawable.popup_top_bright;
centerBright = R.drawable.popup_center_bright;
bottomBright = R.drawable.popup_bottom_bright;
bottomMedium = R.drawable.popup_bottom_medium;
}
fullDark = a.getResourceId(R.styleable.AlertDialog_fullDark, fullDark);
topDark = a.getResourceId(R.styleable.AlertDialog_topDark, topDark);
centerDark = a.getResourceId(R.styleable.AlertDialog_centerDark, centerDark);
bottomDark = a.getResourceId(R.styleable.AlertDialog_bottomDark, bottomDark);
fullBright = a.getResourceId(R.styleable.AlertDialog_fullBright, fullBright);
topBright = a.getResourceId(R.styleable.AlertDialog_topBright, topBright);
centerBright = a.getResourceId(R.styleable.AlertDialog_centerBright, centerBright);
bottomBright = a.getResourceId(R.styleable.AlertDialog_bottomBright, bottomBright);
bottomMedium = a.getResourceId(R.styleable.AlertDialog_bottomMedium, bottomMedium);
/*
* We now set the background of all of the sections of the alert.
* First collect together each section that is being displayed along
* with whether it is on a light or dark background, then run through
* them setting their backgrounds. This is complicated because we need
* to correctly use the full, top, middle, and bottom graphics depending
* on how many views they are and where they appear.
*/
View[] views = new View[4];
boolean[] light = new boolean[4];
View lastView = null;
boolean lastLight = false;
int pos = 0;
if (hasTitle) {
views[pos] = topPanel;
light[pos] = false;
pos++;
}
/* The contentPanel displays either a custom text message or
* a ListView. If it's text we should use the dark background
* for ListView we should use the light background. If neither
* are there the contentPanel will be hidden so set it as null.
*/
views[pos] = (contentPanel.getVisibility() == View.GONE)
? null : contentPanel;
light[pos] = mListView != null;
pos++;
if (customPanel != null) {
views[pos] = customPanel;
light[pos] = mForceInverseBackground;
pos++;
}
if (hasButtons) {
views[pos] = buttonPanel;
light[pos] = true;
}
boolean setView = false;
for (pos=0; pos<views.length; pos++) {
View v = views[pos];
if (v == null) {
continue;
}
if (lastView != null) {
if (!setView) {
lastView.setBackgroundResource(lastLight ? topBright : topDark);
} else {
lastView.setBackgroundResource(lastLight ? centerBright : centerDark);
}
setView = true;
}
lastView = v;
lastLight = light[pos];
}
if (lastView != null) {
if (setView) {
/* ListViews will use the Bright background but buttons use
* the Medium background.
*/
lastView.setBackgroundResource(
lastLight ? (hasButtons ? bottomMedium : bottomBright) : bottomDark);
} else {
lastView.setBackgroundResource(lastLight ? fullBright : fullDark);
}
}
/* TODO: uncomment section below. The logic for this should be if
* it's a Contextual menu being displayed AND only a Cancel button
* is shown then do this.
*/
// if (hasButtons && (mListView != null)) {
/* Yet another *special* case. If there is a ListView with buttons
* don't put the buttons on the bottom but instead put them in the
* footer of the ListView this will allow more items to be
* displayed.
*/
/*
contentPanel.setBackgroundResource(bottomBright);
buttonPanel.setBackgroundResource(centerMedium);
ViewGroup parent = (ViewGroup) mWindow.findViewById(R.id.parentPanel);
parent.removeView(buttonPanel);
AbsListView.LayoutParams params = new AbsListView.LayoutParams(
AbsListView.LayoutParams.MATCH_PARENT,
AbsListView.LayoutParams.MATCH_PARENT);
buttonPanel.setLayoutParams(params);
mListView.addFooterView(buttonPanel);
*/
// }
if ((mListView != null) && (mAdapter != null)) {
mListView.setAdapter(mAdapter);
if (mCheckedItem > -1) {
mListView.setItemChecked(mCheckedItem, true);
mListView.setSelection(mCheckedItem);
}
}
}
public static class RecycleListView extends ListView {
boolean mRecycleOnMeasure = true;
public RecycleListView(Context context) {
super(context);
}
public RecycleListView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public RecycleListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
@Override
protected boolean recycleOnMeasure() {
return mRecycleOnMeasure;
}
}
public static class AlertParams {
public final Context mContext;
public final LayoutInflater mInflater;
public int mIconId = 0;
public Drawable mIcon;
public int mIconAttrId = 0;
public CharSequence mTitle;
public View mCustomTitleView;
public CharSequence mMessage;
public CharSequence mPositiveButtonText;
public DialogInterface.OnClickListener mPositiveButtonListener;
public CharSequence mNegativeButtonText;
public DialogInterface.OnClickListener mNegativeButtonListener;
public CharSequence mNeutralButtonText;
public DialogInterface.OnClickListener mNeutralButtonListener;
public boolean mCancelable;
public DialogInterface.OnCancelListener mOnCancelListener;
public DialogInterface.OnDismissListener mOnDismissListener;
public DialogInterface.OnKeyListener mOnKeyListener;
public CharSequence[] mItems;
public ListAdapter mAdapter;
public DialogInterface.OnClickListener mOnClickListener;
public View mView;
public int mViewSpacingLeft;
public int mViewSpacingTop;
public int mViewSpacingRight;
public int mViewSpacingBottom;
public boolean mViewSpacingSpecified = false;
public boolean[] mCheckedItems;
public boolean mIsMultiChoice;
public boolean mIsSingleChoice;
public int mCheckedItem = -1;
public DialogInterface.OnMultiChoiceClickListener mOnCheckboxClickListener;
public Cursor mCursor;
public String mLabelColumn;
public String mIsCheckedColumn;
public boolean mForceInverseBackground;
public AdapterView.OnItemSelectedListener mOnItemSelectedListener;
public OnPrepareListViewListener mOnPrepareListViewListener;
public boolean mRecycleOnMeasure = true;
/**
* Interface definition for a callback to be invoked before the ListView
* will be bound to an adapter.
*/
public interface OnPrepareListViewListener {
/**
* Called before the ListView is bound to an adapter.
* @param listView The ListView that will be shown in the dialog.
*/
void onPrepareListView(ListView listView);
}
public AlertParams(Context context) {
mContext = context;
mCancelable = true;
mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
public void apply(AlertController dialog) {
if (mCustomTitleView != null) {
dialog.setCustomTitle(mCustomTitleView);
} else {
if (mTitle != null) {
dialog.setTitle(mTitle);
}
if (mIcon != null) {
dialog.setIcon(mIcon);
}
if (mIconId >= 0) {
dialog.setIcon(mIconId);
}
if (mIconAttrId > 0) {
dialog.setIcon(dialog.getIconAttributeResId(mIconAttrId));
}
}
if (mMessage != null) {
dialog.setMessage(mMessage);
}
if (mPositiveButtonText != null) {
dialog.setButton(DialogInterface.BUTTON_POSITIVE, mPositiveButtonText,
mPositiveButtonListener, null);
}
if (mNegativeButtonText != null) {
dialog.setButton(DialogInterface.BUTTON_NEGATIVE, mNegativeButtonText,
mNegativeButtonListener, null);
}
if (mNeutralButtonText != null) {
dialog.setButton(DialogInterface.BUTTON_NEUTRAL, mNeutralButtonText,
mNeutralButtonListener, null);
}
if (mForceInverseBackground) {
dialog.setInverseBackgroundForced(true);
}
// For a list, the client can either supply an array of items or an
// adapter or a cursor
if ((mItems != null) || (mCursor != null) || (mAdapter != null)) {
createListView(dialog);
}
if (mView != null) {
if (mViewSpacingSpecified) {
dialog.setView(mView, mViewSpacingLeft, mViewSpacingTop, mViewSpacingRight,
mViewSpacingBottom);
} else {
dialog.setView(mView);
}
}
/*
dialog.setCancelable(mCancelable);
dialog.setOnCancelListener(mOnCancelListener);
if (mOnKeyListener != null) {
dialog.setOnKeyListener(mOnKeyListener);
}
*/
}
private void createListView(final AlertController dialog) {
final RecycleListView listView = (RecycleListView)
mInflater.inflate(dialog.mListLayout, null);
ListAdapter adapter;
if (mIsMultiChoice) {
if (mCursor == null) {
adapter = new ArrayAdapter<CharSequence>(
mContext, dialog.mMultiChoiceItemLayout, R.id.text1, mItems) {
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view = super.getView(position, convertView, parent);
if (mCheckedItems != null) {
boolean isItemChecked = mCheckedItems[position];
if (isItemChecked) {
listView.setItemChecked(position, true);
}
}
return view;
}
};
} else {
adapter = new CursorAdapter(mContext, mCursor, false) {
private final int mLabelIndex;
private final int mIsCheckedIndex;
{
final Cursor cursor = getCursor();
mLabelIndex = cursor.getColumnIndexOrThrow(mLabelColumn);
mIsCheckedIndex = cursor.getColumnIndexOrThrow(mIsCheckedColumn);
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
CheckedTextView text = (CheckedTextView) view.findViewById(R.id.text1);
text.setText(cursor.getString(mLabelIndex));
listView.setItemChecked(cursor.getPosition(),
cursor.getInt(mIsCheckedIndex) == 1);
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
return mInflater.inflate(dialog.mMultiChoiceItemLayout,
parent, false);
}
};
}
} else {
int layout = mIsSingleChoice
? dialog.mSingleChoiceItemLayout : dialog.mListItemLayout;
if (mCursor == null) {
adapter = (mAdapter != null) ? mAdapter
: new ArrayAdapter<CharSequence>(mContext, layout, R.id.text1, mItems);
} else {
adapter = new SimpleCursorAdapter(mContext, layout,
mCursor, new String[]{mLabelColumn}, new int[]{R.id.text1});
}
}
if (mOnPrepareListViewListener != null) {
mOnPrepareListViewListener.onPrepareListView(listView);
}
/* Don't directly set the adapter on the ListView as we might
* want to add a footer to the ListView later.
*/
dialog.mAdapter = adapter;
dialog.mCheckedItem = mCheckedItem;
if (mOnClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
mOnClickListener.onClick(dialog.mDialogInterface, position);
if (!mIsSingleChoice) {
dialog.mDialogInterface.dismiss();
}
}
});
} else if (mOnCheckboxClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
if (mCheckedItems != null) {
mCheckedItems[position] = listView.isItemChecked(position);
}
mOnCheckboxClickListener.onClick(
dialog.mDialogInterface, position, listView.isItemChecked(position));
}
});
}
// Attach a given OnItemSelectedListener to the ListView
if (mOnItemSelectedListener != null) {
listView.setOnItemSelectedListener(mOnItemSelectedListener);
}
if (mIsSingleChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
} else if (mIsMultiChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
}
listView.mRecycleOnMeasure = mRecycleOnMeasure;
dialog.mListView = listView;
}
}
}
| |
package metroinsight.citadel.rest;
import java.util.UUID;
import io.vertx.core.Vertx;
import io.vertx.core.http.HttpServerResponse;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.web.RoutingContext;
import io.vertx.serviceproxy.ProxyHelper;
import metroinsight.citadel.authorization.Authorization_MetaData;
import metroinsight.citadel.common.ErrorMessages;
import metroinsight.citadel.common.RestApiTemplate;
import metroinsight.citadel.datacache.DataCacheService;
import metroinsight.citadel.metadata.MetadataService;
import metroinsight.citadel.model.BaseContent;
public class MetadataRestApi extends RestApiTemplate {
private MetadataService metadataService;
private DataCacheService cacheService;
Vertx vertx;
/*
* Used to validate user token is valid in every operation on MetaData
*/
Authorization_MetaData Auth_meta;
public MetadataRestApi(Vertx vertx, JsonObject configs) {
this.configs = configs;
metadataService = ProxyHelper.createProxy(MetadataService.class, vertx, MetadataService.ADDRESS);
cacheService = ProxyHelper.createProxy(DataCacheService.class, vertx, DataCacheService.ADDRESS);
this.vertx = vertx;
/*
* Initializing Auth Metadata
*/
Auth_meta = new Authorization_MetaData(configs.getString("auth.hbase.sitefile"));
}
public void queryPoint(RoutingContext rc) {
HttpServerResponse resp = getDefaultResponse(rc);
BaseContent content = new BaseContent();
JsonObject q = (JsonObject) rc.getBodyAsJson().getValue("query");
// System.out.println("In query point:"+q);
metadataService.queryPoint(q, ar -> {
if (ar.failed()) {
content.setReason(ar.cause().getMessage());
resp.setStatusCode(400);
} else {
content.setSucceess(true);
content.setResults(ar.result());
resp.setStatusCode(200);
}
String cStr = content.toString();
String cLen = Integer.toString(cStr.length());
resp.putHeader("content-length", cLen)
.write(cStr)
.end();
});
}
public void upsertMetadata (RoutingContext rc) {
HttpServerResponse resp = getDefaultResponse(rc);
JsonObject body = rc.getBodyAsJson();
String uuid = rc.request().getParam("uuid");
if (!body.containsKey("userToken")) {
sendErrorResponse(resp, 400, ErrorMessages.EMPTY_SEC_TOKEN);
return;
}
JsonObject metadata = body.getJsonObject("metadata");
String userToken = body.getString("userToken");
String userId = Auth_meta.get_userID(userToken);
// TOOD: IMPORTANT: Check if the user has the right level of permission.
if (metadata.containsKey("owner")) {
// This transfers ownership currently. TODO: Maybe just add owner?
// TODO: Need to remove previous relevant metadata and policy.
String newOwnerId = metadata.getString("owner");
String newOwnerToken = Auth_meta.get_userID(newOwnerId);
Auth_meta.insert_ds_owner(uuid, newOwnerToken, newOwnerId);
Auth_meta.insert_policy(uuid, newOwnerId, "true");
}
// TODO: Evaluate if the keys/values are valid.
metadataService.upsertMetadata(uuid, metadata, ar -> {
if (ar.failed()) {
sendErrorResponse(resp, 500, ar.cause().getMessage());
} else {
sendSuccesResponse(resp, 200, new JsonArray());
}
});
}
public void getPoint(RoutingContext rc) {
HttpServerResponse resp = getDefaultResponse(rc);
BaseContent content = new BaseContent();
String uuid = rc.request().getParam("uuid");
if (uuid == null) {
sendErrorResponse(resp, 400, ErrorMessages.EMPTY_UUID);
return ;
} else {
metadataService.getPoint(uuid, ar -> {
if (ar.failed()) {
content.setReason(ar.cause().getMessage());
resp.setStatusCode(400);
} else {
JsonArray pointResult = new JsonArray();
pointResult.add(ar.result());
resp.setStatusCode(200);
content.setSucceess(true);
content.setResults(pointResult);
}
String cStr = content.toString();
String cLen = Integer.toString(cStr.length());
resp.putHeader("content-length", cLen).write(cStr);
});
}
}
public void createPoint(RoutingContext rc) {
HttpServerResponse resp = getDefaultResponse(rc);
BaseContent content = new BaseContent();
JsonObject body = new JsonObject();
try {
body = rc.getBodyAsJson();
} catch (Exception e) {
sendErrorResponse(resp, 400, ErrorMessages.NOT_JSON);
return ;
}
try {
long startTime = System.nanoTime();
System.out.println("body is:" + body);
// check token and sensor is present
/*
if (body.containsKey(Auth_meta.userToken)) {
String userToken = body.getString(Auth_meta.userToken);
// check if this token exists in the HBase, and if it exists, what is the userID
long authStartTime = System.nanoTime();
String userId = Auth_meta.get_userID(userToken);
long authEndTime = System.nanoTime();
System.out.println(String.format("Auth check Time: %f", ((float)authEndTime - (float)authStartTime)/1000000));
if (!userId.equals(""))// user is present in the system
{
// token exists and is linked to the valid userId
JsonObject point = body.getJsonObject("point");
String uuid = UUID.randomUUID().toString();
point.put("uuid", uuid);// This is later used by metadataService.createPoint
point.put("userId", userId);// This can be later used by metadataService.createPoint to link a point to
// userID
// original function to insert Point
// Get the query as JSON.
// Call createPoint in metadataService asynchronously.
metadataService.createPoint(point, ar -> {
// ar is a result object created in metadataService.createPoint
// We pass what to do with the result in this format.
String cStr;
String cLen;
if (ar.failed()) {
// if the service is failed
resp.setStatusCode(400);
content.setReason(ar.cause().getMessage());
cStr = content.toString();
} else {
// we succeeded
// inserts the owner token, userId and ds_ID into the hbase metadata table
long authCreateStartTime = System.nanoTime();
Auth_meta.insert_ds_owner(uuid, userToken, userId);
// insert the policy for Owner to default "true", no-space-time constraints
Auth_meta.insert_policy(uuid, userId, "true");
long authCreateEndTime = System.nanoTime();
System.out.println(String.format("Point auth create time: %f", ((float)authCreateEndTime - (float)authCreateStartTime)/1000000));
// Construct response object.
resp.setStatusCode(201);
JsonObject pointCreateContent = new JsonObject();
pointCreateContent.put("success", true);
pointCreateContent.put("uuid", ar.result().toString());
cStr = pointCreateContent.toString();
}
cLen = Integer.toString(cStr.length());
resp.putHeader("content-length", cLen)
.write(cStr)
.end();
long endTime = System.nanoTime();
System.out.println(String.format("Total Creation API Time: %f", ((float)endTime - (float)startTime)/1000000));
});
} // end if(!userId.equals(""))
else {
System.out.println("Token is not Valid");
sendErrorResponse(resp, 400, "Api-Token doesn't exist or it doesn't have required priveleges");
} // end else
} // end if(body.containsKey(Auth_meta.userToken)&&body.containsKey("sensor"))
else {
System.out.println("In MetadataRestApi: Insert data parameters are missing");
sendErrorResponse(resp, 400, "Parameters are missing");
*/
if (!body.containsKey(Auth_meta.userToken)) {
System.out.println("Token is not Valid");
sendErrorResponse(resp, 400, ErrorMessages.EMPTY_SEC_TOKEN);
return ;
}
String userToken = body.getString(Auth_meta.userToken);
// check if this token exists in the HBase, and if it exists, what is the userID
String userId = Auth_meta.get_userID(userToken);
if (userId.equals("")) {// user is present in the system
sendErrorResponse(resp, 400, ErrorMessages.USER_NOT_FOUND);
return ;
}
// token exists and is linked to the valid userId
JsonObject point = body.getJsonObject("point");
if (!point.containsKey("name")) {
sendErrorResponse(resp, 400, ErrorMessages.PARAM_MISSING);
}
//TODO: Once implemented, add if the point exists in redis cache.
String uuid = UUID.randomUUID().toString();
point.put("uuid", uuid);// This is later used by metadataService.createPoint
point.put("userId", userId);// This can be later used by metadataService.createPoint to link a point to
// userID
// original function to insert Point
// Get the query as JSON.
// Call createPoint in metadataService asynchronously.
metadataService.createPoint(point, ar -> {
// ar is a result object created in metadataService.createPoint
// We pass what to do with the result in this format.
String cStr;
String cLen;
if (ar.failed()) {
// if the service is failed
resp.setStatusCode(400);
content.setReason(ar.cause().getMessage());
cStr = content.toString();
} else {
// we succeeded
// inserts the owner token, userId and ds_ID into the hbase metadata table
Auth_meta.insert_ds_owner(uuid, userToken, userId);
// insert the policy for Owner to default "true", no-space-time constraints
Auth_meta.insert_policy(uuid, userId, "true");
// Construct response object.
resp.setStatusCode(201);
JsonObject pointCreateContent = new JsonObject();
pointCreateContent.put("success", true);
pointCreateContent.put("uuid", ar.result().toString());
cStr = pointCreateContent.toString();
}
cLen = Integer.toString(cStr.length());
resp.putHeader("content-length", cLen).write(cStr);
});
} catch (Exception e) {
e.printStackTrace();
sendErrorResponse(resp, 500, e.getMessage());
}
/*
* end Verifying the Token is present in the point and is valid
*/
}
}
| |
/**
* Copyright (C) 2004-2011 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.fastpath.resources;
import org.jivesoftware.fastpath.FastpathPlugin;
import javax.swing.ImageIcon;
import javax.swing.JEditorPane;
import javax.swing.JFrame;
import javax.swing.JScrollPane;
import java.awt.BorderLayout;
import java.io.File;
import java.net.URL;
import java.util.Enumeration;
import java.util.PropertyResourceBundle;
import java.util.ResourceBundle;
public class FastpathRes {
private static final PropertyResourceBundle prb;
public static final String NOTE_EDIT_16x16 = "NOTE_EDIT_16x16";
public static final String MAGICIAN_IMAGE = "MAGICIAN_IMAGE";
public static final String IM_AWAY = "IM_AWAY";
public static final String RED_FLAG_16x16 = "RED_FLAG_16x16";
public static final String LOGIN_DIALOG_USERNAME = "LOGIN_DIALOG_USERNAME";
public static final String PREFERENCES_IMAGE = "PREFERENCES_IMAGE";
public static final String CURRENT_AGENTS = "CURRENT_AGENTS";
public static final String LOGIN_DIALOG_QUIT = "LOGIN_DIALOG_QUIT";
public static final String RIGHT_ARROW_IMAGE = "RIGHT_ARROW_IMAGE";
public static final String MEGAPHONE_16x16 = "MEGAPHONE_16x16";
public static final String SMALL_DOCUMENT_ADD = "SMALL_DOCUMENT_ADD";
public static final String ADD_TO_KB = "ADD_TO_KB";
public static final String DATA_REFRESH_16x16 = "DATA_REFRESH_16x16";
public static final String TEXT_ITALIC = "TEXT_ITALIC";
public static final String NOTEBOOK_IMAGE = "NOTEBOOK_IMAGE";
public static final String AVAILABLE_USER = "AVAILABLE_USER";
public static final String SPARK_IMAGE = "SPARK_IMAGE";
public static final String VIEW = "VIEW";
public static final String TEXT_BOLD = "TEXT_BOLD";
public static final String SMALL_USER1_MESSAGE = "SMALL_USER1_MESSAGE";
public static final String SERVER_UNAVAILABLE = "SERVER_UNAVAILABLE";
public static final String CONFERENCE_IMAGE_16x16 = "CONFERENCE_IMAGE_16x16";
public static final String WORKGROUP_QUEUE = "WORKGROUP_QUEUE";
public static final String SEARCH_IMAGE_32x32 = "SEARCH_IMAGE_32x32";
public static final String ADD_BOOKMARK_ICON = "ADD_BOOKMARK_ICON";
public static final String TEXT_UNDERLINE = "TEXT_UNDERLINE";
public static final String TOOLBOX = "TOOLBOX";
public static final String PROFILE_ICON = "PROFILE_ICON";
public static final String SMALL_CURRENT_AGENTS = "SMALL_CURRENT_AGENTS";
public static final String STAR_GREEN_IMAGE = "STAR_GREEN_IMAGE";
public static final String QUESTIONS_ANSWERS = "QUESTIONS_ANSWERS";
public static final String DOCUMENT_EXCHANGE_IMAGE = "DOCUMENT_EXCHANGE_IMAGE";
public static final String MOBILE_PHONE_IMAGE = "MOBILE_PHONE_IMAGE";
public static final String BLANK_IMAGE = "BLANK_IMAGE";
public static final String LOCK_UNLOCK_16x16 = "LOCK_UNLOCK_16x16";
public static final String BOOKMARK_ICON = "BOOKMARK_ICON";
public static final String INFORMATION_ICO = "INFORMATION_ICO";
public static final String VERSION = "VERSION";
public static final String MAIL_INTO_16x16 = "MAIL_INTO_16x16";
public static final String ERROR_INVALID_WORKGROUP = "ERROR_INVALID_WORKGROUP";
public static final String SMALL_USER1_NEW = "SMALL_USER1_NEW";
public static final String SMALL_USER_DELETE = "SMALL_USER_DELETE";
public static final String CHATTING_AGENT_IMAGE = "CHATTING_AGENT_IMAGE";
public static final String FORUM_TAB_TITLE = "FORUM_TAB_TITLE";
public static final String DOCUMENT_16x16 = "DOCUMENT_16x16";
public static final String KNOWLEDGE_BASE_TAB_TITLE = "KNOWLEDGE_BASE_TAB_TITLE";
public static final String STAR_RED_IMAGE = "STAR_RED_IMAGE";
public static final String USER1_BACK_16x16 = "USER1_BACK_16x16";
public static final String SMALL_USER1_STOPWATCH = "SMALL_USER1_STOPWATCH";
public static final String ADD_IMAGE_24x24 = "ADD_IMAGE_24x24";
public static final String TRAFFIC_LIGHT_IMAGE = "TRAFFIC_LIGHT_IMAGE";
public static final String GO = "GO";
public static final String SMALL_USER1_INFORMATION = "SMALL_USER1_INFORMATION";
public static final String DATA_DELETE_16x16 = "DATA_DELETE_16x16";
public static final String SMALL_MESSAGE_IMAGE = "SMALL_MESSAGE_IMAGE";
public static final String LOCK_16x16 = "LOCK_16x16";
public static final String STAR_GREY_IMAGE = "STAR_GREY_IMAGE";
public static final String MODERATOR_IMAGE = "MODERATOR_IMAGE";
public static final String JOIN_GROUPCHAT_IMAGE = "JOIN_GROUPCHAT_IMAGE";
public static final String UNRECOVERABLE_ERROR = "UNRECOVERABLE_ERROR";
public static final String DOCUMENT_INFO_32x32 = "DOCUMENT_INFO_32x32";
public static final String CREATE_FAQ_TITLE = "CREATE_FAQ_TITLE";
public static final String SMALL_USER1_TIME = "SMALL_USER1_TIME";
public static final String SMALL_ALL_AGENTS_IMAGE = "SMALL_ALL_AGENTS_IMAGE";
public static final String PAWN_GLASS_WHITE = "PAWN_GLASS_WHITE";
public static final String CURRENT_CHATS = "CURRENT_CHATS";
public static final String INVALID_USERNAME_PASSWORD = "INVALID_USERNAME_PASSWORD";
public static final String SMALL_DATA_FIND_IMAGE = "SMALL_DATA_FIND_IMAGE";
public static final String CLOSE_IMAGE = "CLOSE_IMAGE";
public static final String TEXT_NORMAL = "TEXT_NORMAL";
public static final String STAR_YELLOW_IMAGE = "STAR_YELLOW_IMAGE";
public static final String APP_NAME = "APP_NAME";
public static final String ADD_CONTACT_IMAGE = "ADD_CONTACT_IMAGE";
public static final String SEND_MAIL_IMAGE_16x16 = "SEND_MAIL_IMAGE_16x16";
public static final String FOLDER = "FOLDER";
public static final String LEFT_ARROW_IMAGE = "LEFT_ARROW_IMAGE";
public static final String WELCOME = "WELCOME";
public static final String BLUE_BALL = "BLUE_BALL";
public static final String MESSAGE_DND = "MESSAGE_DND";
public static final String DOCUMENT_FIND_16x16 = "DOCUMENT_FIND_16x16";
public static final String RED_BALL = "RED_BALL";
public static final String BRICKWALL_IMAGE = "BRICKWALL_IMAGE";
public static final String MESSAGE_AWAY = "MESSAGE_AWAY";
public static final String IM_DND = "IM_DND";
public static final String SMALL_DELETE = "SMALL_DELETE";
public static final String LINK_16x16 = "LINK_16x16";
public static final String CALL_ICON = "CALL_ICON";
public static final String MAIN_IMAGE = "MAIN_IMAGE";
public static final String SMALL_CIRCLE_DELETE = "SMALL_CIRCLE_DELETE";
public static final String SMALL_MESSAGE_EDIT_IMAGE = "SMALL_MESSAGE_EDIT_IMAGE";
public static final String AWAY_USER = "AWAY_USER";
public static final String SAVE_AS_16x16 = "SAVE_AS_16x16";
public static final String FIND_TEXT_IMAGE = "FIND_TEXT_IMAGE";
public static final String SEARCH = "SEARCH";
public static final String STAR_BLUE_IMAGE = "STAR_BLUE_IMAGE";
public static final String OFFLINE_ICO = "OFFLINE_ICO";
public static final String SMALL_USER1_MOBILEPHONE = "SMALL_USER1_MOBILEPHONE";
public static final String LOGIN_DIALOG_LOGIN_TITLE = "LOGIN_DIALOG_LOGIN_TITLE";
public static final String ERASER_IMAGE = "ERASER_IMAGE";
public static final String PRINTER_IMAGE_16x16 = "PRINTER_IMAGE_16x16";
public static final String DOWNLOAD_16x16 = "DOWNLOAD_16x16";
public static final String EARTH_LOCK_16x16 = "EARTH_LOCK_16x16";
public static final String LOGIN_DIALOG_AUTHENTICATING = "LOGIN_DIALOG_AUTHENTICATING";
public static final String HELP2_24x24 = "HELP2_24x24";
public static final String MAIN_TITLE = "MAIN_TITLE";
public static final String PROFILE_TAB_TITLE = "PROFILE_TAB_TITLE";
public static final String CHAT_WORKSPACE = "CHAT_WORKSPACE";
public static final String GREEN_FLAG_16x16 = "GREEN_FLAG_16x16";
public static final String MAIN_IMAGE_ICO = "MAIN_IMAGE_ICO";
public static final String TOOLBAR_BACKGROUND = "TOOLBAR_BACKGROUND";
public static final String VIEW_IMAGE = "VIEW_IMAGE";
public static final String CHATTING_CUSTOMER_IMAGE = "CHATTING_CUSTOMER_IMAGE";
public static final String SMALL_ALARM_CLOCK = "SMALL_ALARM_CLOCK";
public static final String INFORMATION_IMAGE = "INFORMATION_IMAGE";
public static final String ACCEPT_CHAT = "ACCEPT_CHAT";
public static final String SMALL_PIN_BLUE = "SMALL_PIN_BLUE";
public static final String FONT_16x16 = "FONT_16x16";
public static final String PAWN_GLASS_RED = "PAWN_GLASS_RED";
public static final String LOGIN_DIALOG_WORKSPACE = "LOGIN_DIALOG_WORKSPACE";
public static final String PAWN_GLASS_YELLOW = "PAWN_GLASS_YELLOW";
public static final String ID_CARD_48x48 = "ID_CARD_48x48";
public static final String DOWN_ARROW_IMAGE = "DOWN_ARROW_IMAGE";
public static final String LOGIN_DIALOG_LOGIN = "LOGIN_DIALOG_LOGIN";
public static final String HISTORY_16x16 = "HISTORY_16x16";
public static final String SMALL_DOCUMENT_VIEW = "SMALL_DOCUMENT_VIEW";
public static final String SMALL_AGENT_IMAGE = "SMALL_AGENT_IMAGE";
public static final String SMALL_ALL_CHATS_IMAGE = "SMALL_ALL_CHATS_IMAGE";
public static final String SERVER_ICON = "SERVER_ICON";
public static final String SMALL_USER_ENTER = "SMALL_USER_ENTER";
public static final String SMALL_CLOSE_BUTTON = "SMALL_CLOSE_BUTTON";
public static final String ON_PHONE_IMAGE = "ON_PHONE_IMAGE";
public static final String MINUS_SIGN = "MINUS_SIGN";
public static final String PAWN_GLASS_GREEN = "PAWN_GLASS_GREEN";
public static final String COPY_16x16 = "COPY_16x16";
public static final String SMALL_WORKGROUP_QUEUE_IMAGE = "SMALL_WORKGROUP_QUEUE_IMAGE";
public static final String CHAT_QUEUE = "CHAT_QUEUE";
public static final String SEND = "SEND";
public static final String USER_HEADSET_24x24 = "USER_HEADSET_24x24";
public static final String BUSY_IMAGE = "BUSY_IMAGE";
public static final String FUNNEL_DOWN_16x16 = "FUNNEL_DOWN_16x16";
public static final String PUSH_URL_16x16 = "PUSH_URL_16x16";
public static final String EARTH_VIEW_16x16 = "EARTH_VIEW_16x16";
public static final String SMALL_QUESTION = "SMALL_QUESTION";
public static final String SEND_FILE_ICON = "SEND_FILE_ICON";
public static final String LOGIN_KEY_IMAGE = "LOGIN_KEY_IMAGE";
public static final String CREATE_FAQ_ENTRY = "CREATE_FAQ_ENTRY";
public static final String SPELL_CHECK_IMAGE = "SPELL_CHECK_IMAGE";
public static final String GREEN_BALL = "GREEN_BALL";
public static final String SMALL_BUSINESS_MAN_VIEW = "SMALL_BUSINESS_MAN_VIEW";
public static final String BLANK_24x24 = "BLANK_24x24";
public static final String USER1_32x32 = "USER1_32x32";
public static final String DOOR_IMAGE = "DOOR_IMAGE";
public static final String ALL_CHATS = "ALL_CHATS";
public static final String SMALL_SCROLL_REFRESH = "SMALL_SCROLL_REFRESH";
public static final String CO_BROWSER_TAB_TITLE = "CO_BROWSER_TAB_TITLE";
public static final String PLUS_SIGN = "PLUS_SIGN";
public static final String FIND_IMAGE = "FIND_IMAGE";
public static final String USER1_MESSAGE_24x24 = "USER1_MESSAGE_24x24";
public static final String SMALL_CHECK = "SMALL_CHECK";
public static final String SEARCH_USER_16x16 = "SEARCH_USER_16x16";
public static final String LOGIN_DIALOG_PASSWORD = "LOGIN_DIALOG_PASSWORD";
public static final String TIME_LEFT = "TIME_LEFT";
public static final String FAQ_TAB_TITLE = "FAQ_TAB_TITLE";
public static final String ADD_TO_CHAT = "ADD_TO_CHAT";
public static final String DELETE_BOOKMARK_ICON = "DELETE_BOOKMARK_ICON";
public static final String FOLDER_CLOSED = "FOLDER_CLOSED";
public static final String REJECT_CHAT = "REJECT_CHAT";
public static final String YELLOW_FLAG_16x16 = "YELLOW_FLAG_16x16";
public static final String ONLINE_ICO = "ONLINE_ICO";
public static final String LINK_DELETE_16x16 = "LINK_DELETE_16x16";
public static final String MAIL_FORWARD_16x16 = "MAIL_FORWARD_16x16";
public static final String TELEPHONE_24x24 = "TELEPHONE_24x24";
public static final String ADD_LINK_TO_CHAT = "ADD_LINK_TO_CHAT";
public static final String SMALL_ABOUT_IMAGE = "SMALL_ABOUT_IMAGE";
public static final String DESKTOP_IMAGE = "DESKTOP_IMAGE";
public static final String MAIL_16x16 = "MAIL_16x16";
public static final String MAIL_IMAGE_32x32 = "MAIL_IMAGE_32x32";
public static final String ADDRESS_BOOK_16x16 = "ADDRESS_BOOK_16x16";
public static final String YELLOW_BALL = "YELLOW_BALL";
public static final String ERROR_DIALOG_TITLE = "ERROR_DIALOG_TITLE";
public static final String REFRESH_IMAGE = "REFRESH_IMAGE";
public static final String SMALL_ADD_IMAGE = "SMALL_ADD_IMAGE";
public static final String SEND_FILE_24x24 = "SEND_FILE_24x24";
public static final String PROFILE_IMAGE_24x24 = "PROFILE_IMAGE_24x24";
public static final String SMALL_ENTRY = "SMALL_ENTRY";
public static final String CLEAR_BALL_ICON = "CLEAR_BALL_ICON";
public static final String CONFERENCE_IMAGE_24x24 = "CONFERENCE_IMAGE_24x24";
public static final String BACKGROUND_IMAGE = "BACKGROUND_IMAGE";
public static final String FREE_TO_CHAT_IMAGE = "FREE_TO_CHAT_IMAGE";
public static final String SOUND_PREFERENCES_IMAGE = "SOUND_PREFERENCES_IMAGE";
public static final String SPARK_LOGOUT_IMAGE = "SPARK_LOGOUT_IMAGE";
public static final String PHOTO_IMAGE = "PHOTO_IMAGE";
public static final String PLUGIN_IMAGE = "PLUGIN_IMAGE";
public static final String SMALL_PROFILE_IMAGE = "SMALL_PROFILE_IMAGE";
public static final String CHANGELOG_IMAGE = "CHANGELOG_IMAGE";
public static final String README_IMAGE = "README_IMAGE";
public static final String DOWN_OPTION_IMAGE = "DOWN_OPTION_IMAGE";
public static final String FASTPATH_IMAGE_16x16 = "FASTPATH_IMAGE_16x16";
public static final String FASTPATH_IMAGE_24x24 = "FASTPATH_IMAGE_24x24";
public static final String FASTPATH_IMAGE_32x32 = "FASTPATH_IMAGE_32x32";
public static final String FASTPATH_IMAGE_64x64 = "FASTPATH_IMAGE_64x64";
public static final String CIRCLE_CHECK_IMAGE = "CIRCLE_CHECK_IMAGE";
public static final String TRANSFER_IMAGE_24x24 = "TRANSFER_IMAGE_24x24";
public static final String FASTPATH_OFFLINE_IMAGE_16x16 = "FASTPATH_OFFLINE_IMAGE_16x16";
public static final String FASTPATH_OFFLINE_IMAGE_24x24 = "FASTPATH_OFFLINE_IMAGE_24x24";
public static final String USER1_ADD_16x16 = "USER1_ADD_16x16";
public static final String END_BUTTON_24x24 = "END_BUTTON_24x24";
public static final String POWERED_BY_IMAGE = "POWERED_BY_IMAGE";
public static final String STICKY_NOTE_IMAGE = "STICKY_NOTE_IMAGE";
public static final String HISTORY_24x24_IMAGE = "HISTORY_24x24";
public static final String PANE_UP_ARROW_IMAGE = "PANE_UP_ARROW_IMAGE";
public static final String PANE_DOWN_ARROW_IMAGE = "PANE_DOWN_ARROW_IMAGE";
public static final String CLOSE_DARK_X_IMAGE = "CLOSE_DARK_X_IMAGE";
public static final String CLOSE_WHITE_X_IMAGE = "CLOSE_WHITE_X_IMAGE";
public static final String CHAT_INVITE_IMAGE_24x24 = "CHAT_INVITE_IMAGE_24x24";
public static final String CHAT_TRANSFER_IMAGE_24x24 = "CHAT_TRANSFER_IMAGE_24x24";
public static final String CHAT_COBROWSE_IMAGE_24x24 = "CHAT_COBROWSE_IMAGE_24x24";
public static final String CHAT_ENDED_IMAGE_24x24 = "CHAT_ENDED_IMAGE_24x24";
private static final ClassLoader cl = FastpathPlugin.class.getClassLoader();
static {
prb = (PropertyResourceBundle)ResourceBundle.getBundle("fastpath");
}
public static String getString(String propertyName) {
return prb.getString(propertyName);
}
public static ImageIcon getImageIcon(String imageName) {
try {
final String iconURI = getString(imageName);
final URL imageURL = cl.getResource(iconURI);
return new ImageIcon(imageURL);
}
catch (Exception ex) {
System.out.println(imageName + " not found.");
}
return null;
}
public static URL getURL(String propertyName) {
return cl.getResource(getString(propertyName));
}
public static void main(String[] args) {
JFrame frame = new JFrame();
frame.getContentPane().setLayout(new BorderLayout());
JEditorPane pane = new JEditorPane();
frame.getContentPane().add(new JScrollPane(pane));
StringBuilder buf = new StringBuilder();
Enumeration<String> enumeration = prb.getKeys();
while (enumeration.hasMoreElements()) {
String token = enumeration.nextElement();
String value = prb.getString(token).toLowerCase();
if (value.endsWith(".gif") || value.endsWith(".png") || value.endsWith(".jpg") || value.endsWith("jpeg")) {
FastpathRes.getImageIcon(token);
}
String str = "public static final String " + token + " = \"" + token + "\";\n";
buf.append(str);
}
checkImageDir();
pane.setText(buf.toString());
frame.pack();
frame.setVisible(true);
}
private static void checkImageDir() {
File[] files = new File("c:\\code\\liveassistant\\client\\resources\\images").listFiles();
final int no = files != null ? files.length : 0;
for (int i = 0; i < no; i++) {
File imageFile = files[i];
String name = imageFile.getName();
// Check to see if the name of the file exists
boolean exists = false;
Enumeration<String> enumeration = prb.getKeys();
while (enumeration.hasMoreElements()) {
String token = enumeration.nextElement();
String value = prb.getString(token);
if (value.endsWith(name)) {
exists = true;
}
}
if (!exists) {
System.out.println(imageFile.getAbsolutePath() + " is not used.");
}
}
}
}
| |
/*
* Copyright [2015] [Letsgood.com s.r.o.]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.Copyright [2015] [Letsgood.com s.r.o.]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Created by Bc. Pavel Stambrecht for Letsgood.com s.r.o.
*/
package com.letsgood.synergykitsdkandroid.request;
import android.os.AsyncTask;
import com.letsgood.synergykitsdkandroid.builders.ResultObjectBuilder;
import com.letsgood.synergykitsdkandroid.errors.Errors;
import com.letsgood.synergykitsdkandroid.requestmethods.Delete;
import com.letsgood.synergykitsdkandroid.requestmethods.Get;
import com.letsgood.synergykitsdkandroid.requestmethods.Patch;
import com.letsgood.synergykitsdkandroid.requestmethods.Post;
import com.letsgood.synergykitsdkandroid.requestmethods.PostFile;
import com.letsgood.synergykitsdkandroid.requestmethods.Put;
import com.letsgood.synergykitsdkandroid.resources.SynergykitError;
import com.letsgood.synergykitsdkandroid.resources.SynergykitObject;
import com.letsgood.synergykitsdkandroid.resources.SynergykitResponse;
import com.letsgood.synergykitsdkandroid.resources.SynergykitUri;
import org.apache.http.HttpStatus;
import java.lang.reflect.Type;
public abstract class SynergykitRequest extends AsyncTask<Void, Void, Object> {
/* Do in background */
@Override
protected abstract Object doInBackground(Void... params);
/* On post execute */
@Override
protected abstract void onPostExecute(Object object);
/* Request method GET */
protected static SynergykitResponse get(final SynergykitUri uri,final String sessionToken, final boolean sessionTokenRequired) {
SynergykitResponse response = new SynergykitResponse();
Get get = new Get(uri,sessionToken,sessionTokenRequired); // request method get
response.setBufferedReader(get.execute());
response.setStatusCode(get.getStatusCode());
return response;
}
/* Request method GET */
protected static SynergykitResponse getFile(final SynergykitUri uri,final String sessionToken, final boolean sessionTokenRequired) {
SynergykitResponse response = new SynergykitResponse();
Get get = new Get(uri,sessionToken,sessionTokenRequired); // request method get
get.setAuthorizationEnabled(false);
response.setInputStream(get.halfExecute());
response.setStatusCode(get.getStatusCode());
return response;
}
/* Request method POST */
protected static SynergykitResponse post(final SynergykitUri uri,final Object object,final String sessionToken, final boolean sessionTokenRequired) {
SynergykitResponse response = new SynergykitResponse();
Post post = new Post(uri, object,sessionToken,sessionTokenRequired);
response.setBufferedReader(post.execute());
response.setStatusCode(post.getStatusCode());
return response;
}
/* File method POST */
protected static SynergykitResponse postFile(SynergykitUri uri, byte[] data,final String sessionToken, final boolean sessionTokenRequired) {
SynergykitResponse response = new SynergykitResponse();
PostFile postFile = new PostFile(uri, data,sessionToken,sessionTokenRequired);
response.setBufferedReader(postFile.execute());
response.setStatusCode(postFile.getStatusCode());
return response;
}
/* Request method PUT */
protected static SynergykitResponse put(SynergykitUri uri, Object object,final String sessionToken, final boolean sessionTokenRequired) {
SynergykitResponse response = new SynergykitResponse();
Put put = new Put(uri, object,sessionToken,sessionTokenRequired);
response.setBufferedReader(put.execute());
response.setStatusCode(put.getStatusCode());
return response;
}
/* Request method PUT */
protected static SynergykitResponse delete(SynergykitUri uri,final String sessionToken, final boolean sessionTokenRequired) {
SynergykitResponse response = new SynergykitResponse();
Delete delete = new Delete(uri,sessionToken,sessionTokenRequired);
response.setBufferedReader(delete.execute());
response.setStatusCode(delete.getStatusCode());
return response;
}
/* Request method PATCH */
protected static SynergykitResponse patch(SynergykitUri uri, Object object,final String sessionToken, final boolean sessionTokenRequired) {
SynergykitResponse response = new SynergykitResponse();
Patch patch = new Patch(uri, object,sessionToken,sessionTokenRequired);
response.setBufferedReader(patch.execute());
response.setStatusCode(patch.getStatusCode());
return response;
}
/* Manage response */
protected ResponseDataHolder manageResponseToObject(
SynergykitResponse response, Type type) {
ResponseDataHolder dataHolder = new ResponseDataHolder();
if (response == null
|| response.getStatusCode() >= HttpStatus.SC_INTERNAL_SERVER_ERROR
|| response.getStatusCode() <= Errors.SC_UNSPECIFIED_ERROR) {
dataHolder.errorObject = ResultObjectBuilder.buildError(response.getStatusCode());
} else if (response.getStatusCode() >= HttpStatus.SC_INTERNAL_SERVER_ERROR
|| response.getStatusCode() <= Errors.SC_UNSPECIFIED_ERROR) {
dataHolder.errorObject = ResultObjectBuilder.buildError(response
.getStatusCode());
} else if (response.getStatusCode() >= HttpStatus.SC_OK
&& response.getStatusCode() < HttpStatus.SC_MULTIPLE_CHOICES) {
dataHolder.statusCode = response.getStatusCode();
if (response.getBufferedReader() != null)
dataHolder.object = ResultObjectBuilder.buildObject(
dataHolder.statusCode, response.getBufferedReader(),
type);
} else {
dataHolder.statusCode = response.getStatusCode();
dataHolder.errorObject = ResultObjectBuilder.buildError(
dataHolder.statusCode, response.getBufferedReader());
}
return dataHolder;
}
/* Manage response */
protected ResponseDataHolder manageResponseToObjects(SynergykitResponse response, Type type) {
ResponseDataHolder dataHolder = new ResponseDataHolder();
if (response == null
|| response.getStatusCode() >= HttpStatus.SC_INTERNAL_SERVER_ERROR
|| response.getStatusCode() <= Errors.SC_UNSPECIFIED_ERROR) {
dataHolder.errorObject = ResultObjectBuilder.buildError(response.getStatusCode());
} else if (response.getStatusCode() >= HttpStatus.SC_INTERNAL_SERVER_ERROR
|| response.getStatusCode() <= Errors.SC_UNSPECIFIED_ERROR) {
dataHolder.errorObject = ResultObjectBuilder.buildError(response
.getStatusCode());
} else if (response.getStatusCode() >= HttpStatus.SC_OK
&& response.getStatusCode() < HttpStatus.SC_MULTIPLE_CHOICES) {
dataHolder.statusCode = response.getStatusCode();
dataHolder.objects = ResultObjectBuilder.buildObjects(
dataHolder.statusCode, response.getBufferedReader(), type);
} else {
dataHolder.statusCode = response.getStatusCode();
dataHolder.errorObject = ResultObjectBuilder.buildError(
dataHolder.statusCode, response.getBufferedReader());
}
return dataHolder;
}
// ----------------------------------------------------------------------------------
protected class ResponseDataHolder {
/* Attributes */
public SynergykitError errorObject;
public SynergykitObject object;
public SynergykitObject[] objects;
public byte[] data;
public int statusCode;
/* Constructor */
public ResponseDataHolder() {
statusCode = Errors.SC_UNSPECIFIED_ERROR;
errorObject = null;
object = null;
objects = null;
data = null;
}
}
}
| |
// Copyright (C) 2014 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.change;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
import com.google.common.base.Converter;
import com.google.common.base.Enums;
import com.google.common.base.MoreObjects;
import com.google.common.cache.Cache;
import com.google.common.cache.Weigher;
import com.google.common.flogger.FluentLogger;
import com.google.common.util.concurrent.UncheckedExecutionException;
import com.google.gerrit.entities.BranchNameKey;
import com.google.gerrit.extensions.client.SubmitType;
import com.google.gerrit.proto.Protos;
import com.google.gerrit.server.cache.CacheModule;
import com.google.gerrit.server.cache.proto.Cache.MergeabilityKeyProto;
import com.google.gerrit.server.cache.serialize.BooleanCacheSerializer;
import com.google.gerrit.server.cache.serialize.CacheSerializer;
import com.google.gerrit.server.cache.serialize.ObjectIdConverter;
import com.google.gerrit.server.git.CodeReviewCommit;
import com.google.gerrit.server.git.CodeReviewCommit.CodeReviewRevWalk;
import com.google.gerrit.server.submit.SubmitDryRun;
import com.google.inject.Inject;
import com.google.inject.Module;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
import java.util.Arrays;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
@Singleton
public class MergeabilityCacheImpl implements MergeabilityCache {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private static final String CACHE_NAME = "mergeability";
public static Module module() {
return new CacheModule() {
@Override
protected void configure() {
persist(CACHE_NAME, EntryKey.class, Boolean.class)
.maximumWeight(1 << 20)
.weigher(MergeabilityWeigher.class)
.version(1)
.keySerializer(EntryKey.Serializer.INSTANCE)
.valueSerializer(BooleanCacheSerializer.INSTANCE);
bind(MergeabilityCache.class).to(MergeabilityCacheImpl.class);
}
};
}
public static ObjectId toId(Ref ref) {
return ref != null && ref.getObjectId() != null ? ref.getObjectId() : ObjectId.zeroId();
}
public static class EntryKey {
private ObjectId commit;
private ObjectId into;
private SubmitType submitType;
private String mergeStrategy;
public EntryKey(ObjectId commit, ObjectId into, SubmitType submitType, String mergeStrategy) {
checkArgument(
submitType != SubmitType.INHERIT,
"Cannot cache %s.%s",
SubmitType.class.getSimpleName(),
submitType);
this.commit = requireNonNull(commit, "commit");
this.into = requireNonNull(into, "into");
this.submitType = requireNonNull(submitType, "submitType");
this.mergeStrategy = requireNonNull(mergeStrategy, "mergeStrategy");
}
public ObjectId getCommit() {
return commit;
}
public ObjectId getInto() {
return into;
}
public SubmitType getSubmitType() {
return submitType;
}
public String getMergeStrategy() {
return mergeStrategy;
}
@Override
public boolean equals(Object o) {
if (o instanceof EntryKey) {
EntryKey k = (EntryKey) o;
return commit.equals(k.commit)
&& into.equals(k.into)
&& submitType == k.submitType
&& mergeStrategy.equals(k.mergeStrategy);
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(commit, into, submitType, mergeStrategy);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("commit", commit.name())
.add("into", into.name())
.addValue(submitType)
.addValue(mergeStrategy)
.toString();
}
enum Serializer implements CacheSerializer<EntryKey> {
INSTANCE;
private static final Converter<String, SubmitType> SUBMIT_TYPE_CONVERTER =
Enums.stringConverter(SubmitType.class);
@Override
public byte[] serialize(EntryKey object) {
ObjectIdConverter idConverter = ObjectIdConverter.create();
return Protos.toByteArray(
MergeabilityKeyProto.newBuilder()
.setCommit(idConverter.toByteString(object.getCommit()))
.setInto(idConverter.toByteString(object.getInto()))
.setSubmitType(SUBMIT_TYPE_CONVERTER.reverse().convert(object.getSubmitType()))
.setMergeStrategy(object.getMergeStrategy())
.build());
}
@Override
public EntryKey deserialize(byte[] in) {
MergeabilityKeyProto proto = Protos.parseUnchecked(MergeabilityKeyProto.parser(), in);
ObjectIdConverter idConverter = ObjectIdConverter.create();
return new EntryKey(
idConverter.fromByteString(proto.getCommit()),
idConverter.fromByteString(proto.getInto()),
SUBMIT_TYPE_CONVERTER.convert(proto.getSubmitType()),
proto.getMergeStrategy());
}
}
}
public static class MergeabilityWeigher implements Weigher<EntryKey, Boolean> {
@Override
public int weigh(EntryKey k, Boolean v) {
return 16
+ 2 * (16 + 20)
+ 3 * 8 // Size of EntryKey, 64-bit JVM.
+ 8; // Size of Boolean.
}
}
private final SubmitDryRun submitDryRun;
private final Cache<EntryKey, Boolean> cache;
@Inject
MergeabilityCacheImpl(
SubmitDryRun submitDryRun, @Named(CACHE_NAME) Cache<EntryKey, Boolean> cache) {
this.submitDryRun = submitDryRun;
this.cache = cache;
}
@Override
public boolean get(
ObjectId commit,
Ref intoRef,
SubmitType submitType,
String mergeStrategy,
BranchNameKey dest,
Repository repo) {
ObjectId into = intoRef != null ? intoRef.getObjectId() : ObjectId.zeroId();
EntryKey key = new EntryKey(commit, into, submitType, mergeStrategy);
try {
return cache.get(
key,
() -> {
if (key.into.equals(ObjectId.zeroId())) {
return true; // Assume yes on new branch.
}
try (CodeReviewRevWalk rw = CodeReviewCommit.newRevWalk(repo)) {
Set<RevCommit> accepted = SubmitDryRun.getAlreadyAccepted(repo, rw);
accepted.add(rw.parseCommit(key.into));
accepted.addAll(Arrays.asList(rw.parseCommit(key.commit).getParents()));
return submitDryRun.run(
null, key.submitType, repo, rw, dest, key.into, key.commit, accepted);
}
});
} catch (ExecutionException | UncheckedExecutionException e) {
logger.atSevere().withCause(e.getCause()).log(
"Error checking mergeability of %s into %s (%s)",
key.commit.name(), key.into.name(), key.submitType.name());
return false;
}
}
@Override
public Boolean getIfPresent(
ObjectId commit, Ref intoRef, SubmitType submitType, String mergeStrategy) {
return cache.getIfPresent(new EntryKey(commit, toId(intoRef), submitType, mergeStrategy));
}
}
| |
/**
* Copyright 2015 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package models.daos;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import metadata.etl.models.EtlJobName;
import metadata.etl.models.EtlJobStatus;
import org.springframework.jdbc.support.KeyHolder;
import play.libs.Time;
import utils.JdbcUtil;
import utils.JsonUtil;
import java.sql.SQLException;
import java.util.*;
/**
* Created by zechen on 9/25/15.
*/
public class EtlJobDao {
public static final String GET_ETL_JOB_BY_ID = "SELECT * FROM wh_etl_job where wh_etl_job_id = :id";
public static final String INSERT_ETL_JOB =
"INSERT INTO wh_etl_job (wh_etl_job_name, wh_etl_type, cron_expr, ref_id, timeout, next_run, comments, ref_id_type) "
+ " VALUES (:whEtlJobName, :whEtlType, :cronExpr, :refId, :timeout, :nextRun, :comments, :refIdType)";
public static final String GET_DUE_JOBS =
"SELECT * FROM wh_etl_job WHERE next_run <= :currentTime and is_active = 'Y'";
public static final String GET_ALL_JOBS =
"SELECT * FROM wh_etl_job";
public static final String UPDATE_NEXT_RUN =
"UPDATE wh_etl_job SET next_run = :nextRun WHERE wh_etl_job_id = :whEtlJobId";
public static final String INSERT_NEW_RUN = "INSERT INTO wh_etl_job_execution(wh_etl_job_id, status, request_time) "
+ "VALUES (:whEtlJobId, :status, :requestTime)";
public static final String START_RUN =
"UPDATE wh_etl_job_execution set status = :status, message = :message, start_time = :startTime where wh_etl_exec_id = :whEtlExecId";
public static final String END_RUN =
"UPDATE wh_etl_job_execution set status = :status, message = :message, end_time = :endTime where wh_etl_exec_id = :whEtlExecId";
public static final String UPDATE_JOB_STATUS =
"UPDATE wh_etl_job SET is_active = :isActive WHERE wh_etl_job_name = :whEtlJobName and ref_id = :refId";
public static final String UPDATE_JOB_SCHEDULE =
"UPDATE wh_etl_job SET cron_expr = :cronExpr WHERE wh_etl_job_name = :whEtlJobName and ref_id = :refId";
public static final String DELETE_JOB =
"DELETE FROM wh_etl_job WHERE wh_etl_job_name = :whEtlJobName and ref_id = :refId";
public static final String DELETE_JOB_PROPERTIES =
"DELETE FROM wh_etl_job_property WHERE wh_etl_job_name = :whEtlJobName and ref_id = :refId";
public static List<Map<String, Object>> getAllJobs() throws Exception {
return JdbcUtil.wherehowsJdbcTemplate.queryForList(GET_ALL_JOBS);
}
public static Map<String, Object> getEtlJobById(int id)
throws SQLException {
Map<String, Object> params = new HashMap<>();
params.put("id", id);
return JdbcUtil.wherehowsNamedJdbcTemplate.queryForMap(GET_ETL_JOB_BY_ID, params);
}
public static int insertEtlJob(JsonNode etlJob)
throws Exception {
Map<String, Object> params = new HashMap<>();
params.put("whEtlJobName", JsonUtil.getJsonValue(etlJob, "wh_etl_job_name", String.class));
EtlJobName whEtlJobName = EtlJobName.valueOf((String) params.get("whEtlJobName"));
params.put("whEtlType", whEtlJobName.getEtlType().toString());
params.put("refIdType", whEtlJobName.getRefIdType().toString());
params.put("refId", JsonUtil.getJsonValue(etlJob, "ref_id", Integer.class));
params.put("cronExpr", JsonUtil.getJsonValue(etlJob, "cron_expr", String.class));
params.put("timeout", JsonUtil.getJsonValue(etlJob, "timeout", Integer.class, null));
params.put("nextRun", JsonUtil.getJsonValue(etlJob, "next_run", Integer.class, System.currentTimeMillis() / 1000));
params.put("comments", JsonUtil.getJsonValue(etlJob, "comments", String.class, null));
ObjectMapper om = new ObjectMapper();
Map<String, String> properties = om.convertValue(etlJob.findPath("properties"),
om.getTypeFactory().constructMapType(HashMap.class, String.class, String.class));
if (properties == null) {
properties = new HashMap<>();
}
Set<String> encryptedPropertyKeys = om.convertValue(etlJob.findPath("encrypted_property_keys"),
om.getTypeFactory().constructCollectionType(HashSet.class, String.class));
if (encryptedPropertyKeys == null) {
encryptedPropertyKeys = new HashSet<>();
}
if (!properties.keySet().containsAll(encryptedPropertyKeys)) {
throw new IllegalArgumentException("Some encrypted keys are not in properties");
}
for (String propertyKey : properties.keySet()) {
EtlJobPropertyDao.insertJobProperty(whEtlJobName, (int) params.get("refId"), propertyKey, properties.get(propertyKey),
encryptedPropertyKeys.contains(propertyKey));
}
KeyHolder kh = JdbcUtil.insertRow(INSERT_ETL_JOB, params);
return kh.getKey().intValue();
}
public static void updateJobStatus(JsonNode jobStatus)
throws Exception {
EtlJobName whEtlJobName = EtlJobName.valueOf((String) JsonUtil.getJsonValue(jobStatus, "wh_etl_job_name", String.class));
int refId = (Integer) JsonUtil.getJsonValue(jobStatus, "ref_id", Integer.class);
String control = (String) JsonUtil.getJsonValue(jobStatus, "control", String.class);
if (control.toLowerCase().equals("activate")) {
EtlJobDao.updateJobStatus(whEtlJobName, refId, true);
}
if (control.toLowerCase().equals("deactivate")) {
EtlJobDao.updateJobStatus(whEtlJobName, refId, false);
}
if (control.toLowerCase().equals("delete")) {
EtlJobDao.deleteJob(whEtlJobName, refId);
}
}
public static void updateJobStatus(EtlJobName whEtlJobName, int refId, boolean active)
throws Exception {
Map<String, Object> params = new HashMap<>();
params.put("whEtlJobName", whEtlJobName.toString());
params.put("refId", refId);
params.put("isActive", active ? "Y" : "N");
JdbcUtil.wherehowsNamedJdbcTemplate.update(UPDATE_JOB_STATUS, params);
}
public static void updateJobSchedule(JsonNode jobSchedule)
throws Exception {
EtlJobName whEtlJobName = EtlJobName.valueOf((String) JsonUtil.getJsonValue(jobSchedule, "wh_etl_job_name", String.class));
int refId = (Integer) JsonUtil.getJsonValue(jobSchedule, "ref_id", Integer.class);
String cronExpr = (String) JsonUtil.getJsonValue(jobSchedule, "cron_expr", String.class);
updateJobSchedule(whEtlJobName, refId, cronExpr);
}
public static void updateJobSchedule(EtlJobName whEtlJobName, int refId, String cronExpr)
throws Exception {
Map<String, Object> params = new HashMap<>();
params.put("whEtlJobName", whEtlJobName.toString());
params.put("refId", refId);
if (!Time.CronExpression.isValidExpression(cronExpr)) {
throw new IllegalArgumentException("Invalid cron expression, please refer to quartz document");
}
params.put("cronExpr", cronExpr);
JdbcUtil.wherehowsNamedJdbcTemplate.update(UPDATE_JOB_SCHEDULE, params);
}
public static void deleteJob(EtlJobName whEtlJobName, int refId)
throws Exception {
Map<String, Object> params = new HashMap<>();
params.put("whEtlJobName", whEtlJobName.toString());
params.put("refId", refId);
JdbcUtil.wherehowsNamedJdbcTemplate.update(DELETE_JOB, params);
JdbcUtil.wherehowsNamedJdbcTemplate.update(DELETE_JOB_PROPERTIES, params);
}
/**
* Update the next run time for the etl job using Quartz cron expression
* @param etlJobId
* @param cronExprStr
* @param startTime
* @throws Exception
*/
public static void updateNextRun(int etlJobId, String cronExprStr, Date startTime)
throws Exception {
Time.CronExpression cronExpression = new Time.CronExpression(cronExprStr);
Date nextTime = cronExpression.getNextValidTimeAfter(startTime);
updateNextRun(etlJobId, nextTime);
}
public static void updateNextRun(int etlJobId, Date nextTime)
throws Exception {
Map<String, Object> params = new HashMap<>();
params.put("nextRun", String.valueOf(nextTime.getTime() / 1000));
params.put("whEtlJobId", etlJobId);
JdbcUtil.wherehowsNamedJdbcTemplate.update(UPDATE_NEXT_RUN, params);
}
public static List<Map<String, Object>> getDueJobs() {
Map<String, Object> params = new HashMap<>();
params.put("currentTime", System.currentTimeMillis() / 1000);
return JdbcUtil.wherehowsNamedJdbcTemplate.queryForList(GET_DUE_JOBS, params);
}
public static long insertNewRun(int whEtlJobId) {
Map<String, Object> params = new HashMap<>();
params.put("whEtlJobId", whEtlJobId);
params.put("status", EtlJobStatus.REQUESTED.toString());
params.put("requestTime", System.currentTimeMillis() / 1000);
KeyHolder keyHolder = JdbcUtil.insertRow(INSERT_NEW_RUN, params);
return (Long) keyHolder.getKey();
}
public static void startRun(long whEtlExecId, String message) {
Map<String, Object> params = new HashMap<>();
params.put("whEtlExecId", whEtlExecId);
params.put("status", EtlJobStatus.STARTED.toString());
params.put("startTime", System.currentTimeMillis() / 1000);
params.put("message", message);
JdbcUtil.wherehowsNamedJdbcTemplate.update(START_RUN, params);
}
public static void endRun(long whEtlExecId, EtlJobStatus status, String message) {
Map<String, Object> params = new HashMap<>();
params.put("whEtlExecId", whEtlExecId);
params.put("status", status.toString());
params.put("endTime", System.currentTimeMillis() / 1000);
params.put("message", message);
JdbcUtil.wherehowsNamedJdbcTemplate.update(END_RUN, params);
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.xml.breadcrumbs;
import com.intellij.codeInsight.breadcrumbs.FileBreadcrumbsCollector;
import com.intellij.codeInsight.daemon.impl.tagTreeHighlighting.XmlTagTreeHighlightingUtil;
import com.intellij.codeInsight.highlighting.HighlightManager;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorGutter;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.event.CaretEvent;
import com.intellij.openapi.editor.event.CaretListener;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.ex.EditorGutterComponentEx;
import com.intellij.openapi.editor.impl.ComplementaryFontsRegistry;
import com.intellij.openapi.editor.markup.RangeHighlighter;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vcs.FileStatusListener;
import com.intellij.openapi.vcs.FileStatusManager;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.Gray;
import com.intellij.ui.components.breadcrumbs.Crumb;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.MouseEventAdapter;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.UiNotifyConnector;
import com.intellij.util.ui.update.Update;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.InputEvent;
import java.awt.event.MouseEvent;
import java.beans.PropertyChangeEvent;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static com.intellij.ui.RelativeFont.SMALL;
import static com.intellij.ui.ScrollPaneFactory.createScrollPane;
import static com.intellij.util.ui.UIUtil.getLabelFont;
/**
* @author spleaner
*/
public class BreadcrumbsXmlWrapper extends JComponent implements Disposable {
final PsiBreadcrumbs breadcrumbs = new PsiBreadcrumbs();
private final Project myProject;
private Editor myEditor;
private Collection<RangeHighlighter> myHighlighed;
private final VirtualFile myFile;
private boolean myUserCaretChange = true;
private final MergingUpdateQueue myQueue = new MergingUpdateQueue("Breadcrumbs.Queue", 200, true, breadcrumbs);
private final List<BreadcrumbListener> myBreadcrumbListeners = new ArrayList<>();
private final Update myUpdate = new Update(this) {
@Override
public void run() {
updateCrumbs();
}
@Override
public boolean canEat(final Update update) {
return true;
}
};
private ProgressIndicator myAsyncUpdateProgress = null;
private final FileBreadcrumbsCollector myBreadcrumbsCollector;
public static final Key<BreadcrumbsXmlWrapper> BREADCRUMBS_COMPONENT_KEY = new Key<>("BREADCRUMBS_KEY");
private static final Iterable<? extends Crumb> EMPTY_BREADCRUMBS = ContainerUtil.emptyIterable();
public BreadcrumbsXmlWrapper(@NotNull final Editor editor) {
myEditor = editor;
myEditor.putUserData(BREADCRUMBS_COMPONENT_KEY, this);
if (editor instanceof EditorEx) {
((EditorEx)editor).addPropertyChangeListener(this::updateEditorFont, this);
}
final Project project = editor.getProject();
assert project != null;
myProject = project;
myFile = FileDocumentManager.getInstance().getFile(myEditor.getDocument());
final FileStatusManager manager = FileStatusManager.getInstance(project);
manager.addFileStatusListener(new FileStatusListener() {
@Override
public void fileStatusesChanged() {
queueUpdate();
}
}, this);
final CaretListener caretListener = new CaretListener() {
@Override
public void caretPositionChanged(@NotNull final CaretEvent e) {
if (myUserCaretChange) {
queueUpdate();
}
myUserCaretChange = true;
}
};
editor.getCaretModel().addCaretListener(caretListener, this);
myBreadcrumbsCollector = FileBreadcrumbsCollector.findBreadcrumbsCollector(myProject, myFile);
if (myFile != null) {
myBreadcrumbsCollector.watchForChanges(myFile, editor, this, () -> queueUpdate());
}
breadcrumbs.onHover(this::itemHovered);
breadcrumbs.onSelect(this::itemSelected);
breadcrumbs.setFont(getNewFont(myEditor));
JScrollPane pane = createScrollPane(breadcrumbs, true);
pane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER);
pane.getHorizontalScrollBar().setEnabled(false);
setLayout(new BorderLayout());
add(BorderLayout.CENTER, pane);
EditorGutter gutter = editor.getGutter();
if (gutter instanceof EditorGutterComponentEx) {
EditorGutterComponentEx gutterComponent = (EditorGutterComponentEx)gutter;
MouseEventAdapter mouseListener = new MouseEventAdapter<EditorGutterComponentEx>(gutterComponent) {
@NotNull
@Override
protected MouseEvent convert(@NotNull MouseEvent event) {
return convert(event, gutterComponent);
}
};
ComponentAdapter resizeListener = new ComponentAdapter() {
@Override
public void componentResized(ComponentEvent event) {
breadcrumbs.updateBorder(gutterComponent.getWhitespaceSeparatorOffset());
breadcrumbs.setFont(getNewFont(myEditor));
}
};
addComponentListener(resizeListener);
gutterComponent.addComponentListener(resizeListener);
breadcrumbs.addMouseListener(mouseListener);
Disposer.register(this, () -> {
removeComponentListener(resizeListener);
gutterComponent.removeComponentListener(resizeListener);
breadcrumbs.removeMouseListener(mouseListener);
});
breadcrumbs.updateBorder(gutterComponent.getWhitespaceSeparatorOffset());
}
else {
breadcrumbs.updateBorder(0);
}
Disposer.register(this, new UiNotifyConnector(breadcrumbs, myQueue));
Disposer.register(this, myQueue);
if (ApplicationManager.getApplication().isHeadlessEnvironment()) {
myQueue.setPassThrough(true);
}
queueUpdate();
}
private void updateCrumbs() {
if (myEditor == null || myFile == null || myEditor.isDisposed()) return;
if (myAsyncUpdateProgress != null) {
myAsyncUpdateProgress.cancel();
}
ProgressIndicator progress = new ProgressIndicatorBase();
myAsyncUpdateProgress = progress;
myBreadcrumbsCollector.updateCrumbs(myFile, myEditor.getDocument(), myEditor.getCaretModel().getOffset(), myAsyncUpdateProgress, (crumbs) -> {
if (!progress.isCanceled() && myEditor != null && !myEditor.isDisposed() && !myProject.isDisposed()) {
if (!breadcrumbs.isShowing() && !ApplicationManager.getApplication().isHeadlessEnvironment()) {
crumbs = EMPTY_BREADCRUMBS;
}
breadcrumbs.setFont(getNewFont(myEditor));
breadcrumbs.setCrumbs(crumbs);
notifyListeners(crumbs);
}
}, BreadcrumbsForceShownSettings.getForcedShown(myEditor));
}
public void queueUpdate() {
myQueue.cancelAllUpdates();
myQueue.queue(myUpdate);
}
public void addBreadcrumbListener(BreadcrumbListener listener, Disposable parentDisposable) {
myBreadcrumbListeners.add(listener);
Disposer.register(parentDisposable, () -> myBreadcrumbListeners.remove(listener));
}
public void removeBreadcrumbListener(BreadcrumbListener listener) {
myBreadcrumbListeners.remove(listener);
}
private void notifyListeners(@NotNull Iterable<? extends Crumb> breadcrumbs) {
for (BreadcrumbListener listener : myBreadcrumbListeners) {
listener.breadcrumbsChanged(breadcrumbs);
}
}
@Deprecated
public JComponent getComponent() {
return this;
}
private void itemSelected(Crumb crumb, InputEvent event) {
if (event == null || !(crumb instanceof NavigatableCrumb)) return;
NavigatableCrumb navigatableCrumb = (NavigatableCrumb)crumb;
navigate(navigatableCrumb, event.isShiftDown() || event.isMetaDown());
}
public void navigate(NavigatableCrumb crumb, boolean withSelection) {
myUserCaretChange = false;
crumb.navigate(myEditor, withSelection);
}
private void itemHovered(Crumb crumb, @SuppressWarnings("unused") InputEvent event) {
if (!Registry.is("editor.breadcrumbs.highlight.on.hover")) {
return;
}
HighlightManager hm = HighlightManager.getInstance(myProject);
if (myHighlighed != null) {
for (RangeHighlighter highlighter : myHighlighed) {
hm.removeSegmentHighlighter(myEditor, highlighter);
}
myHighlighed = null;
}
if (crumb instanceof NavigatableCrumb) {
final TextRange range = ((NavigatableCrumb)crumb).getHighlightRange();
if (range == null) return;
final TextAttributes attributes = new TextAttributes();
final CrumbPresentation p = PsiCrumb.getPresentation(crumb);
Color color = p == null ? null : p.getBackgroundColor(false, false, false);
if (color == null) color = BreadcrumbsComponent.ButtonSettings.getBackgroundColor(false, false, false, false);
if (color == null) color = UIUtil.getLabelBackground();
final Color background = EditorColorsManager.getInstance().getGlobalScheme().getColor(EditorColors.CARET_ROW_COLOR);
attributes.setBackgroundColor(XmlTagTreeHighlightingUtil.makeTransparent(color, background != null ? background : Gray._200, 0.3));
myHighlighed = new ArrayList<>(1);
int flags = HighlightManager.HIDE_BY_ESCAPE | HighlightManager.HIDE_BY_TEXT_CHANGE | HighlightManager.HIDE_BY_ANY_KEY;
hm.addOccurrenceHighlight(myEditor, range.getStartOffset(), range.getEndOffset(), attributes, flags, myHighlighed, null);
}
}
@Nullable
public static BreadcrumbsXmlWrapper getBreadcrumbsComponent(@NotNull Editor editor) {
return editor.getUserData(BREADCRUMBS_COMPONENT_KEY);
}
@Override
public void dispose() {
if (myEditor != null) {
myEditor.putUserData(BREADCRUMBS_COMPONENT_KEY, null);
}
myEditor = null;
breadcrumbs.setCrumbs(EMPTY_BREADCRUMBS);
notifyListeners(EMPTY_BREADCRUMBS);
}
private void updateEditorFont(PropertyChangeEvent event) {
if (EditorEx.PROP_FONT_SIZE.equals(event.getPropertyName())) queueUpdate();
}
private static Font getNewFont(Editor editor) {
Font font = editor == null || Registry.is("editor.breadcrumbs.system.font") ? getLabelFont() : getEditorFont(editor);
return UISettings.getInstance().getUseSmallLabelsOnTabs() ? SMALL.derive(font) : font;
}
private static Font getEditorFont(Editor editor) {
return ComplementaryFontsRegistry.getFontAbleToDisplay('a', Font.PLAIN, editor.getColorsScheme().getFontPreferences(),
null).getFont();
}
}
| |
package com.alekseyzhelo.evilislands.mobplugin.script.template;
import com.alekseyzhelo.evilislands.mobplugin.EIMessages;
import com.alekseyzhelo.evilislands.mobplugin.script.EIScriptLanguage;
import com.alekseyzhelo.evilislands.mobplugin.script.highlighting.EIScriptSyntaxHighlighter;
import com.alekseyzhelo.evilislands.mobplugin.script.psi.*;
import com.alekseyzhelo.evilislands.mobplugin.script.psi.base.EICallableDeclaration;
import com.alekseyzhelo.evilislands.mobplugin.script.psi.base.EIForBlockBase;
import com.alekseyzhelo.evilislands.mobplugin.script.util.EITypeToken;
import com.alekseyzhelo.evilislands.mobplugin.script.util.UsefulPsiTreeUtil;
import com.intellij.codeInsight.template.EverywhereContextType;
import com.intellij.codeInsight.template.TemplateContextType;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.fileTypes.SyntaxHighlighter;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiErrorElement;
import com.intellij.psi.PsiFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Objects;
public abstract class EITemplateContextType extends TemplateContextType {
EITemplateContextType(@NotNull String id,
@NotNull String presentableName,
@Nullable Class<? extends TemplateContextType> baseContextType) {
super(id, presentableName, baseContextType);
}
@Override
public boolean isInContext(@NotNull PsiFile file, int offset) {
if (file.getLanguage() instanceof EIScriptLanguage) {
PsiElement element = file.findElementAt(offset);
return element != null && isInContext(element);
}
return false;
}
@Nullable
@Override
public SyntaxHighlighter createHighlighter() {
return new EIScriptSyntaxHighlighter();
}
protected abstract boolean isInContext(@NotNull PsiElement element);
private static PsiElement getParentSkipError(@NotNull PsiElement element) {
PsiElement parent = element.getParent();
if (parent instanceof PsiErrorElement) { // skip IntellijIdeaRulezzz error element
parent = parent.getParent();
}
return parent;
}
private static boolean isAfterNodeClosingParenthesis(int elementOffset, ASTNode node) {
ASTNode rParen = node.findChildByType(ScriptTypes.RPAREN);
return rParen != null && elementOffset > rParen.getTextRange().getEndOffset();
}
public static class EIGeneric extends EITemplateContextType {
public EIGeneric() {
super("EI", EIMessages.message("lang.display.name"), EverywhereContextType.class);
}
@Override
protected boolean isInContext(@NotNull PsiElement element) {
return true;
}
}
public static class ScriptDeclarationAllowed extends EITemplateContextType {
public ScriptDeclarationAllowed() {
super("EI_SCRIPT_DECLARATION_ALLOWED",
EIMessages.message("templates.context.script.declaration.allowed"), EIGeneric.class);
}
@Override
protected boolean isInContext(@NotNull PsiElement element) {
ScriptPsiFile file = (ScriptPsiFile) element.getContainingFile();
PsiElement parent = getParentSkipError(element);
int elementOffset = element.getTextOffset();
EIScripts scripts = file.findChildByClass(EIScripts.class);
EIWorldScript worldScript = file.findChildByClass(EIWorldScript.class);
final boolean topLevel = parent instanceof EIDeclarations || parent instanceof ScriptPsiFile;
final boolean beforeScriptsAndWorldScript =
(scripts == null && (worldScript == null || elementOffset < worldScript.getTextOffset()))
|| (scripts != null && elementOffset < scripts.getTextOffset());
if (!beforeScriptsAndWorldScript) {
return false;
}
if (topLevel) {
EIGlobalVars globalVars = file.findChildByClass(EIGlobalVars.class);
return globalVars == null || elementOffset > globalVars.getTextRange().getEndOffset();
} else if (parent instanceof EIScriptDeclaration) {
return isAfterNodeClosingParenthesis(elementOffset, parent.getNode());
}
return false;
}
}
public static class ScriptAllowed extends EITemplateContextType {
public ScriptAllowed() {
super("EI_SCRIPT_ALLOWED",
EIMessages.message("templates.context.script.allowed"), EIGeneric.class);
}
@Override
protected boolean isInContext(@NotNull PsiElement element) {
ScriptPsiFile file = (ScriptPsiFile) element.getContainingFile();
PsiElement parent = getParentSkipError(element);
int elementOffset = element.getTextOffset();
EIDeclarations declarations = file.findChildByClass(EIDeclarations.class);
EIWorldScript worldScript = file.findChildByClass(EIWorldScript.class);
final boolean topLevel = parent instanceof EIScripts || parent instanceof ScriptPsiFile
|| afterScriptImplClosingParenth(parent, elementOffset);
final boolean beforeWorldScript = (worldScript == null || elementOffset < worldScript.getTextRange().getStartOffset());
if (!beforeWorldScript) {
return false;
}
if (topLevel) {
if (declarations == null) {
EIGlobalVars globalVars = file.findChildByClass(EIGlobalVars.class);
return globalVars == null || elementOffset > globalVars.getTextRange().getEndOffset();
} else {
return elementOffset > declarations.getTextRange().getEndOffset();
}
} else if (parent instanceof EIScriptDeclaration) {
assert declarations != null;
if (declarations.getLastChild() == parent) {
return isAfterNodeClosingParenthesis(elementOffset, parent.getNode());
}
}
return false;
}
private boolean afterScriptImplClosingParenth(PsiElement correctedParent, int elementOffset) {
if (correctedParent instanceof EIScriptImplementation) {
ASTNode node = correctedParent.getNode();
return isAfterNodeClosingParenthesis(elementOffset, node);
}
return false;
}
}
public static class ScriptBlockAllowed extends EITemplateContextType {
public ScriptBlockAllowed() {
super("EI_SCRIPT_BLOCK_ALLOWED",
EIMessages.message("templates.context.script.block.allowed"), EIGeneric.class);
}
@Override
protected boolean isInContext(@NotNull PsiElement element) {
PsiElement parent = getParentSkipError(element);
if (parent instanceof EIScriptImplementation) {
ASTNode node = parent.getNode();
ASTNode lParen = node.findChildByType(ScriptTypes.LPAREN);
ASTNode rParen = node.findChildByType(ScriptTypes.RPAREN);
return lParen != null
&& element.getTextOffset() > lParen.getStartOffset()
&& (rParen == null || element.getTextOffset() < rParen.getStartOffset());
} else if (parent instanceof EIScriptIfBlock) { // incomplete ifBlock at script end
ASTNode node = parent.getNode();
ASTNode lParen = node.findChildByType(ScriptTypes.LPAREN);
ASTNode rParen = node.findChildByType(ScriptTypes.RPAREN);
return lParen == null && rParen != null && element.getTextOffset() < rParen.getStartOffset();
} else {
return parent instanceof EIScriptBlock;
}
}
}
public static class ScriptExpressionAllowed extends EITemplateContextType {
public ScriptExpressionAllowed() {
super("EI_SCRIPT_EXPRESSION_ALLOWED",
EIMessages.message("templates.context.script.expression.allowed"), EIGeneric.class);
}
@Override
protected boolean isInContext(@NotNull PsiElement element) {
PsiElement parent = getParentSkipError(element);
if (parent instanceof EIVariableAccess) {
final PsiElement superParent = parent.getParent();
if (superParent instanceof EIForBlockBase) { // inside For block arguments
return false;
}
parent = superParent.getParent(); // skip assignment
}
if (parent instanceof EIForBlockBase) {
EIForBlockBase forBlock = (EIForBlockBase) parent;
ASTNode rParen = forBlock.getNode().findChildByType(ScriptTypes.RPAREN);
return rParen != null && element.getTextOffset() > rParen.getStartOffset();
} else {
return parent instanceof EIScriptThenBlock || parent instanceof EIWorldScript;
}
}
}
public static class FunctionArgumentAllowed extends EITemplateContextType {
@SuppressWarnings("unused")
public FunctionArgumentAllowed() {
super("EI_FUNCTION_ARGUMENT_ALLOWED",
EIMessages.message("templates.context.function.argument.allowed"), EIGeneric.class);
}
FunctionArgumentAllowed(@NotNull String id,
@NotNull String presentableName,
@Nullable Class<? extends TemplateContextType> baseContextType) {
super(id, presentableName, baseContextType);
}
@Override
protected boolean isInContext(@NotNull PsiElement element) {
PsiElement parent = getParentSkipError(element);
if (parent instanceof EIVariableAccess) {
parent = parent.getParent();
}
return parent instanceof EIParams;
}
}
public static class CoordinateArgumentsAllowed extends FunctionArgumentAllowed {
public CoordinateArgumentsAllowed() {
super("EI_COORDINATE_ARGUMENTS_ALLOWED",
EIMessages.message("templates.context.coordinate.arguments.allowed"), FunctionArgumentAllowed.class);
}
@Override
protected boolean isInContext(@NotNull PsiElement element) {
if (super.isInContext(element)) {
EIFunctionCall call = UsefulPsiTreeUtil.getParentFunctionCall(element);
EICallableDeclaration resolved = call != null
? (EICallableDeclaration) call.getReference().resolve()
: null;
return resolved != null && hasXParam(resolved);
}
return false;
}
// TODO v2: two contexts, separate for xy and xyz, and detect exact matches?
private boolean hasXParam(@NotNull EICallableDeclaration declaration) {
for (EIFormalParameter parameter : declaration.getCallableParams()) {
if (StringUtil.equalsIgnoreCase(parameter.getName(), "x")
&& EITypeToken.FLOAT.equals(Objects.requireNonNull(parameter.getType()).getTypeToken())) {
return true;
}
}
return false;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.mapreduce.examples;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.tez.client.TezClient;
import org.apache.tez.dag.api.DAG;
import org.apache.tez.dag.api.Edge;
import org.apache.tez.dag.api.ProcessorDescriptor;
import org.apache.tez.dag.api.TezConfiguration;
import org.apache.tez.dag.api.TezException;
import org.apache.tez.dag.api.TezUncheckedException;
import org.apache.tez.dag.api.UserPayload;
import org.apache.tez.dag.api.Vertex;
import org.apache.tez.dag.api.VertexManagerPluginDescriptor;
import org.apache.tez.dag.api.client.DAGClient;
import org.apache.tez.dag.api.client.DAGStatus;
import org.apache.tez.dag.library.vertexmanager.InputReadyVertexManager;
import org.apache.tez.runtime.api.ObjectRegistry;
import org.apache.tez.runtime.api.ProcessorContext;
import org.apache.tez.runtime.library.api.KeyValueReader;
import org.apache.tez.runtime.library.api.KeyValueWriter;
import org.apache.tez.runtime.library.conf.UnorderedKVEdgeConfig;
import org.apache.tez.runtime.library.output.UnorderedKVOutput;
import org.apache.tez.runtime.library.processor.SimpleProcessor;
import com.google.common.base.Preconditions;
public class BroadcastAndOneToOneExample extends Configured implements Tool {
public static class InputProcessor extends SimpleProcessor {
Text word = new Text();
public InputProcessor(ProcessorContext context) {
super(context);
}
@Override
public void run() throws Exception {
Preconditions.checkArgument(getOutputs().size() == 1);
UnorderedKVOutput output = (UnorderedKVOutput) getOutputs().values().iterator()
.next();
KeyValueWriter kvWriter = (KeyValueWriter) output.getWriter();
kvWriter.write(word, new IntWritable(getContext().getTaskIndex()));
ByteBuffer userPayload =
getContext().getUserPayload() == null ? null : getContext().getUserPayload().getPayload();
if (userPayload != null) {
boolean doLocalityCheck = getContext().getUserPayload().getPayload().get(0) > 0 ? true : false;
if (doLocalityCheck) {
ObjectRegistry objectRegistry = getContext().getObjectRegistry();
String entry = String.valueOf(getContext().getTaskIndex());
objectRegistry.cacheForDAG(entry, entry);
}
}
}
}
public static class OneToOneProcessor extends SimpleProcessor {
Text word = new Text();
public OneToOneProcessor(ProcessorContext context) {
super(context);
}
@Override
public void run() throws Exception {
Preconditions.checkArgument(inputs.size() == 2);
KeyValueReader inputKvReader = (KeyValueReader) getInputs().get("Input").getReader();
KeyValueReader broadcastKvReader = (KeyValueReader) getInputs().get("Broadcast").getReader();
int sum = 0;
while (broadcastKvReader.next()) {
sum += ((IntWritable) broadcastKvReader.getCurrentValue()).get();
}
while (inputKvReader.next()) {
sum += ((IntWritable) inputKvReader.getCurrentValue()).get();
}
boolean doLocalityCheck = getContext().getUserPayload().getPayload().get(0) > 0 ? true : false;
int broadcastSum = getContext().getUserPayload().getPayload().get(1);
int expectedSum = broadcastSum + getContext().getTaskIndex();
System.out.println("Index: " + getContext().getTaskIndex() +
" sum: " + sum + " expectedSum: " + expectedSum + " broadcastSum: " + broadcastSum);
Preconditions.checkState((sum == expectedSum), "Sum = " + sum);
if (doLocalityCheck) {
ObjectRegistry objectRegistry = getContext().getObjectRegistry();
String index = (String) objectRegistry.get(String.valueOf(getContext().getTaskIndex()));
if (index == null || Integer.valueOf(index).intValue() != getContext().getTaskIndex()) {
String msg = "Did not find expected local producer "
+ getContext().getTaskIndex() + " in the same JVM";
System.out.println(msg);
throw new TezUncheckedException(msg);
}
}
}
}
private DAG createDAG(FileSystem fs, TezConfiguration tezConf,
Path stagingDir, boolean doLocalityCheck) throws IOException, YarnException {
int numBroadcastTasks = 2;
int numOneToOneTasks = 3;
if (doLocalityCheck) {
YarnClient yarnClient = YarnClient.createYarnClient();
yarnClient.init(tezConf);
yarnClient.start();
int numNMs = yarnClient.getNodeReports(NodeState.RUNNING).size();
yarnClient.stop();
// create enough 1-1 tasks to run in parallel
numOneToOneTasks = numNMs - numBroadcastTasks - 1;// 1 AM
if (numOneToOneTasks < 1) {
numOneToOneTasks = 1;
}
}
byte[] procByte = {(byte) (doLocalityCheck ? 1 : 0), 1};
UserPayload procPayload = UserPayload.create(ByteBuffer.wrap(procByte));
System.out.println("Using " + numOneToOneTasks + " 1-1 tasks");
Vertex broadcastVertex = Vertex.create("Broadcast", ProcessorDescriptor.create(
InputProcessor.class.getName()), numBroadcastTasks);
Vertex inputVertex = Vertex.create("Input", ProcessorDescriptor.create(
InputProcessor.class.getName()).setUserPayload(procPayload), numOneToOneTasks);
Vertex oneToOneVertex = Vertex.create("OneToOne",
ProcessorDescriptor.create(
OneToOneProcessor.class.getName()).setUserPayload(procPayload));
oneToOneVertex.setVertexManagerPlugin(
VertexManagerPluginDescriptor.create(InputReadyVertexManager.class.getName()));
UnorderedKVEdgeConfig edgeConf = UnorderedKVEdgeConfig
.newBuilder(Text.class.getName(), IntWritable.class.getName())
.setFromConfiguration(tezConf).build();
DAG dag = DAG.create("BroadcastAndOneToOneExample");
dag.addVertex(inputVertex)
.addVertex(broadcastVertex)
.addVertex(oneToOneVertex)
.addEdge(
Edge.create(inputVertex, oneToOneVertex, edgeConf.createDefaultOneToOneEdgeProperty()))
.addEdge(
Edge.create(broadcastVertex, oneToOneVertex,
edgeConf.createDefaultBroadcastEdgeProperty()));
return dag;
}
public boolean run(Configuration conf, boolean doLocalityCheck) throws Exception {
System.out.println("Running BroadcastAndOneToOneExample");
// conf and UGI
TezConfiguration tezConf;
if (conf != null) {
tezConf = new TezConfiguration(conf);
} else {
tezConf = new TezConfiguration();
}
tezConf.setBoolean(TezConfiguration.TEZ_AM_CONTAINER_REUSE_ENABLED, true);
UserGroupInformation.setConfiguration(tezConf);
// staging dir
FileSystem fs = FileSystem.get(tezConf);
String stagingDirStr = tezConf.get(TezConfiguration.TEZ_AM_STAGING_DIR,
TezConfiguration.TEZ_AM_STAGING_DIR_DEFAULT) + Path.SEPARATOR +
"BroadcastAndOneToOneExample" + Path.SEPARATOR + Long.toString(System.currentTimeMillis());
Path stagingDir = new Path(stagingDirStr);
tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, stagingDirStr);
stagingDir = fs.makeQualified(stagingDir);
// No need to add jar containing this class as assumed to be part of
// the tez jars.
// TEZ-674 Obtain tokens based on the Input / Output paths. For now assuming staging dir
// is the same filesystem as the one used for Input/Output.
TezClient tezSession = null;
// needs session or else TaskScheduler does not hold onto containers
tezSession = TezClient.create("broadcastAndOneToOneExample", tezConf);
tezSession.start();
DAGClient dagClient = null;
try {
DAG dag = createDAG(fs, tezConf, stagingDir, doLocalityCheck);
tezSession.waitTillReady();
dagClient = tezSession.submitDAG(dag);
// monitoring
DAGStatus dagStatus = dagClient.waitForCompletionWithStatusUpdates(null);
if (dagStatus.getState() != DAGStatus.State.SUCCEEDED) {
System.out.println("DAG diagnostics: " + dagStatus.getDiagnostics());
return false;
}
return true;
} finally {
fs.delete(stagingDir, true);
tezSession.stop();
}
}
@Override
public int run(String[] args) throws Exception {
boolean doLocalityCheck = true;
if (args.length == 1) {
if (args[0].equals(skipLocalityCheck)) {
doLocalityCheck = false;
} else {
printUsage();
throw new TezException("Invalid command line");
}
} else if (args.length > 1) {
printUsage();
throw new TezException("Invalid command line");
}
if (doLocalityCheck &&
getConf().getBoolean(TezConfiguration.TEZ_LOCAL_MODE,
TezConfiguration.TEZ_LOCAL_MODE_DEFAULT)) {
System.out.println("locality check is not valid in local mode. skipping");
doLocalityCheck = false;
}
boolean status = run(getConf(), doLocalityCheck);
return status ? 0 : 1;
}
private static void printUsage() {
System.err.println("broadcastAndOneToOneExample " + skipLocalityCheck);
ToolRunner.printGenericCommandUsage(System.err);
}
static String skipLocalityCheck = "-skipLocalityCheck";
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
BroadcastAndOneToOneExample job = new BroadcastAndOneToOneExample();
int status = ToolRunner.run(conf, job, args);
System.exit(status);
}
}
| |
package org.ripple.bouncycastle.crypto.params;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Locale;
import org.ripple.bouncycastle.crypto.CipherParameters;
import org.ripple.bouncycastle.crypto.digests.SkeinDigest;
import org.ripple.bouncycastle.crypto.digests.SkeinEngine;
import org.ripple.bouncycastle.crypto.macs.SkeinMac;
import org.ripple.bouncycastle.util.Integers;
/**
* Parameters for the Skein hash function - a series of byte[] strings identified by integer tags.
* <p>
* Parameterised Skein can be used for:
* <ul>
* <li>MAC generation, by providing a {@link SkeinParameters.Builder#setKey(byte[]) key}.</li>
* <li>Randomised hashing, by providing a {@link SkeinParameters.Builder#setNonce(byte[]) nonce}.</li>
* <li>A hash function for digital signatures, associating a
* {@link SkeinParameters.Builder#setPublicKey(byte[]) public key} with the message digest.</li>
* <li>A key derivation function, by providing a
* {@link SkeinParameters.Builder#setKeyIdentifier(byte[]) key identifier}.</li>
* <li>Personalised hashing, by providing a
* {@link SkeinParameters.Builder#setPersonalisation(Date, String, String) recommended format} or
* {@link SkeinParameters.Builder#setPersonalisation(byte[]) arbitrary} personalisation string.</li>
* </ul>
*
* @see SkeinEngine
* @see SkeinDigest
* @see SkeinMac
*/
public class SkeinParameters
implements CipherParameters
{
/**
* The parameter type for a secret key, supporting MAC or KDF functions: {@value
* #PARAM_TYPE_KEY}.
*/
public static final int PARAM_TYPE_KEY = 0;
/**
* The parameter type for the Skein configuration block: {@value #PARAM_TYPE_CONFIG}.
*/
public static final int PARAM_TYPE_CONFIG = 4;
/**
* The parameter type for a personalisation string: {@value #PARAM_TYPE_PERSONALISATION}.
*/
public static final int PARAM_TYPE_PERSONALISATION = 8;
/**
* The parameter type for a public key: {@value #PARAM_TYPE_PUBLIC_KEY}.
*/
public static final int PARAM_TYPE_PUBLIC_KEY = 12;
/**
* The parameter type for a key identifier string: {@value #PARAM_TYPE_KEY_IDENTIFIER}.
*/
public static final int PARAM_TYPE_KEY_IDENTIFIER = 16;
/**
* The parameter type for a nonce: {@value #PARAM_TYPE_NONCE}.
*/
public static final int PARAM_TYPE_NONCE = 20;
/**
* The parameter type for the message: {@value #PARAM_TYPE_MESSAGE}.
*/
public static final int PARAM_TYPE_MESSAGE = 48;
/**
* The parameter type for the output transformation: {@value #PARAM_TYPE_OUTPUT}.
*/
public static final int PARAM_TYPE_OUTPUT = 63;
private Hashtable parameters;
public SkeinParameters()
{
this(new Hashtable());
}
private SkeinParameters(final Hashtable parameters)
{
this.parameters = parameters;
}
/**
* Obtains a map of type (Integer) to value (byte[]) for the parameters tracked in this object.
*/
public Hashtable getParameters()
{
return parameters;
}
/**
* Obtains the value of the {@link #PARAM_TYPE_KEY key parameter}, or <code>null</code> if not
* set.
*/
public byte[] getKey()
{
return (byte[])parameters.get(Integers.valueOf(PARAM_TYPE_KEY));
}
/**
* Obtains the value of the {@link #PARAM_TYPE_PERSONALISATION personalisation parameter}, or
* <code>null</code> if not set.
*/
public byte[] getPersonalisation()
{
return (byte[])parameters.get(Integers.valueOf(PARAM_TYPE_PERSONALISATION));
}
/**
* Obtains the value of the {@link #PARAM_TYPE_PUBLIC_KEY public key parameter}, or
* <code>null</code> if not set.
*/
public byte[] getPublicKey()
{
return (byte[])parameters.get(Integers.valueOf(PARAM_TYPE_PUBLIC_KEY));
}
/**
* Obtains the value of the {@link #PARAM_TYPE_KEY_IDENTIFIER key identifier parameter}, or
* <code>null</code> if not set.
*/
public byte[] getKeyIdentifier()
{
return (byte[])parameters.get(Integers.valueOf(PARAM_TYPE_KEY_IDENTIFIER));
}
/**
* Obtains the value of the {@link #PARAM_TYPE_NONCE nonce parameter}, or <code>null</code> if
* not set.
*/
public byte[] getNonce()
{
return (byte[])parameters.get(Integers.valueOf(PARAM_TYPE_NONCE));
}
/**
* A builder for {@link SkeinParameters}.
*/
public static class Builder
{
private Hashtable parameters = new Hashtable();
public Builder()
{
}
public Builder(Hashtable paramsMap)
{
Enumeration keys = paramsMap.keys();
while (keys.hasMoreElements())
{
Integer key = (Integer)keys.nextElement();
parameters.put(key, paramsMap.get(key));
}
}
public Builder(SkeinParameters params)
{
Enumeration keys = params.parameters.keys();
while (keys.hasMoreElements())
{
Integer key = (Integer)keys.nextElement();
parameters.put(key, params.parameters.get(key));
}
}
/**
* Sets a parameters to apply to the Skein hash function.<br>
* Parameter types must be in the range 0,5..62, and cannot use the value {@value
* SkeinParameters#PARAM_TYPE_MESSAGE} (reserved for message body).
* <p>
* Parameters with type < {@value SkeinParameters#PARAM_TYPE_MESSAGE} are processed before
* the message content, parameters with type > {@value SkeinParameters#PARAM_TYPE_MESSAGE}
* are processed after the message and prior to output.
*
* @param type the type of the parameter, in the range 5..62.
* @param value the byte sequence of the parameter.
* @return
*/
public Builder set(int type, byte[] value)
{
if (value == null)
{
throw new IllegalArgumentException("Parameter value must not be null.");
}
if ((type != PARAM_TYPE_KEY)
&& (type <= PARAM_TYPE_CONFIG || type >= PARAM_TYPE_OUTPUT || type == PARAM_TYPE_MESSAGE))
{
throw new IllegalArgumentException("Parameter types must be in the range 0,5..47,49..62.");
}
if (type == PARAM_TYPE_CONFIG)
{
throw new IllegalArgumentException("Parameter type " + PARAM_TYPE_CONFIG
+ " is reserved for internal use.");
}
this.parameters.put(Integers.valueOf(type), value);
return this;
}
/**
* Sets the {@link SkeinParameters#PARAM_TYPE_KEY} parameter.
*/
public Builder setKey(byte[] key)
{
return set(PARAM_TYPE_KEY, key);
}
/**
* Sets the {@link SkeinParameters#PARAM_TYPE_PERSONALISATION} parameter.
*/
public Builder setPersonalisation(byte[] personalisation)
{
return set(PARAM_TYPE_PERSONALISATION, personalisation);
}
/**
* Implements the recommended personalisation format for Skein defined in Section 4.11 of
* the Skein 1.3 specification.
* <p>
* The format is <code>YYYYMMDD email@address distinguisher</code>, encoded to a byte
* sequence using UTF-8 encoding.
*
* @param date the date the personalised application of the Skein was defined.
* @param emailAddress the email address of the creation of the personalised application.
* @param distinguisher an arbitrary personalisation string distinguishing the application.
* @return the current builder.
*/
public Builder setPersonalisation(Date date, String emailAddress, String distinguisher)
{
try
{
final ByteArrayOutputStream bout = new ByteArrayOutputStream();
final OutputStreamWriter out = new OutputStreamWriter(bout, "UTF-8");
final DateFormat format = new SimpleDateFormat("YYYYMMDD");
out.write(format.format(date));
out.write(" ");
out.write(emailAddress);
out.write(" ");
out.write(distinguisher);
out.close();
return set(PARAM_TYPE_PERSONALISATION, bout.toByteArray());
}
catch (IOException e)
{
throw new IllegalStateException("Byte I/O failed: " + e);
}
}
/**
* Implements the recommended personalisation format for Skein defined in Section 4.11 of
* the Skein 1.3 specification. You may need to use this method if the default locale
* doesn't use a Gregorian calender so that the GeneralizedTime produced is compatible implementations.
* <p>
* The format is <code>YYYYMMDD email@address distinguisher</code>, encoded to a byte
* sequence using UTF-8 encoding.
*
* @param date the date the personalised application of the Skein was defined.
* @param dateLocale locale to be used for date interpretation.
* @param emailAddress the email address of the creation of the personalised application.
* @param distinguisher an arbitrary personalisation string distinguishing the application.
* @return the current builder.
*/
public Builder setPersonalisation(Date date, Locale dateLocale, String emailAddress, String distinguisher)
{
try
{
final ByteArrayOutputStream bout = new ByteArrayOutputStream();
final OutputStreamWriter out = new OutputStreamWriter(bout, "UTF-8");
final DateFormat format = new SimpleDateFormat("YYYYMMDD", dateLocale);
out.write(format.format(date));
out.write(" ");
out.write(emailAddress);
out.write(" ");
out.write(distinguisher);
out.close();
return set(PARAM_TYPE_PERSONALISATION, bout.toByteArray());
}
catch (IOException e)
{
throw new IllegalStateException("Byte I/O failed: " + e);
}
}
/**
* Sets the {@link SkeinParameters#PARAM_TYPE_KEY_IDENTIFIER} parameter.
*/
public Builder setPublicKey(byte[] publicKey)
{
return set(PARAM_TYPE_PUBLIC_KEY, publicKey);
}
/**
* Sets the {@link SkeinParameters#PARAM_TYPE_KEY_IDENTIFIER} parameter.
*/
public Builder setKeyIdentifier(byte[] keyIdentifier)
{
return set(PARAM_TYPE_KEY_IDENTIFIER, keyIdentifier);
}
/**
* Sets the {@link SkeinParameters#PARAM_TYPE_NONCE} parameter.
*/
public Builder setNonce(byte[] nonce)
{
return set(PARAM_TYPE_NONCE, nonce);
}
/**
* Constructs a new {@link SkeinParameters} instance with the parameters provided to this
* builder.
*/
public SkeinParameters build()
{
return new SkeinParameters(parameters);
}
}
}
| |
// Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.bazel.rules.ninja.actions;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.Maps;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.FileProvider;
import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder;
import com.google.devtools.build.lib.analysis.RuleConfiguredTargetFactory;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.RunfilesProvider;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.analysis.actions.SymlinkAction;
import com.google.devtools.build.lib.bazel.rules.ninja.file.GenericParsingException;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.NestedSetVisitor;
import com.google.devtools.build.lib.collect.nestedset.NestedSetVisitor.VisitedState;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.packages.Type;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.stream.Collectors;
/** Configured target factory for {@link NinjaBuildRule}. */
public class NinjaBuild implements RuleConfiguredTargetFactory {
@Override
public ConfiguredTarget create(RuleContext ruleContext)
throws InterruptedException, RuleErrorException, ActionConflictException {
Map<String, List<String>> outputGroupsFromAttrs =
ruleContext.attributes().get("output_groups", Type.STRING_LIST_DICT);
NinjaGraphProvider graphProvider =
ruleContext.getPrerequisite("ninja_graph", NinjaGraphProvider.class);
Preconditions.checkNotNull(graphProvider);
List<PathFragment> pathsToBuild =
outputGroupsFromAttrs.values().stream()
.flatMap(List::stream)
.map(PathFragment::create)
.collect(Collectors.toList());
ImmutableSortedMap.Builder<PathFragment, Artifact> depsMapBuilder =
ImmutableSortedMap.naturalOrder();
ImmutableSortedMap.Builder<PathFragment, Artifact> symlinksMapBuilder =
ImmutableSortedMap.naturalOrder();
createDepsMap(
ruleContext, graphProvider.getWorkingDirectory(), depsMapBuilder, symlinksMapBuilder);
ImmutableSortedMap<PathFragment, Artifact> depsMap = depsMapBuilder.build();
NinjaGraphArtifactsHelper artifactsHelper =
new NinjaGraphArtifactsHelper(
ruleContext,
graphProvider.getOutputRoot(),
graphProvider.getWorkingDirectory(),
symlinksMapBuilder.build(),
graphProvider.getOutputRootSymlinks());
if (ruleContext.hasErrors()) {
return null;
}
RuleConfiguredTargetBuilder ruleConfiguredTargetBuilder =
new RuleConfiguredTargetBuilder(ruleContext);
try {
symlinkDepsMappings(ruleContext, artifactsHelper, depsMap);
PhonyTargetArtifacts phonyTargetArtifacts =
new PhonyTargetArtifacts(graphProvider.getPhonyTargetsMap(), artifactsHelper);
ImmutableSet<PathFragment> symlinks =
ImmutableSet.<PathFragment>builder()
.addAll(graphProvider.getOutputRootInputsSymlinks())
.addAll(depsMap.keySet())
.build();
new NinjaActionsHelper(
ruleContext,
ruleConfiguredTargetBuilder,
artifactsHelper,
graphProvider.getTargetsMap(),
graphProvider.getPhonyTargetsMap(),
phonyTargetArtifacts,
pathsToBuild,
symlinks)
.createNinjaActions();
if (!checkOrphanArtifacts(ruleContext)) {
return null;
}
NestedSetBuilder<Artifact> filesToBuild = NestedSetBuilder.stableOrder();
TreeMap<String, NestedSet<Artifact>> outputGroups = Maps.newTreeMap();
for (Map.Entry<String, List<String>> entry : outputGroupsFromAttrs.entrySet()) {
NestedSet<Artifact> artifacts =
getGroupArtifacts(
ruleContext,
entry.getValue(),
graphProvider.getPhonyTargetsMap(),
phonyTargetArtifacts,
artifactsHelper);
outputGroups.put(entry.getKey(), artifacts);
filesToBuild.addTransitive(artifacts);
}
if (ruleContext.hasErrors()) {
return null;
}
return ruleConfiguredTargetBuilder
.addProvider(RunfilesProvider.class, RunfilesProvider.EMPTY)
.setFilesToBuild(filesToBuild.build())
.addOutputGroups(outputGroups)
.build();
} catch (GenericParsingException e) {
ruleContext.ruleError(e.getMessage());
return null;
}
}
private static void symlinkDepsMappings(
RuleContext ruleContext,
NinjaGraphArtifactsHelper artifactsHelper,
ImmutableSortedMap<PathFragment, Artifact> depsMap)
throws GenericParsingException {
for (Map.Entry<PathFragment, Artifact> entry : depsMap.entrySet()) {
PathFragment depPath = entry.getKey();
Artifact destinationArtifact = entry.getValue();
Artifact outputArtifact = artifactsHelper.createOutputArtifact(depPath);
SymlinkAction symlinkAction =
SymlinkAction.toArtifact(
ruleContext.getActionOwner(),
destinationArtifact,
outputArtifact,
String.format(
"Symlinking deps_mapping entry '%s' to '%s'",
destinationArtifact.getExecPath(), outputArtifact.getExecPath()));
ruleContext.registerAction(symlinkAction);
}
}
private static boolean checkOrphanArtifacts(RuleContext ruleContext) {
ImmutableSet<Artifact> orphanArtifacts =
ruleContext.getAnalysisEnvironment().getOrphanArtifacts();
if (!orphanArtifacts.isEmpty()) {
List<String> paths =
orphanArtifacts.stream().map(Artifact::getExecPathString).collect(Collectors.toList());
ruleContext.ruleError(
"The following artifacts do not have a generating action in Ninja file: "
+ String.join(", ", paths));
return false;
}
return true;
}
private static NestedSet<Artifact> getGroupArtifacts(
RuleContext ruleContext,
List<String> targets,
ImmutableSortedMap<PathFragment, PhonyTarget> phonyTargetsMap,
PhonyTargetArtifacts phonyTargetsArtifacts,
NinjaGraphArtifactsHelper artifactsHelper)
throws GenericParsingException {
NestedSetBuilder<Artifact> nestedSetBuilder = NestedSetBuilder.stableOrder();
for (String target : targets) {
PathFragment path = PathFragment.create(target);
if (phonyTargetsMap.containsKey(path)) {
NestedSet<Artifact> artifacts = phonyTargetsArtifacts.getPhonyTargetArtifacts(path);
nestedSetBuilder.addTransitive(artifacts);
} else {
Artifact outputArtifact = artifactsHelper.createOutputArtifact(path);
if (outputArtifact == null) {
ruleContext.ruleError(
String.format("Required target '%s' is not created in ninja_graph.", path));
return NestedSetBuilder.emptySet(Order.STABLE_ORDER);
}
nestedSetBuilder.add(outputArtifact);
}
}
return nestedSetBuilder.build();
}
private static void createDepsMap(
RuleContext ruleContext,
PathFragment workingDirectory,
ImmutableSortedMap.Builder<PathFragment, Artifact> depsMapBuilder,
ImmutableSortedMap.Builder<PathFragment, Artifact> symlinksMapBuilder)
throws InterruptedException {
FileProvider fileProvider = ruleContext.getPrerequisite("ninja_graph", FileProvider.class);
Preconditions.checkNotNull(fileProvider);
new NestedSetVisitor<Artifact>(
a -> {
symlinksMapBuilder.put(a.getExecPath().relativeTo(workingDirectory), a);
},
new VisitedState<>())
.visit(fileProvider.getFilesToBuild());
Map<String, TransitiveInfoCollection> mapping = ruleContext.getPrerequisiteMap("deps_mapping");
for (Map.Entry<String, TransitiveInfoCollection> entry : mapping.entrySet()) {
NestedSet<Artifact> filesToBuild =
entry.getValue().getProvider(FileProvider.class).getFilesToBuild();
if (!filesToBuild.isSingleton()) {
ruleContext.attributeError(
"deps_mapping",
String.format(
"'%s' contains more than one output. "
+ "deps_mapping should only contain targets, producing a single output file.",
entry.getValue().getLabel().getCanonicalForm()));
return;
}
depsMapBuilder.put(PathFragment.create(entry.getKey()), filesToBuild.getSingleton());
}
}
}
| |
/*-
* #%L
* An implementation of the first-order primal-dual solver proposed by Antonin Chamoblle and Thomas Pock.
* Ref.: Chambolle, Antonin, and Thomas Pock. "A first-order primal-dual algorithm for convex problems with applications to imaging." Journal of Mathematical Imaging and Vision 40.1 (2011): 120-145.
* %%
* Copyright (C) 2017 Tim-Oliver Buchholz, University of Konstanz
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package net.imagej.ops.fopd;
import static org.junit.Assert.assertEquals;
import net.imagej.ops.fopd.energy.deconvolution.TGVL1Deconvolution2D;
import net.imagej.ops.fopd.energy.deconvolution.TVHuberL1Deconvolution2D;
import net.imagej.ops.fopd.energy.deconvolution.TVKLDivDeconvolution2D;
import net.imagej.ops.fopd.energy.deconvolution.TVL1Deconvolution2D;
import net.imagej.ops.fopd.energy.deconvolution.TVSquaredL2Deconvolution2D;
import net.imagej.ops.fopd.energy.denoising.TGVL1Denoising2D;
import net.imagej.ops.fopd.energy.denoising.TVHuberL1Denoising2D;
import net.imagej.ops.fopd.energy.denoising.TVL1Denoising2D;
import net.imglib2.Cursor;
import net.imglib2.RandomAccessibleInterval;
import net.imglib2.img.Img;
import net.imglib2.type.numeric.real.DoubleType;
import org.junit.Test;
/**
* Tests of the implemented solvers.
*
* @author Tim-Oliver Buchholz, University of Konstanz
*/
public class SolverTest extends AbstractOpTest {
final static double[] expectedTVL1Denoising = new double[] { 1.0,
0.6505244424115337, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 };
final static double[] expectedTVHuberL1Denoising = new double[] { 1.0,
0.6444815657343546, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 };
final static double[] expectedTGVL1Denoising = new double[] { 1.0,
0.61092099595629, 1.0, 1.0, 1.0, 1.0, 1.0, 0.9919244797347313, 1.0 };
final static double[] expectedTVL1Deconvolution = new double[] {
0.852645952127457, 0.9599103235969544, 1.0, 0.9014054497424371, 1.0,
1.0, 0.8526459284847978, 0.9599102998930213, 1.0 };
final static double[] expectedTVHuberL1Deconvolution = new double[] {
0.8449143152960898, 0.9578770115357464, 1.0, 0.8949538345908679, 1.0,
1.0, 0.8449142943570016, 0.9578769702830028, 1.0 };
private static final double[] expectedTGVL1Deconvolution = new double[] {
0.8439089252434915, 0.9610056122974837, 1.0, 0.8914287608473277, 1.0,
1.0, 0.8445419552876334, 0.9618260754956498, 1.0 };
private static final double[] expectedTVL1MultiViewDeconvolutino =
new double[] { 0.4105700932017502, 0.5980660290731934,
0.9498420512962649, 0.504989219586202, 0.6991254176998365,
0.9600158205067111, 0.4172888561183441, 0.6045622279388823,
0.9466757224895699 };
private static final double[] expectedTVSquaredL2MultiViewDeconvolution =
new double[] { 0.7592273723204381, 0.7623618244650691,
0.7650725640119375, 0.7580971025767415, 0.7613931213358969,
0.764063073007377, 0.7592273809033149, 0.7623618369559704,
0.7650725708261379 };
private static final double[] expectedTVKLDivMultiViewDeconvolution =
new double[] { 0.7788525150845698, 0.7832699980854522,
0.7866283572965034, 0.7760799537075452, 0.7811139240274552,
0.7849561977296122, 0.7788525382058823, 0.7832700390325155,
0.786628410267056 };
@Test
public void TVL1DenoisingTest() {
@SuppressWarnings("unchecked")
final Cursor<DoubleType> c = ((Img<DoubleType>) ops.run(
TVL1Denoising2D.class, new RandomAccessibleInterval[] { img2D }, 10,
0.5)).cursor();
int i = 0;
while (c.hasNext()) {
c.next();
assertEquals("Pixel at [" + c.getDoublePosition(0) + "," + c
.getDoublePosition(1) + "] differs.",
expectedTVL1Denoising[i++], c.get().get(), 0);
}
}
@Test
public void TVHuberL1DenoisingTest() {
@SuppressWarnings("unchecked")
final Cursor<DoubleType> c = ((Img<DoubleType>) ops.run(
TVHuberL1Denoising2D.class, new RandomAccessibleInterval[] { img2D },
10, 0.5, 0.05)).cursor();
int i = 0;
while (c.hasNext()) {
c.next();
assertEquals("Pixel at [" + c.getDoublePosition(0) + "," + c
.getDoublePosition(1) + "] differs.",
expectedTVHuberL1Denoising[i++], c.get().get(), 0);
}
}
@Test
public void TGVL1DenoisingTest() {
@SuppressWarnings("unchecked")
final Cursor<DoubleType> c = ((Img<DoubleType>) ops.run(
TGVL1Denoising2D.class, new RandomAccessibleInterval[] { img2D }, 10,
0.5, 1.0)).cursor();
int i = 0;
while (c.hasNext()) {
c.next();
assertEquals("Pixel at [" + c.getDoublePosition(0) + "," + c
.getDoublePosition(1) + "] differs.",
expectedTGVL1Denoising[i++], c.get().get(), 0);
}
}
@Test
public void TVL1DeconvolutionTest() {
@SuppressWarnings("unchecked")
final Cursor<DoubleType> c = ((Img<DoubleType>) ops.run(
TVL1Deconvolution2D.class, new RandomAccessibleInterval[] {
convolved2D }, new RandomAccessibleInterval[] { kernel2D }, 10,
0.1)).cursor();
int i = 0;
while (c.hasNext()) {
c.next();
assertEquals("Pixel at [" + c.getDoublePosition(0) + "," + c
.getDoublePosition(1) + "] differs.",
expectedTVL1Deconvolution[i++], c.get().get(), 0);
}
}
@Test
public void TVHuberL1DeconvolutionTest() {
@SuppressWarnings("unchecked")
final Cursor<DoubleType> c = ((Img<DoubleType>) ops.run(
TVHuberL1Deconvolution2D.class, new RandomAccessibleInterval[] {
convolved2D }, new RandomAccessibleInterval[] { kernel2D }, 10, 0.1,
0.8)).cursor();
int i = 0;
while (c.hasNext()) {
c.next();
assertEquals("Pixel at [" + c.getDoublePosition(0) + "," + c
.getDoublePosition(1) + "] differs.",
expectedTVHuberL1Deconvolution[i++], c.get().get(), 0);
}
}
@Test
public void TGVL1DeconvolutionTest() {
@SuppressWarnings("unchecked")
final Cursor<DoubleType> c = ((Img<DoubleType>) ops.run(
TGVL1Deconvolution2D.class, new RandomAccessibleInterval[] {
convolved2D }, new RandomAccessibleInterval[] { kernel2D }, 10, 0.5,
1)).cursor();
int i = 0;
while (c.hasNext()) {
c.next();
assertEquals("Pixel at [" + c.getDoublePosition(0) + "," + c
.getDoublePosition(1) + "] differs.",
expectedTGVL1Deconvolution[i++], c.get().get(), 0);
}
}
@Test
public void TVL1MultiViewDeconvolutionTest() {
@SuppressWarnings("unchecked")
Cursor<DoubleType> c = ((Img<DoubleType>) ops.run(
TVL1Deconvolution2D.class, new RandomAccessibleInterval[] { ops.copy()
.img(convolved2D), ops.copy().img(convolved2D) },
new RandomAccessibleInterval[] { kernel2D, kernel2D }, 10, 0.1))
.cursor();
int i = 0;
while (c.hasNext()) {
c.next();
assertEquals("Pixel at [" + c.getDoublePosition(0) + "," + c
.getDoublePosition(1) + "] differs.",
expectedTVL1MultiViewDeconvolutino[i++], c.get().get(), 0);
}
}
@Test
public void TVL2MultiViewDeconvolutionTest() {
@SuppressWarnings("unchecked")
Cursor<DoubleType> c = ((Img<DoubleType>) ops.run(
TVSquaredL2Deconvolution2D.class, new RandomAccessibleInterval[] { ops
.copy().img(convolved2D), ops.copy().img(convolved2D) },
new RandomAccessibleInterval[] { kernel2D, kernel2D }, 10, 0.1))
.cursor();
int i = 0;
while (c.hasNext()) {
c.next();
assertEquals("Pixel at [" + c.getDoublePosition(0) + "," + c
.getDoublePosition(1) + "] differs.",
expectedTVSquaredL2MultiViewDeconvolution[i++], c.get().get(),
0);
}
}
@Test
public void TVKLDivMultiViewDeconvolutionTest() {
@SuppressWarnings("unchecked")
Cursor<DoubleType> c = ((Img<DoubleType>) ops.run(
TVKLDivDeconvolution2D.class, new RandomAccessibleInterval[] { ops
.copy().img(convolved2D), ops.copy().img(convolved2D) },
new RandomAccessibleInterval[] { kernel2D, kernel2D }, 10, 0.1))
.cursor();
int i = 0;
while (c.hasNext()) {
c.next();
assertEquals("Pixel at [" + c.getDoublePosition(0) + "," + c
.getDoublePosition(1) + "] differs.",
expectedTVKLDivMultiViewDeconvolution[i++], c.get().get(), 0);
}
}
}
| |
/*
* Copyright (C) 2017-2019 Hazuki
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.hazuki.yuzubrowser.core.cache;
import java.io.BufferedInputStream;
import java.io.BufferedWriter;
import java.io.Closeable;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.lang.reflect.Array;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import androidx.annotation.NonNull;
/**
* *****************************************************************************
* Taken from the JB source code, can be found in:
* libcore/luni/src/main/java/libcore/io/DiskLruCache.java
* or direct link:
* https://android.googlesource.com/platform/libcore/+/android-4.1.1_r1/luni/src/main/java/libcore/io/DiskLruCache.java
* *****************************************************************************
* <p>
* A cache that uses a bounded amount of space on a filesystem. Each cache
* entry has a string key and a fixed number of values. Values are byte
* sequences, accessible as streams or files. Each value must be between {@code
* 0} and {@code Integer.MAX_VALUE} bytes in length.
* <p>
* <p>The cache stores its data in a directory on the filesystem. This
* directory must be exclusive to the cache; the cache may delete or overwrite
* files from its directory. It is an error for multiple processes to use the
* same cache directory at the same time.
* <p>
* <p>This cache limits the number of bytes that it will store on the
* filesystem. When the number of stored bytes exceeds the limit, the cache will
* remove entries in the background until the limit is satisfied. The limit is
* not strict: the cache may temporarily exceed it while waiting for files to be
* deleted. The limit does not include filesystem overhead or the cache
* journal so space-sensitive applications should set a conservative limit.
* <p>
* <p>Clients call {@link #edit} to create or update the values of an entry. An
* entry may have only one editor at one time; if a value is not available to be
* edited then {@link #edit} will return null.
* <ul>
* <li>When an entry is being <strong>created</strong> it is necessary to
* supply a full set of values; the empty value should be used as a
* placeholder if necessary.
* <li>When an entry is being <strong>edited</strong>, it is not necessary
* to supply data for every value; values default to their previous
* value.
* </ul>
* Every {@link #edit} call must be matched by a call to {@link Editor#commit}
* or {@link Editor#abort}. Committing is atomic: a read observes the full set
* of values as they were before or after the commit, but never a mix of values.
* <p>
* <p>Clients call {@link #get} to read a snapshot of an entry. The read will
* observe the value at the time that {@link #get} was called. Updates and
* removals after the call do not impact ongoing reads.
* <p>
* <p>This class is tolerant of some I/O errors. If files are missing from the
* filesystem, the corresponding entries will be dropped from the cache. If
* an error occurs while writing a cache value, the edit will fail silently.
* Callers should handle other problems by catching {@code IOException} and
* responding appropriately.
*/
public final class DiskLruCache implements Closeable {
private static final String JOURNAL_FILE = "journal";
private static final String JOURNAL_FILE_TMP = "journal.tmp";
private static final String MAGIC = "libcore.io.DiskLruCache";
private static final String VERSION_1 = "1";
private static final long ANY_SEQUENCE_NUMBER = -1;
private static final String CLEAN = "CLEAN";
private static final String DIRTY = "DIRTY";
private static final String REMOVE = "REMOVE";
private static final String READ = "READ";
private static final Charset UTF_8 = Charset.forName("UTF-8");
private static final int IO_BUFFER_SIZE = 8 * 1024;
/*
* This cache uses a journal file named "journal". A typical journal file
* looks like this:
* libcore.io.DiskLruCache
* 1
* 100
* 2
*
* CLEAN 3400330d1dfc7f3f7f4b8d4d803dfcf6 832 21054
* DIRTY 335c4c6028171cfddfbaae1a9c313c52
* CLEAN 335c4c6028171cfddfbaae1a9c313c52 3934 2342
* REMOVE 335c4c6028171cfddfbaae1a9c313c52
* DIRTY 1ab96a171faeeee38496d8b330771a7a
* CLEAN 1ab96a171faeeee38496d8b330771a7a 1600 234
* READ 335c4c6028171cfddfbaae1a9c313c52
* READ 3400330d1dfc7f3f7f4b8d4d803dfcf6
*
* The first five lines of the journal form its header. They are the
* constant string "libcore.io.DiskLruCache", the disk cache's version,
* the application's version, the value count, and a blank line.
*
* Each of the subsequent lines in the file is a record of the state of a
* cache entry. Each line contains space-separated values: a state, a key,
* and optional state-specific values.
* o DIRTY lines track that an entry is actively being created or updated.
* Every successful DIRTY action should be followed by a CLEAN or REMOVE
* action. DIRTY lines without a matching CLEAN or REMOVE indicate that
* temporary files may need to be deleted.
* o CLEAN lines track a cache entry that has been successfully published
* and may be read. A publish line is followed by the lengths of each of
* its values.
* o READ lines track accesses for LRU.
* o REMOVE lines track entries that have been deleted.
*
* The journal file is appended to as cache operations occur. The journal may
* occasionally be compacted by dropping redundant lines. A temporary file named
* "journal.tmp" will be used during compaction; that file should be deleted if
* it exists when the cache is opened.
*/
private final File directory;
private final File journalFile;
private final File journalFileTmp;
private final int appVersion;
private final long maxSize;
private final int valueCount;
private long size = 0;
private Writer journalWriter;
private final LinkedHashMap<String, Entry> lruEntries = new LinkedHashMap<>(0, 0.75f, true);
private int redundantOpCount;
private boolean destroied = false;
private OnTrimCacheListener listener;
/**
* To differentiate between old and current snapshots, each entry is given
* a sequence number each time an edit is committed. A snapshot is stale if
* its sequence number is not equal to its entry's sequence number.
*/
private long nextSequenceNumber = 0;
/* From java.util.Arrays */
@SuppressWarnings("unchecked")
private static <T> T[] copyOfRange(T[] original, int start, int end) {
final int originalLength = original.length; // For exception priority compatibility.
if (start > end) {
throw new IllegalArgumentException();
}
if (start < 0 || start > originalLength) {
throw new ArrayIndexOutOfBoundsException();
}
final int resultLength = end - start;
final int copyLength = Math.min(resultLength, originalLength - start);
final T[] result = (T[]) Array
.newInstance(original.getClass().getComponentType(), resultLength);
System.arraycopy(original, start, result, 0, copyLength);
return result;
}
/**
* Returns the remainder of 'reader' as a string, closing it when done.
*/
public static String readFully(Reader reader) throws IOException {
try {
StringWriter writer = new StringWriter();
char[] buffer = new char[1024];
int count;
while ((count = reader.read(buffer)) != -1) {
writer.write(buffer, 0, count);
}
return writer.toString();
} finally {
reader.close();
}
}
/**
* Returns the ASCII characters up to but not including the next "\r\n", or
* "\n".
*
* @throws java.io.EOFException if the stream is exhausted before the next newline
* character.
*/
public static String readAsciiLine(InputStream in) throws IOException {
// TODO: support UTF-8 here instead
StringBuilder result = new StringBuilder(80);
while (true) {
int c = in.read();
if (c == -1) {
throw new EOFException();
} else if (c == '\n') {
break;
}
result.append((char) c);
}
int length = result.length();
if (length > 0 && result.charAt(length - 1) == '\r') {
result.setLength(length - 1);
}
return result.toString();
}
/**
* Closes 'closeable', ignoring any checked exceptions. Does nothing if 'closeable' is null.
*/
public static void closeQuietly(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (RuntimeException rethrown) {
throw rethrown;
} catch (Exception ignored) {
}
}
}
/**
* Recursively delete everything in {@code dir}.
*/
// TODO: this should specify paths as Strings rather than as Files
public static void deleteContents(File dir) throws IOException {
File[] files = dir.listFiles();
if (files == null) {
throw new IllegalArgumentException("not a directory: " + dir);
}
for (File file : files) {
if (file.isDirectory()) {
deleteContents(file);
}
String name = file.getName();
if (name.equals("indexTable") || name.equals("indexTable-journal")) {
continue;
}
if (!file.delete()) {
throw new IOException("failed to delete file: " + file);
}
}
}
/**
* This cache uses a single background thread to evict entries.
*/
private final ExecutorService executorService = new ThreadPoolExecutor(0, 1,
60L, TimeUnit.SECONDS, new LinkedBlockingQueue<>());
private final Callable<Void> cleanupCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
synchronized (DiskLruCache.this) {
if (journalWriter == null) {
return null; // closed
}
trimToSize();
if (journalRebuildRequired()) {
rebuildJournal();
redundantOpCount = 0;
}
}
return null;
}
};
private DiskLruCache(File directory, int appVersion, int valueCount, long maxSize) {
this.directory = directory;
this.appVersion = appVersion;
this.journalFile = new File(directory, JOURNAL_FILE);
this.journalFileTmp = new File(directory, JOURNAL_FILE_TMP);
this.valueCount = valueCount;
this.maxSize = maxSize;
}
/**
* Opens the cache in {@code directory}, creating a cache if none exists
* there.
*
* @param directory a writable directory
* @param appVersion
* @param valueCount the number of values per cache entry. Must be positive.
* @param maxSize the maximum number of bytes this cache should use to store
* @throws java.io.IOException if reading or writing the cache directory fails
*/
public static DiskLruCache open(File directory, int appVersion, int valueCount, long maxSize)
throws IOException {
if (maxSize <= 0) {
throw new IllegalArgumentException("maxSize <= 0");
}
if (valueCount <= 0) {
throw new IllegalArgumentException("valueCount <= 0");
}
// prefer to pick up where we left off
DiskLruCache cache = new DiskLruCache(directory, appVersion, valueCount, maxSize);
if (cache.journalFile.exists()) {
try {
cache.readJournal();
cache.processJournal();
cache.journalWriter = new BufferedWriter(new FileWriter(cache.journalFile, true),
IO_BUFFER_SIZE);
return cache;
} catch (IOException journalIsCorrupt) {
// System.logW("DiskLruCache " + directory + " is corrupt: "
// + journalIsCorrupt.getMessage() + ", removing");
cache.delete();
}
}
// create a new empty cache
directory.mkdirs();
cache = new DiskLruCache(directory, appVersion, valueCount, maxSize);
cache.rebuildJournal();
return cache;
}
public void clear() {
try {
delete();
lruEntries.clear();
redundantOpCount = 0;
size = 0;
nextSequenceNumber = 0;
rebuildJournal();
} catch (IOException e) {
e.printStackTrace();
}
}
private void readJournal() throws IOException {
InputStream in = new BufferedInputStream(new FileInputStream(journalFile), IO_BUFFER_SIZE);
try {
String magic = readAsciiLine(in);
String version = readAsciiLine(in);
String appVersionString = readAsciiLine(in);
String valueCountString = readAsciiLine(in);
String blank = readAsciiLine(in);
if (!MAGIC.equals(magic)
|| !VERSION_1.equals(version)
|| !Integer.toString(appVersion).equals(appVersionString)
|| !Integer.toString(valueCount).equals(valueCountString)
|| !"".equals(blank)) {
throw new IOException("unexpected journal header: ["
+ magic + ", " + version + ", " + valueCountString + ", " + blank + "]");
}
while (true) {
try {
readJournalLine(readAsciiLine(in));
} catch (EOFException endOfJournal) {
break;
}
}
} finally {
closeQuietly(in);
}
}
private void readJournalLine(String line) throws IOException {
String[] parts = line.split(" ");
if (parts.length < 2) {
throw new IOException("unexpected journal line: " + line);
}
String key = parts[1];
if (parts[0].equals(REMOVE) && parts.length == 2) {
lruEntries.remove(key);
return;
}
Entry entry = lruEntries.get(key);
if (entry == null) {
entry = new Entry(key);
lruEntries.put(key, entry);
}
if (parts[0].equals(CLEAN) && parts.length == 2 + valueCount) {
entry.readable = true;
entry.currentEditor = null;
entry.setLengths(copyOfRange(parts, 2, parts.length));
} else if (parts[0].equals(DIRTY) && parts.length == 2) {
entry.currentEditor = new Editor(entry);
} else if (parts[0].equals(READ) && parts.length == 2) {
// this work was already done by calling lruEntries.get()
} else {
throw new IOException("unexpected journal line: " + line);
}
}
/**
* Computes the initial size and collects garbage as a part of opening the
* cache. Dirty entries are assumed to be inconsistent and will be deleted.
*/
private void processJournal() throws IOException {
deleteIfExists(journalFileTmp);
for (Iterator<Entry> i = lruEntries.values().iterator(); i.hasNext(); ) {
Entry entry = i.next();
if (entry.currentEditor == null) {
for (int t = 0; t < valueCount; t++) {
size += entry.lengths[t];
}
} else {
entry.currentEditor = null;
for (int t = 0; t < valueCount; t++) {
deleteIfExists(entry.getCleanFile(t));
deleteIfExists(entry.getDirtyFile(t));
}
i.remove();
}
}
}
/**
* Creates a new journal that omits redundant information. This replaces the
* current journal if it exists.
*/
@SuppressWarnings("ResultOfMethodCallIgnored")
private synchronized void rebuildJournal() throws IOException {
if (journalWriter != null) {
journalWriter.close();
}
Writer writer = new BufferedWriter(new FileWriter(journalFileTmp), IO_BUFFER_SIZE);
writer.write(MAGIC);
writer.write("\n");
writer.write(VERSION_1);
writer.write("\n");
writer.write(Integer.toString(appVersion));
writer.write("\n");
writer.write(Integer.toString(valueCount));
writer.write("\n");
writer.write("\n");
for (Entry entry : lruEntries.values()) {
if (entry.currentEditor != null) {
writer.write(DIRTY + ' ' + entry.key + '\n');
} else {
writer.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n');
}
}
writer.close();
if (!journalFileTmp.renameTo(journalFile)) {
throw new IllegalStateException("journal file rename failed : " + journalFile.getAbsolutePath()
+ "; directory:" + journalFile.getParentFile().exists()
+ "; from file:" + journalFileTmp.exists() + "; dest file:" + journalFile.exists());
}
journalWriter = new BufferedWriter(new FileWriter(journalFile, true), IO_BUFFER_SIZE);
}
private static void deleteIfExists(File file) throws IOException {
// try {
// Libcore.os.remove(file.getPath());
// } catch (ErrnoException errnoException) {
// if (errnoException.errno != OsConstants.ENOENT) {
// throw errnoException.rethrowAsIOException();
// }
// }
if (file.exists() && !file.delete()) {
throw new IOException();
}
}
/**
* Returns a snapshot of the entry named {@code key}, or null if it doesn't
* exist is not currently readable. If a value is returned, it is moved to
* the head of the LRU queue.
*/
public synchronized Snapshot get(String key) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (entry == null) {
return null;
}
if (!entry.readable) {
return null;
}
/*
* Open all streams eagerly to guarantee that we see a single published
* snapshot. If we opened streams lazily then the streams could come
* from different edits.
*/
InputStream[] ins = new InputStream[valueCount];
try {
for (int i = 0; i < valueCount; i++) {
ins[i] = new FileInputStream(entry.getCleanFile(i));
}
} catch (FileNotFoundException e) {
// a file must have been deleted manually!
return null;
}
redundantOpCount++;
journalWriter.append(READ + ' ').append(key).append('\n');
if (journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
return new Snapshot(key, entry.sequenceNumber, ins);
}
/**
* Returns an editor for the entry named {@code key}, or null if another
* edit is in progress.
*/
public Editor edit(String key) throws IOException {
return edit(key, ANY_SEQUENCE_NUMBER);
}
private synchronized Editor edit(String key, long expectedSequenceNumber) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (expectedSequenceNumber != ANY_SEQUENCE_NUMBER
&& (entry == null || entry.sequenceNumber != expectedSequenceNumber)) {
return null; // snapshot is stale
}
if (entry == null) {
entry = new Entry(key);
lruEntries.put(key, entry);
} else if (entry.currentEditor != null) {
return null; // another edit is in progress
}
Editor editor = new Editor(entry);
entry.currentEditor = editor;
// flush the journal before creating files to prevent file leaks
journalWriter.write(DIRTY + ' ' + key + '\n');
journalWriter.flush();
return editor;
}
/**
* Returns the directory where this cache stores its data.
*/
public File getDirectory() {
return directory;
}
/**
* Returns the maximum number of bytes that this cache should use to store
* its data.
*/
public long maxSize() {
return maxSize;
}
/**
* Returns the number of bytes currently being used to store the values in
* this cache. This may be greater than the max size if a background
* deletion is pending.
*/
public synchronized long size() {
return size;
}
@SuppressWarnings("ResultOfMethodCallIgnored")
private synchronized void completeEdit(Editor editor, boolean success) throws IOException {
Entry entry = editor.entry;
if (entry.currentEditor != editor) {
throw new IllegalStateException();
}
// if this edit is creating the entry for the first time, every index must have a value
if (success && !entry.readable) {
for (int i = 0; i < valueCount; i++) {
if (!entry.getDirtyFile(i).exists()) {
editor.abort();
throw new IllegalStateException("edit didn't create file " + i);
}
}
}
for (int i = 0; i < valueCount; i++) {
File dirty = entry.getDirtyFile(i);
if (success) {
if (dirty.exists()) {
File clean = entry.getCleanFile(i);
dirty.renameTo(clean);
long oldLength = entry.lengths[i];
long newLength = clean.length();
entry.lengths[i] = newLength;
size = size - oldLength + newLength;
}
} else {
deleteIfExists(dirty);
}
}
redundantOpCount++;
entry.currentEditor = null;
if (entry.readable | success) {
entry.readable = true;
journalWriter.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n');
if (success) {
entry.sequenceNumber = nextSequenceNumber++;
}
} else {
lruEntries.remove(entry.key);
journalWriter.write(REMOVE + ' ' + entry.key + '\n');
}
if (size > maxSize || journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
}
/**
* We only rebuild the journal when it will halve the size of the journal
* and eliminate at least 2000 ops.
*/
private boolean journalRebuildRequired() {
final int REDUNDANT_OP_COMPACT_THRESHOLD = 2000;
return redundantOpCount >= REDUNDANT_OP_COMPACT_THRESHOLD
&& redundantOpCount >= lruEntries.size();
}
/**
* Drops the entry for {@code key} if it exists and can be removed. Entries
* actively being edited cannot be removed.
*
* @return true if an entry was removed.
*/
public synchronized boolean remove(String key) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (entry == null || entry.currentEditor != null) {
return false;
}
for (int i = 0; i < valueCount; i++) {
File file = entry.getCleanFile(i);
if (!file.delete()) {
throw new IOException("failed to delete " + file);
}
size -= entry.lengths[i];
entry.lengths[i] = 0;
}
redundantOpCount++;
journalWriter.append(REMOVE + ' ').append(key).append('\n');
lruEntries.remove(key);
if (journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
return true;
}
/**
* Returns true if this cache has been closed.
*/
public boolean isClosed() {
return journalWriter == null;
}
private void checkNotClosed() {
if (journalWriter == null && !destroied) {
try {
rebuildJournal();
} catch (IOException e) {
e.printStackTrace();
}
}
if (journalWriter == null) {
throw new IllegalStateException("cache is closed");
}
}
/**
* Force buffered operations to the filesystem.
*/
public synchronized void flush() throws IOException {
checkNotClosed();
trimToSize();
journalWriter.flush();
}
/**
* Closes this cache. Stored values will remain on the filesystem.
*/
public synchronized void close() throws IOException {
if (journalWriter == null) {
return; // already closed
}
destroied = true;
for (Entry entry : new ArrayList<>(lruEntries.values())) {
if (entry.currentEditor != null) {
entry.currentEditor.abort();
}
}
trimToSize();
journalWriter.close();
journalWriter = null;
}
private void trimToSize() throws IOException {
while (size > maxSize) {
// Map.Entry<String, Entry> toEvict = lruEntries.eldest();
final Map.Entry<String, Entry> toEvict = lruEntries.entrySet().iterator().next();
remove(toEvict.getKey());
if (listener != null)
listener.onTrim(toEvict.getKey());
}
}
/**
* Closes the cache and deletes all of its stored values. This will delete
* all files in the cache directory including files that weren't created by
* the cache.
*/
public void delete() throws IOException {
close();
deleteContents(directory);
}
private void validateKey(String key) {
if (key.contains(" ") || key.contains("\n") || key.contains("\r")) {
throw new IllegalArgumentException(
"keys must not contain spaces or newlines: \"" + key + "\"");
}
}
private static String inputStreamToString(InputStream in) throws IOException {
return readFully(new InputStreamReader(in, UTF_8));
}
/**
* A snapshot of the values for an entry.
*/
public final class Snapshot implements Closeable {
private final String key;
private final long sequenceNumber;
private final InputStream[] ins;
private Snapshot(String key, long sequenceNumber, InputStream[] ins) {
this.key = key;
this.sequenceNumber = sequenceNumber;
this.ins = ins;
}
/**
* Returns an editor for this snapshot's entry, or null if either the
* entry has changed since this snapshot was created or if another edit
* is in progress.
*/
public Editor edit() throws IOException {
return DiskLruCache.this.edit(key, sequenceNumber);
}
/**
* Returns the unbuffered stream with the value for {@code index}.
*/
public InputStream getInputStream(int index) {
return ins[index];
}
/**
* Returns the string value for {@code index}.
*/
public String getString(int index) throws IOException {
return inputStreamToString(getInputStream(index));
}
@Override
public void close() {
for (InputStream in : ins) {
closeQuietly(in);
}
}
}
public void setOnTrimCacheListener(OnTrimCacheListener listener) {
this.listener = listener;
}
/**
* Edits the values for an entry.
*/
public final class Editor {
private final Entry entry;
private boolean hasErrors;
private Editor(Entry entry) {
this.entry = entry;
}
/**
* Returns an unbuffered input stream to read the last committed value,
* or null if no value has been committed.
*/
public InputStream newInputStream(int index) throws IOException {
synchronized (DiskLruCache.this) {
if (entry.currentEditor != this) {
throw new IllegalStateException();
}
if (!entry.readable) {
return null;
}
return new FileInputStream(entry.getCleanFile(index));
}
}
/**
* Returns the last committed value as a string, or null if no value
* has been committed.
*/
public String getString(int index) throws IOException {
InputStream in = newInputStream(index);
return in != null ? inputStreamToString(in) : null;
}
/**
* Returns a new unbuffered output stream to write the value at
* {@code index}. If the underlying output stream encounters errors
* when writing to the filesystem, this edit will be aborted when
* {@link #commit} is called. The returned output stream does not throw
* IOExceptions.
*/
public OutputStream newOutputStream(int index) throws IOException {
synchronized (DiskLruCache.this) {
if (entry.currentEditor != this) {
throw new IllegalStateException();
}
return new FaultHidingOutputStream(new FileOutputStream(entry.getDirtyFile(index)));
}
}
/**
* Sets the value at {@code index} to {@code value}.
*/
public void set(int index, String value) throws IOException {
Writer writer = null;
try {
writer = new OutputStreamWriter(newOutputStream(index), UTF_8);
writer.write(value);
} finally {
closeQuietly(writer);
}
}
/**
* Commits this edit so it is visible to readers. This releases the
* edit lock so another edit may be started on the same key.
*/
public void commit() throws IOException {
if (hasErrors) {
completeEdit(this, false);
remove(entry.key); // the previous entry is stale
} else {
completeEdit(this, true);
}
}
/**
* Aborts this edit. This releases the edit lock so another edit may be
* started on the same key.
*/
public void abort() throws IOException {
completeEdit(this, false);
}
private class FaultHidingOutputStream extends FilterOutputStream {
private FaultHidingOutputStream(OutputStream out) {
super(out);
}
@Override
public void write(int oneByte) {
try {
out.write(oneByte);
} catch (IOException e) {
hasErrors = true;
}
}
@Override
public void write(@NonNull byte[] buffer, int offset, int length) {
try {
out.write(buffer, offset, length);
} catch (IOException e) {
hasErrors = true;
}
}
@Override
public void close() {
try {
out.close();
} catch (IOException e) {
hasErrors = true;
}
}
@Override
public void flush() {
try {
out.flush();
} catch (IOException e) {
hasErrors = true;
}
}
}
}
private final class Entry {
private final String key;
/**
* Lengths of this entry's files.
*/
private final long[] lengths;
/**
* True if this entry has ever been published
*/
private boolean readable;
/**
* The ongoing edit or null if this entry is not being edited.
*/
private Editor currentEditor;
/**
* The sequence number of the most recently committed edit to this entry.
*/
private long sequenceNumber;
private Entry(String key) {
this.key = key;
this.lengths = new long[valueCount];
}
public String getLengths() {
StringBuilder result = new StringBuilder();
for (long size : lengths) {
result.append(' ').append(size);
}
return result.toString();
}
/**
* Set lengths using decimal numbers like "10123".
*/
private void setLengths(String[] strings) throws IOException {
if (strings.length != valueCount) {
throw invalidLengths(strings);
}
try {
for (int i = 0; i < strings.length; i++) {
lengths[i] = Long.parseLong(strings[i]);
}
} catch (NumberFormatException e) {
throw invalidLengths(strings);
}
}
private IOException invalidLengths(String[] strings) throws IOException {
throw new IOException("unexpected journal line: " + Arrays.toString(strings));
}
public File getCleanFile(int i) {
return new File(directory, key + "." + i);
}
public File getDirtyFile(int i) {
return new File(directory, key + "." + i + ".tmp");
}
}
public interface OnTrimCacheListener {
void onTrim(String key);
}
}
| |
/*
* Copyright 2014-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.net.intent;
import com.google.common.base.MoreObjects;
import com.google.common.collect.Sets;
import org.onlab.graph.ScalarWeight;
import org.onlab.graph.Weight;
import org.onosproject.core.GroupId;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.DefaultLink;
import org.onosproject.net.DefaultPath;
import org.onosproject.net.DeviceId;
import org.onosproject.net.ElementId;
import org.onosproject.net.HostId;
import org.onosproject.net.Link;
import org.onosproject.net.NetTestTools;
import org.onosproject.net.NetworkResource;
import org.onosproject.net.Path;
import org.onosproject.net.device.DeviceServiceAdapter;
import org.onosproject.net.flow.FlowId;
import org.onosproject.net.flow.FlowRule;
import org.onosproject.net.flow.FlowRuleExtPayLoad;
import org.onosproject.net.flow.IndexTableId;
import org.onosproject.net.flow.TableId;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flow.TrafficTreatment;
import org.onosproject.net.flow.criteria.Criterion;
import org.onosproject.net.flow.criteria.Criterion.Type;
import org.onosproject.net.flow.instructions.Instruction;
import org.onosproject.net.flow.instructions.Instructions;
import org.onosproject.net.flow.instructions.Instructions.MetadataInstruction;
import org.onosproject.net.link.LinkServiceAdapter;
import org.onosproject.net.provider.ProviderId;
import org.onosproject.net.topology.DefaultTopologyEdge;
import org.onosproject.net.topology.DefaultTopologyVertex;
import org.onosproject.net.topology.LinkWeigher;
import org.onosproject.net.topology.PathServiceAdapter;
import org.onosproject.net.topology.TopologyVertex;
import org.onosproject.store.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.onosproject.net.Link.Type.DIRECT;
import static org.onosproject.net.NetTestTools.*;
/**
* Common mocks used by the intent framework tests.
*/
public class IntentTestsMocks {
/**
* Mock traffic selector class used for satisfying API requirements.
*/
public static class MockSelector implements TrafficSelector {
@Override
public Set<Criterion> criteria() {
return new HashSet<>();
}
@Override
public Criterion getCriterion(Type type) {
return null;
}
}
/**
* Mock traffic treatment class used for satisfying API requirements.
*/
public static class MockTreatment implements TrafficTreatment {
@Override
public List<Instruction> deferred() {
return Collections.emptyList();
}
@Override
public List<Instruction> immediate() {
return Collections.emptyList();
}
@Override
public List<Instruction> allInstructions() {
return Collections.emptyList();
}
@Override
public Instructions.TableTypeTransition tableTransition() {
return null;
}
@Override
public boolean clearedDeferred() {
return false;
}
@Override
public MetadataInstruction writeMetadata() {
return null;
}
@Override
public Instructions.StatTriggerInstruction statTrigger() {
return null;
}
@Override
public Instructions.MeterInstruction metered() {
return null;
}
@Override
public Set<Instructions.MeterInstruction> meters() {
return Sets.newHashSet();
}
}
/**
* Mock path service for creating paths within the test.
*/
public static class MockPathService extends PathServiceAdapter {
final String[] pathHops;
final String[] reversePathHops;
/**
* Constructor that provides a set of hops to mock.
*
* @param pathHops path hops to mock
*/
public MockPathService(String[] pathHops) {
this.pathHops = pathHops;
String[] reversed = pathHops.clone();
Collections.reverse(Arrays.asList(reversed));
reversePathHops = reversed;
}
@Override
public Set<Path> getPaths(ElementId src, ElementId dst) {
Set<Path> result = new HashSet<>();
String[] allHops = new String[pathHops.length];
if (src.toString().endsWith(pathHops[0])) {
System.arraycopy(pathHops, 0, allHops, 0, pathHops.length);
} else {
System.arraycopy(reversePathHops, 0, allHops, 0, pathHops.length);
}
result.add(createPath(src instanceof HostId, dst instanceof HostId, allHops));
return result;
}
@Override
public Set<Path> getPaths(ElementId src, ElementId dst, LinkWeigher weigher) {
Set<Path> paths = getPaths(src, dst);
for (Path path : paths) {
DeviceId srcDevice = path.src().elementId() instanceof DeviceId ? path.src().deviceId() : null;
DeviceId dstDevice = path.dst().elementId() instanceof DeviceId ? path.dst().deviceId() : null;
if (srcDevice != null && dstDevice != null) {
TopologyVertex srcVertex = new DefaultTopologyVertex(srcDevice);
TopologyVertex dstVertex = new DefaultTopologyVertex(dstDevice);
Link link = link(src.toString(), 1, dst.toString(), 1);
Weight weightValue = weigher.weight(new DefaultTopologyEdge(srcVertex, dstVertex, link));
if (weightValue.isNegative()) {
return new HashSet<>();
}
}
}
return paths;
}
}
/**
* Mock path service for creating paths within the test with multiple possible paths.
*/
public static class MockMultiplePathService extends PathServiceAdapter {
final String[][] pathsHops;
/**
* Constructor that provides a set of hops to mock.
*
* @param pathHops multiple path hops to mock
*/
public MockMultiplePathService(String[][] pathHops) {
this.pathsHops = pathHops;
}
@Override
public Set<Path> getPaths(ElementId src, ElementId dst) {
//Extracts all the paths that goes from src to dst
Set<Path> allPaths = new HashSet<>();
allPaths.addAll(IntStream.range(0, pathsHops.length)
.filter(i -> src.toString().endsWith(pathsHops[i][0])
&& dst.toString().endsWith(pathsHops[i][pathsHops[i].length - 1]))
.mapToObj(i -> createPath(src instanceof HostId,
dst instanceof HostId,
pathsHops[i]))
.collect(Collectors.toSet()));
// Maintain only the shortest paths
int minPathLength = allPaths.stream()
.mapToInt(o -> o.links().size())
.min()
.orElse(Integer.MAX_VALUE);
Set<Path> shortestPaths = allPaths.stream()
.filter(path -> path.links().size() <= minPathLength)
.collect(Collectors.toSet());
return shortestPaths;
}
@Override
public Set<Path> getPaths(ElementId src, ElementId dst, LinkWeigher weigher) {
Set<Path> paths = getPaths(src, dst);
for (Path path : paths) {
DeviceId srcDevice = path.src().elementId() instanceof DeviceId ? path.src().deviceId() : null;
DeviceId dstDevice = path.dst().elementId() instanceof DeviceId ? path.dst().deviceId() : null;
if (srcDevice != null && dstDevice != null) {
TopologyVertex srcVertex = new DefaultTopologyVertex(srcDevice);
TopologyVertex dstVertex = new DefaultTopologyVertex(dstDevice);
Link link = link(src.toString(), 1, dst.toString(), 1);
Weight weightValue = weigher.weight(new DefaultTopologyEdge(srcVertex, dstVertex, link));
if (weightValue.isNegative()) {
return new HashSet<>();
}
}
}
return paths;
}
}
/**
* Mock path service for creating paths within the test.
*
*/
public static class Mp2MpMockPathService extends PathServiceAdapter {
final String[] pathHops;
final String[] reversePathHops;
/**
* Constructor that provides a set of hops to mock.
*
* @param pathHops path hops to mock
*/
public Mp2MpMockPathService(String[] pathHops) {
this.pathHops = pathHops;
String[] reversed = pathHops.clone();
Collections.reverse(Arrays.asList(reversed));
reversePathHops = reversed;
}
@Override
public Set<Path> getPaths(ElementId src, ElementId dst) {
Set<Path> result = new HashSet<>();
String[] allHops = new String[pathHops.length + 2];
allHops[0] = src.toString();
allHops[allHops.length - 1] = dst.toString();
if (pathHops.length != 0) {
System.arraycopy(pathHops, 0, allHops, 1, pathHops.length);
}
result.add(createPath(allHops));
return result;
}
@Override
public Set<Path> getPaths(ElementId src, ElementId dst, LinkWeigher weigher) {
final Set<Path> paths = getPaths(src, dst);
for (Path path : paths) {
final DeviceId srcDevice = path.src().elementId() instanceof DeviceId ? path.src().deviceId() : null;
final DeviceId dstDevice = path.dst().elementId() instanceof DeviceId ? path.dst().deviceId() : null;
if (srcDevice != null && dstDevice != null) {
final TopologyVertex srcVertex = new DefaultTopologyVertex(srcDevice);
final TopologyVertex dstVertex = new DefaultTopologyVertex(dstDevice);
final Link link = link(src.toString(), 1, dst.toString(), 1);
final Weight weightValue = weigher.weight(new DefaultTopologyEdge(srcVertex, dstVertex, link));
if (weightValue.isNegative()) {
return new HashSet<>();
}
}
}
return paths;
}
}
/**
* Mock active and direct link.
*/
public static class FakeLink extends DefaultLink {
/**
* Constructor that provides source and destination of the fake link.
*
* @param src Source connect point of the fake link
* @param dst Destination connect point of the fake link
*/
public FakeLink(ConnectPoint src, ConnectPoint dst) {
super(null, src, dst, DIRECT, Link.State.ACTIVE);
}
}
/**
* Mock path service for creating paths for MP2SP intent tests, returning
* pre-determined paths.
*/
public static class FixedMP2MPMockPathService extends PathServiceAdapter {
final String[] pathHops;
public static final String DPID_1 = "of:s1";
public static final String DPID_2 = "of:s2";
public static final String DPID_3 = "of:s3";
public static final String DPID_4 = "of:s4";
/**
* Constructor that provides a set of hops to mock.
*
* @param pathHops path hops to mock
*/
public FixedMP2MPMockPathService(String[] pathHops) {
this.pathHops = pathHops;
}
@Override
public Set<Path> getPaths(ElementId src, ElementId dst) {
List<Link> links = new ArrayList<>();
Set<Path> result = new HashSet<>();
ProviderId providerId = new ProviderId("of", "foo");
DefaultPath path;
if (src.toString().equals(DPID_1) && dst.toString().equals(DPID_4)) {
links.add(NetTestTools.linkNoPrefixes(src.toString(), 2, pathHops[0], 1));
links.add(NetTestTools.linkNoPrefixes(pathHops[0], 2, dst.toString(), 1));
} else if (src.toString().equals(DPID_2) && dst.toString().equals(DPID_4)) {
links.add(NetTestTools.linkNoPrefixes(src.toString(), 2, pathHops[0], 3));
links.add(NetTestTools.linkNoPrefixes(pathHops[0], 2, dst.toString(), 1));
} else if (src.toString().equals(DPID_4) && dst.toString().equals(DPID_1)) {
links.add(NetTestTools.linkNoPrefixes(src.toString(), 2, pathHops[0], 1));
links.add(NetTestTools.linkNoPrefixes(pathHops[0], 2, dst.toString(), 1));
} else if (src.toString().equals(DPID_4) && dst.toString().equals(DPID_2)) {
links.add(NetTestTools.linkNoPrefixes(src.toString(), 2, pathHops[0], 1));
links.add(NetTestTools.linkNoPrefixes(pathHops[0], 3, dst.toString(), 1));
} else {
return result;
}
path = new DefaultPath(providerId, links, ScalarWeight.toWeight(3));
result.add(path);
return result;
}
@Override
public Set<Path> getPaths(ElementId src, ElementId dst, LinkWeigher weigher) {
final Set<Path> paths = getPaths(src, dst);
for (Path path : paths) {
final DeviceId srcDevice = path.src().elementId() instanceof DeviceId ? path.src().deviceId() : null;
final DeviceId dstDevice = path.dst().elementId() instanceof DeviceId ? path.dst().deviceId() : null;
if (srcDevice != null && dstDevice != null) {
final TopologyVertex srcVertex = new DefaultTopologyVertex(srcDevice);
final TopologyVertex dstVertex = new DefaultTopologyVertex(dstDevice);
final Link link = link(src.toString(), 1, dst.toString(), 1);
final Weight weightValue = weigher.weight(new DefaultTopologyEdge(srcVertex, dstVertex, link));
if (weightValue.isNegative()) {
return new HashSet<>();
}
}
}
return paths;
}
}
/**
* Mock link service for getting links to check path availability
* when a suggested path is submitted.
*/
public static class MockLinkService extends LinkServiceAdapter {
final String[][] linksHops;
/**
* Constructor that provides a set of links (as a list of hops).
*
* @param linksHops links to to mock (link as a set of hops)
*/
public MockLinkService(String[][] linksHops) {
this.linksHops = linksHops;
}
@Override
public Set<Link> getLinks() {
return Arrays.asList(linksHops).stream()
.map(path -> createPath(path).links())
.flatMap(List::stream)
.collect(Collectors.toSet());
}
@Override
public Set<Link> getLinks(ConnectPoint connectPoint) {
return getLinks().stream()
.filter(link -> link.src().deviceId().equals(connectPoint.deviceId())
|| link.dst().deviceId().equals(connectPoint.deviceId()))
.collect(Collectors.toSet());
}
}
private static final IntentTestsMocks.MockSelector SELECTOR =
new IntentTestsMocks.MockSelector();
private static final IntentTestsMocks.MockTreatment TREATMENT =
new IntentTestsMocks.MockTreatment();
public static class MockFlowRule implements FlowRule {
static int nextId = 0;
int priority;
IndexTableId tableId;
long timestamp;
int id;
FlowRuleExtPayLoad payLoad;
public MockFlowRule(int priority) {
this.priority = priority;
this.tableId = DEFAULT_TABLE;
this.timestamp = System.currentTimeMillis();
this.id = nextId++;
this.payLoad = null;
}
public MockFlowRule(int priority, FlowRuleExtPayLoad payLoad) {
this.priority = priority;
this.timestamp = System.currentTimeMillis();
this.id = nextId++;
this.payLoad = payLoad;
}
@Override
public FlowId id() {
return FlowId.valueOf(id);
}
@Override
public short appId() {
return 0;
}
@Override
public GroupId groupId() {
return new GroupId(0);
}
@Override
public int priority() {
return priority;
}
@Override
public DeviceId deviceId() {
return did("1");
}
@Override
public TrafficSelector selector() {
return SELECTOR;
}
@Override
public TrafficTreatment treatment() {
return TREATMENT;
}
@Override
public int timeout() {
return 0;
}
@Override
public int hardTimeout() {
return 0;
}
@Override
public FlowRemoveReason reason() {
return FlowRemoveReason.NO_REASON;
}
@Override
public boolean isPermanent() {
return false;
}
@Override
public int hashCode() {
return priority;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
final MockFlowRule other = (MockFlowRule) obj;
return Objects.equals(this.timestamp, other.timestamp) &&
this.id == other.id;
}
@Override
public boolean exactMatch(FlowRule rule) {
return this.equals(rule);
}
@Override
public int tableId() {
return tableId.id();
}
@Override
public TableId table() {
return tableId;
}
@Override
public FlowRuleExtPayLoad payLoad() {
return payLoad;
}
}
public static class MockIntent extends Intent {
private static AtomicLong counter = new AtomicLong(0);
private final Long number;
public MockIntent(Long number) {
super(NetTestTools.APP_ID, null, Collections.emptyList(),
Intent.DEFAULT_INTENT_PRIORITY, null);
this.number = number;
}
public MockIntent(Long number, Collection<NetworkResource> resources) {
super(NetTestTools.APP_ID, null, resources, Intent.DEFAULT_INTENT_PRIORITY, null);
this.number = number;
}
public Long number() {
return number;
}
public static Long nextId() {
return counter.getAndIncrement();
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("id", id())
.add("appId", appId())
.toString();
}
}
public static class MockTimestamp implements Timestamp {
final int value;
public MockTimestamp(int value) {
this.value = value;
}
@Override
public int compareTo(Timestamp o) {
if (!(o instanceof MockTimestamp)) {
return -1;
}
MockTimestamp that = (MockTimestamp) o;
return this.value - that.value;
}
@Override
public int hashCode() {
return value;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof MockTimestamp) {
return this.compareTo((MockTimestamp) obj) == 0;
}
return false;
}
}
/**
* Mocks the device service so that a device appears available in the test.
*/
public static class MockDeviceService extends DeviceServiceAdapter {
@Override
public boolean isAvailable(DeviceId deviceId) {
return true;
}
}
}
| |
/*
* Copyright 2016 The gRPC Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.grpc.testing;
import static com.google.common.truth.Truth.assertThat;
import io.grpc.ManagedChannel;
import io.grpc.Server;
import io.grpc.stub.StreamObserver;
import io.grpc.testing.protobuf.SimpleRequest;
import io.grpc.testing.protobuf.SimpleResponse;
import io.grpc.testing.protobuf.SimpleServiceGrpc;
import java.util.Collection;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.junit.runners.model.Statement;
/** Unit tests for {@link GrpcServerRule}. */
@RunWith(JUnit4.class)
public class GrpcServerRuleTest {
@Rule public final GrpcServerRule grpcServerRule1 = new GrpcServerRule();
@Rule public final GrpcServerRule grpcServerRule2 = new GrpcServerRule().directExecutor();
@Test
public void serverAndChannelAreStarted_withoutDirectExecutor() {
assertThat(grpcServerRule1.getServer().isShutdown()).isFalse();
assertThat(grpcServerRule1.getServer().isTerminated()).isFalse();
assertThat(grpcServerRule1.getChannel().isShutdown()).isFalse();
assertThat(grpcServerRule1.getChannel().isTerminated()).isFalse();
assertThat(grpcServerRule1.getServerName()).isNotNull();
assertThat(grpcServerRule1.getServiceRegistry()).isNotNull();
}
@Test
public void serverAllowsServicesToBeAddedViaServiceRegistry_withoutDirectExecutor() {
TestServiceImpl testService = new TestServiceImpl();
grpcServerRule1.getServiceRegistry().addService(testService);
SimpleServiceGrpc.SimpleServiceBlockingStub stub =
SimpleServiceGrpc.newBlockingStub(grpcServerRule1.getChannel());
SimpleRequest request1 = SimpleRequest.getDefaultInstance();
SimpleRequest request2 = SimpleRequest.newBuilder().build();
stub.unaryRpc(request1);
stub.unaryRpc(request2);
assertThat(testService.unaryCallRequests).containsExactly(request1, request2);
}
@Test
public void serviceIsNotRunOnSameThreadAsTest_withoutDirectExecutor() {
TestServiceImpl testService = new TestServiceImpl();
grpcServerRule1.getServiceRegistry().addService(testService);
SimpleServiceGrpc.SimpleServiceBlockingStub stub =
SimpleServiceGrpc.newBlockingStub(grpcServerRule1.getChannel());
stub.serverStreamingRpc(SimpleRequest.getDefaultInstance());
assertThat(testService.lastServerStreamingRpcThread).isNotEqualTo(Thread.currentThread());
}
@Test(expected = IllegalStateException.class)
public void callDirectExecutorNotAtRuleInstantiation_withoutDirectExecutor() {
grpcServerRule1.directExecutor();
}
@Test
public void serverAndChannelAreStarted_withDirectExecutor() {
assertThat(grpcServerRule2.getServer().isShutdown()).isFalse();
assertThat(grpcServerRule2.getServer().isTerminated()).isFalse();
assertThat(grpcServerRule2.getChannel().isShutdown()).isFalse();
assertThat(grpcServerRule2.getChannel().isTerminated()).isFalse();
assertThat(grpcServerRule2.getServerName()).isNotNull();
assertThat(grpcServerRule2.getServiceRegistry()).isNotNull();
}
@Test
public void serverAllowsServicesToBeAddedViaServiceRegistry_withDirectExecutor() {
TestServiceImpl testService = new TestServiceImpl();
grpcServerRule2.getServiceRegistry().addService(testService);
SimpleServiceGrpc.SimpleServiceBlockingStub stub =
SimpleServiceGrpc.newBlockingStub(grpcServerRule2.getChannel());
SimpleRequest request1 = SimpleRequest.getDefaultInstance();
SimpleRequest request2 = SimpleRequest.newBuilder().build();
stub.unaryRpc(request1);
stub.unaryRpc(request2);
assertThat(testService.unaryCallRequests).containsExactly(request1, request2);
}
@Test
public void serviceIsRunOnSameThreadAsTest_withDirectExecutor() {
TestServiceImpl testService = new TestServiceImpl();
grpcServerRule2.getServiceRegistry().addService(testService);
SimpleServiceGrpc.SimpleServiceBlockingStub stub =
SimpleServiceGrpc.newBlockingStub(grpcServerRule2.getChannel());
stub.serverStreamingRpc(SimpleRequest.getDefaultInstance());
assertThat(testService.lastServerStreamingRpcThread).isEqualTo(Thread.currentThread());
}
@Test
public void serverAndChannelAreShutdownAfterRule() throws Throwable {
GrpcServerRule grpcServerRule = new GrpcServerRule();
// Before the rule has been executed, all of its resources should be null.
assertThat(grpcServerRule.getChannel()).isNull();
assertThat(grpcServerRule.getServer()).isNull();
assertThat(grpcServerRule.getServerName()).isNull();
assertThat(grpcServerRule.getServiceRegistry()).isNull();
// The TestStatement stores the channel and server instances so that we can inspect them after
// the rule cleans up.
TestStatement statement = new TestStatement(grpcServerRule);
grpcServerRule.apply(statement, null).evaluate();
// Ensure that the stored channel and server instances were shut down.
assertThat(statement.channel.isShutdown()).isTrue();
assertThat(statement.server.isShutdown()).isTrue();
// All references to the resources that we created should be set to null.
assertThat(grpcServerRule.getChannel()).isNull();
assertThat(grpcServerRule.getServer()).isNull();
assertThat(grpcServerRule.getServerName()).isNull();
assertThat(grpcServerRule.getServiceRegistry()).isNull();
}
private static class TestStatement extends Statement {
private final GrpcServerRule grpcServerRule;
private ManagedChannel channel;
private Server server;
private TestStatement(GrpcServerRule grpcServerRule) {
this.grpcServerRule = grpcServerRule;
}
@Override
public void evaluate() throws Throwable {
channel = grpcServerRule.getChannel();
server = grpcServerRule.getServer();
}
}
private static class TestServiceImpl extends SimpleServiceGrpc.SimpleServiceImplBase {
private final Collection<SimpleRequest> unaryCallRequests =
new ConcurrentLinkedQueue<>();
private volatile Thread lastServerStreamingRpcThread;
@Override
public void serverStreamingRpc(
SimpleRequest request, StreamObserver<SimpleResponse> responseObserver) {
lastServerStreamingRpcThread = Thread.currentThread();
responseObserver.onNext(SimpleResponse.getDefaultInstance());
responseObserver.onCompleted();
}
@Override
public void unaryRpc(
SimpleRequest request, StreamObserver<SimpleResponse> responseObserver) {
unaryCallRequests.add(request);
responseObserver.onNext(SimpleResponse.getDefaultInstance());
responseObserver.onCompleted();
}
}
}
| |
package org.ereunao.spider;
import com.beust.jcommander.JCommander;
import com.google.common.net.InternetDomainName;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.google.inject.util.Modules;
import org.ereunao.ereunao.api.extension.Extension;
import org.ereunao.ereunao.api.extension.FormatHandler;
import org.ereunao.ereunao.api.extension.TransportHandler;
import org.ereunao.ereunao.api.io.ConductorCommunicator;
import org.ereunao.ereunao.api.io.LibrarianCommunicator;
import org.ereunao.ereunao.api.io.WorkerCommunicator;
import org.ereunao.ereunao.api.resource.CatalogueEntry;
import org.ereunao.ereunao.api.resource.Host;
import org.ereunao.ereunao.api.resource.Status;
import org.ereunao.ereunao.api.service.CatalogueService;
import org.ereunao.ereunao.api.service.HostService;
import org.ereunao.ereunao.api.service.IndexService;
import org.ereunao.ereunao.api.service.ReferenceService;
import org.ereunao.ereunao.api.worker.Worker;
import org.ereunao.spider.common.CustomCommander;
import org.ereunao.spider.common.ExtensionLoader;
import org.ereunao.spider.common.PropertyFileDefaultProvider;
import org.ereunao.spider.conductor.ConductorRunner;
import org.ereunao.spider.gumshoe.WorkerRunner;
import org.ereunao.spider.gumshoe.util.FormatHandlerSet;
import org.ereunao.spider.gumshoe.util.TransportHandlerSet;
import org.ereunao.spider.librarian.LibrarianRunner;
import io.mola.galimatias.GalimatiasParseException;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.logging.Level;
import org.apache.commons.io.FileUtils;
import org.reflections.Reflections;
import org.reflections.util.ClasspathHelper;
import org.reflections.util.ConfigurationBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Main {
private static final Logger LOGGER = LoggerFactory.getLogger(Main.class);
private static final String VERSION = "0.1.0-SNAPSHOT";
private JCommander jc;
private final MasterConfiguration mc = new MasterConfiguration();
private Set<Extension> extensions;
private Set<Object> configs = new HashSet<>();
private Injector injector;
public Main(final String[] args) throws Exception {
// First run
jc = new CustomCommander(mc);
jc.setAcceptUnknownOptions(true);
jc.parseWithoutValidation(args);
// Second run
String propertiesFile = getApplicationPropertiesFile();
jc = new CustomCommander();
jc.addObject(mc);
extensions = loadExtensions();
if (propertiesFile != null) {
jc.setDefaultProvider(new PropertyFileDefaultProvider(getApplicationPropertiesFile()));
}
jc.setColumnSize(80);
jc.parse(args);
if (mc.help) {
jc.usage();
return;
}
if (mc.version) {
System.out.println(VERSION);
return;
}
Set<Module> extensionModules = new HashSet<>();
for (Extension ex : extensions) {
extensionModules.add(ex.getModule());
}
injector = Guice.createInjector(
Modules.override(new CoreModule(jc, mc, extensions, configs))
.with(extensionModules)
);
// Load the seeds
loadSeeds();
// Execute the requested component(s)
if (mc.components.contains("conductor")) {
((ConductorRunner)injector.getInstance(ConductorRunner.class)).run();
}
if (mc.components.contains("librarian")) {
((LibrarianRunner)injector.getInstance(LibrarianRunner.class)).run();
}
if (mc.components.contains("worker")) {
((WorkerRunner)injector.getInstance(WorkerRunner.class)).run();
}
}
private String getApplicationPropertiesFile() {
if (mc.configFile != null) {
return mc.configFile.getAbsolutePath();
}
if (mc.homeDir != null) {
return new File(mc.homeDir, "application.properties").getAbsolutePath();
}
File fallback = new File("application.properties");
if (fallback.exists()) {
return fallback.getAbsolutePath();
}
return null;
}
private Set<Extension> loadExtensions() throws MalformedURLException, ClassNotFoundException, InstantiationException, IllegalAccessException {
// Attempt to load the extensions
File extDir;
if (mc.extensionsDir != null) {
extDir = mc.extensionsDir;
} else if (mc.homeDir != null) {
extDir = new File(mc.homeDir, "extensions");
} else {
extDir = new File("extensions").getAbsoluteFile();
}
LOGGER.info("Extension dir: {}", extDir);
// Load extensions
ExtensionLoader loader = new ExtensionLoader(new URL[0], Main.class.getClassLoader());
loader.add(extDir.getAbsolutePath());
// Load the extensions and give them the opportunity to add command line
// options
ConfigurationBuilder rcb = new ConfigurationBuilder();
rcb.setClassLoaders(new ClassLoader[]{ loader, getClass().getClassLoader() });
rcb.addUrls(ClasspathHelper.forClassLoader(loader, getClass().getClassLoader()));
Reflections reflections = new Reflections(rcb);
Set<Class<? extends Extension>> extensions = reflections.getSubTypesOf(Extension.class);
Set<Extension> result = new HashSet<>();
for (Class<? extends Extension> ext : extensions) {
Extension e = ext.newInstance();
LOGGER.info("Loading extension: {}", e.getClass().getName());
Object config = e.getConfigurationObject();
jc.addObject(config);
configs.add(config);
result.add(e);
}
return result;
}
private TransportHandlerSet buildTransportHandlerSet() {
Collection<Class<? extends TransportHandler>> handlerClasses;
if (mc.workerTransportHandlers != null
&& !mc.workerTransportHandlers.isEmpty()) {
handlerClasses = mc.workerTransportHandlers;
} else {
ConfigurationBuilder rcb = new ConfigurationBuilder();
rcb.setClassLoaders(new ClassLoader[]{ getClass().getClassLoader() });
rcb.addUrls(ClasspathHelper.forClassLoader(getClass().getClassLoader()));
Reflections reflections = new Reflections(rcb);
handlerClasses = reflections.getSubTypesOf(TransportHandler.class);
}
// Collection<TransportHandler> result = new ArrayList<>();
// for (Class<? extends TransportHandler> handler : handlerClasses) {
// result.add((TransportHandler)injector.get(handler));
// }
return new TransportHandlerSet(handlerClasses);
}
private FormatHandlerSet buildFormatHandlerSet() {
Collection<Class<? extends FormatHandler>> handlerClasses;
if (mc.workerTransportHandlers != null
&& !mc.workerTransportHandlers.isEmpty()) {
handlerClasses = mc.workerFormatHandlers;
} else {
ConfigurationBuilder rcb = new ConfigurationBuilder();
rcb.setClassLoaders(new ClassLoader[]{ getClass().getClassLoader() });
rcb.addUrls(ClasspathHelper.forClassLoader(getClass().getClassLoader()));
Reflections reflections = new Reflections(rcb);
handlerClasses = reflections.getSubTypesOf(FormatHandler.class);
}
Collection<FormatHandler> result = new ArrayList<>();
for (Class<? extends FormatHandler> handler : handlerClasses) {
try {
result.add((FormatHandler)handler.newInstance());
} catch (IllegalAccessException | InstantiationException ex) {}
}
return new FormatHandlerSet(result);
}
private void loadSeeds() throws IOException {
Collection<File> seedFiles = new ArrayList<>();
if (mc.conductorSeedFiles != null && !mc.conductorSeedFiles.isEmpty()) {
seedFiles = mc.conductorSeedFiles;
} else {
seedFiles.add(new File(mc.getHomeDirectory(), "seeds.txt"));
}
if (seedFiles.isEmpty()) {
return;
}
final HostService hostService = injector.getInstance(HostService.class);
final CatalogueService catalogueService = injector.getInstance(CatalogueService.class);
for (File seedFile : seedFiles) {
if (!seedFile.exists()) {
LOGGER.warn("Seed file {} does not exist", seedFile);
continue;
}
Iterator<String> it = FileUtils.lineIterator(seedFile);
while(it.hasNext()) {
String line = it.next();
if (line == null || line.trim().isEmpty()
|| line.trim().startsWith("#")) {
continue;
}
line = line.trim();
io.mola.galimatias.URL url;
try {
url = io.mola.galimatias.URL.parse(line);
} catch (GalimatiasParseException ex) {
continue;
}
String tld = InternetDomainName.from(url.host().toString()).topPrivateDomain().toString();
Host host = hostService.findByDomain(tld);
if (host == null) {
host = new Host();
host.host = tld;
}
CatalogueEntry seed = new CatalogueEntry();
seed.location = line;
seed.initialDepth = 0;
seed.status = Status.PENDING;
seed.host = host;
if (catalogueService.findForLocation(seed.location) == null) {
LOGGER.info("Found seed: {}", line);
catalogueService.save(seed);
}
}
}
}
//~ Entry point ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
public static void main(String[] args) throws Exception {
new Main(args);
}
public class CoreModule extends AbstractModule {
private final JCommander jc;
private final MasterConfiguration mc;
private final Set<Extension> extensions;
private final Set<Object> configs;
public CoreModule(final JCommander jc, final MasterConfiguration mc,
final Set<Extension> extensions, final Set<Object> configs) {
this.jc = jc;
this.mc = mc;
this.extensions = extensions;
this.configs = configs;
}
@Override
protected void configure() {
bind(MasterConfiguration.class).toInstance(mc);
for (Object config : configs) {
bind((Class)config.getClass()).toInstance(config);
}
// Build the DI stuff
bind(TransportHandlerSet.class).toInstance(Main.this.buildTransportHandlerSet());
bind(FormatHandlerSet.class).toInstance(Main.this.buildFormatHandlerSet());
// bind(WorkerCommunicator.class).to((Class<? extends WorkerCommunicator>)Class.forName("org.ereunao.ereunao.communicator.amqp.service.AMQPWorkerCommunicator"));
// bind(LibrarianCommunicator.class).to((Class<? extends LibrarianCommunicator>)Class.forName("org.ereunao.ereunao.communicator.amqp.service.AMQPLibrarianCommunicator"));
// bind(ConductorCommunicator.class).toProvider(mc.communicatorProvider);
bind(CatalogueService.class).toProvider(mc.conductorCatalogueServiceProvider);
bind(HostService.class).toProvider(mc.conductorHostServiceProvider);
bind(ReferenceService.class).toProvider(mc.conductorReferenceServiceProvider);
bind(IndexService.class).toProvider(mc.librarianIndexServiceProvider);
bind(Worker.class).toProvider(mc.workerProvider);
}
}
}
| |
/*
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.market.sensitivity;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.function.DoubleUnaryOperator;
import java.util.stream.Collectors;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.ImmutableBean;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaBean;
import org.joda.beans.MetaProperty;
import org.joda.beans.gen.BeanDefinition;
import org.joda.beans.gen.PropertyDefinition;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import org.joda.beans.impl.direct.DirectPrivateBeanBuilder;
import com.google.common.collect.ImmutableList;
import com.opengamma.strata.basics.currency.Currency;
import com.opengamma.strata.basics.currency.FxConvertible;
import com.opengamma.strata.basics.currency.FxRateProvider;
import com.opengamma.strata.collect.Guavate;
/**
* A collection of point sensitivities.
* <p>
* Contains a list of {@linkplain PointSensitivity point sensitivity} objects,
* each referring to a specific point on a curve that was queried.
* The order of the list has no specific meaning, but does allow duplicates.
* <p>
* For example, the point sensitivity for present value on a FRA might contain
* two entries, one for the Ibor forward curve and one for the discount curve.
* Each entry identifies the date that the curve was queried and the resulting multiplier.
* <p>
* When creating an instance, consider using {@link MutablePointSensitivities}.
* <p>
* One way of viewing this class is as a {@code Map} from a specific sensitivity
* key to a {@code double} sensitivity value. However, instead or being structured
* as a {@code Map}, the data is structured as a {@code List}, with the key and
* value in each entry.
*/
@BeanDefinition(builderScope = "private")
public final class PointSensitivities
implements FxConvertible<PointSensitivities>, ImmutableBean, Serializable {
/**
* An empty instance.
*/
private static final PointSensitivities EMPTY = new PointSensitivities(ImmutableList.of());
/**
* The point sensitivities.
* <p>
* Each entry includes details of the market data query it relates to.
*/
@PropertyDefinition(validate = "notNull")
private final ImmutableList<PointSensitivity> sensitivities;
//-------------------------------------------------------------------------
/**
* An empty sensitivity instance.
*
* @return the empty instance
*/
public static PointSensitivities empty() {
return EMPTY;
}
/**
* Obtains an instance from an array of sensitivity entries.
*
* @param sensitivity the sensitivity entry
* @return the sensitivities instance
*/
public static PointSensitivities of(PointSensitivity... sensitivity) {
return PointSensitivities.of(ImmutableList.copyOf(sensitivity));
}
/**
* Obtains an instance from a list of sensitivity entries.
*
* @param sensitivities the list of sensitivity entries
* @return the sensitivities instance
*/
@SuppressWarnings("unchecked")
public static PointSensitivities of(List<? extends PointSensitivity> sensitivities) {
return new PointSensitivities((List<PointSensitivity>) sensitivities);
}
//-----------------------------------------------------------------------
/**
* Gets the number of sensitivity entries.
*
* @return the size of the internal list of point sensitivities
*/
public int size() {
return sensitivities.size();
}
//-------------------------------------------------------------------------
/**
* Combines this point sensitivities with another instance.
* <p>
* This returns a new sensitivity instance with a combined list of point sensitivities.
* This instance is immutable and unaffected by this method.
* The result may contain duplicate point sensitivities.
*
* @param other the other point sensitivities
* @return a {@code PointSensitivities} based on this one, with the other instance added
*/
public PointSensitivities combinedWith(PointSensitivities other) {
return new PointSensitivities(ImmutableList.<PointSensitivity>builder()
.addAll(sensitivities)
.addAll(other.sensitivities)
.build());
}
//-------------------------------------------------------------------------
/**
* Multiplies the sensitivities in this instance by the specified factor.
* <p>
* The result will consist of the same entries, but with each sensitivity value multiplied.
* This instance is immutable and unaffected by this method.
*
* @param factor the multiplicative factor
* @return a {@code PointSensitivities} based on this one, with each sensitivity multiplied by the factor
*/
public PointSensitivities multipliedBy(double factor) {
return mapSensitivities(s -> s * factor);
}
/**
* Applies an operation to the sensitivities in this instance.
* <p>
* The result will consist of the same entries, but with the operator applied to each sensitivity value.
* This instance is immutable and unaffected by this method.
* <p>
* This is used to apply a mathematical operation to the sensitivity values.
* For example, the operator could multiply the sensitivities by a constant, or take the inverse.
* <pre>
* inverse = base.mapSensitivities(value -> 1 / value);
* </pre>
*
* @param operator the operator to be applied to the sensitivities
* @return a {@code PointSensitivities} based on this one, with the operator applied to the sensitivity values
*/
public PointSensitivities mapSensitivities(DoubleUnaryOperator operator) {
return sensitivities.stream()
.map(s -> s.withSensitivity(operator.applyAsDouble(s.getSensitivity())))
.collect(
Collectors.collectingAndThen(
Guavate.toImmutableList(),
PointSensitivities::new));
}
/**
* Normalizes the point sensitivities by sorting and merging.
* <p>
* The list of sensitivities is sorted and then merged.
* Any two entries that represent the same curve query are merged.
* For example, if there are two point sensitivities that were created based on the same curve,
* currency and fixing date, then the entries are combined, summing the sensitivity value.
* <p>
* The intention is that normalization occurs after gathering all the point sensitivities.
* <p>
* This instance is immutable and unaffected by this method.
*
* @return a {@code PointSensitivities} based on this one, with the sensitivities normalized
*/
public PointSensitivities normalized() {
if (sensitivities.isEmpty()) {
return this;
}
List<PointSensitivity> mutable = new ArrayList<>();
for (PointSensitivity sensi : sensitivities) {
insert(mutable, sensi);
}
return new PointSensitivities(mutable);
}
//-----------------------------------------------------------------------
/**
* Returns a mutable version of this object.
* <p>
* The result is an instance of the mutable {@link MutablePointSensitivities}.
* It will contain the same individual sensitivity entries.
*
* @return the mutable sensitivity instance, not null
*/
public MutablePointSensitivities toMutable() {
return new MutablePointSensitivities(sensitivities);
}
//-------------------------------------------------------------------------
/**
* Checks if this sensitivity equals another within the specified tolerance.
* <p>
* This returns true if the two instances have the list of {@code PointSensitivity},
* where the sensitivity {@code double} values are compared within the specified tolerance.
* It is expected that this comparator will be used on the normalized version of the sensitivity.
*
* @param other the other sensitivity
* @param tolerance the tolerance
* @return true if equal up to the tolerance
*/
public boolean equalWithTolerance(PointSensitivities other, double tolerance) {
ImmutableList<PointSensitivity> list1 = this.getSensitivities();
ImmutableList<PointSensitivity> list2 = other.getSensitivities();
int nbList1 = list1.size();
int nbList2 = list2.size();
if (nbList1 != nbList2) {
return false;
}
for (int i1 = 0; i1 < nbList1; i1++) {
if (list1.get(i1).compareKey(list2.get(i1)) == 0) {
if (Math.abs(list1.get(i1).getSensitivity() - list2.get(i1).getSensitivity()) > tolerance) {
return false;
}
} else {
return false;
}
}
return true;
}
//-------------------------------------------------------------------------
@Override
public PointSensitivities convertedTo(Currency resultCurrency, FxRateProvider rateProvider) {
List<PointSensitivity> mutable = new ArrayList<>();
for (PointSensitivity sensi : sensitivities) {
insert(mutable, sensi.convertedTo(resultCurrency, rateProvider));
}
return new PointSensitivities(mutable);
}
// inserts a sensitivity into the mutable list in the right location
// merges the entry with an existing entry if the key matches
private static void insert(List<PointSensitivity> mutable, PointSensitivity addition) {
int index = Collections.binarySearch(mutable, addition, PointSensitivity::compareKey);
if (index >= 0) {
PointSensitivity base = mutable.get(index);
double combined = base.getSensitivity() + addition.getSensitivity();
mutable.set(index, base.withSensitivity(combined));
} else {
int insertionPoint = -(index + 1);
mutable.add(insertionPoint, addition);
}
}
//------------------------- AUTOGENERATED START -------------------------
/**
* The meta-bean for {@code PointSensitivities}.
* @return the meta-bean, not null
*/
public static PointSensitivities.Meta meta() {
return PointSensitivities.Meta.INSTANCE;
}
static {
MetaBean.register(PointSensitivities.Meta.INSTANCE);
}
/**
* The serialization version id.
*/
private static final long serialVersionUID = 1L;
private PointSensitivities(
List<PointSensitivity> sensitivities) {
JodaBeanUtils.notNull(sensitivities, "sensitivities");
this.sensitivities = ImmutableList.copyOf(sensitivities);
}
@Override
public PointSensitivities.Meta metaBean() {
return PointSensitivities.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the point sensitivities.
* <p>
* Each entry includes details of the market data query it relates to.
* @return the value of the property, not null
*/
public ImmutableList<PointSensitivity> getSensitivities() {
return sensitivities;
}
//-----------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
PointSensitivities other = (PointSensitivities) obj;
return JodaBeanUtils.equal(sensitivities, other.sensitivities);
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(sensitivities);
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(64);
buf.append("PointSensitivities{");
buf.append("sensitivities").append('=').append(JodaBeanUtils.toString(sensitivities));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code PointSensitivities}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code sensitivities} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<ImmutableList<PointSensitivity>> sensitivities = DirectMetaProperty.ofImmutable(
this, "sensitivities", PointSensitivities.class, (Class) ImmutableList.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"sensitivities");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 1226228605: // sensitivities
return sensitivities;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends PointSensitivities> builder() {
return new PointSensitivities.Builder();
}
@Override
public Class<? extends PointSensitivities> beanType() {
return PointSensitivities.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code sensitivities} property.
* @return the meta-property, not null
*/
public MetaProperty<ImmutableList<PointSensitivity>> sensitivities() {
return sensitivities;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 1226228605: // sensitivities
return ((PointSensitivities) bean).getSensitivities();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code PointSensitivities}.
*/
private static final class Builder extends DirectPrivateBeanBuilder<PointSensitivities> {
private List<PointSensitivity> sensitivities = ImmutableList.of();
/**
* Restricted constructor.
*/
private Builder() {
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 1226228605: // sensitivities
return sensitivities;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@SuppressWarnings("unchecked")
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 1226228605: // sensitivities
this.sensitivities = (List<PointSensitivity>) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public PointSensitivities build() {
return new PointSensitivities(
sensitivities);
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(64);
buf.append("PointSensitivities.Builder{");
buf.append("sensitivities").append('=').append(JodaBeanUtils.toString(sensitivities));
buf.append('}');
return buf.toString();
}
}
//-------------------------- AUTOGENERATED END --------------------------
}
| |
// Copyright 2005 Konrad Twardowski
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.makagiga.commons.swing;
import static org.makagiga.commons.UI.i18n;
import java.awt.Dimension;
import java.awt.event.MouseEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import javax.swing.JMenuBar;
import org.makagiga.commons.Config;
import org.makagiga.commons.FS;
import org.makagiga.commons.Lockable;
import org.makagiga.commons.MApplication;
import org.makagiga.commons.UI;
import org.makagiga.commons.about.MAboutDialog;
/**
* The main window.
*
* @since 4.0 (org.makagiga.commons.swing package)
*/
public class MMainWindow extends MFrame implements Lockable {
// private
private boolean locked;
private boolean oldMenuBarVisible = true;
private boolean showSystemTrayInfo = true;
// public
/**
* @since 4.2
*/
public static final String LOCKED_PROPERTY = "locked";
// public
/**
* Constructs the main (global) window.
*/
public MMainWindow() {
super(MApplication.getFullName());
setDefaultCloseOperation(DO_NOTHING_ON_CLOSE);
readConfig(Config.getDefault(), null);
setMinimumSize(new Dimension(getWidth() / 2, getHeight() / 2));
if (MApplication.getLogo() != null)
setIconImage(MApplication.getLogo());
MStatusBar statusBar = new MStatusBar();
addSouth(statusBar);
// init global window and status bar
MainView.init(this, statusBar);
// add close event handler
addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(final WindowEvent e) {
// do not initialize MSystemTray class if tray icon is disabled
if (UI.systemTray.get() && UI.hideMainWindowInTray.get() && MSystemTray.isVisible()) {
setVisible(false);
fireWindowIconified();
if (showSystemTrayInfo) {
showSystemTrayInfo = false;
MNotification.Message m = new MNotification.Message(
MApplication.getFullName(),
UI.makeHTML(i18n("The application is still running<br>in the system tray.")),
MApplication.getIcon()
);
m.setTimeout(MNotification.getDefaultTimeout());
m.show();
}
}
else {
MApplication.quit();
}
}
@Override
public void windowDeiconified(final WindowEvent e) {
// repaint window on restore
repaint();
}
@Override
public void windowIconified(final WindowEvent e) {
onMinimize();
}
} );
}
/**
* Shows information about this application.
*/
public void about() {
MAboutDialog about = new MAboutDialog(this);
about.exec();
}
/**
* Returns @c true if the main window is locked.
*
* @see #setLocked(boolean)
*
* @since 2.4
*/
@Override
public boolean isLocked() { return locked; }
/**
* Sets whether or not the main window is locked.
*
* Locked frame properties:
* - Ignores all mouse and keyboard input events
* - Content pane is hidden
* - Menu bar is hidden
* - System tray popup menu is disabled
*
* @param value {@code true} = locked, {@code false} = unlocked
*
* @see #isLocked()
*
* @since 2.4
*/
@Override
public void setLocked(final boolean value) {
SecurityManager sm = System.getSecurityManager();
if (sm != null)
sm.checkPermission(new MApplication.Permission("setLocked"));
if (value == locked)
return;
locked = value;
JMenuBar menuBar = getJMenuBar();
if (locked) {
if (menuBar != null) {
oldMenuBarVisible = menuBar.isVisible();
menuBar.setVisible(false);
}
}
else {
if (menuBar != null)
menuBar.setVisible(oldMenuBarVisible);
}
getContentPane().setVisible(!locked);
firePropertyChange(LOCKED_PROPERTY, !locked, locked);
}
/**
* @since 4.0
*/
public boolean onTrayIconClick(final MouseEvent e) { return false; }
/**
* @since 4.0
*/
public void onTrayIconMenuPopup(final MMenu menu) { }
public void restore() {
setVisible(true);
if (getExtendedState() == ICONIFIED)
setExtendedState(NORMAL);
toFront();
}
/**
* @since 3.8.7
*/
public void updateTitle(final String subTitle) {
StringBuilder name = new StringBuilder(MApplication.getFullName());
if (FS.getProfile() != null)
name.append(" [").append(FS.getProfile()).append(']');
else if (FS.isPortable())
name.append(" [Portable]");
if (MApplication.isSafeMode())
name.append(" [Safe-Mode]");
if (MApplication.offline.get())
name.append(" [Offline]");
if (subTitle != null) {
name.insert(0, " - ");
name.insert(0, subTitle);
}
setTitle(name.toString());
}
// protected
/**
* Invoked when this window is minimized (iconified).
*/
protected void onMinimize() { }
// package private
void fireWindowIconified() {
WindowListener[] wl = getWindowListeners();
if (wl.length > 0) {
WindowEvent e = new WindowEvent(this, WindowEvent.WINDOW_ICONIFIED);
for (WindowListener i : wl)
i.windowIconified(e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.lsm.rtree.impls;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.bloomfilter.impls.BloomCalculations;
import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilter;
import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilterSpecification;
import org.apache.hyracks.storage.am.btree.impls.BTree;
import org.apache.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
import org.apache.hyracks.storage.am.btree.impls.RangePredicate;
import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.common.tuples.DualTupleReference;
import org.apache.hyracks.storage.am.lsm.common.api.IComponentFilterHelper;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFrameFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentBulkLoader;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMemoryComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
import org.apache.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFilterManager;
import org.apache.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
import org.apache.hyracks.storage.am.rtree.impls.RTree;
import org.apache.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
import org.apache.hyracks.storage.am.rtree.impls.SearchPredicate;
import org.apache.hyracks.storage.common.IIndexBulkLoader;
import org.apache.hyracks.storage.common.IIndexCursor;
import org.apache.hyracks.storage.common.IModificationOperationCallback;
import org.apache.hyracks.storage.common.ISearchOperationCallback;
import org.apache.hyracks.storage.common.ISearchPredicate;
import org.apache.hyracks.storage.common.MultiComparator;
public class LSMRTree extends AbstractLSMRTree {
protected final int[] buddyBTreeFields;
public LSMRTree(IIOManager ioManager, List<IVirtualBufferCache> virtualBufferCaches,
ITreeIndexFrameFactory rtreeInteriorFrameFactory, ITreeIndexFrameFactory rtreeLeafFrameFactory,
ITreeIndexFrameFactory btreeInteriorFrameFactory, ITreeIndexFrameFactory btreeLeafFrameFactory,
ILSMIndexFileManager fileNameManager, TreeIndexFactory<RTree> diskRTreeFactory,
TreeIndexFactory<BTree> diskBTreeFactory, BloomFilterFactory bloomFilterFactory,
IComponentFilterHelper filterHelper, ILSMComponentFilterFrameFactory filterFrameFactory,
LSMComponentFilterManager filterManager, double bloomFilterFalsePositiveRate, int fieldCount,
IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
ILinearizeComparatorFactory linearizer, int[] comparatorFields, IBinaryComparatorFactory[] linearizerArray,
ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler,
ILSMIOOperationCallback ioOpCallback, int[] rtreeFields, int[] buddyBTreeFields, int[] filterFields,
boolean durable, boolean isPointMBR) throws HyracksDataException {
super(ioManager, virtualBufferCaches, rtreeInteriorFrameFactory, rtreeLeafFrameFactory,
btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
new LSMRTreeDiskComponentFactory(diskRTreeFactory, diskBTreeFactory, bloomFilterFactory, filterHelper),
fieldCount, rtreeCmpFactories, btreeCmpFactories, linearizer, comparatorFields, linearizerArray,
bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler, ioOpCallback, filterHelper,
filterFrameFactory, filterManager, rtreeFields, filterFields, durable, isPointMBR,
diskRTreeFactory.getBufferCache());
this.buddyBTreeFields = buddyBTreeFields;
}
/*
* For External indexes with no memory components
*/
public LSMRTree(IIOManager ioManager, ITreeIndexFrameFactory rtreeInteriorFrameFactory,
ITreeIndexFrameFactory rtreeLeafFrameFactory, ITreeIndexFrameFactory btreeInteriorFrameFactory,
ITreeIndexFrameFactory btreeLeafFrameFactory, ILSMIndexFileManager fileNameManager,
TreeIndexFactory<RTree> diskRTreeFactory, TreeIndexFactory<BTree> diskBTreeFactory,
BloomFilterFactory bloomFilterFactory, double bloomFilterFalsePositiveRate,
IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
ILinearizeComparatorFactory linearizer, int[] comparatorFields, IBinaryComparatorFactory[] linearizerArray,
ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler,
ILSMIOOperationCallback ioOpCallback, int[] buddyBTreeFields, boolean durable, boolean isPointMBR) {
super(ioManager, rtreeInteriorFrameFactory, rtreeLeafFrameFactory, btreeInteriorFrameFactory,
btreeLeafFrameFactory, fileNameManager,
new LSMRTreeDiskComponentFactory(diskRTreeFactory, diskBTreeFactory, bloomFilterFactory, null),
rtreeCmpFactories, btreeCmpFactories, linearizer, comparatorFields, linearizerArray,
bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler, ioOpCallback, durable, isPointMBR,
diskRTreeFactory.getBufferCache());
this.buddyBTreeFields = buddyBTreeFields;
}
@Override
protected ILSMDiskComponent loadComponent(LSMComponentFileReferences lsmComonentFileReferences)
throws HyracksDataException {
return createDiskComponent(componentFactory, lsmComonentFileReferences.getInsertIndexFileReference(),
lsmComonentFileReferences.getDeleteIndexFileReference(),
lsmComonentFileReferences.getBloomFilterFileReference(), false);
}
@Override
protected void deactivateDiskComponent(ILSMDiskComponent c) throws HyracksDataException {
LSMRTreeDiskComponent component = (LSMRTreeDiskComponent) c;
RTree rtree = component.getRTree();
BTree btree = component.getBTree();
BloomFilter bloomFilter = component.getBloomFilter();
rtree.deactivate();
btree.deactivate();
bloomFilter.deactivate();
rtree.purge();
btree.purge();
bloomFilter.purge();
}
@Override
protected void destroyDiskComponent(ILSMDiskComponent c) throws HyracksDataException {
LSMRTreeDiskComponent component = (LSMRTreeDiskComponent) c;
component.getBTree().destroy();
component.getBloomFilter().destroy();
component.getRTree().destroy();
}
@Override
protected void clearDiskComponent(ILSMDiskComponent c) throws HyracksDataException {
LSMRTreeDiskComponent component = (LSMRTreeDiskComponent) c;
component.getBTree().deactivate();
component.getBloomFilter().deactivate();
component.getRTree().deactivate();
component.getBTree().destroy();
component.getBloomFilter().destroy();
component.getRTree().destroy();
}
@Override
public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
LSMRTreeFlushOperation flushOp = (LSMRTreeFlushOperation) operation;
LSMRTreeMemoryComponent flushingComponent = (LSMRTreeMemoryComponent) flushOp.getFlushingComponent();
// Renaming order is critical because we use assume ordering when we
// read the file names when we open the tree.
// The RTree should be renamed before the BTree.
// scan the memory RTree
ITreeIndexAccessor memRTreeAccessor = flushingComponent.getRTree()
.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
RTreeSearchCursor rtreeScanCursor = (RTreeSearchCursor) memRTreeAccessor.createSearchCursor(false);
SearchPredicate rtreeNullPredicate = new SearchPredicate(null, null);
memRTreeAccessor.search(rtreeScanCursor, rtreeNullPredicate);
LSMRTreeDiskComponent component = createDiskComponent(componentFactory, flushOp.getTarget(),
flushOp.getBTreeTarget(), flushOp.getBloomFilterTarget(), true);
//count the number of tuples in the buddy btree
ITreeIndexAccessor memBTreeAccessor = flushingComponent.getBTree()
.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
RangePredicate btreeNullPredicate = new RangePredicate(null, null, true, true, null, null);
IIndexCursor btreeCountingCursor = ((BTreeAccessor) memBTreeAccessor).createCountingSearchCursor();
memBTreeAccessor.search(btreeCountingCursor, btreeNullPredicate);
long numBTreeTuples = 0L;
try {
while (btreeCountingCursor.hasNext()) {
btreeCountingCursor.next();
ITupleReference countTuple = btreeCountingCursor.getTuple();
numBTreeTuples = IntegerPointable.getInteger(countTuple.getFieldData(0), countTuple.getFieldStart(0));
}
} finally {
btreeCountingCursor.close();
}
ILSMDiskComponentBulkLoader componentBulkLoader =
createComponentBulkLoader(component, 1.0f, false, numBTreeTuples, false, false, false);
ITreeIndexCursor cursor;
IBinaryComparatorFactory[] linearizerArray = { linearizer };
TreeTupleSorter rTreeTupleSorter = new TreeTupleSorter(flushingComponent.getRTree().getFileId(),
linearizerArray, rtreeLeafFrameFactory.createFrame(), rtreeLeafFrameFactory.createFrame(),
flushingComponent.getRTree().getBufferCache(), comparatorFields);
// BulkLoad the tuples from the in-memory tree into the new disk
// RTree.
boolean isEmpty = true;
try {
while (rtreeScanCursor.hasNext()) {
isEmpty = false;
rtreeScanCursor.next();
rTreeTupleSorter.insertTupleEntry(rtreeScanCursor.getPageId(), rtreeScanCursor.getTupleOffset());
}
} finally {
rtreeScanCursor.close();
}
rTreeTupleSorter.sort();
cursor = rTreeTupleSorter;
if (!isEmpty) {
try {
while (cursor.hasNext()) {
cursor.next();
ITupleReference frameTuple = cursor.getTuple();
componentBulkLoader.add(frameTuple);
}
} finally {
cursor.close();
}
}
// scan the memory BTree
IIndexCursor btreeScanCursor = memBTreeAccessor.createSearchCursor(false);
memBTreeAccessor.search(btreeScanCursor, btreeNullPredicate);
try {
while (btreeScanCursor.hasNext()) {
btreeScanCursor.next();
ITupleReference frameTuple = btreeScanCursor.getTuple();
componentBulkLoader.delete(frameTuple);
}
} finally {
btreeScanCursor.close();
}
if (component.getLSMComponentFilter() != null) {
List<ITupleReference> filterTuples = new ArrayList<>();
filterTuples.add(flushingComponent.getLSMComponentFilter().getMinTuple());
filterTuples.add(flushingComponent.getLSMComponentFilter().getMaxTuple());
getFilterManager().updateFilter(component.getLSMComponentFilter(), filterTuples);
getFilterManager().writeFilter(component.getLSMComponentFilter(), component.getRTree());
}
// Note. If we change the filter to write to metadata object, we don't need the if block above
flushingComponent.getMetadata().copy(component.getMetadata());
componentBulkLoader.end();
return component;
}
@Override
public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
LSMRTreeMergeOperation mergeOp = (LSMRTreeMergeOperation) operation;
IIndexCursor cursor = mergeOp.getCursor();
ISearchPredicate rtreeSearchPred = new SearchPredicate(null, null);
ILSMIndexOperationContext opCtx = ((LSMRTreeSortedCursor) cursor).getOpCtx();
opCtx.getComponentHolder().addAll(mergeOp.getMergingComponents());
search(opCtx, cursor, rtreeSearchPred);
LSMRTreeDiskComponent mergedComponent = createDiskComponent(componentFactory, mergeOp.getTarget(),
mergeOp.getBTreeTarget(), mergeOp.getBloomFilterTarget(), true);
ILSMDiskComponentBulkLoader componentBulkLoader;
// In case we must keep the deleted-keys BTrees, then they must be merged *before* merging the r-trees so that
// lsmHarness.endSearch() is called once when the r-trees have been merged.
if (mergeOp.getMergingComponents().get(mergeOp.getMergingComponents().size() - 1) != diskComponents
.get(diskComponents.size() - 1)) {
// Keep the deleted tuples since the oldest disk component is not included in the merge operation
long numElements = 0L;
for (int i = 0; i < mergeOp.getMergingComponents().size(); ++i) {
numElements += ((LSMRTreeDiskComponent) mergeOp.getMergingComponents().get(i)).getBloomFilter()
.getNumElements();
}
componentBulkLoader =
createComponentBulkLoader(mergedComponent, 1.0f, false, numElements, false, false, false);
LSMRTreeDeletedKeysBTreeMergeCursor btreeCursor = new LSMRTreeDeletedKeysBTreeMergeCursor(opCtx);
search(opCtx, btreeCursor, rtreeSearchPred);
try {
while (btreeCursor.hasNext()) {
btreeCursor.next();
ITupleReference tuple = btreeCursor.getTuple();
componentBulkLoader.delete(tuple);
}
} finally {
btreeCursor.close();
}
} else {
//no buddy-btree needed
componentBulkLoader = createComponentBulkLoader(mergedComponent, 1.0f, false, 0L, false, false, false);
}
//search old rtree components
try {
while (cursor.hasNext()) {
cursor.next();
ITupleReference frameTuple = cursor.getTuple();
componentBulkLoader.add(frameTuple);
}
} finally {
cursor.close();
}
if (mergedComponent.getLSMComponentFilter() != null) {
List<ITupleReference> filterTuples = new ArrayList<>();
for (int i = 0; i < mergeOp.getMergingComponents().size(); ++i) {
filterTuples.add(mergeOp.getMergingComponents().get(i).getLSMComponentFilter().getMinTuple());
filterTuples.add(mergeOp.getMergingComponents().get(i).getLSMComponentFilter().getMaxTuple());
}
getFilterManager().updateFilter(mergedComponent.getLSMComponentFilter(), filterTuples);
getFilterManager().writeFilter(mergedComponent.getLSMComponentFilter(), mergedComponent.getRTree());
}
componentBulkLoader.end();
return mergedComponent;
}
@Override
public ILSMIndexAccessor createAccessor(IModificationOperationCallback modificationCallback,
ISearchOperationCallback searchCallback) {
return new LSMRTreeAccessor(getLsmHarness(), createOpContext(modificationCallback, searchCallback),
buddyBTreeFields);
}
@Override
public ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException {
LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
return createDiskComponent(componentFactory, componentFileRefs.getInsertIndexFileReference(),
componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(), true);
}
@Override
public ILSMDiskComponentBulkLoader createComponentBulkLoader(ILSMDiskComponent component, float fillFactor,
boolean verifyInput, long numElementsHint, boolean checkIfEmptyIndex, boolean withFilter,
boolean cleanupEmptyComponent) throws HyracksDataException {
BloomFilterSpecification bloomFilterSpec = null;
if (numElementsHint > 0) {
int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElementsHint);
bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement, bloomFilterFalsePositiveRate);
}
if (withFilter && filterFields != null) {
return new LSMRTreeDiskComponentBulkLoader((LSMRTreeDiskComponent) component, bloomFilterSpec, fillFactor,
verifyInput, numElementsHint, checkIfEmptyIndex, cleanupEmptyComponent, filterManager, treeFields,
filterFields, MultiComparator.create(component.getLSMComponentFilter().getFilterCmpFactories()));
} else {
return new LSMRTreeDiskComponentBulkLoader((LSMRTreeDiskComponent) component, bloomFilterSpec, fillFactor,
verifyInput, numElementsHint, checkIfEmptyIndex, cleanupEmptyComponent);
}
}
@Override
public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint)
throws HyracksDataException {
return new LSMRTreeBulkLoader(this, fillLevel, verifyInput, numElementsHint);
}
// This function is modified for R-Trees without antimatter tuples to allow buddy B-Tree to have only primary keys
@Override
public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
LSMRTreeOpContext ctx = (LSMRTreeOpContext) ictx;
if (ctx.getOperation() == IndexOperation.PHYSICALDELETE) {
throw new UnsupportedOperationException("Physical delete not supported in the LSM-RTree");
}
ITupleReference indexTuple;
if (ctx.getIndexTuple() != null) {
ctx.getIndexTuple().reset(tuple);
indexTuple = ctx.getIndexTuple();
ctx.getCurrentMutableRTreeAccessor().getOpContext().resetNonIndexFieldsTuple(tuple);
} else {
indexTuple = tuple;
}
ctx.getModificationCallback().before(indexTuple);
ctx.getModificationCallback().found(null, indexTuple);
if (ctx.getOperation() == IndexOperation.INSERT) {
ctx.getCurrentMutableRTreeAccessor().insert(indexTuple);
} else {
// First remove all entries in the in-memory rtree (if any).
ctx.getCurrentMutableRTreeAccessor().delete(indexTuple);
try {
ctx.getCurrentMutableBTreeAccessor().insert(((DualTupleReference) tuple).getPermutingTuple());
} catch (HyracksDataException e) {
// Do nothing, because one delete tuple is enough to indicate
// that all the corresponding insert tuples are deleted
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
throw e;
}
}
}
updateFilter(ctx, tuple);
}
@Override
public void markAsValid(ILSMDiskComponent lsmComponent) throws HyracksDataException {
LSMRTreeDiskComponent component = (LSMRTreeDiskComponent) lsmComponent;
markAsValidInternal(component.getBTree().getBufferCache(), component.getBloomFilter());
markAsValidInternal((component).getBTree());
markAsValidInternal((component).getRTree());
}
@Override
public Set<String> getLSMComponentPhysicalFiles(ILSMComponent lsmComponent) {
Set<String> files = new HashSet<>();
LSMRTreeDiskComponent component = (LSMRTreeDiskComponent) lsmComponent;
files.add(component.getBTree().getFileReference().getFile().getAbsolutePath());
files.add(component.getRTree().getFileReference().getFile().getAbsolutePath());
files.add(component.getBloomFilter().getFileReference().getFile().getAbsolutePath());
return files;
}
@Override
protected ILSMIOOperation createFlushOperation(AbstractLSMIndexOperationContext opCtx,
ILSMMemoryComponent flushingComponent, LSMComponentFileReferences componentFileRefs,
ILSMIOOperationCallback callback) throws HyracksDataException {
LSMRTreeAccessor accessor = new LSMRTreeAccessor(getLsmHarness(), opCtx, buddyBTreeFields);
return new LSMRTreeFlushOperation(accessor, flushingComponent, componentFileRefs.getInsertIndexFileReference(),
componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(),
callback, fileManager.getBaseDir().getAbsolutePath());
}
@Override
protected ILSMIOOperation createMergeOperation(AbstractLSMIndexOperationContext opCtx,
List<ILSMComponent> mergingComponents, LSMComponentFileReferences mergeFileRefs,
ILSMIOOperationCallback callback) throws HyracksDataException {
ITreeIndexCursor cursor = new LSMRTreeSortedCursor(opCtx, linearizer, buddyBTreeFields);
ILSMIndexAccessor accessor = new LSMRTreeAccessor(getLsmHarness(), opCtx, buddyBTreeFields);
return new LSMRTreeMergeOperation(accessor, mergingComponents, cursor,
mergeFileRefs.getInsertIndexFileReference(), mergeFileRefs.getDeleteIndexFileReference(),
mergeFileRefs.getBloomFilterFileReference(), callback, fileManager.getBaseDir().getAbsolutePath());
}
}
| |
package io.miti.schema.gui;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.List;
import javax.swing.JList;
import javax.swing.JMenuItem;
import javax.swing.JPopupMenu;
import io.miti.schema.cache.DBCache;
import io.miti.schema.dbutil.TableInfo;
import io.miti.schema.model.TableListModel;
import io.miti.schema.util.ListFormatter;
import io.miti.schema.util.Utility;
public final class MousePopupListener extends MouseAdapter
{
private boolean showTables = true;
private JList tableList = null;
private JPopupMenu menu = new JPopupMenu();
private Point point = null;
private static final String EOLN = "\r\n";
/**
* Default constructor.
*/
public MousePopupListener()
{
super();
}
/**
* Standard constructor for this class.
*
* @param tableData whether we're showing table or column data
* @param dataList the JList for this popup menu
*/
public MousePopupListener(final boolean tableData,
final JList dataList)
{
showTables = tableData;
tableList = dataList;
buildPopup();
}
/**
* Build the popup menu.
*/
private void buildPopup()
{
final String text = showTables ? "table" : "column";
JMenuItem m1 = new JMenuItem(String.format("Copy this %s", text));
m1.addActionListener(new PopupAction(0));
JMenuItem m2 = new JMenuItem(String.format("Copy selected %ss", text));
m2.addActionListener(new PopupAction(1));
JMenuItem m3 = new JMenuItem(String.format("Copy all %ss", text));
m3.addActionListener(new PopupAction(2));
JMenuItem m4 = new JMenuItem(String.format("Copy this %s and metadata", text));
m4.addActionListener(new PopupAction(3));
JMenuItem m5 = new JMenuItem(String.format("Copy selected %ss and metadata", text));
m5.addActionListener(new PopupAction(4));
JMenuItem m6 = new JMenuItem(String.format("Copy all %ss and metadata", text));
m6.addActionListener(new PopupAction(5));
menu.add(m1);
menu.add(m2);
menu.add(m3);
menu.addSeparator();
menu.add(m4);
menu.add(m5);
menu.add(m6);
}
@Override
public void mouseClicked(final MouseEvent e)
{
checkPopup(e);
}
@Override
public void mousePressed(final MouseEvent e)
{
checkPopup(e);
}
@Override
public void mouseReleased(final MouseEvent e)
{
checkPopup(e);
}
/**
* If the user invoked the popup trigger (right-click), show the popup menu.
*
* @param e the mouse event
*/
private void checkPopup(final MouseEvent e)
{
if (e.isPopupTrigger())
{
point = new Point(e.getX(), e.getY());
updatePopupItems();
menu.show(tableList, e.getX(), e.getY());
// demoSel();
}
}
/**
* Enable and disable items in the popup menu as needed.
*/
private void updatePopupItems()
{
// See if the list has any items
final int len = ((TableListModel) tableList.getModel()).getSize();
if (len < 1)
{
// No items in the list, so disable all menu items
enableMenuItems(false, new int[] {0, 1, 2, 4, 5, 6});
return;
}
else
{
// Enable the "all tables" menu items
enableMenuItems(true, new int[] {2, 6});
// Check if there's an item near the mouse click
final int currItem = tableList.locationToIndex(point);
enableMenuItems((currItem >= 0), new int[] {0, 4});
// Check if any rows are selected
final int[] sel = tableList.getSelectedIndices();
enableMenuItems((sel.length > 0), new int[] {1, 5});
}
}
/**
* Enable or disable menu items in the popup menu.
*
* @param enable whether to enable the items referenced by the list in ind
* @param ind the list of indices of popup menu items to enable or disable
*/
private void enableMenuItems(final boolean enable, final int[] ind)
{
final int num = ind.length;
for (int i = 0; i < num; ++i)
{
menu.getComponent(ind[i]).setEnabled(enable);
}
}
/**
* The action listener for the items in the popup menu.
*/
class PopupAction implements ActionListener
{
/** The mode for this action - copy one row, selected rows, or all rows. */
private int mode = 0;
/**
* Constructor.
*
* @param nMode the mode for this instance
*/
public PopupAction(final int nMode)
{
mode = nMode;
}
/**
* Handle the action.
*
* @param evt the action event
*/
@Override
public void actionPerformed(final ActionEvent evt)
{
final boolean isShift = ((evt.getModifiers() & ActionEvent.SHIFT_MASK) != 0);
switch (mode)
{
case 0:
copyObject(evt);
break;
case 1:
copySelectedObject(evt);
break;
case 2:
copyAllObjects(evt);
break;
case 3:
copyObjectAndData(evt, isShift);
break;
case 4:
copySelectedObjectAndData(evt, isShift);
break;
case 5:
default:
copyAllObjectsAndData(evt, isShift);
break;
}
}
/**
* Copy the current object.
*
* @param evt the action event
*/
private void copyObject(final ActionEvent evt)
{
final int currItem = tableList.locationToIndex(point);
if (currItem >= 0)
{
String item = (String) ((TableListModel)
tableList.getModel()).getElementAt(currItem);
Utility.copyToClipboard(item);
}
}
/**
* Copy the selected object.
*
* @param evt the action event
*/
private void copySelectedObject(final ActionEvent evt)
{
int[] sel = tableList.getSelectedIndices();
final int len = sel.length;
if (len > 0)
{
StringBuilder sb = new StringBuilder(50);
String item = (String) ((TableListModel)
tableList.getModel()).getElementAt(sel[0]);
sb.append(item);
for (int i = 1; i < len; ++i)
{
sb.append(EOLN);
item = (String) ((TableListModel)
tableList.getModel()).getElementAt(sel[i]);
sb.append(item);
}
// System.out.println(sb.toString());
Utility.copyToClipboard(sb.toString());
}
}
/**
* Copy all objects.
*
* @param evt the action event
*/
private void copyAllObjects(final ActionEvent evt)
{
final int len = ((TableListModel) tableList.getModel()).getSize();
if (len > 0)
{
StringBuilder sb = new StringBuilder(50);
String item = (String) ((TableListModel)
tableList.getModel()).getElementAt(0);
sb.append(item);
for (int i = 1; i < len; ++i)
{
sb.append(EOLN);
item = (String) ((TableListModel)
tableList.getModel()).getElementAt(i);
sb.append(item);
}
// System.out.println(sb.toString());
Utility.copyToClipboard(sb.toString());
}
}
/**
* Copy the current object and its data.
*
* @param evt the action event
* @param isShift whether the shift key was pressed on the menu
*/
private void copyObjectAndData(final ActionEvent evt, final boolean isShift)
{
final int currItem = tableList.locationToIndex(point);
if (currItem >= 0)
{
// Get the field name
String item = (String) ((TableListModel)
tableList.getModel()).getElementAt(currItem);
StringBuilder sb = new StringBuilder(100);
sb.append(item).append(EOLN);
sb.append(isShift ? getItemDetailsMD(item) : getItemDetails(item));
// Copy it to the clipboard
Utility.copyToClipboard(sb.toString());
}
}
/**
* Get the details for the specified item in Markdown format.
*
* @param item the table or column name
* @return the details table in Markdown table format
*/
private String getItemDetailsMD(final String item)
{
// Get the info for this table/column
List<TableInfo> tableInfo = showTables ? DBCache.getInstance().getTableInfo(item) :
DBCache.getInstance().getColumnInfo(item);
// This will hold the output string
StringBuilder sb = new StringBuilder(100);
// Iterate over the column names
sb.append("| ");
for (String title : getOutputTitles()) {
sb.append(title).append(" | ");
}
sb.append(EOLN);
// Add the separator row
final int numCols = getOutputTitles().length;
sb.append("|");
for (int i = 0; i < numCols; ++i) {
sb.append(" --- |");
}
sb.append(EOLN);
// Add the data
// {"order", (showTables ? "columnName" : "tableName"), "columnType", "isNullable", "isPK"};
for (TableInfo ti : tableInfo) {
sb.append("| ").append(ti.order).append(" | ")
.append(showTables ? ti.columnName : ti.tableName).append(" | ")
.append(ti.columnType).append(" | ")
.append(ti.isNullable).append(" | ")
.append(ti.isPK).append(" |").append(EOLN);
}
return sb.toString();
}
/**
* Get the details for the specified item.
*
* @param item the table or column name
* @return the details table
*/
private String getItemDetails(final String item)
{
// Get the info for this table/column
List<TableInfo> tableInfo = showTables ? DBCache.getInstance().getTableInfo(item) :
DBCache.getInstance().getColumnInfo(item);
String table = new ListFormatter().getTable(tableInfo, getOutputColumns(), getOutputTitles());
return table;
}
/**
* Return the column titles for the output list.
*
* @return the column titles
*/
private String[] getOutputTitles()
{
String[] titles = new String[]{"#", (showTables ? "Column" : "Table"), "Column Type", "Can Be Null?", "Primary Key?"};
return titles;
}
/**
* Return the names of the fields to include in the output list.
*
* @return the column fields
*/
private String[] getOutputColumns()
{
String[] cols = new String[]{"order", (showTables ? "columnName" : "tableName"), "columnType", "isNullable", "isPK"};
return cols;
}
/**
* Copy the selected objects and data.
*
* @param evt the action event
* @param isShift whether the shift key was pressed on the menu
*/
private void copySelectedObjectAndData(final ActionEvent evt, final boolean isShift)
{
int[] sel = tableList.getSelectedIndices();
final int len = sel.length;
if (len > 0)
{
StringBuilder sb = new StringBuilder(50);
String item = (String) ((TableListModel)
tableList.getModel()).getElementAt(sel[0]);
sb.append(item).append(EOLN);
sb.append(isShift ? getItemDetailsMD(item) : getItemDetails(item));
for (int i = 1; i < len; ++i)
{
sb.append(EOLN);
item = (String) ((TableListModel)
tableList.getModel()).getElementAt(sel[i]);
sb.append(item).append(EOLN);
sb.append(isShift ? getItemDetailsMD(item) : getItemDetails(item));
}
// System.out.println(sb.toString());
Utility.copyToClipboard(sb.toString());
}
}
/**
* Copy all objects and data.
*
* @param evt the action event
* @param isShift whether the shift key was pressed on the menu
*/
private void copyAllObjectsAndData(final ActionEvent evt, final boolean isShift)
{
final int len = ((TableListModel) tableList.getModel()).getSize();
if (len > 0)
{
StringBuilder sb = new StringBuilder(50);
String item = (String) ((TableListModel)
tableList.getModel()).getElementAt(0);
sb.append(item).append(EOLN);
sb.append(isShift ? getItemDetailsMD(item) : getItemDetails(item));
for (int i = 1; i < len; ++i)
{
sb.append(EOLN);
item = (String) ((TableListModel)
tableList.getModel()).getElementAt(i);
sb.append(item).append(EOLN);
sb.append(isShift ? getItemDetailsMD(item) : getItemDetails(item));
}
// System.out.println(sb.toString());
Utility.copyToClipboard(sb.toString());
}
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.directconnect.model;
import java.io.Serializable;
/**
* <p>
* A structure containing information about a public virtual interface that will
* be provisioned on a connection.
* </p>
*/
public class NewPublicVirtualInterfaceAllocation implements Serializable,
Cloneable {
private String virtualInterfaceName;
private Integer vlan;
private Integer asn;
private String authKey;
private String amazonAddress;
private String customerAddress;
private com.amazonaws.internal.SdkInternalList<RouteFilterPrefix> routeFilterPrefixes;
/**
* @param virtualInterfaceName
*/
public void setVirtualInterfaceName(String virtualInterfaceName) {
this.virtualInterfaceName = virtualInterfaceName;
}
/**
* @return
*/
public String getVirtualInterfaceName() {
return this.virtualInterfaceName;
}
/**
* @param virtualInterfaceName
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public NewPublicVirtualInterfaceAllocation withVirtualInterfaceName(
String virtualInterfaceName) {
setVirtualInterfaceName(virtualInterfaceName);
return this;
}
/**
* @param vlan
*/
public void setVlan(Integer vlan) {
this.vlan = vlan;
}
/**
* @return
*/
public Integer getVlan() {
return this.vlan;
}
/**
* @param vlan
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public NewPublicVirtualInterfaceAllocation withVlan(Integer vlan) {
setVlan(vlan);
return this;
}
/**
* @param asn
*/
public void setAsn(Integer asn) {
this.asn = asn;
}
/**
* @return
*/
public Integer getAsn() {
return this.asn;
}
/**
* @param asn
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public NewPublicVirtualInterfaceAllocation withAsn(Integer asn) {
setAsn(asn);
return this;
}
/**
* @param authKey
*/
public void setAuthKey(String authKey) {
this.authKey = authKey;
}
/**
* @return
*/
public String getAuthKey() {
return this.authKey;
}
/**
* @param authKey
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public NewPublicVirtualInterfaceAllocation withAuthKey(String authKey) {
setAuthKey(authKey);
return this;
}
/**
* @param amazonAddress
*/
public void setAmazonAddress(String amazonAddress) {
this.amazonAddress = amazonAddress;
}
/**
* @return
*/
public String getAmazonAddress() {
return this.amazonAddress;
}
/**
* @param amazonAddress
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public NewPublicVirtualInterfaceAllocation withAmazonAddress(
String amazonAddress) {
setAmazonAddress(amazonAddress);
return this;
}
/**
* @param customerAddress
*/
public void setCustomerAddress(String customerAddress) {
this.customerAddress = customerAddress;
}
/**
* @return
*/
public String getCustomerAddress() {
return this.customerAddress;
}
/**
* @param customerAddress
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public NewPublicVirtualInterfaceAllocation withCustomerAddress(
String customerAddress) {
setCustomerAddress(customerAddress);
return this;
}
/**
* @return
*/
public java.util.List<RouteFilterPrefix> getRouteFilterPrefixes() {
if (routeFilterPrefixes == null) {
routeFilterPrefixes = new com.amazonaws.internal.SdkInternalList<RouteFilterPrefix>();
}
return routeFilterPrefixes;
}
/**
* @param routeFilterPrefixes
*/
public void setRouteFilterPrefixes(
java.util.Collection<RouteFilterPrefix> routeFilterPrefixes) {
if (routeFilterPrefixes == null) {
this.routeFilterPrefixes = null;
return;
}
this.routeFilterPrefixes = new com.amazonaws.internal.SdkInternalList<RouteFilterPrefix>(
routeFilterPrefixes);
}
/**
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setRouteFilterPrefixes(java.util.Collection)} or
* {@link #withRouteFilterPrefixes(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param routeFilterPrefixes
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public NewPublicVirtualInterfaceAllocation withRouteFilterPrefixes(
RouteFilterPrefix... routeFilterPrefixes) {
if (this.routeFilterPrefixes == null) {
setRouteFilterPrefixes(new com.amazonaws.internal.SdkInternalList<RouteFilterPrefix>(
routeFilterPrefixes.length));
}
for (RouteFilterPrefix ele : routeFilterPrefixes) {
this.routeFilterPrefixes.add(ele);
}
return this;
}
/**
* @param routeFilterPrefixes
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public NewPublicVirtualInterfaceAllocation withRouteFilterPrefixes(
java.util.Collection<RouteFilterPrefix> routeFilterPrefixes) {
setRouteFilterPrefixes(routeFilterPrefixes);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getVirtualInterfaceName() != null)
sb.append("VirtualInterfaceName: " + getVirtualInterfaceName()
+ ",");
if (getVlan() != null)
sb.append("Vlan: " + getVlan() + ",");
if (getAsn() != null)
sb.append("Asn: " + getAsn() + ",");
if (getAuthKey() != null)
sb.append("AuthKey: " + getAuthKey() + ",");
if (getAmazonAddress() != null)
sb.append("AmazonAddress: " + getAmazonAddress() + ",");
if (getCustomerAddress() != null)
sb.append("CustomerAddress: " + getCustomerAddress() + ",");
if (getRouteFilterPrefixes() != null)
sb.append("RouteFilterPrefixes: " + getRouteFilterPrefixes());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof NewPublicVirtualInterfaceAllocation == false)
return false;
NewPublicVirtualInterfaceAllocation other = (NewPublicVirtualInterfaceAllocation) obj;
if (other.getVirtualInterfaceName() == null
^ this.getVirtualInterfaceName() == null)
return false;
if (other.getVirtualInterfaceName() != null
&& other.getVirtualInterfaceName().equals(
this.getVirtualInterfaceName()) == false)
return false;
if (other.getVlan() == null ^ this.getVlan() == null)
return false;
if (other.getVlan() != null
&& other.getVlan().equals(this.getVlan()) == false)
return false;
if (other.getAsn() == null ^ this.getAsn() == null)
return false;
if (other.getAsn() != null
&& other.getAsn().equals(this.getAsn()) == false)
return false;
if (other.getAuthKey() == null ^ this.getAuthKey() == null)
return false;
if (other.getAuthKey() != null
&& other.getAuthKey().equals(this.getAuthKey()) == false)
return false;
if (other.getAmazonAddress() == null ^ this.getAmazonAddress() == null)
return false;
if (other.getAmazonAddress() != null
&& other.getAmazonAddress().equals(this.getAmazonAddress()) == false)
return false;
if (other.getCustomerAddress() == null
^ this.getCustomerAddress() == null)
return false;
if (other.getCustomerAddress() != null
&& other.getCustomerAddress().equals(this.getCustomerAddress()) == false)
return false;
if (other.getRouteFilterPrefixes() == null
^ this.getRouteFilterPrefixes() == null)
return false;
if (other.getRouteFilterPrefixes() != null
&& other.getRouteFilterPrefixes().equals(
this.getRouteFilterPrefixes()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getVirtualInterfaceName() == null) ? 0
: getVirtualInterfaceName().hashCode());
hashCode = prime * hashCode
+ ((getVlan() == null) ? 0 : getVlan().hashCode());
hashCode = prime * hashCode
+ ((getAsn() == null) ? 0 : getAsn().hashCode());
hashCode = prime * hashCode
+ ((getAuthKey() == null) ? 0 : getAuthKey().hashCode());
hashCode = prime
* hashCode
+ ((getAmazonAddress() == null) ? 0 : getAmazonAddress()
.hashCode());
hashCode = prime
* hashCode
+ ((getCustomerAddress() == null) ? 0 : getCustomerAddress()
.hashCode());
hashCode = prime
* hashCode
+ ((getRouteFilterPrefixes() == null) ? 0
: getRouteFilterPrefixes().hashCode());
return hashCode;
}
@Override
public NewPublicVirtualInterfaceAllocation clone() {
try {
return (NewPublicVirtualInterfaceAllocation) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.reindex.remote;
import org.elasticsearch.Version;
import org.elasticsearch.action.bulk.byscroll.ScrollableHitSource.BasicHit;
import org.elasticsearch.action.bulk.byscroll.ScrollableHitSource.Hit;
import org.elasticsearch.action.bulk.byscroll.ScrollableHitSource.Response;
import org.elasticsearch.action.bulk.byscroll.ScrollableHitSource.SearchFailure;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.util.List;
import java.util.function.BiFunction;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.Objects.requireNonNull;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Parsers to convert the response from the remote host into objects useful for {@link RemoteScrollableHitSource}.
*/
final class RemoteResponseParsers {
private RemoteResponseParsers() {}
/**
* Parser for an individual {@code hit} element.
*/
public static final ConstructingObjectParser<BasicHit, XContentType> HIT_PARSER =
new ConstructingObjectParser<>("hit", true, a -> {
int i = 0;
String index = (String) a[i++];
String type = (String) a[i++];
String id = (String) a[i++];
Long version = (Long) a[i++];
return new BasicHit(index, type, id, version == null ? -1 : version);
});
static {
HIT_PARSER.declareString(constructorArg(), new ParseField("_index"));
HIT_PARSER.declareString(constructorArg(), new ParseField("_type"));
HIT_PARSER.declareString(constructorArg(), new ParseField("_id"));
HIT_PARSER.declareLong(optionalConstructorArg(), new ParseField("_version"));
HIT_PARSER.declareObject(((basicHit, tuple) -> basicHit.setSource(tuple.v1(), tuple.v2())), (p, s) -> {
try {
/*
* We spool the data from the remote back into xcontent so we can get bytes to send. There ought to be a better way but for
* now this should do.
*/
try (XContentBuilder b = XContentBuilder.builder(s.xContent())) {
b.copyCurrentStructure(p);
// a hack but this lets us get the right xcontent type to go with the source
return new Tuple<>(b.bytes(), s);
}
} catch (IOException e) {
throw new ParsingException(p.getTokenLocation(), "[hit] failed to parse [_source]", e);
}
}, new ParseField("_source"));
ParseField routingField = new ParseField("_routing");
ParseField parentField = new ParseField("_parent");
ParseField ttlField = new ParseField("_ttl");
HIT_PARSER.declareString(BasicHit::setRouting, routingField);
HIT_PARSER.declareString(BasicHit::setParent, parentField);
// Pre-2.0.0 parent and routing come back in "fields"
class Fields {
String routing;
String parent;
}
ObjectParser<Fields, XContentType> fieldsParser = new ObjectParser<>("fields", Fields::new);
HIT_PARSER.declareObject((hit, fields) -> {
hit.setRouting(fields.routing);
hit.setParent(fields.parent);
}, fieldsParser, new ParseField("fields"));
fieldsParser.declareString((fields, routing) -> fields.routing = routing, routingField);
fieldsParser.declareString((fields, parent) -> fields.parent = parent, parentField);
fieldsParser.declareLong((fields, ttl) -> {}, ttlField); // ignore ttls since they have been removed
}
/**
* Parser for the {@code hits} element. Parsed to an array of {@code [total (Long), hits (List<Hit>)]}.
*/
public static final ConstructingObjectParser<Object[], XContentType> HITS_PARSER =
new ConstructingObjectParser<>("hits", true, a -> a);
static {
HITS_PARSER.declareLong(constructorArg(), new ParseField("total"));
HITS_PARSER.declareObjectArray(constructorArg(), HIT_PARSER, new ParseField("hits"));
}
/**
* Parser for {@code failed} shards in the {@code _shards} elements.
*/
public static final ConstructingObjectParser<SearchFailure, XContentType> SEARCH_FAILURE_PARSER =
new ConstructingObjectParser<>("failure", true, a -> {
int i = 0;
String index = (String) a[i++];
Integer shardId = (Integer) a[i++];
String nodeId = (String) a[i++];
Object reason = a[i++];
Throwable reasonThrowable;
if (reason instanceof String) {
reasonThrowable = new RuntimeException("Unknown remote exception with reason=[" + (String) reason + "]");
} else {
reasonThrowable = (Throwable) reason;
}
return new SearchFailure(reasonThrowable, index, shardId, nodeId);
});
static {
SEARCH_FAILURE_PARSER.declareString(optionalConstructorArg(), new ParseField("index"));
SEARCH_FAILURE_PARSER.declareInt(optionalConstructorArg(), new ParseField("shard"));
SEARCH_FAILURE_PARSER.declareString(optionalConstructorArg(), new ParseField("node"));
SEARCH_FAILURE_PARSER.declareField(constructorArg(), (p, c) -> {
if (p.currentToken() == XContentParser.Token.START_OBJECT) {
return ThrowableBuilder.PARSER.apply(p, c);
} else {
return p.text();
}
}, new ParseField("reason"), ValueType.OBJECT_OR_STRING);
}
/**
* Parser for the {@code _shards} element. Throws everything out except the errors array if there is one. If there isn't one then it
* parses to an empty list.
*/
public static final ConstructingObjectParser<List<Throwable>, XContentType> SHARDS_PARSER =
new ConstructingObjectParser<>("_shards", true, a -> {
@SuppressWarnings("unchecked")
List<Throwable> failures = (List<Throwable>) a[0];
failures = failures == null ? emptyList() : failures;
return failures;
});
static {
SHARDS_PARSER.declareObjectArray(optionalConstructorArg(), SEARCH_FAILURE_PARSER, new ParseField("failures"));
}
public static final ConstructingObjectParser<Response, XContentType> RESPONSE_PARSER =
new ConstructingObjectParser<>("search_response", true, a -> {
int i = 0;
Throwable catastrophicFailure = (Throwable) a[i++];
if (catastrophicFailure != null) {
return new Response(false, singletonList(new SearchFailure(catastrophicFailure)), 0, emptyList(), null);
}
boolean timedOut = (boolean) a[i++];
String scroll = (String) a[i++];
Object[] hitsElement = (Object[]) a[i++];
@SuppressWarnings("unchecked")
List<SearchFailure> failures = (List<SearchFailure>) a[i++];
long totalHits = 0;
List<Hit> hits = emptyList();
// Pull apart the hits element if we got it
if (hitsElement != null) {
i = 0;
totalHits = (long) hitsElement[i++];
@SuppressWarnings("unchecked")
List<Hit> h = (List<Hit>) hitsElement[i++];
hits = h;
}
return new Response(timedOut, failures, totalHits, hits, scroll);
});
static {
RESPONSE_PARSER.declareObject(optionalConstructorArg(), ThrowableBuilder.PARSER::apply, new ParseField("error"));
RESPONSE_PARSER.declareBoolean(optionalConstructorArg(), new ParseField("timed_out"));
RESPONSE_PARSER.declareString(optionalConstructorArg(), new ParseField("_scroll_id"));
RESPONSE_PARSER.declareObject(optionalConstructorArg(), HITS_PARSER, new ParseField("hits"));
RESPONSE_PARSER.declareObject(optionalConstructorArg(), SHARDS_PARSER, new ParseField("_shards"));
}
/**
* Collects stuff about Throwables and attempts to rebuild them.
*/
public static class ThrowableBuilder {
public static final BiFunction<XContentParser, XContentType, Throwable> PARSER;
static {
ObjectParser<ThrowableBuilder, XContentType> parser = new ObjectParser<>("reason", true, ThrowableBuilder::new);
PARSER = parser.andThen(ThrowableBuilder::build);
parser.declareString(ThrowableBuilder::setType, new ParseField("type"));
parser.declareString(ThrowableBuilder::setReason, new ParseField("reason"));
parser.declareObject(ThrowableBuilder::setCausedBy, PARSER::apply, new ParseField("caused_by"));
// So we can give a nice error for parsing exceptions
parser.declareInt(ThrowableBuilder::setLine, new ParseField("line"));
parser.declareInt(ThrowableBuilder::setColumn, new ParseField("col"));
}
private String type;
private String reason;
private Integer line;
private Integer column;
private Throwable causedBy;
public Throwable build() {
Throwable t = buildWithoutCause();
if (causedBy != null) {
t.initCause(causedBy);
}
return t;
}
private Throwable buildWithoutCause() {
requireNonNull(type, "[type] is required");
requireNonNull(reason, "[reason] is required");
switch (type) {
// Make some effort to use the right exceptions
case "es_rejected_execution_exception":
return new EsRejectedExecutionException(reason);
case "parsing_exception":
XContentLocation location = null;
if (line != null && column != null) {
location = new XContentLocation(line, column);
}
return new ParsingException(location, reason);
// But it isn't worth trying to get it perfect....
default:
return new RuntimeException(type + ": " + reason);
}
}
public void setType(String type) {
this.type = type;
}
public void setReason(String reason) {
this.reason = reason;
}
public void setLine(Integer line) {
this.line = line;
}
public void setColumn(Integer column) {
this.column = column;
}
public void setCausedBy(Throwable causedBy) {
this.causedBy = causedBy;
}
}
/**
* Parses the main action to return just the {@linkplain Version} that it returns. We throw everything else out.
*/
public static final ConstructingObjectParser<Version, XContentType> MAIN_ACTION_PARSER = new ConstructingObjectParser<>(
"/", true, a -> (Version) a[0]);
static {
ConstructingObjectParser<Version, XContentType> versionParser = new ConstructingObjectParser<>(
"version", true, a -> Version.fromString((String) a[0]));
versionParser.declareString(constructorArg(), new ParseField("number"));
MAIN_ACTION_PARSER.declareObject(constructorArg(), versionParser, new ParseField("version"));
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <p>
*/
package org.olat.presentation.framework.dispatcher;
import java.util.GregorianCalendar;
import java.util.Iterator;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.log4j.Logger;
import org.olat.data.basesecurity.BaseSecurity;
import org.olat.data.basesecurity.Identity;
import org.olat.data.basesecurity.SecurityGroup;
import org.olat.data.catalog.CatalogEntry;
import org.olat.data.repository.RepositoryEntry;
import org.olat.lms.catalog.CatalogService;
import org.olat.lms.commons.i18n.I18nModule;
import org.olat.lms.commons.mediaresource.NotFoundMediaResource;
import org.olat.lms.commons.mediaresource.ServletUtil;
import org.olat.lms.framework.dispatcher.CatalogExportModuleEBL;
import org.olat.presentation.framework.common.ControllerFactory;
import org.olat.presentation.framework.core.render.StringOutput;
import org.olat.presentation.framework.core.translator.PackageTranslator;
import org.olat.presentation.framework.core.translator.PackageUtil;
import org.olat.presentation.framework.core.translator.Translator;
import org.olat.presentation.repository.RepositoryDetailsController;
import org.olat.presentation.repository.RepositoryEntryIconRenderer;
import org.olat.system.commons.Settings;
import org.olat.system.logging.log4j.LoggerHelper;
import org.olat.system.spring.CoreSpringFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
public class CatalogExportModuleDispatcher implements Dispatcher {
private static final Logger log = LoggerHelper.getLogger();
private static final String XML_CAT = "catalog"; // catalog root tag
private static final String XML_NODE = "node"; // node element (catalog hierarchy structure)
private static final String XML_LEAF = "leaf"; // leaf element (catalog entries (courses, files, ...))
private static final String XML_CHILDREN = "children"; // child elements of nodes
private static final String XML_DESCR = "description"; // catalog(!) description of nodes and leaves
private static final String XML_TYPE = "type"; // catalog entry type (course, PDF, ...) -> translated to installations default language!
private static final String XML_TYPE_CSS = "iconCSS"; // CSS class for catalog entry type icon
private static final String XML_LINKS = "links"; // links to entries container
private static final String XML_LINK = "link"; // link to the entry
private static final String XML_LINKTYPE = "type"; // type of the link to the entry (see following)
private static final String XML_LINKTYPE_GUEST = "guest"; // -> link for guest access (only there if accessible by guests)
private static final String XML_LINKTYPE_LOGIN = "login"; // -> link with login required
private static final String XML_LINKTYPE_DETAIL = "detail"; // -> link to entry detail page
private static final String XML_LINKTYPE_ENROLL = "cbb_enrollment"; // -> link to enrollment course buildings blocks in entry (there may be many of this!!!)
private static final String XML_ACC = "access"; // entry access settings attribute name
private static final String XML_OWN = "owners"; // entry owners container element name
private static final String XML_USR = "user"; // entry owners user subelements
private static final String XML_CUSTOM = "custom"; // custom info (empty for now)
// NLS:
private static final String NLS_CIF_TYPE_NA = "cif.type.na";
private static final String NLS_TABLE_HEADER_ACCESS_GUEST = "access.guest";
private static final String NLS_TABLE_HEADER_ACCESS_USER = "access.user";
private static final String NLS_TABLE_HEADER_ACCESS_AUTHOR = "access.author";
private static final String NLS_TABLE_HEADER_ACCESS_OWNER = "access.owner";
private static DocumentBuilderFactory domFactory = null;
private static DocumentBuilder domBuilder = null;
private static BaseSecurity securityManager = null;
private static Translator repoTypeTranslator = null;
private static Translator catalogExportTranslator = null;
private TimerTask tt;
private long updateInterval;
private CatalogService catalogService;
private static boolean instance = false;
CatalogExportModuleEBL catalogExportModuleEBL;
/**
* @return
*/
private CatalogExportModuleDispatcher(final Long updateInterval) {
this.updateInterval = updateInterval * 60 * 1000;
if (this.updateInterval < 60000) {
// interval is smaller than one minute -> inform and go to default
this.updateInterval = 5 * 60 * 1000;
log.info("Update interval is to small , increasing to default of 5min!");
}
catalogService = CoreSpringFactory.getBean(CatalogService.class);
}
synchronized private boolean reInitialize() {// o_clusterOK by:fj
boolean retVal = true;
if (instance) {
return retVal;
}
// TODO there is a new way of creating package translator
repoTypeTranslator = new PackageTranslator(PackageUtil.getPackageName(RepositoryDetailsController.class), I18nModule.getDefaultLocale());
catalogExportTranslator = new PackageTranslator(PackageUtil.getPackageName(CatalogExportModuleDispatcher.class), I18nModule.getDefaultLocale());
securityManager = (BaseSecurity) CoreSpringFactory.getBean(BaseSecurity.class);
try {
domFactory = DocumentBuilderFactory.newInstance(); // init
domBuilder = domFactory.newDocumentBuilder();
} catch (final Exception e) {
retVal = false;
}
tt = new TimerTask() {
@Override
public void run() {
Thread.currentThread().setPriority(Thread.MIN_PRIORITY); // don't disturb other things going on
createXML();
}
};
final Timer timer = new Timer();
timer.schedule(tt, (new GregorianCalendar()).getTime(), updateInterval);
instance = true;
return retVal;
}
protected void createXML() {
log.debug("Creating catalog export XML file...");
final Document doc = domBuilder.newDocument(); // create new XML document
final Element cat = doc.createElement(XML_CAT); // catalog element
doc.appendChild(cat);
cat.setAttribute("date", String.valueOf(System.currentTimeMillis())); // set date in catalog element
final Element root = doc.createElement(XML_NODE); // root node
root.setAttribute("name", "root");
cat.appendChild(root);
final List ces = catalogService.getRootCatalogEntries();
for (final Iterator it = ces.iterator(); it.hasNext();) { // for every root entry (currently only one)
final CatalogEntry ce = (CatalogEntry) it.next();
getCatalogSubStructure(doc, root, catalogService, ce); // scan this entry
}
try {
catalogExportModuleEBL.transformCatalogXml(doc);
} catch (final Exception e) {
log.error("Error writing catalog export file.", e);
}
}
private void getCatalogSubStructure(final Document doc, final Element parent, final CatalogService catalogService, final CatalogEntry ce) {
Element cur = null; // tmp. element
final List l = catalogService.getChildrenOf(ce); // get catalog children
// all nodes
for (final Iterator it = l.iterator(); it.hasNext();) { // scan for node entries
final CatalogEntry c = (CatalogEntry) it.next();
if (c.getType() == CatalogEntry.TYPE_NODE) { // it's a node
final Element node = doc.createElement(XML_NODE); // node element
node.setAttribute("name", c.getName());
parent.appendChild(node);
cur = doc.createElement(XML_DESCR); // description element
cur.appendChild(doc.createTextNode(c.getDescription()));
node.appendChild(cur);
if (catalogService.getChildrenOf(c).size() > 0) { // children element containing all subentries
cur = doc.createElement(XML_CHILDREN);
node.appendChild(cur);
getCatalogSubStructure(doc, cur, catalogService, c); // recursive scan
}
cur = doc.createElement(XML_CUSTOM);
/*
* Insert custom info here!
*/
node.appendChild(cur);
}
}
// all leafes
for (final Iterator it = l.iterator(); it.hasNext();) { // scan for leaf entries
final CatalogEntry c = (CatalogEntry) it.next();
if (c.getType() == CatalogEntry.TYPE_LEAF) {
final RepositoryEntry re = c.getRepositoryEntry(); // get repo entry
if (re.getAccess() > RepositoryEntry.ACC_OWNERS_AUTHORS) { // just show entries visible for registered users
final Element leaf = doc.createElement(XML_LEAF); // leaf element
leaf.setAttribute("name", c.getName());
parent.appendChild(leaf);
cur = doc.createElement(XML_DESCR); // description element
cur.appendChild(doc.createTextNode(c.getDescription()));
leaf.appendChild(cur);
cur = doc.createElement(XML_TYPE);
final String typeName = re.getOlatResource().getResourceableTypeName(); // add the resource type
final StringOutput typeDisplayText = new StringOutput(100);
if (typeName != null) { // add typename code
final RepositoryEntryIconRenderer reir = new RepositoryEntryIconRenderer();
cur.setAttribute(XML_TYPE_CSS, reir.getIconCssClass(re));
final String tName = ControllerFactory.translateResourceableTypeName(typeName, repoTypeTranslator.getLocale());
typeDisplayText.append(tName);
} else {
typeDisplayText.append(repoTypeTranslator.translate(NLS_CIF_TYPE_NA));
}
cur.appendChild(doc.createTextNode(typeDisplayText.toString()));
leaf.appendChild(cur);
final Element links = doc.createElement(XML_LINKS); // links container
String tmp = "";
String url = Settings.getServerContextPathURI() + "/url/CatalogEntry/" + re.getKey();
switch (re.getAccess()) { // Attention! This uses the switch-case-fall-through mechanism!
case RepositoryEntry.ACC_USERS_GUESTS:
tmp = catalogExportTranslator.translate(NLS_TABLE_HEADER_ACCESS_GUEST) + tmp;
appendLinkElement(doc, links, XML_LINKTYPE_GUEST, url + "&guest=true&lang=" + I18nModule.getDefaultLocale().toString().toLowerCase());
case RepositoryEntry.ACC_USERS:
tmp = catalogExportTranslator.translate(NLS_TABLE_HEADER_ACCESS_USER) + tmp;
case RepositoryEntry.ACC_OWNERS_AUTHORS:
tmp = catalogExportTranslator.translate(NLS_TABLE_HEADER_ACCESS_AUTHOR) + tmp;
case RepositoryEntry.ACC_OWNERS:
tmp = catalogExportTranslator.translate(NLS_TABLE_HEADER_ACCESS_OWNER) + tmp;
appendLinkElement(doc, links, XML_LINKTYPE_LOGIN, url);
break;
default:
tmp = catalogExportTranslator.translate(NLS_TABLE_HEADER_ACCESS_USER);
break;
}
// when implemented in OLAT, add link to detail page and enrollment entries here
// appendLinkElement(doc, links, XML_LINKTYPE_DETAIL, RepoJumpInHandlerFactory.buildRepositoryDispatchURI2DeatilPage(re));
// appendALotOfLinkElements4EnrollmentCBBsNeverthelessTheyAreVisibleAndOrAccessibleOrNot(doc, links, XML_LINKTYPE_ENROLL, re);
leaf.setAttribute(XML_ACC, tmp); // access rights as attribute
leaf.appendChild(links); // append links container
final Element owners = doc.createElement(XML_OWN); // owners node
leaf.appendChild(owners);
final SecurityGroup sg = re.getOwnerGroup();
final List m = securityManager.getIdentitiesOfSecurityGroup(sg);
for (final Iterator iter = m.iterator(); iter.hasNext();) {
final Identity i = (Identity) iter.next();
cur = doc.createElement(XML_USR); // get all users
cur.appendChild(doc.createTextNode(i.getName()));
owners.appendChild(cur);
}
cur = doc.createElement(XML_CUSTOM);
/*
* Insert custom info here!
*/
leaf.appendChild(cur);
}
}
}
}
private void appendLinkElement(final Document doc, final Element parent, final String type, final String URL) {
final Element link = doc.createElement(XML_LINK);
link.appendChild(doc.createTextNode(URL));
link.setAttribute(XML_LINKTYPE, type);
parent.appendChild(link);
}
@Override
public void execute(final HttpServletRequest request, final HttpServletResponse response, final String uriPrefix) {
catalogExportModuleEBL = getCatalogExportModuleEBL();
if (!this.reInitialize()) {
log.error("Some Failsaves in reInitialization needed !");
}
try {
log.info("Catalog XML file requested by " + request.getRemoteAddr());
ServletUtil.serveResource(request, response, catalogExportModuleEBL.getCatalogXmlFileMediaResource());
} catch (final Exception e) {
log.error("Error requesting catalog export file: ", e);
try {
ServletUtil.serveResource(request, response, new NotFoundMediaResource(request.getRequestURI()));
} catch (final Exception e1) {
// what now???
log.error("What now ???");
}
}
}
private CatalogExportModuleEBL getCatalogExportModuleEBL() {
return CoreSpringFactory.getBean(CatalogExportModuleEBL.class);
}
}
| |
/* Copyright 2021 Telstra Open Source
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openkilda.wfm.topology.floodlightrouter.bolts;
import org.openkilda.bluegreen.LifecycleEvent;
import org.openkilda.messaging.AliveRequest;
import org.openkilda.messaging.AliveResponse;
import org.openkilda.messaging.Message;
import org.openkilda.messaging.command.CommandMessage;
import org.openkilda.messaging.command.discovery.NetworkCommandData;
import org.openkilda.messaging.info.InfoData;
import org.openkilda.persistence.PersistenceManager;
import org.openkilda.persistence.repositories.KildaFeatureTogglesRepository;
import org.openkilda.wfm.AbstractBolt;
import org.openkilda.wfm.error.PipelineException;
import org.openkilda.wfm.share.bolt.MonotonicClock;
import org.openkilda.wfm.share.zk.ZkStreams;
import org.openkilda.wfm.share.zk.ZooKeeperBolt;
import org.openkilda.wfm.share.zk.ZooKeeperSpout;
import org.openkilda.wfm.topology.floodlightrouter.ComponentType;
import org.openkilda.wfm.topology.floodlightrouter.RegionAwareKafkaTopicSelector;
import org.openkilda.wfm.topology.floodlightrouter.Stream;
import org.openkilda.wfm.topology.floodlightrouter.TickId;
import org.openkilda.wfm.topology.floodlightrouter.service.FloodlightTracker;
import org.openkilda.wfm.topology.floodlightrouter.service.RegionMonitorCarrier;
import lombok.extern.slf4j.Slf4j;
import org.apache.storm.kafka.bolt.mapper.FieldNameBasedTupleToKafkaMapper;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;
import java.util.Set;
@Slf4j
public class RegionTrackerBolt extends AbstractBolt implements RegionMonitorCarrier {
public static final String BOLT_ID = ComponentType.KILDA_TOPO_DISCO_BOLT;
public static final String FIELD_ID_REGION = SpeakerToNetworkProxyBolt.FIELD_ID_REGION;
public static final String STREAM_SPEAKER_ID = Stream.SPEAKER_DISCO;
public static final String STREAM_REGION_NOTIFICATION_ID = "region";
public static final Fields STREAM_REGION_NOTIFICATION_FIELDS = new Fields(
FIELD_ID_REGION, AbstractBolt.FIELD_ID_CONTEXT);
private final String kafkaSpeakerTopic;
private final MonotonicClock.Match<TickId> monotonicTickMatch = new MonotonicClock.Match<>(
MonotonicTick.BOLT_ID, null);
private final MonotonicClock.Match<TickId> networkDumpTickMatch = new MonotonicClock.Match<>(
MonotonicTick.BOLT_ID, TickId.NETWORK_DUMP);
private final Set<String> floodlights;
private final long floodlightAliveTimeout;
private final long floodlightAliveInterval;
private transient KildaFeatureTogglesRepository featureTogglesRepository;
private transient FloodlightTracker floodlightTracker;
public RegionTrackerBolt(
String kafkaSpeakerTopic, PersistenceManager persistenceManager, Set<String> floodlights,
long floodlightAliveTimeout, long floodlightAliveInterval) {
super(persistenceManager);
this.kafkaSpeakerTopic = kafkaSpeakerTopic;
this.floodlights = floodlights;
this.floodlightAliveTimeout = floodlightAliveTimeout;
this.floodlightAliveInterval = floodlightAliveInterval;
}
@Override
protected void dispatch(Tuple input) throws Exception {
String source = input.getSourceComponent();
if (ZooKeeperSpout.SPOUT_ID.equals(input.getSourceComponent())) {
LifecycleEvent event = (LifecycleEvent) input.getValueByField(ZooKeeperSpout.FIELD_ID_LIFECYCLE_EVENT);
if (event != null && shouldHandleLifeCycleEvent(event.getSignal())) {
handleLifeCycleEvent(event);
}
} else if (!active) {
log.debug("Because the topology is inactive ignoring input tuple: {}", input);
} else if (monotonicTickMatch.isTick(input)) {
handleTick();
} else if (networkDumpTickMatch.isTick(input)) {
handleNetworkDump();
} else if (SpeakerToNetworkProxyBolt.BOLT_ID.equals(source)) {
handleNetworkNotification(input);
} else {
if (active) {
super.dispatch(input);
}
}
}
@Override
protected void handleInput(Tuple input) {
unhandledInput(input);
}
@Override
public void declareOutputFields(OutputFieldsDeclarer outputManager) {
Fields fields = new Fields(
FieldNameBasedTupleToKafkaMapper.BOLT_KEY, FieldNameBasedTupleToKafkaMapper.BOLT_MESSAGE,
RegionAwareKafkaTopicSelector.FIELD_ID_TOPIC, RegionAwareKafkaTopicSelector.FIELD_ID_REGION);
outputManager.declareStream(STREAM_SPEAKER_ID, fields);
outputManager.declareStream(STREAM_REGION_NOTIFICATION_ID, STREAM_REGION_NOTIFICATION_FIELDS);
outputManager.declareStream(ZkStreams.ZK.toString(), new Fields(ZooKeeperBolt.FIELD_ID_STATE,
ZooKeeperBolt.FIELD_ID_CONTEXT));
}
@Override
public void init() {
super.init();
featureTogglesRepository = persistenceManager.getRepositoryFactory().createFeatureTogglesRepository();
floodlightTracker = new FloodlightTracker(this, floodlights, floodlightAliveTimeout, floodlightAliveInterval);
}
private void handleTick() {
floodlightTracker.emitAliveRequests();
floodlightTracker.handleAliveExpiration();
}
private void handleNetworkDump() {
if (!queryPeriodicSyncFeatureToggle()) {
log.warn("Skip periodic network sync (disabled by feature toggle)");
return;
}
log.debug("Do periodic network dump request");
String dumpId = getCommandContext().getCorrelationId();
for (String region : floodlights) {
emitNetworkDumpRequest(region, dumpId);
}
}
private void handleNetworkNotification(Tuple input) throws PipelineException {
String stream = input.getSourceStreamId();
if (SpeakerToNetworkProxyBolt.STREAM_ALIVE_EVIDENCE_ID.equals(stream)) {
handleAliveEvidenceNotification(input);
} else if (SpeakerToNetworkProxyBolt.STREAM_REGION_NOTIFICATION_ID.equals(stream)) {
handleRegionNotification(input);
} else {
unhandledInput(input);
}
}
private void handleAliveEvidenceNotification(Tuple input) throws PipelineException {
String region = pullSpeakerRegion(input);
long timestamp = pullValue(input, SpeakerToNetworkProxyBolt.FIELD_ID_TIMESTAMP, Long.class);
floodlightTracker.handleAliveEvidence(region, timestamp);
}
private void handleRegionNotification(Tuple input) throws PipelineException {
String region = pullSpeakerRegion(input);
InfoData payload = pullValue(input, SpeakerToNetworkProxyBolt.FIELD_ID_PAYLOAD, InfoData.class);
if (!handleRegionNotification(region, payload)) {
unhandledInput(input);
}
}
private boolean handleRegionNotification(String region, InfoData payload) {
if (payload instanceof AliveResponse) {
floodlightTracker.handleAliveResponse(region, (AliveResponse) payload);
} else {
return false;
}
return true;
}
// SwitchStatusCarrier implementation
@Override
public void emitSpeakerAliveRequest(String region) {
AliveRequest request = new AliveRequest();
CommandMessage message = new CommandMessage(
request, System.currentTimeMillis(),
getCommandContext().fork(String.format("alive-request(%s)", region)).getCorrelationId());
getOutput().emit(
STREAM_SPEAKER_ID, getCurrentTuple(), makeSpeakerTuple(null, message, region));
}
@Override
public void emitNetworkDumpRequest(String region) {
emitNetworkDumpRequest(region, null);
}
/**
* Send network dump requests for target region.
*/
@Override
public void emitNetworkDumpRequest(String region, String dumpId) {
String correlationId = getCommandContext().fork(String.format("network-dump(%s)", region)).getCorrelationId();
CommandMessage command = new CommandMessage(
new NetworkCommandData(dumpId), System.currentTimeMillis(), correlationId);
log.info("Send network dump request (correlation-id: {})", correlationId);
getOutput().emit(STREAM_SPEAKER_ID, getCurrentTuple(), makeSpeakerTuple(correlationId, command, region));
}
@Override
public void emitRegionBecameUnavailableNotification(String region) {
getOutput().emit(STREAM_REGION_NOTIFICATION_ID, getCurrentTuple(), makeRegionNotificationTuple(region));
}
private boolean queryPeriodicSyncFeatureToggle() {
return featureTogglesRepository.getOrDefault().getFloodlightRoutePeriodicSync();
}
private String pullSpeakerRegion(Tuple tuple) throws PipelineException {
return pullValue(tuple, SpeakerToNetworkProxyBolt.FIELD_ID_REGION, String.class);
}
private Values makeSpeakerTuple(String key, Message payload, String region) {
return new Values(key, payload, kafkaSpeakerTopic, region);
}
private Values makeRegionNotificationTuple(String region) {
return new Values(region, getCommandContext().fork(region));
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.managedblockchain.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* The actions to carry out if a proposal is <code>APPROVED</code>.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/ProposalActions" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ProposalActions implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The actions to perform for an <code>APPROVED</code> proposal to invite an AWS account to create a member and join
* the network.
* </p>
*/
private java.util.List<InviteAction> invitations;
/**
* <p>
* The actions to perform for an <code>APPROVED</code> proposal to remove a member from the network, which deletes
* the member and all associated member resources from the network.
* </p>
*/
private java.util.List<RemoveAction> removals;
/**
* <p>
* The actions to perform for an <code>APPROVED</code> proposal to invite an AWS account to create a member and join
* the network.
* </p>
*
* @return The actions to perform for an <code>APPROVED</code> proposal to invite an AWS account to create a member
* and join the network.
*/
public java.util.List<InviteAction> getInvitations() {
return invitations;
}
/**
* <p>
* The actions to perform for an <code>APPROVED</code> proposal to invite an AWS account to create a member and join
* the network.
* </p>
*
* @param invitations
* The actions to perform for an <code>APPROVED</code> proposal to invite an AWS account to create a member
* and join the network.
*/
public void setInvitations(java.util.Collection<InviteAction> invitations) {
if (invitations == null) {
this.invitations = null;
return;
}
this.invitations = new java.util.ArrayList<InviteAction>(invitations);
}
/**
* <p>
* The actions to perform for an <code>APPROVED</code> proposal to invite an AWS account to create a member and join
* the network.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setInvitations(java.util.Collection)} or {@link #withInvitations(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param invitations
* The actions to perform for an <code>APPROVED</code> proposal to invite an AWS account to create a member
* and join the network.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ProposalActions withInvitations(InviteAction... invitations) {
if (this.invitations == null) {
setInvitations(new java.util.ArrayList<InviteAction>(invitations.length));
}
for (InviteAction ele : invitations) {
this.invitations.add(ele);
}
return this;
}
/**
* <p>
* The actions to perform for an <code>APPROVED</code> proposal to invite an AWS account to create a member and join
* the network.
* </p>
*
* @param invitations
* The actions to perform for an <code>APPROVED</code> proposal to invite an AWS account to create a member
* and join the network.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ProposalActions withInvitations(java.util.Collection<InviteAction> invitations) {
setInvitations(invitations);
return this;
}
/**
* <p>
* The actions to perform for an <code>APPROVED</code> proposal to remove a member from the network, which deletes
* the member and all associated member resources from the network.
* </p>
*
* @return The actions to perform for an <code>APPROVED</code> proposal to remove a member from the network, which
* deletes the member and all associated member resources from the network.
*/
public java.util.List<RemoveAction> getRemovals() {
return removals;
}
/**
* <p>
* The actions to perform for an <code>APPROVED</code> proposal to remove a member from the network, which deletes
* the member and all associated member resources from the network.
* </p>
*
* @param removals
* The actions to perform for an <code>APPROVED</code> proposal to remove a member from the network, which
* deletes the member and all associated member resources from the network.
*/
public void setRemovals(java.util.Collection<RemoveAction> removals) {
if (removals == null) {
this.removals = null;
return;
}
this.removals = new java.util.ArrayList<RemoveAction>(removals);
}
/**
* <p>
* The actions to perform for an <code>APPROVED</code> proposal to remove a member from the network, which deletes
* the member and all associated member resources from the network.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setRemovals(java.util.Collection)} or {@link #withRemovals(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param removals
* The actions to perform for an <code>APPROVED</code> proposal to remove a member from the network, which
* deletes the member and all associated member resources from the network.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ProposalActions withRemovals(RemoveAction... removals) {
if (this.removals == null) {
setRemovals(new java.util.ArrayList<RemoveAction>(removals.length));
}
for (RemoveAction ele : removals) {
this.removals.add(ele);
}
return this;
}
/**
* <p>
* The actions to perform for an <code>APPROVED</code> proposal to remove a member from the network, which deletes
* the member and all associated member resources from the network.
* </p>
*
* @param removals
* The actions to perform for an <code>APPROVED</code> proposal to remove a member from the network, which
* deletes the member and all associated member resources from the network.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ProposalActions withRemovals(java.util.Collection<RemoveAction> removals) {
setRemovals(removals);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getInvitations() != null)
sb.append("Invitations: ").append(getInvitations()).append(",");
if (getRemovals() != null)
sb.append("Removals: ").append(getRemovals());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ProposalActions == false)
return false;
ProposalActions other = (ProposalActions) obj;
if (other.getInvitations() == null ^ this.getInvitations() == null)
return false;
if (other.getInvitations() != null && other.getInvitations().equals(this.getInvitations()) == false)
return false;
if (other.getRemovals() == null ^ this.getRemovals() == null)
return false;
if (other.getRemovals() != null && other.getRemovals().equals(this.getRemovals()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getInvitations() == null) ? 0 : getInvitations().hashCode());
hashCode = prime * hashCode + ((getRemovals() == null) ? 0 : getRemovals().hashCode());
return hashCode;
}
@Override
public ProposalActions clone() {
try {
return (ProposalActions) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.managedblockchain.model.transform.ProposalActionsMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.