repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
flutterwireless/ArduinoCodebase | app/src/processing/app/EditorLineStatus.java | 3672 | /* -*- mode: java; c-basic-offset: 2; indent-tabs-mode: nil -*- */
/*
Part of the Processing project - http://processing.org
Copyright (c) 2005-07 Ben Fry and Casey Reas
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software Foundation,
Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package processing.app;
import processing.app.syntax.*;
import java.awt.*;
import java.awt.geom.Rectangle2D;
import java.util.Map;
import javax.swing.*;
/**
* Li'l status bar fella that shows the line number.
*/
public class EditorLineStatus extends JComponent {
JEditTextArea textarea;
int start = -1, stop;
Image resize;
Color foreground;
Color background;
Color messageForeground;
Font font;
int high;
String text = "";
String name = "";
String serialport = "";
public EditorLineStatus(JEditTextArea textarea) {
this.textarea = textarea;
textarea.editorLineStatus = this;
background = Theme.getColor("linestatus.bgcolor");
font = Theme.getFont("linestatus.font");
foreground = Theme.getColor("linestatus.color");
high = Theme.getInteger("linestatus.height");
if (Base.isMacOS()) {
resize = Base.getThemeImage("resize.gif", this);
}
//linestatus.bgcolor = #000000
//linestatus.font = SansSerif,plain,10
//linestatus.color = #FFFFFF
}
public void set(int newStart, int newStop) {
if ((newStart == start) && (newStop == stop)) return;
start = newStart;
stop = newStop;
/*
if (start == stop) {
text = "Line " + (start + 1);
} else {
text = "Lines " + (start + 1) + " to " + (stop + 1);
}
*/
if (start == stop) {
text = String.valueOf(start+1);
} else {
text = (start+1) + " - " + (stop+1);
}
repaint();
}
public void paintComponent(Graphics g) {
if (name=="" && serialport=="") {
Map<String, String> boardPreferences = Base.getBoardPreferences();
if (boardPreferences!=null)
setBoardName(boardPreferences.get("name"));
else
setBoardName("-");
setSerialPort(Preferences.get("serial.port"));
}
g.setColor(background);
Dimension size = getSize();
g.fillRect(0, 0, size.width, size.height);
g.setFont(font);
g.setColor(foreground);
int baseline = (high + g.getFontMetrics().getAscent()) / 2;
g.drawString(text, 6, baseline);
g.setColor(messageForeground);
String tmp = name + " on " + serialport;
Rectangle2D bounds = g.getFontMetrics().getStringBounds(tmp, null);
g.drawString(tmp, size.width - (int) bounds.getWidth() -20 , baseline);
if (Base.isMacOS()) {
g.drawImage(resize, size.width - 20, 0, this);
}
}
public void setBoardName(String name) { this.name = name; }
public void setSerialPort(String serialport) { this.serialport = serialport; }
public Dimension getPreferredSize() {
return new Dimension(300, high);
}
public Dimension getMinimumSize() {
return getPreferredSize();
}
public Dimension getMaximumSize() {
return new Dimension(3000, high);
}
}
| lgpl-2.1 |
elsiklab/intermine | bio/core/main/src/org/intermine/bio/util/OrganismRepository.java | 10049 | package org.intermine.bio.util;
/*
* Copyright (C) 2002-2017 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.io.IOException;
import java.io.InputStream;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.collections.keyvalue.MultiKey;
/**
* A class to hold information about organisms.
* @author Kim Rutherford
*/
public final class OrganismRepository
{
private static OrganismRepository or = null;
private Map<Integer, OrganismData> taxonMap = new HashMap<Integer, OrganismData>();
private Map<String, OrganismData> abbreviationMap = new HashMap<String, OrganismData>();
private Map<String, OrganismData> shortNameMap = new HashMap<String, OrganismData>();
private Map<MultiKey, OrganismData> genusSpeciesMap = new HashMap<MultiKey, OrganismData>();
private Map<Integer, OrganismData> strains = new HashMap<Integer, OrganismData>();
private Map<String, String> organismsWithStrains = new HashMap<String, String>();
private static Map<String, OrganismData> uniprotToTaxon = new HashMap<String, OrganismData>();
private static final String PROP_FILE = "organism_config.properties";
private static final String PREFIX = "taxon";
private static final String ABBREVIATION = "abbreviation";
private static final String GENUS = "genus";
private static final String SPECIES = "species";
private static final String STRAINS = "strains";
private static final String ENSEMBL = "ensemblPrefix";
private static final String UNIPROT = "uniprot";
private static final String REGULAR_EXPRESSION =
PREFIX + "\\.(\\d+)\\.(" + SPECIES + "|" + GENUS + "|" + ABBREVIATION + "|" + STRAINS
+ "|" + ENSEMBL + "|" + UNIPROT + ")";
private OrganismRepository() {
//disable external instantiation
}
/**
* Return an OrganismRepository created from a properties file in the class path.
* @return the OrganismRepository
*/
@SuppressWarnings("unchecked")
public static OrganismRepository getOrganismRepository() {
if (or == null) {
Properties props = new Properties();
try {
InputStream propsResource =
OrganismRepository.class.getClassLoader().getResourceAsStream(PROP_FILE);
if (propsResource == null) {
throw new RuntimeException("can't find " + PROP_FILE + " in class path");
}
props.load(propsResource);
} catch (IOException e) {
throw new RuntimeException("Problem loading properties '" + PROP_FILE + "'", e);
}
or = new OrganismRepository();
Enumeration<String> propNames = (Enumeration<String>) props.propertyNames();
Pattern pattern = Pattern.compile(REGULAR_EXPRESSION);
while (propNames.hasMoreElements()) {
String name = propNames.nextElement();
if (name.startsWith(PREFIX)) {
Matcher matcher = pattern.matcher(name);
if (matcher.matches()) {
String taxonIdString = matcher.group(1);
int taxonId = Integer.valueOf(taxonIdString).intValue();
String fieldName = matcher.group(2);
OrganismData od = or.getOrganismDataByTaxonInternal(taxonId);
final String attributeValue = props.getProperty(name);
if (fieldName.equals(ABBREVIATION)) {
od.setAbbreviation(attributeValue);
or.abbreviationMap.put(attributeValue.toLowerCase(), od);
} else if (fieldName.equals(STRAINS)) {
String[] strains = attributeValue.split(" ");
for (String strain : strains) {
try {
or.strains.put(Integer.valueOf(strain), od);
or.organismsWithStrains.put(taxonIdString, strain);
} catch (NumberFormatException e) {
throw new NumberFormatException("taxon ID must be a number");
}
}
} else if (fieldName.equals(ENSEMBL)) {
od.setEnsemblPrefix(attributeValue);
} else if (fieldName.equals(UNIPROT)) {
od.setUniprot(attributeValue);
uniprotToTaxon.put(attributeValue, od);
} else {
if (fieldName.equals(SPECIES)) {
od.setSpecies(attributeValue);
} else {
if (fieldName.equals(GENUS)) {
od.setGenus(attributeValue);
} else {
throw new RuntimeException("internal error didn't match: "
+ fieldName);
}
}
}
} else {
throw new RuntimeException("unable to parse organism property key: "
+ name);
}
} else {
throw new RuntimeException("properties in " + PROP_FILE + " must start with "
+ PREFIX + ".");
}
}
for (OrganismData od: or.taxonMap.values()) {
or.genusSpeciesMap.put(new MultiKey(od.getGenus(), od.getSpecies()), od);
// we have some organisms from uniprot that don't have a short name
if (od.getShortName() != null) {
or.shortNameMap.put(od.getShortName(), od);
}
}
}
return or;
}
/**
* Look up OrganismData objects by taxon id. Create and return a new OrganismData object if
* there is no existing one.
* @param taxonId the taxon id
* @return the OrganismData
*/
public OrganismData getOrganismDataByTaxonInternal(int taxonId) {
OrganismData od = taxonMap.get(new Integer(taxonId));
if (od == null) {
od = new OrganismData();
od.setTaxonId(taxonId);
taxonMap.put(new Integer(taxonId), od);
}
return od;
}
/**
* Look up OrganismData objects by taxon id. If there is no taxon, look in strains. Return
* null if there is no such organism.
*
* @param taxonId the taxon id
* @return the OrganismData
*/
public OrganismData getOrganismDataByTaxon(int taxonId) {
OrganismData od = taxonMap.get(new Integer(taxonId));
if (od == null) {
od = strains.get(taxonId);
}
return od;
}
/**
* Look up OrganismData objects by abbreviation, abbreviations are not case sensitive.
* Return null if there is no such organism.
* @param abbreviation the abbreviation
* @return the OrganismData
*/
public OrganismData getOrganismDataByAbbreviation(String abbreviation) {
if (abbreviation == null) {
return null;
}
return abbreviationMap.get(abbreviation.toLowerCase());
}
/**
* Look up OrganismData objects by shortName, short names are case sensitive.
* Return null if there is no such organism.
* @param shortName the short name. e.g. "H. sapiens"
* @return the OrganismData
*/
public OrganismData getOrganismDataByShortName(String shortName) {
if (shortName == null) {
return null;
}
return shortNameMap.get(shortName);
}
/**
* Look up OrganismData objects by genus and species - both must match. Returns null if there
* is no OrganismData in this OrganismRepository that matches.
* @param genus the genus
* @param species the species
* @return the OrganismData
*/
public OrganismData getOrganismDataByGenusSpecies(String genus, String species) {
MultiKey key = new MultiKey(genus, species);
return genusSpeciesMap.get(key);
}
/**
* Look up OrganismData objects by Uniprot abbreviation, eg HUMAN or DROME.
* Returns null if there is no OrganismData in this OrganismRepository that matches.
* @param abbreviation the UniProt abbreviation, eg. HUMAN or DROME
* @return the OrganismData
*/
public static OrganismData getOrganismDataByUniprot(String abbreviation) {
return uniprotToTaxon.get(abbreviation);
}
/**
* Look up OrganismData objects by a full name that is genus <space> species. Returns null if
* there is no OrganismData in this OrganismRepository that matches.
* @param fullName the genus and species separated by a space
* @return the OrganismData
*/
public OrganismData getOrganismDataByFullName(String fullName) {
if (fullName.indexOf(" ") == -1) {
return null;
}
String genus = fullName.split(" ", 2)[0];
String species = fullName.split(" ", 2)[1];
return getOrganismDataByGenusSpecies(genus, species);
}
/**
* Get strains for given taxon ID
* @param taxonString taxon ID for organism
* @return taxonId for strain
*/
public String getStrain(String taxonString) {
return organismsWithStrains.get(taxonString);
}
}
| lgpl-2.1 |
ThiagoGarciaAlves/intellij-community | platform/lang-impl/src/com/intellij/ide/util/AbstractTreeClassChooserDialog.java | 19626 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.util;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.projectView.BaseProjectTreeBuilder;
import com.intellij.ide.projectView.impl.AbstractProjectTreeStructure;
import com.intellij.ide.projectView.impl.ProjectAbstractTreeStructureBase;
import com.intellij.ide.projectView.impl.ProjectTreeBuilder;
import com.intellij.ide.util.gotoByName.*;
import com.intellij.ide.util.treeView.AlphaComparator;
import com.intellij.ide.util.treeView.NodeRenderer;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.openapi.wm.ex.IdeFocusTraversalPolicy;
import com.intellij.pom.Navigatable;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiNamedElement;
import com.intellij.psi.presentation.java.SymbolPresentationUtil;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiUtilBase;
import com.intellij.ui.*;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Processor;
import com.intellij.util.Query;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.indexing.FindSymbolParameters;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import java.awt.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
public abstract class AbstractTreeClassChooserDialog<T extends PsiNamedElement> extends DialogWrapper implements TreeChooser<T> {
@NotNull private final Project myProject;
private final GlobalSearchScope myScope;
@NotNull private final Filter<T> myClassFilter;
private final Class<T> myElementClass;
@Nullable private final T myBaseClass;
private final boolean myIsShowMembers;
private final boolean myIsShowLibraryContents;
private Tree myTree;
private T mySelectedClass = null;
private BaseProjectTreeBuilder myBuilder;
private TabbedPaneWrapper myTabbedPane;
private ChooseByNamePanel myGotoByNamePanel;
private T myInitialClass;
public AbstractTreeClassChooserDialog(String title, Project project, final Class<T> elementClass) {
this(title, project, elementClass, null);
}
public AbstractTreeClassChooserDialog(String title, Project project, final Class<T> elementClass, @Nullable T initialClass) {
this(title, project, GlobalSearchScope.projectScope(project), elementClass, null, initialClass);
}
public AbstractTreeClassChooserDialog(String title,
@NotNull Project project,
GlobalSearchScope scope,
@NotNull Class<T> elementClass,
@Nullable Filter<T> classFilter,
@Nullable T initialClass) {
this(title, project, scope, elementClass, classFilter, null, initialClass, false, true);
}
public AbstractTreeClassChooserDialog(String title,
@NotNull Project project,
GlobalSearchScope scope,
@NotNull Class<T> elementClass,
@Nullable Filter<T> classFilter,
@Nullable T baseClass,
@Nullable T initialClass,
boolean isShowMembers,
boolean isShowLibraryContents) {
super(project, true);
myScope = scope;
myElementClass = elementClass;
myClassFilter = classFilter == null ? allFilter() : classFilter;
myBaseClass = baseClass;
myInitialClass = initialClass;
myIsShowMembers = isShowMembers;
myIsShowLibraryContents = isShowLibraryContents;
setTitle(title);
myProject = project;
init();
if (initialClass != null) {
select(initialClass);
}
handleSelectionChanged();
}
private Filter<T> allFilter() {
return new Filter<T>() {
@Override
public boolean isAccepted(T element) {
return true;
}
};
}
@Override
protected JComponent createCenterPanel() {
final DefaultTreeModel model = new DefaultTreeModel(new DefaultMutableTreeNode());
myTree = new Tree(model);
ProjectAbstractTreeStructureBase treeStructure = new AbstractProjectTreeStructure(myProject) {
@Override
public boolean isFlattenPackages() {
return false;
}
@Override
public boolean isShowMembers() {
return myIsShowMembers;
}
@Override
public boolean isHideEmptyMiddlePackages() {
return true;
}
@Override
public boolean isAbbreviatePackageNames() {
return false;
}
@Override
public boolean isShowLibraryContents() {
return myIsShowLibraryContents;
}
@Override
public boolean isShowModules() {
return false;
}
};
myBuilder = new ProjectTreeBuilder(myProject, myTree, model, AlphaComparator.INSTANCE, treeStructure);
myTree.setRootVisible(false);
myTree.setShowsRootHandles(true);
myTree.expandRow(0);
myTree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
myTree.setCellRenderer(new NodeRenderer());
UIUtil.setLineStyleAngled(myTree);
JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(myTree);
scrollPane.setPreferredSize(JBUI.size(500, 300));
scrollPane.putClientProperty(UIUtil.KEEP_BORDER_SIDES, SideBorder.RIGHT | SideBorder.LEFT | SideBorder.BOTTOM);
myTree.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (KeyEvent.VK_ENTER == e.getKeyCode()) {
doOKAction();
}
}
});
new DoubleClickListener() {
@Override
protected boolean onDoubleClick(MouseEvent event) {
TreePath path = myTree.getPathForLocation(event.getX(), event.getY());
if (path != null && myTree.isPathSelected(path)) {
doOKAction();
return true;
}
return false;
}
}.installOn(myTree);
myTree.addTreeSelectionListener(
new TreeSelectionListener() {
@Override
public void valueChanged(TreeSelectionEvent e) {
handleSelectionChanged();
}
}
);
new TreeSpeedSearch(myTree);
myTabbedPane = new TabbedPaneWrapper(getDisposable());
final JPanel dummyPanel = new JPanel(new BorderLayout());
String name = null;
/*
if (myInitialClass != null) {
name = myInitialClass.getName();
}
*/
myGotoByNamePanel = new ChooseByNamePanel(myProject, createChooseByNameModel(), name, myScope.isSearchInLibraries(), getContext()) {
@Override
protected void showTextFieldPanel() {
}
@Override
protected void close(boolean isOk) {
super.close(isOk);
if (isOk) {
doOKAction();
}
else {
doCancelAction();
}
}
@NotNull
@Override
protected Set<Object> filter(@NotNull Set<Object> elements) {
return doFilter(elements);
}
@Override
protected void initUI(ChooseByNamePopupComponent.Callback callback, ModalityState modalityState, boolean allowMultipleSelection) {
super.initUI(callback, modalityState, allowMultipleSelection);
dummyPanel.add(myGotoByNamePanel.getPanel(), BorderLayout.CENTER);
IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> {
IdeFocusManager.getGlobalInstance()
.requestFocus(IdeFocusTraversalPolicy.getPreferredFocusedComponent(myGotoByNamePanel.getPanel()), true);
});
}
@Override
protected void showList() {
super.showList();
if (myInitialClass != null && myList.getModel().getSize() > 0) {
myList.setSelectedValue(myInitialClass, true);
myInitialClass = null;
}
}
@Override
protected void chosenElementMightChange() {
handleSelectionChanged();
}
};
Disposer.register(myDisposable, myGotoByNamePanel);
myTabbedPane.addTab(IdeBundle.message("tab.chooser.search.by.name"), dummyPanel);
myTabbedPane.addTab(IdeBundle.message("tab.chooser.project"), scrollPane);
myGotoByNamePanel.invoke(new MyCallback(), getModalityState(), false);
myTabbedPane.addChangeListener(
new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
handleSelectionChanged();
}
}
);
return myTabbedPane.getComponent();
}
private Set<Object> doFilter(Set<Object> elements) {
Set<Object> result = new LinkedHashSet<>();
for (Object o : elements) {
if (myElementClass.isInstance(o) && getFilter().isAccepted((T)o)) {
result.add(o);
}
}
return result;
}
protected ChooseByNameModel createChooseByNameModel() {
if (myBaseClass == null) {
return new MyGotoClassModel<>(myProject, this);
}
else {
BaseClassInheritorsProvider<T> inheritorsProvider = getInheritorsProvider(myBaseClass);
if (inheritorsProvider != null) {
return new SubclassGotoClassModel<>(myProject, this, inheritorsProvider);
}
else {
throw new IllegalStateException("inheritors provider is null");
}
}
}
/**
* Makes sense only in case of not null base class.
*
* @param baseClass
* @return
*/
@Nullable
protected BaseClassInheritorsProvider<T> getInheritorsProvider(@NotNull T baseClass) {
return null;
}
private void handleSelectionChanged() {
T selection = calcSelectedClass();
setOKActionEnabled(selection != null);
}
@Override
protected void doOKAction() {
mySelectedClass = calcSelectedClass();
if (mySelectedClass == null) return;
if (!myClassFilter.isAccepted(mySelectedClass)) {
Messages.showErrorDialog(myTabbedPane.getComponent(),
SymbolPresentationUtil.getSymbolPresentableText(mySelectedClass) + " is not acceptable");
return;
}
super.doOKAction();
}
@Override
public T getSelected() {
return mySelectedClass;
}
@Override
public void select(@NotNull final T aClass) {
selectElementInTree(aClass);
}
@Override
public void selectDirectory(@NotNull final PsiDirectory directory) {
selectElementInTree(directory);
}
@Override
public void showDialog() {
show();
}
@Override
public void showPopup() {
//todo leak via not shown dialog?
ChooseByNamePopup popup = ChooseByNamePopup.createPopup(myProject, createChooseByNameModel(), getContext());
popup.invoke(new ChooseByNamePopupComponent.Callback() {
@Override
public void elementChosen(Object element) {
mySelectedClass = (T)element;
((Navigatable)element).navigate(true);
}
}, getModalityState(), true);
}
private T getContext() {
return myBaseClass != null ? myBaseClass : myInitialClass != null ? myInitialClass : null;
}
private void selectElementInTree(@NotNull final PsiElement element) {
ApplicationManager.getApplication().invokeLater(() -> {
if (myBuilder == null) return;
final VirtualFile vFile = PsiUtilBase.getVirtualFile(element);
myBuilder.select(element, vFile, false);
}, getModalityState());
}
private ModalityState getModalityState() {
return ModalityState.stateForComponent(getRootPane());
}
@Nullable
protected T calcSelectedClass() {
if (getTabbedPane().getSelectedIndex() == 0) {
return (T)getGotoByNamePanel().getChosenElement();
}
else {
TreePath path = getTree().getSelectionPath();
if (path == null) return null;
DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent();
return getSelectedFromTreeUserObject(node);
}
}
protected abstract T getSelectedFromTreeUserObject(DefaultMutableTreeNode node);
@Override
public void dispose() {
if (myBuilder != null) {
Disposer.dispose(myBuilder);
myBuilder = null;
}
super.dispose();
}
@Override
protected String getDimensionServiceKey() {
return "#com.intellij.ide.util.TreeClassChooserDialog";
}
@Override
public JComponent getPreferredFocusedComponent() {
return myGotoByNamePanel.getPreferredFocusedComponent();
}
@NotNull
protected Project getProject() {
return myProject;
}
GlobalSearchScope getScope() {
return myScope;
}
@NotNull
protected Filter<T> getFilter() {
return myClassFilter;
}
T getBaseClass() {
return myBaseClass;
}
T getInitialClass() {
return myInitialClass;
}
protected TabbedPaneWrapper getTabbedPane() {
return myTabbedPane;
}
protected Tree getTree() {
return myTree;
}
protected ChooseByNamePanel getGotoByNamePanel() {
return myGotoByNamePanel;
}
@NotNull
protected abstract List<T> getClassesByName(final String name,
final boolean checkBoxState,
final String pattern,
final GlobalSearchScope searchScope);
protected static class MyGotoClassModel<T extends PsiNamedElement> extends GotoClassModel2 {
private final AbstractTreeClassChooserDialog<T> myTreeClassChooserDialog;
public MyGotoClassModel(@NotNull Project project,
AbstractTreeClassChooserDialog<T> treeClassChooserDialog) {
super(project);
myTreeClassChooserDialog = treeClassChooserDialog;
}
AbstractTreeClassChooserDialog<T> getTreeClassChooserDialog() {
return myTreeClassChooserDialog;
}
@NotNull
@Override
public Object[] getElementsByName(String name, FindSymbolParameters parameters, @NotNull ProgressIndicator canceled) {
String patternName = parameters.getLocalPatternName();
List<T> classes = myTreeClassChooserDialog.getClassesByName(
name, parameters.isSearchInLibraries(), patternName, myTreeClassChooserDialog.getScope()
);
if (classes.size() == 0) return ArrayUtil.EMPTY_OBJECT_ARRAY;
if (classes.size() == 1) {
return isAccepted(classes.get(0)) ? ArrayUtil.toObjectArray(classes) : ArrayUtil.EMPTY_OBJECT_ARRAY;
}
Set<String> qNames = ContainerUtil.newHashSet();
List<T> list = new ArrayList<>(classes.size());
for (T aClass : classes) {
if (qNames.add(getFullName(aClass)) && isAccepted(aClass)) {
list.add(aClass);
}
}
return ArrayUtil.toObjectArray(list);
}
@Override
@Nullable
public String getPromptText() {
return null;
}
protected boolean isAccepted(T aClass) {
return myTreeClassChooserDialog.getFilter().isAccepted(aClass);
}
}
public abstract static class BaseClassInheritorsProvider<T> {
private final T myBaseClass;
private final GlobalSearchScope myScope;
public BaseClassInheritorsProvider(T baseClass, GlobalSearchScope scope) {
myBaseClass = baseClass;
myScope = scope;
}
public T getBaseClass() {
return myBaseClass;
}
public GlobalSearchScope getScope() {
return myScope;
}
@NotNull
protected abstract Query<T> searchForInheritors(T baseClass, GlobalSearchScope searchScope, boolean checkDeep);
protected abstract boolean isInheritor(T clazz, T baseClass, boolean checkDeep);
protected abstract String[] getNames();
protected Query<T> searchForInheritorsOfBaseClass() {
return searchForInheritors(myBaseClass, myScope, true);
}
protected boolean isInheritorOfBaseClass(T aClass) {
return isInheritor(aClass, myBaseClass, true);
}
}
private static class SubclassGotoClassModel<T extends PsiNamedElement> extends MyGotoClassModel<T> {
private final BaseClassInheritorsProvider<T> myInheritorsProvider;
private boolean myFastMode = true;
public SubclassGotoClassModel(@NotNull final Project project,
@NotNull final AbstractTreeClassChooserDialog<T> treeClassChooserDialog,
@NotNull BaseClassInheritorsProvider<T> inheritorsProvider) {
super(project, treeClassChooserDialog);
myInheritorsProvider = inheritorsProvider;
assert myInheritorsProvider.getBaseClass() != null;
}
@Override
public void processNames(final Processor<String> nameProcessor, boolean checkBoxState) {
if (myFastMode) {
myFastMode = myInheritorsProvider.searchForInheritorsOfBaseClass().forEach(new Processor<T>() {
private long start = System.currentTimeMillis();
@Override
public boolean process(T aClass) {
if (System.currentTimeMillis() - start > 500 && !ApplicationManager.getApplication().isUnitTestMode()) {
return false;
}
if ((getTreeClassChooserDialog().getFilter().isAccepted(aClass)) && aClass.getName() != null) {
nameProcessor.process(aClass.getName());
}
return true;
}
});
}
if (!myFastMode) {
for (String name : myInheritorsProvider.getNames()) {
nameProcessor.process(name);
}
}
}
@Override
protected boolean isAccepted(T aClass) {
if (myFastMode) {
return getTreeClassChooserDialog().getFilter().isAccepted(aClass);
}
else {
return (aClass == getTreeClassChooserDialog().getBaseClass() ||
myInheritorsProvider.isInheritorOfBaseClass(aClass)) &&
getTreeClassChooserDialog().getFilter().isAccepted(
aClass);
}
}
}
private class MyCallback extends ChooseByNamePopupComponent.Callback {
@Override
public void elementChosen(Object element) {
mySelectedClass = (T)element;
close(OK_EXIT_CODE);
}
}
}
| apache-2.0 |
svstanev/presto | presto-hive/src/test/java/com/facebook/presto/hive/AbstractTestHiveClientS3.java | 24825 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.GroupByHashPageIndexerFactory;
import com.facebook.presto.hive.AbstractTestHiveClient.HiveTransaction;
import com.facebook.presto.hive.AbstractTestHiveClient.Transaction;
import com.facebook.presto.hive.authentication.NoHdfsAuthentication;
import com.facebook.presto.hive.metastore.BridgingHiveMetastore;
import com.facebook.presto.hive.metastore.CachingHiveMetastore;
import com.facebook.presto.hive.metastore.Database;
import com.facebook.presto.hive.metastore.ExtendedHiveMetastore;
import com.facebook.presto.hive.metastore.PrincipalPrivileges;
import com.facebook.presto.hive.metastore.Table;
import com.facebook.presto.hive.metastore.ThriftHiveMetastore;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ConnectorOutputTableHandle;
import com.facebook.presto.spi.ConnectorPageSink;
import com.facebook.presto.spi.ConnectorPageSource;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.ConnectorSplitSource;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.facebook.presto.spi.ConnectorTableLayoutResult;
import com.facebook.presto.spi.ConnectorTableMetadata;
import com.facebook.presto.spi.Constraint;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.TableNotFoundException;
import com.facebook.presto.spi.connector.ConnectorMetadata;
import com.facebook.presto.spi.connector.ConnectorPageSinkProvider;
import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
import com.facebook.presto.spi.connector.ConnectorSplitManager;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.sql.gen.JoinCompiler;
import com.facebook.presto.testing.MaterializedResult;
import com.facebook.presto.testing.MaterializedRow;
import com.facebook.presto.testing.TestingConnectorSession;
import com.facebook.presto.testing.TestingNodeManager;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.net.HostAndPort;
import io.airlift.concurrent.BoundedExecutor;
import io.airlift.json.JsonCodec;
import io.airlift.slice.Slice;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import static com.facebook.presto.hadoop.HadoopFileStatus.isDirectory;
import static com.facebook.presto.hive.AbstractTestHiveClient.createTableProperties;
import static com.facebook.presto.hive.AbstractTestHiveClient.filterNonHiddenColumnHandles;
import static com.facebook.presto.hive.AbstractTestHiveClient.filterNonHiddenColumnMetadata;
import static com.facebook.presto.hive.AbstractTestHiveClient.getAllSplits;
import static com.facebook.presto.hive.HiveTestUtils.TYPE_MANAGER;
import static com.facebook.presto.hive.HiveTestUtils.getDefaultHiveDataStreamFactories;
import static com.facebook.presto.hive.HiveTestUtils.getDefaultHiveFileWriterFactories;
import static com.facebook.presto.hive.HiveTestUtils.getDefaultHiveRecordCursorProvider;
import static com.facebook.presto.hive.HiveTestUtils.getTypes;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.testing.MaterializedResult.materializeSourceDataStream;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.util.concurrent.MoreExecutors.newDirectExecutorService;
import static io.airlift.concurrent.MoreFutures.getFutureValue;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.testing.Assertions.assertEqualsIgnoreOrder;
import static java.lang.String.format;
import static java.util.Locale.ENGLISH;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
@Test(groups = "hive-s3")
public abstract class AbstractTestHiveClientS3
{
protected String writableBucket;
protected String database;
protected SchemaTableName tableS3;
protected SchemaTableName temporaryCreateTable;
protected HdfsEnvironment hdfsEnvironment;
protected LocationService locationService;
protected TestingHiveMetastore metastoreClient;
protected HiveMetadataFactory metadataFactory;
protected HiveTransactionManager transactionManager;
protected ConnectorSplitManager splitManager;
protected ConnectorPageSinkProvider pageSinkProvider;
protected ConnectorPageSourceProvider pageSourceProvider;
private ExecutorService executor;
@BeforeClass
public void setUp()
throws Exception
{
executor = newCachedThreadPool(daemonThreadsNamed("hive-%s"));
}
@AfterClass
public void tearDown()
throws Exception
{
if (executor != null) {
executor.shutdownNow();
executor = null;
}
}
protected void setupHive(String databaseName)
{
database = databaseName;
tableS3 = new SchemaTableName(database, "presto_test_s3");
String random = UUID.randomUUID().toString().toLowerCase(ENGLISH).replace("-", "");
temporaryCreateTable = new SchemaTableName(database, "tmp_presto_test_create_s3_" + random);
}
protected void setup(String host, int port, String databaseName, String awsAccessKey, String awsSecretKey, String writableBucket)
{
this.writableBucket = writableBucket;
setupHive(databaseName);
HiveS3Config s3Config = new HiveS3Config()
.setS3AwsAccessKey(awsAccessKey)
.setS3AwsSecretKey(awsSecretKey);
HiveClientConfig hiveClientConfig = new HiveClientConfig();
String proxy = System.getProperty("hive.metastore.thrift.client.socks-proxy");
if (proxy != null) {
hiveClientConfig.setMetastoreSocksProxy(HostAndPort.fromString(proxy));
}
HiveConnectorId connectorId = new HiveConnectorId("hive-test");
HiveCluster hiveCluster = new TestingHiveCluster(hiveClientConfig, host, port);
ExecutorService executor = newCachedThreadPool(daemonThreadsNamed("hive-s3-%s"));
HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationUpdater(hiveClientConfig, s3Config));
HivePartitionManager hivePartitionManager = new HivePartitionManager(connectorId, TYPE_MANAGER, hiveClientConfig);
hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hiveClientConfig, new NoHdfsAuthentication());
metastoreClient = new TestingHiveMetastore(
new BridgingHiveMetastore(new ThriftHiveMetastore(hiveCluster)),
executor,
hiveClientConfig,
writableBucket,
hdfsEnvironment);
locationService = new HiveLocationService(hdfsEnvironment);
JsonCodec<PartitionUpdate> partitionUpdateCodec = JsonCodec.jsonCodec(PartitionUpdate.class);
metadataFactory = new HiveMetadataFactory(
connectorId,
hiveClientConfig,
metastoreClient,
hdfsEnvironment,
hivePartitionManager,
newDirectExecutorService(),
TYPE_MANAGER,
locationService,
new TableParameterCodec(),
partitionUpdateCodec,
new HiveTypeTranslator(),
new NodeVersion("test_version"));
transactionManager = new HiveTransactionManager();
splitManager = new HiveSplitManager(
connectorId,
transactionHandle -> ((HiveMetadata) transactionManager.get(transactionHandle)).getMetastore(),
new NamenodeStats(),
hdfsEnvironment,
new HadoopDirectoryLister(),
new BoundedExecutor(executor, hiveClientConfig.getMaxSplitIteratorThreads()),
new HiveCoercionPolicy(TYPE_MANAGER),
hiveClientConfig.getMaxOutstandingSplits(),
hiveClientConfig.getMinPartitionBatchSize(),
hiveClientConfig.getMaxPartitionBatchSize(),
hiveClientConfig.getMaxInitialSplits(),
hiveClientConfig.getRecursiveDirWalkerEnabled());
pageSinkProvider = new HivePageSinkProvider(
getDefaultHiveFileWriterFactories(hiveClientConfig),
hdfsEnvironment,
metastoreClient,
new GroupByHashPageIndexerFactory(new JoinCompiler()),
TYPE_MANAGER,
new HiveClientConfig(),
locationService,
partitionUpdateCodec,
new TestingNodeManager("fake-environment"),
new HiveEventClient(),
new HiveSessionProperties(hiveClientConfig));
pageSourceProvider = new HivePageSourceProvider(hiveClientConfig, hdfsEnvironment, getDefaultHiveRecordCursorProvider(hiveClientConfig), getDefaultHiveDataStreamFactories(hiveClientConfig), TYPE_MANAGER);
}
protected ConnectorSession newSession()
{
return new TestingConnectorSession(new HiveSessionProperties(new HiveClientConfig()).getSessionProperties());
}
protected Transaction newTransaction()
{
return new HiveTransaction(transactionManager, metadataFactory.create());
}
@Test
public void testGetRecordsS3()
throws Exception
{
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorSession session = newSession();
ConnectorTableHandle table = getTableHandle(metadata, tableS3);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(session, table).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
List<ConnectorTableLayoutResult> tableLayoutResults = metadata.getTableLayouts(session, table, new Constraint<>(TupleDomain.all(), bindings -> true), Optional.empty());
HiveTableLayoutHandle layoutHandle = (HiveTableLayoutHandle) getOnlyElement(tableLayoutResults).getTableLayout().getHandle();
assertEquals(layoutHandle.getPartitions().get().size(), 1);
ConnectorSplitSource splitSource = splitManager.getSplits(transaction.getTransactionHandle(), session, layoutHandle);
long sum = 0;
for (ConnectorSplit split : getAllSplits(splitSource)) {
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(transaction.getTransactionHandle(), session, split, columnHandles)) {
MaterializedResult result = materializeSourceDataStream(session, pageSource, getTypes(columnHandles));
for (MaterializedRow row : result) {
sum += (Long) row.getField(columnIndex.get("t_bigint"));
}
}
}
assertEquals(sum, 78300);
}
}
@Test
public void testGetFileStatus()
throws Exception
{
Path basePath = new Path("s3://presto-test-hive/");
Path tablePath = new Path(basePath, "presto_test_s3");
Path filePath = new Path(tablePath, "test1.csv");
FileSystem fs = hdfsEnvironment.getFileSystem("user", basePath);
assertTrue(isDirectory(fs.getFileStatus(basePath)));
assertTrue(isDirectory(fs.getFileStatus(tablePath)));
assertFalse(isDirectory(fs.getFileStatus(filePath)));
assertFalse(fs.exists(new Path(basePath, "foo")));
}
@Test
public void testRename()
throws Exception
{
Path basePath = new Path(format("s3://%s/rename/%s/", writableBucket, UUID.randomUUID()));
FileSystem fs = hdfsEnvironment.getFileSystem("user", basePath);
assertFalse(fs.exists(basePath));
// create file foo.txt
Path path = new Path(basePath, "foo.txt");
assertTrue(fs.createNewFile(path));
assertTrue(fs.exists(path));
// rename foo.txt to bar.txt
Path newPath = new Path(basePath, "bar.txt");
assertFalse(fs.exists(newPath));
assertTrue(fs.rename(path, newPath));
assertFalse(fs.exists(path));
assertTrue(fs.exists(newPath));
// create file foo.txt and rename to bar.txt
assertTrue(fs.createNewFile(path));
assertFalse(fs.rename(path, newPath));
assertTrue(fs.exists(path));
// rename foo.txt to foo.txt
assertTrue(fs.rename(path, path));
assertTrue(fs.exists(path));
// delete foo.txt
assertTrue(fs.delete(path, false));
assertFalse(fs.exists(path));
// create directory source with file
Path source = new Path(basePath, "source");
assertTrue(fs.createNewFile(new Path(source, "test.txt")));
// rename source to non-existing target
Path target = new Path(basePath, "target");
assertFalse(fs.exists(target));
assertTrue(fs.rename(source, target));
assertFalse(fs.exists(source));
assertTrue(fs.exists(target));
// create directory source with file
assertTrue(fs.createNewFile(new Path(source, "test.txt")));
// rename source to existing target
assertTrue(fs.rename(source, target));
assertFalse(fs.exists(source));
target = new Path(target, "source");
assertTrue(fs.exists(target));
assertTrue(fs.exists(new Path(target, "test.txt")));
// delete target
target = new Path(basePath, "target");
assertTrue(fs.exists(target));
assertTrue(fs.delete(target, true));
assertFalse(fs.exists(target));
// cleanup
fs.delete(basePath, true);
}
@Test
public void testTableCreation()
throws Exception
{
for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) {
try {
doCreateTable(temporaryCreateTable, storageFormat);
}
finally {
dropTable(temporaryCreateTable);
}
}
}
private void doCreateTable(SchemaTableName tableName, HiveStorageFormat storageFormat)
throws Exception
{
List<ColumnMetadata> columns = ImmutableList.<ColumnMetadata>builder()
.add(new ColumnMetadata("id", BIGINT))
.build();
MaterializedResult data = MaterializedResult.resultBuilder(newSession(), BIGINT)
.row(1L)
.row(3L)
.row(2L)
.build();
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorSession session = newSession();
// begin creating the table
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(tableName, columns, createTableProperties(storageFormat));
ConnectorOutputTableHandle outputHandle = metadata.beginCreateTable(session, tableMetadata, Optional.empty());
// write the records
ConnectorPageSink sink = pageSinkProvider.createPageSink(transaction.getTransactionHandle(), session, outputHandle);
sink.appendPage(data.toPage());
Collection<Slice> fragments = getFutureValue(sink.finish());
// commit the table
metadata.finishCreateTable(session, outputHandle, fragments);
transaction.commit();
// Hack to work around the metastore not being configured for S3.
// The metastore tries to validate the location when creating the
// table, which fails without explicit configuration for S3.
// We work around that by using a dummy location when creating the
// table and update it here to the correct S3 location.
metastoreClient.updateTableLocation(
database,
tableName.getTableName(),
locationService.writePathRoot(((HiveOutputTableHandle) outputHandle).getLocationHandle()).get().toString());
}
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorSession session = newSession();
// load the new table
ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName);
List<ColumnHandle> columnHandles = filterNonHiddenColumnHandles(metadata.getColumnHandles(session, tableHandle).values());
// verify the metadata
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(session, getTableHandle(metadata, tableName));
assertEquals(filterNonHiddenColumnMetadata(tableMetadata.getColumns()), columns);
// verify the data
List<ConnectorTableLayoutResult> tableLayoutResults = metadata.getTableLayouts(session, tableHandle, new Constraint<>(TupleDomain.all(), bindings -> true), Optional.empty());
HiveTableLayoutHandle layoutHandle = (HiveTableLayoutHandle) getOnlyElement(tableLayoutResults).getTableLayout().getHandle();
assertEquals(layoutHandle.getPartitions().get().size(), 1);
ConnectorSplitSource splitSource = splitManager.getSplits(transaction.getTransactionHandle(), session, layoutHandle);
ConnectorSplit split = getOnlyElement(getAllSplits(splitSource));
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(transaction.getTransactionHandle(), session, split, columnHandles)) {
MaterializedResult result = materializeSourceDataStream(session, pageSource, getTypes(columnHandles));
assertEqualsIgnoreOrder(result.getMaterializedRows(), data.getMaterializedRows());
}
}
}
private void dropTable(SchemaTableName table)
{
try (Transaction transaction = newTransaction()) {
transaction.getMetastore(table.getSchemaName()).dropTable(newSession(), table.getSchemaName(), table.getTableName());
transaction.commit();
}
catch (RuntimeException e) {
// this usually occurs because the table was not created
}
}
private ConnectorTableHandle getTableHandle(ConnectorMetadata metadata, SchemaTableName tableName)
{
ConnectorTableHandle handle = metadata.getTableHandle(newSession(), tableName);
checkArgument(handle != null, "table not found: %s", tableName);
return handle;
}
private static ImmutableMap<String, Integer> indexColumns(List<ColumnHandle> columnHandles)
{
ImmutableMap.Builder<String, Integer> index = ImmutableMap.builder();
int i = 0;
for (ColumnHandle columnHandle : columnHandles) {
HiveColumnHandle hiveColumnHandle = (HiveColumnHandle) columnHandle;
index.put(hiveColumnHandle.getName(), i);
i++;
}
return index.build();
}
private static class TestingHiveMetastore
extends CachingHiveMetastore
{
private final String writableBucket;
private final HdfsEnvironment hdfsEnvironment;
public TestingHiveMetastore(ExtendedHiveMetastore delegate, ExecutorService executor, HiveClientConfig hiveClientConfig, String writableBucket, HdfsEnvironment hdfsEnvironment)
{
super(delegate, executor, hiveClientConfig);
this.writableBucket = writableBucket;
this.hdfsEnvironment = hdfsEnvironment;
}
@Override
public Optional<Database> getDatabase(String databaseName)
{
return super.getDatabase(databaseName)
.map(database -> Database.builder(database)
.setLocation(Optional.of("s3://" + writableBucket + "/"))
.build());
}
@Override
public void createTable(Table table, PrincipalPrivileges privileges)
{
// hack to work around the metastore not being configured for S3
Table.Builder tableBuilder = Table.builder(table);
tableBuilder.getStorageBuilder().setLocation("/");
super.createTable(tableBuilder.build(), privileges);
}
@Override
public void dropTable(String databaseName, String tableName, boolean deleteData)
{
try {
Optional<Table> table = getTable(databaseName, tableName);
if (!table.isPresent()) {
throw new TableNotFoundException(new SchemaTableName(databaseName, tableName));
}
// hack to work around the metastore not being configured for S3
List<String> locations = listAllDataPaths(databaseName, tableName);
Table.Builder tableBuilder = Table.builder(table.get());
tableBuilder.getStorageBuilder().setLocation("/");
// drop table
replaceTable(databaseName, tableName, tableBuilder.build(), new PrincipalPrivileges(ImmutableMultimap.of(), ImmutableMultimap.of()));
delegate.dropTable(databaseName, tableName, false);
// drop data
if (deleteData) {
for (String location : locations) {
Path path = new Path(location);
hdfsEnvironment.getFileSystem("user", path).delete(path, true);
}
}
}
catch (Exception e) {
throw Throwables.propagate(e);
}
finally {
invalidateTable(databaseName, tableName);
}
}
public void updateTableLocation(String databaseName, String tableName, String location)
{
Optional<Table> table = getTable(databaseName, tableName);
if (!table.isPresent()) {
throw new TableNotFoundException(new SchemaTableName(databaseName, tableName));
}
Table.Builder tableBuilder = Table.builder(table.get());
tableBuilder.getStorageBuilder().setLocation(location);
// NOTE: this clears the permissions
replaceTable(databaseName, tableName, tableBuilder.build(), new PrincipalPrivileges(ImmutableMultimap.of(), ImmutableMultimap.of()));
}
private List<String> listAllDataPaths(String schemaName, String tableName)
{
ImmutableList.Builder<String> locations = ImmutableList.builder();
Table table = getTable(schemaName, tableName).get();
if (table.getStorage().getLocation() != null) {
// For partitioned table, there should be nothing directly under this directory.
// But including this location in the set makes the directory content assert more
// extensive, which is desirable.
locations.add(table.getStorage().getLocation());
}
Optional<List<String>> partitionNames = getPartitionNames(schemaName, tableName);
if (partitionNames.isPresent()) {
getPartitionsByNames(schemaName, tableName, partitionNames.get()).values().stream()
.map(Optional::get)
.map(partition -> partition.getStorage().getLocation())
.filter(location -> !location.startsWith(table.getStorage().getLocation()))
.forEach(locations::add);
}
return locations.build();
}
}
}
| apache-2.0 |
agolPL/keycloak | testsuite/integration-arquillian/tests/base/src/test/java/org/keycloak/testsuite/adapter/undertow/servlet/UndertowDemoServletsAdapterTest.java | 1119 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.adapter.undertow.servlet;
import org.keycloak.testsuite.adapter.servlet.AbstractDemoServletsAdapterTest;
import org.keycloak.testsuite.arquillian.annotation.AppServerContainer;
import org.junit.Ignore;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
@AppServerContainer("auth-server-undertow")
public class UndertowDemoServletsAdapterTest extends AbstractDemoServletsAdapterTest {
}
| apache-2.0 |
mdamour1976/big-data-plugin | src/org/pentaho/di/job/PropertyEntry.java | 2509 | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job;
import org.pentaho.ui.xul.XulEventSource;
import java.beans.PropertyChangeListener;
import java.util.Map;
/**
* User: RFellows Date: 6/18/12
*/
public class PropertyEntry implements Map.Entry<String, String>, XulEventSource {
private String key = null;
private String value = null;
public PropertyEntry() {
this( null, null );
}
public PropertyEntry( String key, String value ) {
this.key = key;
this.value = value;
}
@Override
public String getKey() {
return key;
}
public void setKey( String key ) {
this.key = key;
}
@Override
public String getValue() {
return value;
}
@Override
public String setValue( String value ) {
this.value = value;
return value;
}
@Override
public boolean equals( Object o ) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
PropertyEntry that = (PropertyEntry) o;
if ( key != null ? !key.equals( that.key ) : that.key != null ) {
return false;
}
if ( value != null ? !value.equals( that.value ) : that.value != null ) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = key != null ? key.hashCode() : 0;
result = 31 * result + ( value != null ? value.hashCode() : 0 );
return result;
}
@Override
public void addPropertyChangeListener( PropertyChangeListener propertyChangeListener ) {
}
@Override
public void removePropertyChangeListener( PropertyChangeListener propertyChangeListener ) {
}
}
| apache-2.0 |
rchallapalli/drill | exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java | 37569 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill;
import com.google.common.collect.Lists;
import mockit.Deencapsulation;
import org.apache.drill.common.config.CommonConstants;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.exceptions.UserRemoteException;
import org.apache.drill.common.util.TestTools;
import org.apache.drill.exec.exception.VersionMismatchException;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
import org.apache.drill.exec.expr.fn.registry.LocalFunctionRegistry;
import org.apache.drill.exec.expr.fn.registry.RemoteFunctionRegistry;
import org.apache.drill.exec.proto.UserBitShared.Jar;
import org.apache.drill.exec.proto.UserBitShared.Registry;
import org.apache.drill.exec.server.DrillbitContext;
import org.apache.drill.exec.store.sys.store.DataChangeVersion;
import org.apache.drill.exec.util.JarUtil;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.class)
public class TestDynamicUDFSupport extends BaseTestQuery {
private static final File jars = new File(TestTools.getWorkingPath() + "/src/test/resources/jars");
private static final String default_binary_name = "DrillUDF-1.0.jar";
private static final String default_source_name = JarUtil.getSourceName(default_binary_name);
@Rule
public final TemporaryFolder base = new TemporaryFolder();
@Before
public void setup() {
Properties overrideProps = new Properties();
overrideProps.setProperty("drill.exec.udf.directory.root", base.getRoot().getPath());
overrideProps.setProperty("drill.tmp-dir", base.getRoot().getPath());
updateTestCluster(1, DrillConfig.create(overrideProps));
}
@Test
public void testSyntax() throws Exception {
test("create function using jar 'jar_name.jar'");
test("drop function using jar 'jar_name.jar'");
}
@Test
public void testEnableDynamicSupport() throws Exception {
try {
test("alter system set `exec.udf.enable_dynamic_support` = true");
test("create function using jar 'jar_name.jar'");
test("drop function using jar 'jar_name.jar'");
} finally {
test("alter system reset `exec.udf.enable_dynamic_support`");
}
}
@Test
public void testDisableDynamicSupport() throws Exception {
try {
test("alter system set `exec.udf.enable_dynamic_support` = false");
String[] actions = new String[] {"create", "drop"};
String query = "%s function using jar 'jar_name.jar'";
for (String action : actions) {
try {
test(query, action);
} catch (UserRemoteException e) {
assertThat(e.getMessage(), containsString("Dynamic UDFs support is disabled."));
}
}
} finally {
test("alter system reset `exec.udf.enable_dynamic_support`");
}
}
@Test
public void testAbsentBinaryInStaging() throws Exception {
Path staging = getDrillbitContext().getRemoteFunctionRegistry().getStagingArea();
String summary = String.format("File %s does not exist", new Path(staging, default_binary_name).toUri().getPath());
testBuilder()
.sqlQuery("create function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, summary)
.go();
}
@Test
public void testAbsentSourceInStaging() throws Exception {
Path staging = getDrillbitContext().getRemoteFunctionRegistry().getStagingArea();
copyJar(getDrillbitContext().getRemoteFunctionRegistry().getFs(), new Path(jars.toURI()),
staging, default_binary_name);
String summary = String.format("File %s does not exist", new Path(staging, default_source_name).toUri().getPath());
testBuilder()
.sqlQuery("create function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, summary)
.go();
}
@Test
public void testJarWithoutMarkerFile() throws Exception {
String jarWithNoMarkerFile = "DrillUDF_NoMarkerFile-1.0.jar";
copyJarsToStagingArea(jarWithNoMarkerFile, JarUtil.getSourceName(jarWithNoMarkerFile));
String summary = "Marker file %s is missing in %s";
testBuilder()
.sqlQuery("create function using jar '%s'", jarWithNoMarkerFile)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, String.format(summary,
CommonConstants.DRILL_JAR_MARKER_FILE_RESOURCE_PATHNAME, jarWithNoMarkerFile))
.go();
}
@Test
public void testJarWithoutFunctions() throws Exception {
String jarWithNoFunctions = "DrillUDF_Empty-1.0.jar";
copyJarsToStagingArea(jarWithNoFunctions, JarUtil.getSourceName(jarWithNoFunctions));
String summary = "Jar %s does not contain functions";
testBuilder()
.sqlQuery("create function using jar '%s'", jarWithNoFunctions)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, String.format(summary, jarWithNoFunctions))
.go();
}
@Test
public void testSuccessfulRegistration() throws Exception {
copyDefaultJarsToStagingArea();
String summary = "The following UDFs in jar %s have been registered:\n" +
"[custom_lower(VARCHAR-REQUIRED)]";
testBuilder()
.sqlQuery("create function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(true, String.format(summary, default_binary_name))
.go();
RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
FileSystem fs = remoteFunctionRegistry.getFs();
assertFalse("Staging area should be empty", fs.listFiles(remoteFunctionRegistry.getStagingArea(), false).hasNext());
assertFalse("Temporary area should be empty", fs.listFiles(remoteFunctionRegistry.getTmpArea(), false).hasNext());
assertTrue("Binary should be present in registry area",
fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_binary_name)));
assertTrue("Source should be present in registry area",
fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_source_name)));
Registry registry = remoteFunctionRegistry.getRegistry(new DataChangeVersion());
assertEquals("Registry should contain one jar", registry.getJarList().size(), 1);
assertEquals(registry.getJar(0).getName(), default_binary_name);
}
@Test
public void testDuplicatedJarInRemoteRegistry() throws Exception {
copyDefaultJarsToStagingArea();
test("create function using jar '%s'", default_binary_name);
copyDefaultJarsToStagingArea();
String summary = "Jar with %s name has been already registered";
testBuilder()
.sqlQuery("create function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, String.format(summary, default_binary_name))
.go();
}
@Test
public void testDuplicatedJarInLocalRegistry() throws Exception {
copyDefaultJarsToStagingArea();
test("create function using jar '%s'", default_binary_name);
test("select custom_lower('A') from (values(1))");
copyDefaultJarsToStagingArea();
String summary = "Jar with %s name has been already registered";
testBuilder()
.sqlQuery("create function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, String.format(summary, default_binary_name))
.go();
}
@Test
public void testDuplicatedFunctionsInRemoteRegistry() throws Exception {
String jarWithDuplicate = "DrillUDF_Copy-1.0.jar";
copyDefaultJarsToStagingArea();
test("create function using jar '%s'", default_binary_name);
copyJarsToStagingArea(jarWithDuplicate, JarUtil.getSourceName(jarWithDuplicate));
String summary = "Found duplicated function in %s: custom_lower(VARCHAR-REQUIRED)";
testBuilder()
.sqlQuery("create function using jar '%s'", jarWithDuplicate)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, String.format(summary, default_binary_name))
.go();
}
@Test
public void testDuplicatedFunctionsInLocalRegistry() throws Exception {
String jarWithDuplicate = "DrillUDF_DupFunc-1.0.jar";
copyJarsToStagingArea(jarWithDuplicate, JarUtil.getSourceName(jarWithDuplicate));
String summary = "Found duplicated function in %s: lower(VARCHAR-REQUIRED)";
testBuilder()
.sqlQuery("create function using jar '%s'", jarWithDuplicate)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, String.format(summary, LocalFunctionRegistry.BUILT_IN))
.go();
}
@Test
public void testSuccessfulRegistrationAfterSeveralRetryAttempts() throws Exception {
RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
copyDefaultJarsToStagingArea();
doThrow(new VersionMismatchException("Version mismatch detected", 1))
.doThrow(new VersionMismatchException("Version mismatch detected", 1))
.doCallRealMethod()
.when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
String summary = "The following UDFs in jar %s have been registered:\n" +
"[custom_lower(VARCHAR-REQUIRED)]";
testBuilder()
.sqlQuery("create function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(true, String.format(summary, default_binary_name))
.go();
verify(remoteFunctionRegistry, times(3))
.updateRegistry(any(Registry.class), any(DataChangeVersion.class));
FileSystem fs = remoteFunctionRegistry.getFs();
assertFalse("Staging area should be empty", fs.listFiles(remoteFunctionRegistry.getStagingArea(), false).hasNext());
assertFalse("Temporary area should be empty", fs.listFiles(remoteFunctionRegistry.getTmpArea(), false).hasNext());
assertTrue("Binary should be present in registry area",
fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_binary_name)));
assertTrue("Source should be present in registry area",
fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_source_name)));
Registry registry = remoteFunctionRegistry.getRegistry(new DataChangeVersion());
assertEquals("Registry should contain one jar", registry.getJarList().size(), 1);
assertEquals(registry.getJar(0).getName(), default_binary_name);
}
@Test
public void testSuccessfulUnregistrationAfterSeveralRetryAttempts() throws Exception {
RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
copyDefaultJarsToStagingArea();
test("create function using jar '%s'", default_binary_name);
reset(remoteFunctionRegistry);
doThrow(new VersionMismatchException("Version mismatch detected", 1))
.doThrow(new VersionMismatchException("Version mismatch detected", 1))
.doCallRealMethod()
.when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
String summary = "The following UDFs in jar %s have been unregistered:\n" +
"[custom_lower(VARCHAR-REQUIRED)]";
testBuilder()
.sqlQuery("drop function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(true, String.format(summary, default_binary_name))
.go();
verify(remoteFunctionRegistry, times(3))
.updateRegistry(any(Registry.class), any(DataChangeVersion.class));
FileSystem fs = remoteFunctionRegistry.getFs();
assertFalse("Registry area should be empty", fs.listFiles(remoteFunctionRegistry.getRegistryArea(), false).hasNext());
assertEquals("Registry should be empty",
remoteFunctionRegistry.getRegistry(new DataChangeVersion()).getJarList().size(), 0);
}
@Test
public void testExceedRetryAttemptsDuringRegistration() throws Exception {
RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
copyDefaultJarsToStagingArea();
doThrow(new VersionMismatchException("Version mismatch detected", 1))
.when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
String summary = "Failed to update remote function registry. Exceeded retry attempts limit.";
testBuilder()
.sqlQuery("create function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, summary)
.go();
verify(remoteFunctionRegistry, times(remoteFunctionRegistry.getRetryAttempts() + 1))
.updateRegistry(any(Registry.class), any(DataChangeVersion.class));
FileSystem fs = remoteFunctionRegistry.getFs();
assertTrue("Binary should be present in staging area",
fs.exists(new Path(remoteFunctionRegistry.getStagingArea(), default_binary_name)));
assertTrue("Source should be present in staging area",
fs.exists(new Path(remoteFunctionRegistry.getStagingArea(), default_source_name)));
assertFalse("Registry area should be empty",
fs.listFiles(remoteFunctionRegistry.getRegistryArea(), false).hasNext());
assertFalse("Temporary area should be empty",
fs.listFiles(remoteFunctionRegistry.getTmpArea(), false).hasNext());
assertEquals("Registry should be empty",
remoteFunctionRegistry.getRegistry(new DataChangeVersion()).getJarList().size(), 0);
}
@Test
public void testExceedRetryAttemptsDuringUnregistration() throws Exception {
RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
copyDefaultJarsToStagingArea();
test("create function using jar '%s'", default_binary_name);
reset(remoteFunctionRegistry);
doThrow(new VersionMismatchException("Version mismatch detected", 1))
.when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
String summary = "Failed to update remote function registry. Exceeded retry attempts limit.";
testBuilder()
.sqlQuery("drop function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, summary)
.go();
verify(remoteFunctionRegistry, times(remoteFunctionRegistry.getRetryAttempts() + 1))
.updateRegistry(any(Registry.class), any(DataChangeVersion.class));
FileSystem fs = remoteFunctionRegistry.getFs();
assertTrue("Binary should be present in registry area",
fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_binary_name)));
assertTrue("Source should be present in registry area",
fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_source_name)));
Registry registry = remoteFunctionRegistry.getRegistry(new DataChangeVersion());
assertEquals("Registry should contain one jar", registry.getJarList().size(), 1);
assertEquals(registry.getJar(0).getName(), default_binary_name);
}
@Test
public void testLazyInit() throws Exception {
try {
test("select custom_lower('A') from (values(1))");
} catch (UserRemoteException e){
assertThat(e.getMessage(), containsString("No match found for function signature custom_lower(<CHARACTER>)"));
}
copyDefaultJarsToStagingArea();
test("create function using jar '%s'", default_binary_name);
testBuilder()
.sqlQuery("select custom_lower('A') as res from (values(1))")
.unOrdered()
.baselineColumns("res")
.baselineValues("a")
.go();
Path localUdfDirPath = Deencapsulation.getField(
getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir");
File localUdfDir = new File(localUdfDirPath.toUri().getPath());
assertTrue("Binary should exist in local udf directory", new File(localUdfDir, default_binary_name).exists());
assertTrue("Source should exist in local udf directory", new File(localUdfDir, default_source_name).exists());
}
@Test
public void testLazyInitWhenDynamicUdfSupportIsDisabled() throws Exception {
try {
test("select custom_lower('A') from (values(1))");
} catch (UserRemoteException e){
assertThat(e.getMessage(), containsString("No match found for function signature custom_lower(<CHARACTER>)"));
}
copyDefaultJarsToStagingArea();
test("create function using jar '%s'", default_binary_name);
try {
testBuilder()
.sqlQuery("select custom_lower('A') as res from (values(1))")
.optionSettingQueriesForTestQuery("alter system set `exec.udf.enable_dynamic_support` = false")
.unOrdered()
.baselineColumns("res")
.baselineValues("a")
.go();
} finally {
test("alter system reset `exec.udf.enable_dynamic_support`");
}
}
@Test
public void testOverloadedFunctionPlanningStage() throws Exception {
String jarName = "DrillUDF-overloading-1.0.jar";
copyJarsToStagingArea(jarName, JarUtil.getSourceName(jarName));
test("create function using jar '%s'", jarName);
testBuilder()
.sqlQuery("select abs('A', 'A') as res from (values(1))")
.unOrdered()
.baselineColumns("res")
.baselineValues("ABS was overloaded. Input: A, A")
.go();
}
@Test
public void testOverloadedFunctionExecutionStage() throws Exception {
String jarName = "DrillUDF-overloading-1.0.jar";
copyJarsToStagingArea(jarName, JarUtil.getSourceName(jarName));
test("create function using jar '%s'", jarName);
testBuilder()
.sqlQuery("select log('A') as res from (values(1))")
.unOrdered()
.baselineColumns("res")
.baselineValues("LOG was overloaded. Input: A")
.go();
}
@Test
public void testDropFunction() throws Exception {
copyDefaultJarsToStagingArea();
test("create function using jar '%s'", default_binary_name);
test("select custom_lower('A') from (values(1))");
Path localUdfDirPath = Deencapsulation.getField(
getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir");
File localUdfDir = new File(localUdfDirPath.toUri().getPath());
assertTrue("Binary should exist in local udf directory", new File(localUdfDir, default_binary_name).exists());
assertTrue("Source should exist in local udf directory", new File(localUdfDir, default_source_name).exists());
String summary = "The following UDFs in jar %s have been unregistered:\n" +
"[custom_lower(VARCHAR-REQUIRED)]";
testBuilder()
.sqlQuery("drop function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(true, String.format(summary, default_binary_name))
.go();
try {
test("select custom_lower('A') from (values(1))");
} catch (UserRemoteException e){
assertThat(e.getMessage(), containsString("No match found for function signature custom_lower(<CHARACTER>)"));
}
RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
assertEquals("Remote registry should be empty",
remoteFunctionRegistry.getRegistry(new DataChangeVersion()).getJarList().size(), 0);
FileSystem fs = remoteFunctionRegistry.getFs();
assertFalse("Binary should not be present in registry area",
fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_binary_name)));
assertFalse("Source should not be present in registry area",
fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_source_name)));
assertFalse("Binary should not be present in local udf directory",
new File(localUdfDir, default_binary_name).exists());
assertFalse("Source should not be present in local udf directory",
new File(localUdfDir, default_source_name).exists());
}
@Test
public void testReRegisterTheSameJarWithDifferentContent() throws Exception {
copyDefaultJarsToStagingArea();
test("create function using jar '%s'", default_binary_name);
testBuilder()
.sqlQuery("select custom_lower('A') as res from (values(1))")
.unOrdered()
.baselineColumns("res")
.baselineValues("a")
.go();
test("drop function using jar '%s'", default_binary_name);
Thread.sleep(1000);
Path src = new Path(jars.toURI().getPath(), "v2");
copyJarsToStagingArea(src, default_binary_name, default_source_name);
test("create function using jar '%s'", default_binary_name);
testBuilder()
.sqlQuery("select custom_lower('A') as res from (values(1))")
.unOrdered()
.baselineColumns("res")
.baselineValues("a_v2")
.go();
}
@Test
public void testDropAbsentJar() throws Exception {
String summary = "Jar %s is not registered in remote registry";
testBuilder()
.sqlQuery("drop function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, String.format(summary, default_binary_name))
.go();
}
@Test
public void testRegistrationFailDuringRegistryUpdate() throws Exception {
final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
final FileSystem fs = remoteFunctionRegistry.getFs();
final String errorMessage = "Failure during remote registry update.";
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
assertTrue("Binary should be present in registry area",
fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_binary_name)));
assertTrue("Source should be present in registry area",
fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_source_name)));
throw new RuntimeException(errorMessage);
}
}).when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
copyDefaultJarsToStagingArea();
testBuilder()
.sqlQuery("create function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, errorMessage)
.go();
assertFalse("Registry area should be empty",
fs.listFiles(remoteFunctionRegistry.getRegistryArea(), false).hasNext());
assertFalse("Temporary area should be empty",
fs.listFiles(remoteFunctionRegistry.getTmpArea(), false).hasNext());
assertTrue("Binary should be present in staging area",
fs.exists(new Path(remoteFunctionRegistry.getStagingArea(), default_binary_name)));
assertTrue("Source should be present in staging area",
fs.exists(new Path(remoteFunctionRegistry.getStagingArea(), default_source_name)));
}
@Test
public void testConcurrentRegistrationOfTheSameJar() throws Exception {
RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
final CountDownLatch latch1 = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(1);
doAnswer(new Answer<String>() {
@Override
public String answer(InvocationOnMock invocation) throws Throwable {
String result = (String) invocation.callRealMethod();
latch2.countDown();
latch1.await();
return result;
}
})
.doCallRealMethod()
.doCallRealMethod()
.when(remoteFunctionRegistry).addToJars(anyString(), any(RemoteFunctionRegistry.Action.class));
final String query = String.format("create function using jar '%s'", default_binary_name);
Thread thread = new Thread(new SimpleQueryRunner(query));
thread.start();
latch2.await();
try {
String summary = "Jar with %s name is used. Action: REGISTRATION";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, String.format(summary, default_binary_name))
.go();
testBuilder()
.sqlQuery("drop function using jar '%s'", default_binary_name)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false, String.format(summary, default_binary_name))
.go();
} finally {
latch1.countDown();
thread.join();
}
}
@Test
public void testConcurrentRemoteRegistryUpdateWithDuplicates() throws Exception {
RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
final CountDownLatch latch1 = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(1);
final CountDownLatch latch3 = new CountDownLatch(1);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
latch3.countDown();
latch1.await();
invocation.callRealMethod();
latch2.countDown();
return null;
}
}).doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
latch1.countDown();
latch2.await();
invocation.callRealMethod();
return null;
}
})
.when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
final String jarName1 = default_binary_name;
final String jarName2 = "DrillUDF_Copy-1.0.jar";
final String query = "create function using jar '%s'";
copyDefaultJarsToStagingArea();
copyJarsToStagingArea(jarName2, JarUtil.getSourceName(jarName2));
Thread thread1 = new Thread(new TestBuilderRunner(
testBuilder()
.sqlQuery(query, jarName1)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(true,
String.format("The following UDFs in jar %s have been registered:\n" +
"[custom_lower(VARCHAR-REQUIRED)]", jarName1))
));
Thread thread2 = new Thread(new TestBuilderRunner(
testBuilder()
.sqlQuery(query, jarName2)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(false,
String.format("Found duplicated function in %s: custom_lower(VARCHAR-REQUIRED)", jarName1))
));
thread1.start();
latch3.await();
thread2.start();
thread1.join();
thread2.join();
DataChangeVersion version = new DataChangeVersion();
Registry registry = remoteFunctionRegistry.getRegistry(version);
assertEquals("Remote registry version should match", 1, version.getVersion());
List<Jar> jarList = registry.getJarList();
assertEquals("Only one jar should be registered", 1, jarList.size());
assertEquals("Jar name should match", jarName1, jarList.get(0).getName());
verify(remoteFunctionRegistry, times(2)).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
}
@Test
public void testConcurrentRemoteRegistryUpdateForDifferentJars() throws Exception {
RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
final CountDownLatch latch1 = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(2);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
latch2.countDown();
latch1.await();
invocation.callRealMethod();
return null;
}
})
.when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
final String jarName1 = default_binary_name;
final String jarName2 = "DrillUDF-2.0.jar";
final String query = "create function using jar '%s'";
copyDefaultJarsToStagingArea();
copyJarsToStagingArea(jarName2, JarUtil.getSourceName(jarName2));
Thread thread1 = new Thread(new TestBuilderRunner(
testBuilder()
.sqlQuery(query, jarName1)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(true,
String.format("The following UDFs in jar %s have been registered:\n" +
"[custom_lower(VARCHAR-REQUIRED)]", jarName1))
));
Thread thread2 = new Thread(new TestBuilderRunner(
testBuilder()
.sqlQuery(query, jarName2)
.unOrdered()
.baselineColumns("ok", "summary")
.baselineValues(true, String.format("The following UDFs in jar %s have been registered:\n" +
"[custom_upper(VARCHAR-REQUIRED)]", jarName2))
));
thread1.start();
thread2.start();
latch2.await();
latch1.countDown();
thread1.join();
thread2.join();
DataChangeVersion version = new DataChangeVersion();
Registry registry = remoteFunctionRegistry.getRegistry(version);
assertEquals("Remote registry version should match", 2, version.getVersion());
List<Jar> actualJars = registry.getJarList();
List<String> expectedJars = Lists.newArrayList(jarName1, jarName2);
assertEquals("Only one jar should be registered", 2, actualJars.size());
for (Jar jar : actualJars) {
assertTrue("Jar should be present in remote function registry", expectedJars.contains(jar.getName()));
}
verify(remoteFunctionRegistry, times(3)).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
}
@Test
public void testLazyInitConcurrent() throws Exception {
FunctionImplementationRegistry functionImplementationRegistry = spyFunctionImplementationRegistry();
copyDefaultJarsToStagingArea();
test("create function using jar '%s'", default_binary_name);
final CountDownLatch latch1 = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(1);
final String query = "select custom_lower('A') from (values(1))";
doAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
latch1.await();
boolean result = (boolean) invocation.callRealMethod();
assertTrue("syncWithRemoteRegistry() should return true", result);
latch2.countDown();
return true;
}
})
.doAnswer(new Answer() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
latch1.countDown();
latch2.await();
boolean result = (boolean) invocation.callRealMethod();
assertTrue("syncWithRemoteRegistry() should return true", result);
return true;
}
})
.when(functionImplementationRegistry).syncWithRemoteRegistry(anyLong());
SimpleQueryRunner simpleQueryRunner = new SimpleQueryRunner(query);
Thread thread1 = new Thread(simpleQueryRunner);
Thread thread2 = new Thread(simpleQueryRunner);
thread1.start();
thread2.start();
thread1.join();
thread2.join();
verify(functionImplementationRegistry, times(2)).syncWithRemoteRegistry(anyLong());
LocalFunctionRegistry localFunctionRegistry = Deencapsulation.getField(
functionImplementationRegistry, "localFunctionRegistry");
assertEquals("Sync function registry version should match", 1L, localFunctionRegistry.getVersion());
}
@Test
public void testLazyInitNoReload() throws Exception {
FunctionImplementationRegistry functionImplementationRegistry = spyFunctionImplementationRegistry();
copyDefaultJarsToStagingArea();
test("create function using jar '%s'", default_binary_name);
doAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
boolean result = (boolean) invocation.callRealMethod();
assertTrue("syncWithRemoteRegistry() should return true", result);
return true;
}
})
.doAnswer(new Answer() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
boolean result = (boolean) invocation.callRealMethod();
assertFalse("syncWithRemoteRegistry() should return false", result);
return false;
}
})
.when(functionImplementationRegistry).syncWithRemoteRegistry(anyLong());
test("select custom_lower('A') from (values(1))");
try {
test("select unknown_lower('A') from (values(1))");
} catch (UserRemoteException e){
assertThat(e.getMessage(), containsString("No match found for function signature unknown_lower(<CHARACTER>)"));
}
verify(functionImplementationRegistry, times(2)).syncWithRemoteRegistry(anyLong());
LocalFunctionRegistry localFunctionRegistry = Deencapsulation.getField(
functionImplementationRegistry, "localFunctionRegistry");
assertEquals("Sync function registry version should match", 1L, localFunctionRegistry.getVersion());
}
private void copyDefaultJarsToStagingArea() throws IOException {
copyJarsToStagingArea(new Path(jars.toURI()), default_binary_name, default_source_name);
}
private void copyJarsToStagingArea(String binaryName, String sourceName) throws IOException {
copyJarsToStagingArea(new Path(jars.toURI()), binaryName, sourceName);
}
private void copyJarsToStagingArea(Path src, String binaryName, String sourceName) throws IOException {
RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
copyJar(remoteFunctionRegistry.getFs(), src, remoteFunctionRegistry.getStagingArea(), binaryName);
copyJar(remoteFunctionRegistry.getFs(), src, remoteFunctionRegistry.getStagingArea(), sourceName);
}
private void copyJar(FileSystem fs, Path src, Path dest, String name) throws IOException {
Path jarPath = new Path(src, name);
fs.copyFromLocalFile(jarPath, dest);
}
private RemoteFunctionRegistry spyRemoteFunctionRegistry() {
FunctionImplementationRegistry functionImplementationRegistry =
getDrillbitContext().getFunctionImplementationRegistry();
RemoteFunctionRegistry remoteFunctionRegistry = functionImplementationRegistry.getRemoteFunctionRegistry();
RemoteFunctionRegistry spy = spy(remoteFunctionRegistry);
Deencapsulation.setField(functionImplementationRegistry, "remoteFunctionRegistry", spy);
return spy;
}
private FunctionImplementationRegistry spyFunctionImplementationRegistry() {
DrillbitContext drillbitContext = getDrillbitContext();
FunctionImplementationRegistry spy = spy(drillbitContext.getFunctionImplementationRegistry());
Deencapsulation.setField(drillbitContext, "functionRegistry", spy);
return spy;
}
private class SimpleQueryRunner implements Runnable {
private final String query;
SimpleQueryRunner(String query) {
this.query = query;
}
@Override
public void run() {
try {
test(query);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
private class TestBuilderRunner implements Runnable {
private final TestBuilder testBuilder;
TestBuilderRunner(TestBuilder testBuilder) {
this.testBuilder = testBuilder;
}
@Override
public void run() {
try {
testBuilder.go();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
| apache-2.0 |
jabbrwcky/selenium | java/client/src/com/thoughtworks/selenium/webdriven/Windows.java | 6295 | // Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.thoughtworks.selenium.webdriven;
import com.thoughtworks.selenium.SeleniumException;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.NoSuchFrameException;
import org.openqa.selenium.NoSuchWindowException;
import org.openqa.selenium.WebDriver;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class Windows {
private final Map<String, String> lastFrame = new HashMap<>();
private final String originalWindowHandle;
public Windows(WebDriver driver) {
originalWindowHandle = driver.getWindowHandle();
}
public void selectWindow(WebDriver driver, String windowID) {
if (null == windowID || "null".equals(windowID) || "".equals(windowID)) {
driver.switchTo().window(originalWindowHandle);
} else if ("_blank".equals(windowID)) {
selectBlankWindow(driver);
} else {
if (windowID.startsWith("title=")) {
selectWindowWithTitle(driver, windowID.substring("title=".length()));
return;
}
if (windowID.startsWith("name=")) {
windowID = windowID.substring("name=".length());
}
try {
driver.switchTo().window(windowID);
} catch (NoSuchWindowException e) {
selectWindowWithTitle(driver, windowID);
}
}
if (lastFrame.containsKey(driver.getWindowHandle())) {
// If the frame has gone, fall back
try {
selectFrame(driver, lastFrame.get(driver.getWindowHandle()));
} catch (SeleniumException e) {
lastFrame.remove(driver.getWindowHandle());
}
}
}
public void selectPopUp(WebDriver driver, String windowID) {
if ("null".equals(windowID) || "".equals(windowID)) {
Set<String> windowHandles = driver.getWindowHandles();
windowHandles.remove(originalWindowHandle);
if (!windowHandles.isEmpty()) {
driver.switchTo().window(windowHandles.iterator().next());
} else {
throw new SeleniumException("Unable to find a popup window");
}
} else {
selectWindow(driver, windowID);
}
}
public void selectFrame(WebDriver driver, String locator) {
if ("relative=top".equals(locator)) {
driver.switchTo().defaultContent();
lastFrame.remove(driver.getWindowHandle());
return;
}
if ("relative=up".equals(locator)) {
driver.switchTo().parentFrame();
lastFrame.put(driver.getWindowHandle(), locator);
return;
}
if (locator.startsWith("index=")) {
try {
int index = Integer.parseInt(locator.substring("index=".length()));
lastFrame.put(driver.getWindowHandle(), locator);
driver.switchTo().frame(index);
return;
} catch (NumberFormatException e) {
throw new SeleniumException(e.getMessage(), e);
} catch (NoSuchFrameException e) {
throw new SeleniumException(e.getMessage(), e);
}
}
if (locator.startsWith("id=")) {
locator = locator.substring("id=".length());
} else if (locator.startsWith("name=")) {
locator = locator.substring("name=".length());
}
try {
lastFrame.put(driver.getWindowHandle(), locator);
driver.switchTo().frame(locator);
} catch (NoSuchFrameException e) {
throw new SeleniumException(e.getMessage(), e);
}
}
private void selectWindowWithTitle(WebDriver driver, String title) {
String current = driver.getWindowHandle();
for (String handle : driver.getWindowHandles()) {
driver.switchTo().window(handle);
if (title.equals(driver.getTitle())) {
return;
}
}
driver.switchTo()
.window(current);
throw new SeleniumException("Unable to select window with title: " + title);
}
/**
* Selects the only <code>_blank</code> window. A window open with <code>target='_blank'</code>
* will have a <code>window.name = null</code>.
* <p>
* This method assumes that there will only be one single <code>_blank</code> window and selects
* the first one with no name. Therefore if for any reasons there are multiple windows with
* <code>window.name = null</code> the first found one will be selected.
* <p>
* If none of the windows have <code>window.name = null</code> the last selected one will be
* re-selected and a {@link SeleniumException} will be thrown.
*
* @param driver WebDriver
* @throws NoSuchWindowException if no window with <code>window.name = null</code> is found.
*/
public void selectBlankWindow(WebDriver driver) {
String current = driver.getWindowHandle();
// Find the first window without a "name" attribute
List<String> handles = new ArrayList<>(driver.getWindowHandles());
for (String handle : handles) {
// the original window will never be a _blank window, so don't even look at it
// this is also important to skip, because the original/root window won't have
// a name either, so if we didn't know better we might think it's a _blank popup!
if (handle.equals(originalWindowHandle)) {
continue;
}
driver.switchTo().window(handle);
String value = (String)
((JavascriptExecutor) driver).executeScript("return window.name;");
if (value == null || "".equals(value)) {
// We found it!
return;
}
}
// We couldn't find it
driver.switchTo().window(current);
throw new SeleniumException("Unable to select window _blank");
}
}
| apache-2.0 |
vineetgarg02/hive | ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rewrite/AlterMaterializedViewRewriteDesc.java | 2624 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.ddl.view.materialized.alter.rewrite;
import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId;
import org.apache.hadoop.hive.ql.plan.Explain;
import org.apache.hadoop.hive.ql.plan.Explain.Level;
/**
* DDL task description for the ALTER MATERIALIZED VIEW (ENABLE|DISABLE) REWRITE commands.
*/
@Explain(displayName = "Alter Materialized View Rewrite", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public class AlterMaterializedViewRewriteDesc implements DDLDescWithWriteId {
private final String fqMaterializedViewName;
private final boolean rewriteEnable;
public AlterMaterializedViewRewriteDesc(String fqMaterializedViewName, boolean rewriteEnable) {
this.fqMaterializedViewName = fqMaterializedViewName;
this.rewriteEnable = rewriteEnable;
}
@Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public String getMaterializedViewName() {
return fqMaterializedViewName;
}
@Explain(displayName = "enable", displayOnlyOnTrue = true,
explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public boolean isRewriteEnable() {
return rewriteEnable;
}
/** Only for explaining. */
@Explain(displayName = "disable", displayOnlyOnTrue = true,
explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public boolean isRewriteDisable() {
return !rewriteEnable;
}
@Override
public void setWriteId(long writeId) {
// We don't actually need the write id, but by implementing DDLDescWithWriteId it ensures that it is allocated
}
@Override
public String getFullTableName() {
return fqMaterializedViewName;
}
@Override
public boolean mayNeedWriteId() {
return true; // Verified when this is set as DDL Desc for ACID.
}
}
| apache-2.0 |
howepeng/isis | core/metamodel/src/test/java/org/apache/isis/core/metamodel/facets/propparam/specification/SpecificationAlwaysSatisfied.java | 1112 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.metamodel.facets.propparam.specification;
import org.apache.isis.applib.spec.Specification;
public class SpecificationAlwaysSatisfied implements Specification {
@Override
public String satisfies(final Object obj) {
return null;
}
}
| apache-2.0 |
howepeng/isis | core/metamodel/src/main/java/org/apache/isis/core/metamodel/consent/Veto.java | 1737 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.metamodel.consent;
import static org.apache.isis.core.commons.ensure.Ensure.ensureThatArg;
import static org.apache.isis.core.commons.matchers.IsisMatchers.nonEmptyString;
import org.apache.isis.core.metamodel.facetapi.Facet;
public class Veto extends ConsentAbstract {
private static final long serialVersionUID = 1L;
public static Veto DEFAULT = new Veto("Vetoed by default");
/**
* Called by DnD viewer; we should instead find a way to put the calling
* logic into {@link Facet}s so that it is available for use by other
* viewers.
*
* @param reasonVeteod
* - must not be <tt>null</tt>
*/
public Veto(final String reasonVetoed) {
super(null, ensureThatArg(reasonVetoed, nonEmptyString()));
}
public Veto(final InteractionResult interactionResult) {
super(interactionResult);
}
}
| apache-2.0 |
ananthc/apex-malhar | benchmark/src/main/java/org/apache/apex/benchmark/stream/IntegerOperator.java | 2296 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.apex.benchmark.stream;
import com.datatorrent.api.Context.OperatorContext;
import com.datatorrent.api.DefaultOutputPort;
import com.datatorrent.api.InputOperator;
/**
*
* Integer input operator which emits Integer tuples only.
* This operator is benchmarked to emit more than 2 million tuples/sec on cluster node.
*
* @since 2.0.0
*/
public class IntegerOperator implements InputOperator
{
/**
* Output port which emits integer.
*/
public final transient DefaultOutputPort<Integer> integer_data = new DefaultOutputPort<Integer>();
@Override
public void emitTuples()
{
Integer i = 21;
for (int j = 0; j < 1000; j++) {
integer_data.emit(i);
}
}
@Override
public void beginWindow(long windowId)
{
//throw new UnsupportedOperationException("Not supported yet.");
// To change body of generated methods, choose Tools | Templates.
}
@Override
public void endWindow()
{
//throw new UnsupportedOperationException("Not supported yet.");
// To change body of generated methods, choose Tools | Templates.
}
@Override
public void setup(OperatorContext context)
{
//throw new UnsupportedOperationException("Not supported yet.");
// To change body of generated methods, choose Tools | Templates.
}
@Override
public void teardown()
{
//throw new UnsupportedOperationException("Not supported yet.");
// To change body of generated methods, choose Tools | Templates.
}
}
| apache-2.0 |
phani546/elasticsearch | core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java | 8109 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.simple;
import com.google.common.base.Charsets;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.hamcrest.Matchers.equalTo;
/**
*
*/
public class SimpleMapperTests extends ElasticsearchSingleNodeTest {
@Test
public void testSimpleMapper() throws Exception {
IndexService indexService = createIndex("test");
Settings settings = indexService.settingsService().getSettings();
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
DocumentMapper docMapper = doc("test", settings,
rootObject("person")
.add(object("name").add(stringField("first").store(true).index(false))),
indexService.mapperService()).build(indexService.mapperService(), mapperParser);
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
Document doc = docMapper.parse("person", "1", json).rootDoc();
assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay"));
// System.out.println("Document: " + doc);
// System.out.println("Json: " + docMapper.sourceMapper().value(doc));
doc = docMapper.parse("person", "1", json).rootDoc();
// System.out.println("Document: " + doc);
// System.out.println("Json: " + docMapper.sourceMapper().value(doc));
}
@Test
public void testParseToJsonAndParse() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper docMapper = parser.parse(mapping);
String builtMapping = docMapper.mappingSource().string();
// System.out.println(builtMapping);
// reparse it
DocumentMapper builtDocMapper = parser.parse(builtMapping);
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
Document doc = builtDocMapper.parse("person", "1", json).rootDoc();
assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1")));
assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay"));
// System.out.println("Document: " + doc);
// System.out.println("Json: " + docMapper.sourceMapper().value(doc));
}
@Test
public void testSimpleParser() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
assertThat((String) docMapper.meta().get("param1"), equalTo("value1"));
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
Document doc = docMapper.parse("person", "1", json).rootDoc();
assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1")));
assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay"));
// System.out.println("Document: " + doc);
// System.out.println("Json: " + docMapper.sourceMapper().value(doc));
}
@Test
public void testSimpleParserNoTypeNoId() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json"));
Document doc = docMapper.parse("person", "1", json).rootDoc();
assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1")));
assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay"));
// System.out.println("Document: " + doc);
// System.out.println("Json: " + docMapper.sourceMapper().value(doc));
}
@Test
public void testAttributes() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper docMapper = parser.parse(mapping);
assertThat((String) docMapper.meta().get("param1"), equalTo("value1"));
String builtMapping = docMapper.mappingSource().string();
DocumentMapper builtDocMapper = parser.parse(builtMapping);
assertThat((String) builtDocMapper.meta().get("param1"), equalTo("value1"));
}
@Test
public void testNoDocumentSent() throws Exception {
IndexService indexService = createIndex("test");
Settings settings = indexService.settingsService().getSettings();
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
DocumentMapper docMapper = doc("test", settings,
rootObject("person")
.add(object("name").add(stringField("first").store(true).index(false))),
indexService.mapperService()).build(indexService.mapperService(), mapperParser);
BytesReference json = new BytesArray("".getBytes(Charsets.UTF_8));
try {
docMapper.parse("person", "1", json).rootDoc();
fail("this point is never reached");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), equalTo("failed to parse, document is empty"));
}
}
public void testHazardousFieldNames() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo.bar").field("type", "string").endObject()
.endObject().endObject().string();
try {
mapperParser.parse(mapping);
fail("Mapping parse should have failed");
} catch (MapperParsingException e) {
assertTrue(e.getMessage(), e.getMessage().contains("cannot contain '.'"));
}
}
}
| apache-2.0 |
yush1ga/pulsar | pulsar-common/src/main/java/org/apache/pulsar/common/policies/data/BrokerStatus.java | 2063 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.common.policies.data;
import com.google.common.collect.ComparisonChain;
public class BrokerStatus implements Comparable<BrokerStatus> {
private String brokerAddress;
private boolean active;
private int loadFactor;
public BrokerStatus(String lookupServiceAddress, boolean active, int loadFactor) {
this.brokerAddress = lookupServiceAddress;
this.active = active;
this.loadFactor = loadFactor;
}
public boolean isActive() {
return this.active;
}
public int getLoadFactor() {
return this.loadFactor;
}
public String getBrokerAddress() {
return this.brokerAddress;
}
public void setActive(boolean active) {
this.active = active;
}
public void setLoadFactor(int loadFactor) {
this.loadFactor = loadFactor;
}
@Override
public int compareTo(BrokerStatus other) {
return ComparisonChain.start().compare(this.loadFactor, other.loadFactor)
.compare(this.brokerAddress, other.brokerAddress).result();
}
@Override
public String toString() {
return String.format("[brokerAddress=%s, active=%s, loadFactor=%s]", brokerAddress, active, loadFactor);
}
}
| apache-2.0 |
google/guava | guava/src/com/google/common/collect/RangeGwtSerializationDependencies.java | 1267 | /*
* Copyright (C) 2016 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import com.google.common.annotations.GwtCompatible;
import java.io.Serializable;
/**
* A dummy superclass to support GWT serialization of the element type of a {@link Range}. The GWT
* supersource for this class contains a field of type {@code C}.
*
* <p>For details about this hack, see {@code GwtSerializationDependencies}, which takes the same
* approach but with a subclass rather than a superclass.
*
* <p>TODO(cpovirk): Consider applying this subclass approach to our other types.
*/
@GwtCompatible(emulated = true)
abstract class RangeGwtSerializationDependencies<C extends Comparable> implements Serializable {}
| apache-2.0 |
jwren/intellij-community | python/src/com/jetbrains/python/codeInsight/liveTemplates/PyClassNameMacro.java | 1109 | // Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python.codeInsight.liveTemplates;
import com.intellij.codeInsight.template.*;
import com.intellij.psi.PsiElement;
import com.intellij.psi.util.PsiTreeUtil;
import com.jetbrains.python.psi.PyClass;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class PyClassNameMacro extends Macro {
@Override
public String getName() {
return "pyClassName";
}
@Nullable
@Override
public Result calculateResult(Expression @NotNull [] params, ExpressionContext context) {
PsiElement place = context.getPsiElementAtStartOffset();
PyClass pyClass = PsiTreeUtil.getParentOfType(place, PyClass.class);
if (pyClass == null) {
return null;
}
String name = pyClass.getName();
return name == null ? null : new TextResult(name);
}
@Override
public boolean isAcceptableInContext(TemplateContextType context) {
return context instanceof PythonTemplateContextType;
}
}
| apache-2.0 |
shun634501730/java_source_cn | src_en/java/awt/geom/LineIterator.java | 3878 | /*
* Copyright (c) 1997, 1999, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.awt.geom;
import java.util.*;
/**
* A utility class to iterate over the path segments of a line segment
* through the PathIterator interface.
*
* @author Jim Graham
*/
class LineIterator implements PathIterator {
Line2D line;
AffineTransform affine;
int index;
LineIterator(Line2D l, AffineTransform at) {
this.line = l;
this.affine = at;
}
/**
* Return the winding rule for determining the insideness of the
* path.
* @see #WIND_EVEN_ODD
* @see #WIND_NON_ZERO
*/
public int getWindingRule() {
return WIND_NON_ZERO;
}
/**
* Tests if there are more points to read.
* @return true if there are more points to read
*/
public boolean isDone() {
return (index > 1);
}
/**
* Moves the iterator to the next segment of the path forwards
* along the primary direction of traversal as long as there are
* more points in that direction.
*/
public void next() {
index++;
}
/**
* Returns the coordinates and type of the current path segment in
* the iteration.
* The return value is the path segment type:
* SEG_MOVETO, SEG_LINETO, SEG_QUADTO, SEG_CUBICTO, or SEG_CLOSE.
* A float array of length 6 must be passed in and may be used to
* store the coordinates of the point(s).
* Each point is stored as a pair of float x,y coordinates.
* SEG_MOVETO and SEG_LINETO types will return one point,
* SEG_QUADTO will return two points,
* SEG_CUBICTO will return 3 points
* and SEG_CLOSE will not return any points.
* @see #SEG_MOVETO
* @see #SEG_LINETO
* @see #SEG_QUADTO
* @see #SEG_CUBICTO
* @see #SEG_CLOSE
*/
public int currentSegment(float[] coords) {
if (isDone()) {
throw new NoSuchElementException("line iterator out of bounds");
}
int type;
if (index == 0) {
coords[0] = (float) line.getX1();
coords[1] = (float) line.getY1();
type = SEG_MOVETO;
} else {
coords[0] = (float) line.getX2();
coords[1] = (float) line.getY2();
type = SEG_LINETO;
}
if (affine != null) {
affine.transform(coords, 0, coords, 0, 1);
}
return type;
}
/**
* Returns the coordinates and type of the current path segment in
* the iteration.
* The return value is the path segment type:
* SEG_MOVETO, SEG_LINETO, SEG_QUADTO, SEG_CUBICTO, or SEG_CLOSE.
* A double array of length 6 must be passed in and may be used to
* store the coordinates of the point(s).
* Each point is stored as a pair of double x,y coordinates.
* SEG_MOVETO and SEG_LINETO types will return one point,
* SEG_QUADTO will return two points,
* SEG_CUBICTO will return 3 points
* and SEG_CLOSE will not return any points.
* @see #SEG_MOVETO
* @see #SEG_LINETO
* @see #SEG_QUADTO
* @see #SEG_CUBICTO
* @see #SEG_CLOSE
*/
public int currentSegment(double[] coords) {
if (isDone()) {
throw new NoSuchElementException("line iterator out of bounds");
}
int type;
if (index == 0) {
coords[0] = line.getX1();
coords[1] = line.getY1();
type = SEG_MOVETO;
} else {
coords[0] = line.getX2();
coords[1] = line.getY2();
type = SEG_LINETO;
}
if (affine != null) {
affine.transform(coords, 0, coords, 0, 1);
}
return type;
}
}
| apache-2.0 |
cstamas/orientdb | server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/command/get/OServerCommandKillDbConnection.java | 3240 | /*
*
* * Copyright 2014 Orient Technologies.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package com.orientechnologies.orient.server.network.protocol.http.command.get;
import com.orientechnologies.orient.core.serialization.serializer.OJSONWriter;
import com.orientechnologies.orient.server.OClientConnection;
import com.orientechnologies.orient.server.OClientConnectionManager;
import com.orientechnologies.orient.server.network.protocol.ONetworkProtocolData;
import com.orientechnologies.orient.server.network.protocol.http.OHttpRequest;
import com.orientechnologies.orient.server.network.protocol.http.OHttpResponse;
import com.orientechnologies.orient.server.network.protocol.http.OHttpUtils;
import com.orientechnologies.orient.server.network.protocol.http.ONetworkProtocolHttpAbstract;
import com.orientechnologies.orient.server.network.protocol.http.command.OServerCommandAuthenticatedDbAbstract;
import java.io.IOException;
import java.io.StringWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
public class OServerCommandKillDbConnection extends OServerCommandAuthenticatedDbAbstract {
private static final String[] NAMES = { "POST|dbconnection/*" };
@Override
public boolean execute(OHttpRequest iRequest, OHttpResponse iResponse) throws Exception {
final String[] urlParts = checkSyntax(iRequest.url, 2, "Syntax error: dbconnection/<database>");
doPost(iRequest, iResponse, urlParts[1], iRequest.content);
return false;
}
private void doPost(OHttpRequest iRequest, OHttpResponse iResponse, String db, String command) throws IOException {
List<OClientConnection> connections = OClientConnectionManager.instance().getConnections();
for (OClientConnection connection : connections) {
if (checkDbSession(iRequest, db, command, connection)) {
OClientConnectionManager.instance().kill(connection.id);
}
}
iResponse.send(OHttpUtils.STATUS_OK_NOCONTENT_CODE, OHttpUtils.STATUS_OK_NOCONTENT_DESCRIPTION, OHttpUtils.CONTENT_TEXT_PLAIN,
null, null);
}
public boolean checkDbSession(OHttpRequest iRequest, String db, String command, OClientConnection connection) {
boolean session = true;
session = session && connection.protocol instanceof ONetworkProtocolHttpAbstract;
session = session && db.equals(connection.data.lastDatabase);
session = session && command.equals(connection.data.commandDetail);
session = session && ((ONetworkProtocolHttpAbstract) connection.protocol).getSessionID().equals(iRequest.sessionId);
return session;
}
@Override
public String[] getNames() {
return NAMES;
}
}
| apache-2.0 |
TheUtils/weixin-popular | src/main/java/weixin/popular/bean/paymch/Transfers.java | 2563 | package weixin.popular.bean.paymch;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement(name="xml")
@XmlAccessorType(XmlAccessType.FIELD)
public class Transfers {
private String mch_appid;
private String mchid;
private String sub_mch_id;
private String device_info;
private String nonce_str;
private String sign;
private String partner_trade_no;
private String openid;
private String check_name;
private String re_user_name;
private String amount;
private String desc;
private String spbill_create_ip;
public String getMch_appid() {
return mch_appid;
}
public void setMch_appid(String mch_appid) {
this.mch_appid = mch_appid;
}
public String getMchid() {
return mchid;
}
public void setMchid(String mchid) {
this.mchid = mchid;
}
public String getSub_mch_id() {
return sub_mch_id;
}
public void setSub_mch_id(String sub_mch_id) {
this.sub_mch_id = sub_mch_id;
}
public String getDevice_info() {
return device_info;
}
public void setDevice_info(String device_info) {
this.device_info = device_info;
}
public String getNonce_str() {
return nonce_str;
}
public void setNonce_str(String nonce_str) {
this.nonce_str = nonce_str;
}
public String getSign() {
return sign;
}
public void setSign(String sign) {
this.sign = sign;
}
public String getPartner_trade_no() {
return partner_trade_no;
}
public void setPartner_trade_no(String partner_trade_no) {
this.partner_trade_no = partner_trade_no;
}
public String getOpenid() {
return openid;
}
public void setOpenid(String openid) {
this.openid = openid;
}
public String getCheck_name() {
return check_name;
}
public void setCheck_name(String check_name) {
this.check_name = check_name;
}
public String getRe_user_name() {
return re_user_name;
}
public void setRe_user_name(String re_user_name) {
this.re_user_name = re_user_name;
}
public String getAmount() {
return amount;
}
public void setAmount(String amount) {
this.amount = amount;
}
public String getDesc() {
return desc;
}
public void setDesc(String desc) {
this.desc = desc;
}
public String getSpbill_create_ip() {
return spbill_create_ip;
}
public void setSpbill_create_ip(String spbill_create_ip) {
this.spbill_create_ip = spbill_create_ip;
}
}
| apache-2.0 |
talenguyen/storio | storio-sqlite/src/main/java/com/pushtorefresh/storio/sqlite/operations/put/PutResults.java | 3173 | package com.pushtorefresh.storio.sqlite.operations.put;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import java.util.Collections;
import java.util.Map;
/**
* Immutable container for results of Put Operation.
* <p>
* Instances of this class are Immutable
*
* @param <T> type of objects that were put.
*/
public final class PutResults<T> {
@NonNull
private final Map<T, PutResult> results;
@Nullable
private transient volatile Integer numberOfInsertsCache;
@Nullable
private transient volatile Integer numberOfUpdatesCache;
private PutResults(@NonNull Map<T, PutResult> putResults) {
this.results = Collections.unmodifiableMap(putResults);
}
/**
* Creates new instance of {@link PutResults}.
*
* @param putResults results of Put Operation.
* @param <T> type of objects.
* @return immutable instance of {@link PutResults}.
*/
@NonNull
public static <T> PutResults<T> newInstance(@NonNull Map<T, PutResult> putResults) {
return new PutResults<T>(putResults);
}
/**
* Returns immutable Map of pairs {@code (object, PutResult)}.
*
* @return immutable Map of pairs {@code (object, PutResult)}.
*/
@NonNull
public Map<T, PutResult> results() {
return results;
}
/**
* Returns number of inserts from all {@link #results()}.
*
* @return number of inserts from all {@link #results()}.
*/
public int numberOfInserts() {
final Integer cachedValue = numberOfInsertsCache;
if (cachedValue != null) {
return cachedValue;
}
int numberOfInserts = 0;
for (T object : results.keySet()) {
if (results.get(object).wasInserted()) {
numberOfInserts++;
}
}
numberOfInsertsCache = numberOfInserts;
return numberOfInserts;
}
/**
* Returns number of updates from all {@link #results()}.
*
* @return number of updates from all {@link #results()}.
*/
public int numberOfUpdates() {
final Integer cachedValue = numberOfUpdatesCache;
if (cachedValue != null) {
return cachedValue;
}
int numberOfUpdates = 0;
for (T object : results.keySet()) {
final PutResult putResult = results.get(object);
if (putResult.wasUpdated()) {
//noinspection ConstantConditions
numberOfUpdates += putResult.numberOfRowsUpdated();
}
}
numberOfUpdatesCache = numberOfUpdates;
return numberOfUpdates;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PutResults<?> that = (PutResults<?>) o;
return results.equals(that.results);
}
@Override
public int hashCode() {
return results.hashCode();
}
@Override
public String toString() {
return "PutResults{" +
"results=" + results +
'}';
}
}
| apache-2.0 |
koshalt/modules | dhis2/src/main/java/org/motechproject/dhis2/service/impl/DataSetServiceImpl.java | 1709 | package org.motechproject.dhis2.service.impl;
import org.motechproject.dhis2.domain.DataElement;
import org.motechproject.dhis2.domain.DataSet;
import org.motechproject.dhis2.repository.DataSetDataService;
import org.motechproject.dhis2.rest.domain.DataElementDto;
import org.motechproject.dhis2.rest.domain.DataSetDto;
import org.motechproject.dhis2.service.DataElementService;
import org.motechproject.dhis2.service.DataSetService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
/**
* Implementation of the {@link DataSetService} interface.
*/
@Service
public class DataSetServiceImpl implements DataSetService {
@Autowired
private DataSetDataService dataSetDataService;
@Autowired
private DataElementService dataElementService;
@Override
public DataSet createFromDetails(DataSetDto dto) {
DataSet dataSet = new DataSet();
dataSet.setName(dto.getName());
dataSet.setUuid(dto.getId());
List<DataElement> dataElements = new ArrayList<>();
for (DataElementDto dataElementDto : dto.getDataElements()) {
dataElements.add(dataElementService.findById(dataElementDto.getId()));
}
dataSet.setDataElementList(dataElements);
return dataSetDataService.create(dataSet);
}
@Override
public List<DataSet> findAll() {
return dataSetDataService.retrieveAll();
}
@Override
public DataSet findByUuid(String uuid) {
return dataSetDataService.findByUuid(uuid);
}
@Override
public void deleteAll() {
dataSetDataService.deleteAll();
}
}
| bsd-3-clause |
xiaohanz/softcontroller | opendaylight/hosttracker_new/implementation/src/test/java/org/opendaylight/controller/hosttracker/test/MockEntityClassifier.java | 2280 | /*
* Copyright (c) 2013 Big Switch Networks, Inc.
*
* Licensed under the Eclipse Public License, Version 1.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.eclipse.org/legal/epl-v10.html
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.opendaylight.controller.hosttracker.test;
import static org.opendaylight.controller.hosttracker.IDeviceService.DeviceField.MAC;
import static org.opendaylight.controller.hosttracker.IDeviceService.DeviceField.SWITCHPORT;
import static org.opendaylight.controller.hosttracker.IDeviceService.DeviceField.VLAN;
import java.util.EnumSet;
import org.opendaylight.controller.hosttracker.Entity;
import org.opendaylight.controller.hosttracker.IDeviceService;
import org.opendaylight.controller.hosttracker.IDeviceService.DeviceField;
import org.opendaylight.controller.hosttracker.IEntityClass;
import org.opendaylight.controller.hosttracker.internal.DefaultEntityClassifier;
/**
* A simple IEntityClassifier. Useful for tests that need IEntityClassifiers and
* IEntityClass'es with switch and/or port key fields
*/
public class MockEntityClassifier extends DefaultEntityClassifier {
public static class TestEntityClass implements IEntityClass {
@Override
public EnumSet<DeviceField> getKeyFields() {
return EnumSet.of(MAC, VLAN, SWITCHPORT);
}
@Override
public String getName() {
return "TestEntityClass";
}
}
public static IEntityClass testEC = new MockEntityClassifier.TestEntityClass();
@Override
public IEntityClass classifyEntity(Entity entity) {
if (((Long) entity.getPort().getNode().getID()) >= 10L) {
return testEC;
}
return DefaultEntityClassifier.entityClass;
}
@Override
public EnumSet<IDeviceService.DeviceField> getKeyFields() {
return EnumSet.of(MAC, VLAN, SWITCHPORT);
}
} | epl-1.0 |
Snickermicker/smarthome | bundles/config/org.eclipse.smarthome.config.core/src/main/java/org/eclipse/smarthome/config/core/ConfigDescriptionProvider.java | 1595 | /**
* Copyright (c) 2014,2019 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.eclipse.smarthome.config.core;
import java.net.URI;
import java.util.Collection;
import java.util.Locale;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
/**
* The {@link ConfigDescriptionProvider} can be implemented and registered as an <i>OSGi</i>
* service to provide {@link ConfigDescription}s.
*
* @author Dennis Nobel - Initial contribution
* @author Michael Grammling - Initial contribution
*/
@NonNullByDefault
public interface ConfigDescriptionProvider {
/**
* Provides a collection of {@link ConfigDescription}s.
*
* @param locale locale
* @return the configuration descriptions provided by this provider (not
* null, could be empty)
*/
Collection<ConfigDescription> getConfigDescriptions(@Nullable Locale locale);
/**
* Provides a {@link ConfigDescription} for the given URI.
*
* @param uri uri of the config description
* @param locale locale
* @return config description or null if no config description could be found
*/
@Nullable
ConfigDescription getConfigDescription(URI uri, @Nullable Locale locale);
}
| epl-1.0 |
zhudongfang/Sipdroid | src/org/zoolu/sdp/SdpParser.java | 6107 | /*
* Copyright (C) 2005 Luca Veltri - University of Parma - Italy
*
* This file is part of MjSip (http://www.mjsip.org)
*
* MjSip is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* MjSip is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with MjSip; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Author(s):
* Luca Veltri (luca.veltri@unipr.it)
* Nitin Khanna, Hughes Systique Corp. (Reason: Android specific change, optmization, bug fix)
*/
package org.zoolu.sdp;
/* HSC CHANGE STARTS */
// import org.zoolu.sdp.*;
/* HSC CHANGE ENDS */
import org.zoolu.tools.Parser;
import java.util.Vector;
/**
* Class SdpParser extends class Parser for parsing of SDP strings.
*/
class SdpParser extends Parser {
/** Creates a SdpParser based on String <i>s</i> */
public SdpParser(String s) {
super(s);
}
/**
* Creates a SdpParser based on String <i>s</i> and starting from position
* <i>i</i>
*/
public SdpParser(String s, int i) {
super(s, i);
}
/**
* Returns the SdpField at the current position. The SDP value terminates
* with the end of the String or with the first CR or LF char.
* <p>
* Returns null if no SdpField is recognized.
*/
/*
* public SdpField parseSdpField() { Parser par=new Parser(str,index); while
* (par.length()>1 && par.charAt(1)!='=') par.goToNextLine(); if
* (!par.hasMore()) return null; char type=par.getChar(); par.skipChar();
* String value=par.skipChar().getLine(); if (value==null) return null;
* index=par.getPos(); // for DEBUG //System.out.println("DEBUG:
* "+type+"="+value); return new SdpField(type,value); }
*/
/**
* Returns the first SdpField. The SDP value terminates with the end of the
* String or with the first CR or LF char.
*
* @return the first SdpField, or null if no SdpField is recognized.
*/
public SdpField parseSdpField() {
int begin = index;
while (begin >= 0 && begin < str.length() - 1
&& str.charAt(begin + 1) != '=')
begin = str.indexOf("\n", begin);
if (begin < 0)
return null;
char type = str.charAt(begin);
begin += 2;
int end = str.length();
int CR = str.indexOf('\r', begin);
if (CR > 0 && CR < end)
end = CR;
int LF = str.indexOf('\n', begin);
if (LF > 0 && LF < end)
end = LF;
String value = str.substring(begin, end).trim();
if (value == null)
return null;
setPos(end);
goToNextLine();
// for DEBUG
// System.out.println("DEBUG: "+type+"="+value);
return new SdpField(type, value);
}
/**
* Returns the first SdpField of type <i>type</i>. The SDP value terminates
* with the end of the String or with the first CR or LF char.
*
* @return the first SdpField, or null if no <i>type</i> SdpField is found.
*/
public SdpField parseSdpField(char type) {
int begin = 0;
if (!str.startsWith(type + "=", index)) {
begin = str.indexOf("\n" + type + "=", index);
// if (begin<0) begin=str.indexOf("\r"+type+"=",index);
if (begin < 0) { // return null if no type SdpField has been
// found
return null;
}
index = begin + 1;
}
return parseSdpField();
}
/**
* Returns the first OriginField.
*
* @return the first OriginField, or null if no OriginField is found.
*/
public OriginField parseOriginField() {
SdpField sf = parseSdpField('o');
if (sf != null)
return new OriginField(sf);
else
return null;
}
/**
* Returns the first MediaField.
*
* @return the first MediaField, or null if no MediaField is found.
*/
public MediaField parseMediaField() {
SdpField sf = parseSdpField('m');
if (sf != null)
return new MediaField(sf);
else
return null;
}
/**
* Returns the first ConnectionField.
*
* @return the first ConnectionField, or null if no ConnectionField is
* found.
*/
public ConnectionField parseConnectionField() {
SdpField sf = parseSdpField('c');
if (sf != null)
return new ConnectionField(sf);
else
return null;
}
/**
* Returns the first SessionNameField.
*
* @return the first SessionNameField, or null if no SessionNameField is
* found.
*/
public SessionNameField parseSessionNameField() {
SdpField sf = parseSdpField('s');
if (sf != null)
return new SessionNameField(sf);
else
return null;
}
/**
* Returns the first TimeField.
*
* @return the first TimeField, or null if no TimeField is found.
*/
public TimeField parseTimeField() {
SdpField sf = parseSdpField('t');
if (sf != null)
return new TimeField(sf);
else
return null;
}
/**
* Returns the first AttributeField.
*
* @return the first AttributeField, or null if no AttributeField is found.
*/
public AttributeField parseAttributeField() {
SdpField sf = parseSdpField('a');
if (sf != null)
return new AttributeField(sf);
else
return null;
}
/**
* Returns the first MediaDescriptor.
*
* @return the first MediaDescriptor, or null if no MediaDescriptor is
* found.
*/
public MediaDescriptor parseMediaDescriptor() {
MediaField m = parseMediaField();
if (m == null)
return null;
int begin = index;
int end = str.indexOf("\nm", begin);
if (end < 0)
end = str.length();
else
end++;
index = end;
SdpParser par = new SdpParser(str.substring(begin, end));
ConnectionField c = par.parseConnectionField();
/* HSC CHANGE BEGINS */
Vector<AttributeField> av = new Vector<AttributeField>();
/* HSC CHANGE ENDS */
AttributeField a = par.parseAttributeField();
while (a != null) {
av.addElement(a);
a = par.parseAttributeField();
}
return new MediaDescriptor(m, c, av);
}
}
| gpl-2.0 |
md-5/jdk10 | src/java.desktop/share/classes/javax/sound/midi/spi/SoundbankReader.java | 3505 | /*
* Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.sound.midi.spi;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import javax.sound.midi.InvalidMidiDataException;
import javax.sound.midi.Soundbank;
import javax.sound.midi.Synthesizer;
/**
* A {@code SoundbankReader} supplies soundbank file-reading services. Concrete
* subclasses of {@code SoundbankReader} parse a given soundbank file, producing
* a {@link Soundbank} object that can be loaded into a {@link Synthesizer}.
*
* @author Kara Kytle
* @since 1.3
*/
public abstract class SoundbankReader {
/**
* Obtains a soundbank object from the {@code URL} provided.
*
* @param url {@code URL} representing the soundbank
* @return soundbank object
* @throws InvalidMidiDataException if the {@code URL} does not point to
* valid MIDI soundbank data recognized by this soundbank reader
* @throws IOException if an I/O error occurs
* @throws NullPointerException if {@code url} is {@code null}
*/
public abstract Soundbank getSoundbank(URL url)
throws InvalidMidiDataException, IOException;
/**
* Obtains a soundbank object from the {@code InputStream} provided.
*
* @param stream {@code InputStream} representing the soundbank
* @return soundbank object
* @throws InvalidMidiDataException if the stream does not point to valid
* MIDI soundbank data recognized by this soundbank reader
* @throws IOException if an I/O error occurs
* @throws NullPointerException if {@code stream} is {@code null}
*/
public abstract Soundbank getSoundbank(InputStream stream)
throws InvalidMidiDataException, IOException;
/**
* Obtains a soundbank object from the {@code File} provided.
*
* @param file the {@code File} representing the soundbank
* @return soundbank object
* @throws InvalidMidiDataException if the file does not point to valid MIDI
* soundbank data recognized by this soundbank reader
* @throws IOException if an I/O error occurs
* @throws NullPointerException if {@code file} is {@code null}
*/
public abstract Soundbank getSoundbank(File file)
throws InvalidMidiDataException, IOException;
}
| gpl-2.0 |
jballanc/openmicroscopy | components/server/test/ome/server/itests/sec/EnumTest.java | 2469 | /*
* $Id$
*
* Copyright 2006 University of Dundee. All rights reserved.
* Use is subject to license terms supplied in LICENSE.txt
*/
package ome.server.itests.sec;
import java.util.UUID;
import ome.api.ITypes;
import ome.model.core.Pixels;
import ome.model.enums.DimensionOrder;
import ome.model.enums.EventType;
import ome.model.enums.Format;
import ome.model.meta.Experimenter;
import ome.model.meta.ExperimenterGroup;
import ome.server.itests.AbstractManagedContextTest;
import ome.testing.ObjectFactory;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
@Test(groups = { "ticket:140", "ticket:145", "ticket:226", "security", "filter" })
public class EnumTest extends AbstractManagedContextTest {
Experimenter e = new Experimenter();
@BeforeClass
public void createData() throws Exception {
setUp();
loginRoot();
String gid = uuid();
ExperimenterGroup g = new ExperimenterGroup();
g.setName(gid);
iAdmin.createGroup(g);
e = new Experimenter();
e.setOmeName(UUID.randomUUID().toString());
e.setFirstName("enums");
e.setLastName("enums");
e = factory.getAdminService().getExperimenter(
factory.getAdminService().createUser(e, gid));
tearDown();
}
@Test
public void testEnumsAreReloaded() throws Exception {
loginUser(e.getOmeName());
DimensionOrder test = new DimensionOrder();
test.setValue("XYZCT");
factory.getUpdateService().saveObject(test);
}
@Test
public void testEvenWhenInAGraph() throws Exception {
loginUser(e.getOmeName());
Pixels test = ObjectFactory.createPixelGraph(null);
factory.getUpdateService().saveObject(test.getImage());
}
@Test(groups = "ticket:226")
public void testStringConstructor() throws Exception {
loginUser(e.getOmeName());
EventType type = new EventType(uuid());
factory.getTypesService().createEnumeration(type);
}
@Test(groups = "ticket:1204")
public void testFormatsAreReadable() {
ITypes types = factory.getTypesService();
Experimenter e = loginNewUser();
String uuid = uuid();
assertWorldReadable(types.createEnumeration(new Format(uuid)));
Format f = types.getEnumeration(Format.class, "image/png");
assertNotNull(f);
}
} | gpl-2.0 |
xucp/mpc_hc | src/thirdparty/LAVFilters/src/libbluray/src/libbluray/bdj/java/com/aacsla/bluray/online/MediaAttribute.java | 1506 | /*
* This file is part of libbluray
* Copyright (C) 2012 libbluray
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see
* <http://www.gnu.org/licenses/>.
*/
package com.aacsla.bluray.online;
import org.videolan.Libbluray;
import org.videolan.Logger;
public class MediaAttribute {
public MediaAttribute() {
}
public byte[] getPMSN() {
byte pmsn[] = Libbluray.getAacsData(Libbluray.AACS_MEDIA_PMSN);
if (pmsn == null) {
logger.warning("getPMSN() failed");
return new byte[16];
}
return pmsn;
}
public byte[] getVolumeID() {
byte vid[] = Libbluray.getAacsData(Libbluray.AACS_MEDIA_VID);
if (vid == null) {
logger.warning("getVolumeID() failed");
return new byte[16];
}
return vid;
}
private static final Logger logger = Logger.getLogger(MediaAttribute.class.getName());
}
| gpl-3.0 |
ua-eas/ua-kfs-5.3 | work/src/org/kuali/kfs/sys/service/impl/ReportWriterTextServiceImpl.java | 35971 | /*
* The Kuali Financial System, a comprehensive financial management system for higher education.
*
* Copyright 2005-2014 The Kuali Foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kfs.sys.service.impl;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.kuali.kfs.sys.KFSConstants;
import org.kuali.kfs.sys.Message;
import org.kuali.kfs.sys.batch.service.WrappingBatchService;
import org.kuali.kfs.sys.context.SpringContext;
import org.kuali.kfs.sys.report.BusinessObjectReportHelper;
import org.kuali.kfs.sys.service.ReportWriterService;
import org.kuali.rice.core.api.datetime.DateTimeService;
import org.kuali.rice.krad.bo.BusinessObject;
import org.kuali.rice.krad.util.ObjectUtils;
/**
* Text output implementation of <code>ReportWriterService</code> interface. If you are a developer attempting to add a new business
* object for error report writing, take a look at the Spring definitions for BusinessObjectReportHelper.<br>
* This class CANNOT be used by 2 processes simultaneously. It is for very specific batch processes that should not run at the same
* time, and initialize and destroy must be called and the beginning and end of each process that uses it.
*
* @see org.kuali.kfs.sys.report.BusinessObjectReportHelper
*/
public class ReportWriterTextServiceImpl implements ReportWriterService, WrappingBatchService {
private static org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(ReportWriterTextServiceImpl.class);
// Changing the initial line number would only affect that a page break occurs early. It does not actually print in the
// middle of the page. Hence changing this has little use.
protected static final int INITIAL_LINE_NUMBER = 0;
protected String filePath;
protected String fileNamePrefix;
protected String fileNameSuffix;
protected String title;
protected int pageWidth;
protected int pageLength;
protected int initialPageNumber;
protected String errorSubTitle;
protected String statisticsLabel;
protected String statisticsLeftPadding;
private String parametersLabel;
private String parametersLeftPadding;
protected String pageLabel;
protected String newLineCharacter;
protected DateTimeService dateTimeService;
protected boolean aggregationModeOn;
/**
* A map of BO classes to {@link BusinessObjectReportHelper} bean names, to configure which BO's will be rendered by which
* BusinessObjectReportHelper. This property should be configured via the spring bean definition
*/
protected Map<Class<? extends BusinessObject>, String> classToBusinessObjectReportHelperBeanNames;
// Local caching field to speed up the selection of formatting BusinessObjectReportHelper to use per configuration in Spring
protected Map<Class<? extends BusinessObject>, BusinessObjectReportHelper> businessObjectReportHelpers;
protected PrintStream printStream;
protected int page;
protected int line = INITIAL_LINE_NUMBER;
protected String errorFormat;
// Ensures that the statistics header isn't written multiple times. Does not check that a user doesn't write other stuff into
// the statistics
// section. A developer is responsible for ensuring that themselves
protected boolean modeStatistics = false;
// Ensures that the parameters header isn't written multiple times. Does not check that a user doesn't write other stuff into
// the parameters
// section. A developer is responsible for ensuring that themselves
protected boolean modeParameters = false;
// So that writeError knows when to writeErrorHeader
protected boolean newPage = true;
// For printing new headers when the BO is changed
protected Class<? extends BusinessObject> businessObjectClass;
public ReportWriterTextServiceImpl() {
// TODO Auto-generated constructor stub
}
/**
* @see org.kuali.kfs.sys.batch.service.WrappingBatchService#initialize()
*/
public void initialize() {
try {
printStream = new PrintStream(generateFullFilePath());
}
catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
page = initialPageNumber;
initializeBusinessObjectReportHelpers();
// Initial header
this.writeHeader(title);
}
protected void initializeBusinessObjectReportHelpers() {
businessObjectReportHelpers = new HashMap<Class<? extends BusinessObject>, BusinessObjectReportHelper>();
if (classToBusinessObjectReportHelperBeanNames != null) {
for (Class<? extends BusinessObject> clazz : classToBusinessObjectReportHelperBeanNames.keySet()) {
String businessObjectReportHelperBeanName = classToBusinessObjectReportHelperBeanNames.get(clazz);
BusinessObjectReportHelper reportHelper = (BusinessObjectReportHelper) SpringContext.getService(businessObjectReportHelperBeanName);
if (ObjectUtils.isNull(reportHelper)) {
LOG.error("Cannot find BusinessObjectReportHelper implementation for class: " + clazz.getName() + " bean name: " + businessObjectReportHelperBeanName);
throw new RuntimeException("Cannot find BusinessObjectReportHelper implementation for class: " + clazz.getName() + " bean name: " + businessObjectReportHelperBeanName);
}
businessObjectReportHelpers.put(clazz, reportHelper);
}
}
}
protected String generateFullFilePath() {
if (aggregationModeOn) {
return filePath + File.separator + this.fileNamePrefix + fileNameSuffix;
}
else {
return filePath + File.separator + this.fileNamePrefix + dateTimeService.toDateTimeStringForFilename(dateTimeService.getCurrentDate()) + fileNameSuffix;
}
}
/**
* @see org.kuali.kfs.sys.batch.service.WrappingBatchService#destroy()
*/
public void destroy() {
if(printStream != null) {
printStream.close();
printStream = null;
}
// reset variables that track state
page = initialPageNumber;
line = INITIAL_LINE_NUMBER;
modeStatistics = false;
modeParameters = false;
newPage = true;
businessObjectClass = null;
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#writeSubTitle(java.lang.String)
*/
public void writeSubTitle(String message) {
if (message.length() > pageWidth) {
LOG.warn("sub title to be written exceeds pageWidth. Printing anyway.");
this.writeFormattedMessageLine(message);
}
else {
int padding = (pageWidth - message.length()) / 2;
this.writeFormattedMessageLine("%" + (padding + message.length()) + "s", message);
}
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#writeError(java.lang.Class, org.kuali.kfs.sys.Message)
*/
public void writeError(BusinessObject businessObject, Message message) {
this.writeError(businessObject, message, true);
}
/**
* @param printBusinessObjectValues indicates whether the bo values should be printed before the message
* @see org.kuali.kfs.sys.service.ReportWriterService#writeError(java.lang.Class, org.kuali.kfs.sys.Message)
*/
public void writeError(BusinessObject businessObject, Message message, boolean printBusinessObjectValues) {
// Check if we need to write a new table header. We do this if it hasn't been written before or if the businessObject
// changed
if (newPage || businessObjectClass == null || !businessObjectClass.getName().equals(businessObject.getClass().getName())) {
if (businessObjectClass == null) {
// If we didn't write the header before, write it with a subTitle
this.writeSubTitle(errorSubTitle);
}
else if (!businessObjectClass.getName().equals(businessObject.getClass().getName())) {
// If it changed push a newline in for neater formatting
this.writeNewLines(1);
}
this.writeErrorHeader(businessObject);
newPage = false;
businessObjectClass = businessObject.getClass();
}
// Get business object formatter that will be used
BusinessObjectReportHelper businessObjectReportHelper = getBusinessObjectReportHelper(businessObject);
// Print the values of the businessObject per formatting determined by writeErrorHeader
List<Object> formatterArgs = new ArrayList<Object>();
if (printBusinessObjectValues) {
formatterArgs.addAll(businessObjectReportHelper.getValues(businessObject));
}
else {
formatterArgs.addAll(businessObjectReportHelper.getBlankValues(businessObject));
}
// write rest of message on new line(s) if it was cut off
int maxMessageLength = Integer.parseInt(StringUtils.substringBefore(StringUtils.substringAfterLast(errorFormat, "%-"), "s"));
String messageToPrint = message.getMessage();
boolean firstMessageLine = true;
while (messageToPrint.length() > 0 && StringUtils.isNotBlank(messageToPrint)) {
if (!firstMessageLine) {
formatterArgs = new ArrayList<Object>();
formatterArgs.addAll(businessObjectReportHelper.getBlankValues(businessObject));
}
else {
firstMessageLine = false;
}
messageToPrint = StringUtils.trim(messageToPrint);
String messageLine = messageToPrint;
if (messageLine.length() > maxMessageLength) {
messageLine = StringUtils.substring(messageLine, 0, maxMessageLength);
if (StringUtils.contains(messageLine, " ")) {
messageLine = StringUtils.substringBeforeLast(messageLine, " ");
}
}
formatterArgs.add(new Message(messageLine, message.getType()));
this.writeFormattedMessageLine(errorFormat, formatterArgs.toArray());
messageToPrint = StringUtils.removeStart(messageToPrint, messageLine);
}
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#writeError(java.lang.Class, java.util.List)
*/
public void writeError(BusinessObject businessObject, List<Message> messages) {
int i = 0;
for (Iterator<Message> messagesIter = messages.iterator(); messagesIter.hasNext();) {
Message message = messagesIter.next();
if (i == 0) {
// First one has its values written
this.writeError(businessObject, message, true);
}
else {
// Any consecutive one only has message written
this.writeError(businessObject, message, false);
}
i++;
}
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#writeNewLines(int)
*/
public void writeNewLines(int lines) {
for (int i = 0; i < lines; i++) {
this.writeFormattedMessageLine("");
}
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#writeStatisticLine(java.lang.String, java.lang.Object[])
*/
public void writeStatisticLine(String message, Object... args) {
// Statistics header is only written if it hasn't been written before
if (!modeStatistics) {
this.modeStatistics = true;
// If nothing has been written to the report we don't want to page break
if (!(page == initialPageNumber && line == INITIAL_LINE_NUMBER + 2)) {
this.pageBreak();
}
this.writeFormattedMessageLine("*********************************************************************************************************************************");
this.writeFormattedMessageLine("*********************************************************************************************************************************");
this.writeFormattedMessageLine("*******************" + statisticsLabel + "*******************");
this.writeFormattedMessageLine("*********************************************************************************************************************************");
this.writeFormattedMessageLine("*********************************************************************************************************************************");
}
this.writeFormattedMessageLine(statisticsLeftPadding + message, args);
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#writeParameterLine(java.lang.String, java.lang.Object[])
*/
public void writeParameterLine(String message, Object... args) {
// Statistics header is only written if it hasn't been written before
if (!modeParameters) {
this.modeParameters = true;
// If nothing has been written to the report we don't want to page break
if (!(page == initialPageNumber && line == INITIAL_LINE_NUMBER + 2)) {
this.pageBreak();
}
this.writeFormattedMessageLine("*********************************************************************************************************************************");
this.writeFormattedMessageLine("*********************************************************************************************************************************");
this.writeFormattedMessageLine("*******************" + getParametersLabel() + "*******************");
this.writeFormattedMessageLine("*********************************************************************************************************************************");
this.writeFormattedMessageLine("*********************************************************************************************************************************");
}
this.writeFormattedMessageLine(getParametersLeftPadding() + message, args);
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#writeFormattedMessageLine(java.lang.String, java.lang.Object[])
*/
public void writeFormattedMessageLine(String format, Object... args) {
if (format.indexOf("% s") > -1) {
LOG.warn("Cannot properly format: "+format);
}
else {
Object[] escapedArgs = escapeArguments(args);
if (LOG.isDebugEnabled()) {
LOG.debug("writeFormattedMessageLine, format: "+format);
}
String message = null;
if (escapedArgs.length > 0) {
message = String.format(format + newLineCharacter, escapedArgs);
} else {
message = format+newLineCharacter;
}
// Log we are writing out of bounds. Would be nice to show message here but not so sure if it's wise to dump that data into
// logs
if (message.length() > pageWidth) {
if (LOG.isDebugEnabled()) {
LOG.debug("message is out of bounds writing anyway");
}
}
printStream.print(message);
printStream.flush();
line++;
if (line >= pageLength) {
this.pageBreak();
}
}
}
/**
* Determines if all formatting on the given String is escaped - ie, that it has no formatting
* @param format the format to test
* @return true if the String is without formatting, false otherwise
*/
protected boolean allFormattingEscaped(String format) {
int currPoint = 0;
int currIndex = format.indexOf('%', currPoint);
while (currIndex > -1) {
char nextChar = format.charAt(currIndex+1);
if (nextChar != '%') {
return false;
}
currPoint = currIndex + 2;
}
return true;
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#pageBreak()
*/
public void pageBreak() {
// Intentionally not using writeFormattedMessageLine here since it would loop trying to page break ;)
// 12 represents the ASCII Form Feed character
printStream.printf("%c" + newLineCharacter, 12);
page++;
line = INITIAL_LINE_NUMBER;
newPage = true;
this.writeHeader(title);
}
/**
* Helper method to write a header for placement at top of new page
*
* @param title that should be printed on this header
*/
protected void writeHeader(String title) {
String headerText = String.format("%1$tY-%1$tm-%1$td %1$tH:%1$tM", dateTimeService.getCurrentDate());
int reportTitlePadding = pageWidth / 2 - headerText.length() - title.length() / 2;
headerText = String.format("%s%" + (reportTitlePadding + title.length()) + "s%" + reportTitlePadding + "s", headerText, title, "");
if (aggregationModeOn) {
this.writeFormattedMessageLine("%s%s%s", headerText, pageLabel, KFSConstants.REPORT_WRITER_SERVICE_PAGE_NUMBER_PLACEHOLDER);
}
else {
this.writeFormattedMessageLine("%s%s%,9d", headerText, pageLabel, page);
}
this.writeNewLines(1);
}
/**
* Helper method to write the error header
*
* @param businessObject to print header for
*/
protected void writeErrorHeader(BusinessObject businessObject) {
BusinessObjectReportHelper businessObjectReportHelper = getBusinessObjectReportHelper(businessObject);
List<String> errorHeader = businessObjectReportHelper.getTableHeader(pageWidth);
// If we are at end of page and don't have space for table header, go ahead and page break
if (errorHeader.size() + line >= pageLength) {
this.pageBreak();
}
// Print the header one by one. Note the last element is the formatter. So capture that seperately
for (Iterator<String> headers = errorHeader.iterator(); headers.hasNext();) {
String header = headers.next();
if (headers.hasNext()) {
this.writeFormattedMessageLine("%s", header);
}
else {
errorFormat = header;
}
}
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#writeTableHeader(org.kuali.rice.krad.bo.BusinessObject)
*/
public void writeTableHeader(BusinessObject businessObject) {
BusinessObjectReportHelper businessObjectReportHelper = getBusinessObjectReportHelper(businessObject);
Map<String, String> tableDefinition = businessObjectReportHelper.getTableDefinition();
String tableHeaderFormat = tableDefinition.get(KFSConstants.ReportConstants.TABLE_HEADER_LINE_KEY);
String[] headerLines = this.getMultipleFormattedMessageLines(tableHeaderFormat, new Object());
this.writeMultipleFormattedMessageLines(headerLines);
}
/**
* Writes out the table header, based on a business object class
* @param businessObjectClass the class to write the header out for
*/
public void writeTableHeader(Class<? extends BusinessObject> businessObjectClass) {
BusinessObjectReportHelper businessObjectReportHelper = getBusinessObjectReportHelper(businessObjectClass);
Map<String, String> tableDefinition = businessObjectReportHelper.getTableDefinition();
String tableHeaderFormat = tableDefinition.get(KFSConstants.ReportConstants.TABLE_HEADER_LINE_KEY);
String[] headerLines = this.getMultipleFormattedMessageLines(tableHeaderFormat, new Object());
this.writeMultipleFormattedMessageLines(headerLines);
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#writeTableRow(org.kuali.rice.krad.bo.BusinessObject)
*/
public void writeTableRowSeparationLine(BusinessObject businessObject) {
BusinessObjectReportHelper businessObjectReportHelper = getBusinessObjectReportHelper(businessObject);
Map<String, String> tableDefinition = businessObjectReportHelper.getTableDefinition();
String separationLine = tableDefinition.get(KFSConstants.ReportConstants.SEPARATOR_LINE_KEY);
this.writeFormattedMessageLine(separationLine);
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#writeTableRow(org.kuali.rice.krad.bo.BusinessObject)
*/
public void writeTableRow(BusinessObject businessObject) {
BusinessObjectReportHelper businessObjectReportHelper = getBusinessObjectReportHelper(businessObject);
Map<String, String> tableDefinition = businessObjectReportHelper.getTableDefinition();
String tableCellFormat = tableDefinition.get(KFSConstants.ReportConstants.TABLE_CELL_FORMAT_KEY);
List<String> tableCellValues = businessObjectReportHelper.getTableCellValuesPaddingWithEmptyCell(businessObject, false);
String[] rowMessageLines = this.getMultipleFormattedMessageLines(tableCellFormat, tableCellValues.toArray());
this.writeMultipleFormattedMessageLines(rowMessageLines);
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#writeTableRowWithColspan(org.kuali.rice.krad.bo.BusinessObject)
*/
public void writeTableRowWithColspan(BusinessObject businessObject) {
BusinessObjectReportHelper businessObjectReportHelper = getBusinessObjectReportHelper(businessObject);
Map<String, String> tableDefinition = businessObjectReportHelper.getTableDefinition();
String tableCellFormat = businessObjectReportHelper.getTableCellFormat(true, true, StringUtils.EMPTY);
List<String> tableCellValues = businessObjectReportHelper.getTableCellValuesPaddingWithEmptyCell(businessObject, true);
String[] rowMessageLines = this.getMultipleFormattedMessageLines(tableCellFormat, tableCellValues.toArray());
this.writeMultipleFormattedMessageLines(rowMessageLines);
}
/**
* @see org.kuali.kfs.sys.service.ReportWriterService#writeTable(java.util.List, boolean, boolean)
*/
public void writeTable(List<? extends BusinessObject> businessObjects, boolean isHeaderRepeatedInNewPage, boolean isRowBreakAcrossPageAllowed) {
if (ObjectUtils.isNull(businessObjects) || businessObjects.isEmpty()) {
return;
}
BusinessObject firstBusinessObject = businessObjects.get(0);
this.writeTableHeader(firstBusinessObject);
BusinessObjectReportHelper businessObjectReportHelper = getBusinessObjectReportHelper(businessObjects.get(0));
Map<String, String> tableDefinition = businessObjectReportHelper.getTableDefinition();
String tableHeaderFormat = tableDefinition.get(KFSConstants.ReportConstants.TABLE_HEADER_LINE_KEY);
String[] headerLines = this.getMultipleFormattedMessageLines(tableHeaderFormat, new Object());
String tableCellFormat = tableDefinition.get(KFSConstants.ReportConstants.TABLE_CELL_FORMAT_KEY);
for (BusinessObject businessObject : businessObjects) {
List<String> tableCellValues = businessObjectReportHelper.getTableCellValuesPaddingWithEmptyCell(businessObject, false);
String[] messageLines = this.getMultipleFormattedMessageLines(tableCellFormat, tableCellValues.toArray());
boolean hasEnoughLinesInPage = messageLines.length <= (this.pageLength - this.line);
if (!hasEnoughLinesInPage && !isRowBreakAcrossPageAllowed) {
this.pageBreak();
if (isHeaderRepeatedInNewPage) {
this.writeTableHeader(firstBusinessObject);
}
}
this.writeMultipleFormattedMessageLines(messageLines, headerLines, isRowBreakAcrossPageAllowed);
}
}
/**
* get the business report helper for the given business object
*
* @param businessObject the given business object
* @return the business report helper for the given business object
*/
public BusinessObjectReportHelper getBusinessObjectReportHelper(BusinessObject businessObject) {
if (LOG.isDebugEnabled()) {
if (businessObject == null) {
LOG.debug("reporting "+filePath+" but can't because null business object sent in");
} else if (businessObjectReportHelpers == null) {
LOG.debug("Logging "+businessObject+" in report "+filePath+" but businessObjectReportHelpers are null");
}
}
BusinessObjectReportHelper businessObjectReportHelper = this.businessObjectReportHelpers.get(businessObject.getClass());
if (ObjectUtils.isNull(businessObjectReportHelper)) {
throw new RuntimeException(businessObject.getClass().toString() + " is not handled");
}
return businessObjectReportHelper;
}
/**
* get the business report helper for the given business object
*
* @param businessObject the given business object
* @return the business report helper for the given business object
*/
public BusinessObjectReportHelper getBusinessObjectReportHelper(Class<? extends BusinessObject> businessObjectClass) {
BusinessObjectReportHelper businessObjectReportHelper = this.businessObjectReportHelpers.get(businessObjectClass);
if (ObjectUtils.isNull(businessObjectReportHelper)) {
throw new RuntimeException(businessObjectClass.getName() + " is not handled");
}
return businessObjectReportHelper;
}
/**
* write the given information as multiple lines if it contains more than one line breaks
*
* @param format the given text format definition
* @param messageLines the given information being written out
* @param headerLinesInNewPage the given header lines being written in the begin of a new page
*/
protected void writeMultipleFormattedMessageLines(String[] messageLines, String[] headerLinesInNewPage, boolean isRowBreakAcrossPageAllowed) {
int currentPageNumber = this.page;
for (String line : messageLines) {
boolean hasEnoughLinesInPage = messageLines.length <= (this.pageLength - this.line);
if (!hasEnoughLinesInPage && !isRowBreakAcrossPageAllowed) {
this.pageBreak();
}
if (currentPageNumber < this.page && ObjectUtils.isNotNull(headerLinesInNewPage)) {
currentPageNumber = this.page;
for (String headerLine : headerLinesInNewPage) {
this.writeFormattedMessageLine(headerLine);
}
}
this.writeFormattedMessageLine(line);
}
}
/**
* write the given information as multiple lines if it contains more than one line breaks
*
* @param format the given text format definition
* @param args the given information being written out
*/
public void writeMultipleFormattedMessageLines(String[] messageLines) {
this.writeMultipleFormattedMessageLines(messageLines, null, false);
}
public void writeMultipleFormattedMessageLines(String format, Object... args) {
Object[] escapedArgs = escapeArguments(args);
String[] messageLines = getMultipleFormattedMessageLines(format, escapedArgs);
writeMultipleFormattedMessageLines(messageLines);
}
/**
* This method...
*
* @param format
* @param args
* @return
*/
public String[] getMultipleFormattedMessageLines(String format, Object... args) {
Object[] escapedArgs = escapeArguments(args);
String message = String.format(format, escapedArgs);
return StringUtils.split(message, newLineCharacter);
}
/**
* Iterates through array and escapes special formatting characters
*
* @param args Object array to process
* @return Object array with String members escaped
*/
protected Object[] escapeArguments(Object... args) {
Object[] escapedArgs = new Object[args.length];
for (int i = 0; i < args.length; i++) {
Object arg = args[i];
if (arg == null) {
args[i] = "";
} else if (arg != null && arg instanceof String) {
String escapedArg = escapeFormatCharacters((String)arg);
escapedArgs[i] = escapedArg;
}
else {
escapedArgs[i] = arg;
}
}
return escapedArgs;
}
/**
* Escapes characters in a string that have special meaning for formatting
*
* @param replacementString string to escape
* @return string with format characters escaped
* @see KFSConstants.ReportConstants.FORMAT_ESCAPE_CHARACTERS
*/
protected String escapeFormatCharacters(String replacementString) {
String escapedString = replacementString;
for (int i = 0; i < KFSConstants.ReportConstants.FORMAT_ESCAPE_CHARACTERS.length; i++) {
String characterToEscape = KFSConstants.ReportConstants.FORMAT_ESCAPE_CHARACTERS[i];
escapedString = StringUtils.replace(escapedString, characterToEscape, characterToEscape + characterToEscape);
}
return escapedString;
}
/**
* Sets the filePath
*
* @param filePath The filePath to set.
*/
public void setFilePath(String filePath) {
this.filePath = filePath;
}
/**
* Sets the fileNamePrefix
*
* @param fileNamePrefix The fileNamePrefix to set.
*/
public void setFileNamePrefix(String fileNamePrefix) {
this.fileNamePrefix = fileNamePrefix;
}
/**
* Sets the fileNameSuffix
*
* @param fileNameSuffix The fileNameSuffix to set.
*/
public void setFileNameSuffix(String fileNameSuffix) {
this.fileNameSuffix = fileNameSuffix;
}
/**
* Sets the title
*
* @param title The title to set.
*/
public void setTitle(String title) {
this.title = title;
}
/**
* Sets the pageWidth
*
* @param pageWidth The pageWidth to set.
*/
public void setPageWidth(int pageWidth) {
this.pageWidth = pageWidth;
}
/**
* Sets the pageLength
*
* @param pageLength The pageLength to set.
*/
public void setPageLength(int pageLength) {
this.pageLength = pageLength;
}
/**
* Sets the initialPageNumber
*
* @param initialPageNumber The initialPageNumber to set.
*/
public void setInitialPageNumber(int initialPageNumber) {
this.initialPageNumber = initialPageNumber;
}
/**
* Sets the errorSubTitle
*
* @param errorSubTitle The errorSubTitle to set.
*/
public void setErrorSubTitle(String errorSubTitle) {
this.errorSubTitle = errorSubTitle;
}
/**
* Sets the statisticsLabel
*
* @param statisticsLabel The statisticsLabel to set.
*/
public void setStatisticsLabel(String statisticsLabel) {
this.statisticsLabel = statisticsLabel;
}
/**
* Sets the statisticsLeftPadding
*
* @param statisticsLeftPadding The statisticsLeftPadding to set.
*/
public void setStatisticsLeftPadding(String statisticsLeftPadding) {
this.statisticsLeftPadding = statisticsLeftPadding;
}
/**
* Sets the pageLabel
*
* @param pageLabel The pageLabel to set.
*/
public void setPageLabel(String pageLabel) {
this.pageLabel = pageLabel;
}
/**
* Sets the newLineCharacter
*
* @param newLineCharacter The newLineCharacter to set.
*/
public void setNewLineCharacter(String newLineCharacter) {
this.newLineCharacter = newLineCharacter;
}
/**
* Sets the DateTimeService
*
* @param dateTimeService The DateTimeService to set.
*/
public void setDateTimeService(DateTimeService dateTimeService) {
this.dateTimeService = dateTimeService;
}
/**
* Sets a map of BO classes to {@link BusinessObjectReportHelper} bean names, to configure which BO's will be rendered by which
* BusinessObjectReportHelper. This property should be configured via the spring bean definition
*
* @param classToBusinessObjectReportHelperBeanNames The classToBusinessObjectReportHelperBeanNames to set.
*/
public void setClassToBusinessObjectReportHelperBeanNames(Map<Class<? extends BusinessObject>, String> classToBusinessObjectReportHelperBeanNames) {
this.classToBusinessObjectReportHelperBeanNames = classToBusinessObjectReportHelperBeanNames;
}
/**
* Gets the parametersLabel attribute.
* @return Returns the parametersLabel.
*/
public String getParametersLabel() {
return parametersLabel;
}
/**
* Sets the parametersLabel attribute value.
* @param parametersLabel The parametersLabel to set.
*/
public void setParametersLabel(String parametersLabel) {
this.parametersLabel = parametersLabel;
}
/**
* Gets the parametersLeftPadding attribute.
* @return Returns the parametersLeftPadding.
*/
public String getParametersLeftPadding() {
return parametersLeftPadding;
}
/**
* Sets the parametersLeftPadding attribute value.
* @param parametersLeftPadding The parametersLeftPadding to set.
*/
public void setParametersLeftPadding(String parametersLeftPadding) {
this.parametersLeftPadding = parametersLeftPadding;
}
/**
* Gets the aggregationModeOn attribute.
* @return Returns the aggregationModeOn.
*/
public boolean isAggregationModeOn() {
return aggregationModeOn;
}
/**
* Sets the aggregationModeOn attribute value.
* @param aggregationModeOn The aggregationModeOn to set.
*/
public void setAggregationModeOn(boolean aggregationModeOn) {
this.aggregationModeOn = aggregationModeOn;
}
}
| agpl-3.0 |
yersan/wildfly-core | testsuite/manualmode/src/test/java/org/jboss/as/test/manualmode/deployment/DeploymentScannerRedeploymentTestCase.java | 4643 | /*
* JBoss, Home of Professional Open Source
* Copyright 2014, JBoss Inc., and individual contributors as indicated
* by the @authors tag.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.as.test.manualmode.deployment;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.DEPLOYMENT;
import java.io.File;
import javax.inject.Inject;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.controller.client.ModelControllerClient;
import org.jboss.as.test.shared.TestSuiteEnvironment;
import org.jboss.as.test.shared.TimeoutUtil;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.wildfly.core.testrunner.ServerControl;
import org.wildfly.core.testrunner.ServerController;
import org.wildfly.core.testrunner.WildflyTestRunner;
@RunWith(WildflyTestRunner.class)
@ServerControl(manual = true)
public class DeploymentScannerRedeploymentTestCase extends AbstractDeploymentScannerBasedTestCase {
private static final int DELAY = 100;
private static final int TIMEOUT = 30000;
private static final PathAddress DEPLOYMENT_TEST = PathAddress.pathAddress(DEPLOYMENT, "deployment-test.jar");
@Inject
private ServerController container;
@Test
public void testRedeployment() throws Exception {
container.start();
try {
try (ModelControllerClient client = TestSuiteEnvironment.getModelControllerClient()) {
final File deployment = new File(getDeployDir(), "deployment-test.jar");
createDeployment(deployment, "non.existing.dependency");
boolean done = false;
// Add a new deployment scanner
addDeploymentScanner(client, 0, false, true);
try {
// Wait until deployed ...
long timeout = System.currentTimeMillis() + TimeoutUtil.adjust(TIMEOUT);
while (!exists(client, DEPLOYMENT_TEST) && System.currentTimeMillis() < timeout) {
Thread.sleep(DELAY);
}
Assert.assertTrue("deployemt archive is expected.", exists(client, DEPLOYMENT_TEST));
Assert.assertEquals("FAILED", deploymentState(client, DEPLOYMENT_TEST));
final File deployed = new File(getDeployDir(), "deployment-test.jar.deployed");
// Restart ...
container.stop();
// replace broken deployment with a correct one
createDeployment(deployment, "org.jboss.modules", true);
startContainer(client);
timeout = System.currentTimeMillis() + TimeoutUtil.adjust(TIMEOUT);
while (exists(client, DEPLOYMENT_TEST) && System.currentTimeMillis() < timeout) {
Thread.sleep(200);
}
Assert.assertTrue(".deployed marker is expected.", deployed.exists());
Assert.assertTrue("deployemt archive is expected.", exists(client, DEPLOYMENT_TEST));
Assert.assertEquals("OK", deploymentState(client, DEPLOYMENT_TEST));
done = true;
} finally {
try {
removeDeploymentScanner(client);
} catch (Exception e) {
if (done) {
//noinspection ThrowFromFinallyBlock
throw e;
} else {
e.printStackTrace(System.out);
}
}
}
}
} finally {
container.stop();
}
}
private void startContainer(ModelControllerClient client) throws InterruptedException {
container.start();
// Wait until started ...
long timeout = System.currentTimeMillis() + TimeoutUtil.adjust(TIMEOUT);
while (!isRunning(client) && System.currentTimeMillis() < timeout) {
Thread.sleep(200);
}
}
}
| lgpl-2.1 |
rahuljoshi123/databus | databus-core/databus-core-impl/src/main/java/com/linkedin/databus2/core/mbean/DatabusReadOnlyStatus.java | 2461 | package com.linkedin.databus2.core.mbean;
/*
*
* Copyright 2013 LinkedIn Corp. All rights reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import java.util.Hashtable;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import com.linkedin.databus.core.DatabusComponentStatus;
public class DatabusReadOnlyStatus extends BaseDatabusMBean implements DatabusReadOnlyStatusMBean
{
private final DatabusComponentStatus _status;
private final String _name;
private final long _ownerId;
public DatabusReadOnlyStatus(String name, DatabusComponentStatus status, long ownerId)
{
_status = status;
_name = name;
_ownerId = ownerId;
}
@Override
public String getStatus()
{
return _status.getStatus().toString();
}
@Override
public String getStatusMessage()
{
return _status.getMessage();
}
@Override
public String getComponentName()
{
return _name;
}
@Override
public int getStatusCode()
{
return _status.getStatus().getCode();
}
@Override
public ObjectName generateObjectName() throws MalformedObjectNameException
{
Hashtable<String, String> nameProps = generateBaseMBeanProps();
nameProps.put("name", sanitizeString(_name));
nameProps.put("ownerId", Long.toString(_ownerId));
return new ObjectName(JMX_DOMAIN, nameProps);
}
@Override
public int getRetriesNum()
{
return _status.getRetriesNum();
}
@Override
public int getRemainingRetriesNum()
{
return _status.getRetriesLeft();
}
@Override
public long getCurrentRetryLatency()
{
return _status.getLastRetrySleepMs();
}
@Override
public long getTotalRetryTime()
{
return _status.getRetriesCounter().getTotalRetryTime();
}
private String sanitizeString(String s)
{
return s.replaceAll("[.,;]", "_");
}
@Override
public long getUptimeMs()
{
return _status.getUptimeMs();
}
}
| apache-2.0 |
ksimar/incubator-carbondata | core/src/test/java/org/apache/carbondata/core/carbon/ColumnIdentifierTest.java | 3351 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.carbon;
import org.apache.carbondata.core.metadata.ColumnIdentifier;
import org.apache.carbondata.core.metadata.datatype.DataType;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import static junit.framework.TestCase.assertEquals;
public class ColumnIdentifierTest {
static ColumnIdentifier columnIdentifier;
static Map<String, String> columnProperties;
@BeforeClass public static void setup() {
columnProperties = new HashMap<String, String>();
columnProperties.put("key", "value");
columnIdentifier = new ColumnIdentifier("columnId", columnProperties, DataType.INT);
}
@Test public void hashCodeTest() {
int res = columnIdentifier.hashCode();
int expectedResult = -623419600;
assertEquals(res, expectedResult);
}
@Test public void equalsTestwithSameObject() {
Boolean res = columnIdentifier.equals(columnIdentifier);
assert (res);
}
@Test public void equalsTestwithSimilarObject() {
ColumnIdentifier columnIdentifierTest =
new ColumnIdentifier("columnId", columnProperties, DataType.INT);
Boolean res = columnIdentifier.equals(columnIdentifierTest);
assert (res);
}
@Test public void equalsTestwithNullObject() {
Boolean res = columnIdentifier.equals(null);
assert (!res);
}
@Test public void equalsTestwithStringObject() {
Boolean res = columnIdentifier.equals("String Object");
assert (!res);
}
@Test public void equalsTestwithNullColumnId() {
ColumnIdentifier columnIdentifierTest =
new ColumnIdentifier(null, columnProperties, DataType.INT);
Boolean res = columnIdentifierTest.equals(columnIdentifier);
assert (!res);
}
@Test public void equalsTestwithDiffColumnId() {
ColumnIdentifier columnIdentifierTest =
new ColumnIdentifier("diffColumnId", columnProperties, DataType.INT);
Boolean res = columnIdentifierTest.equals(columnIdentifier);
assert (!res);
}
@Test public void toStringTest() {
String res = columnIdentifier.toString();
assert (res.equals("ColumnIdentifier [columnId=columnId]"));
}
@Test public void getColumnPropertyTest() {
ColumnIdentifier columnIdentifierTest =
new ColumnIdentifier("diffColumnId", null, DataType.INT);
String res = columnIdentifierTest.getColumnProperty("key");
assertEquals(res, null);
}
@Test public void getColumnPropertyTestwithNull() {
assert (columnIdentifier.getColumnProperty("key").equals("value"));
}
}
| apache-2.0 |
mztaylor/rice-git | rice-middleware/client-contrib/src/main/java/org/kuali/rice/kim/sesn/timeouthandlers/AbstractTimeoutHandler.java | 1565 | /**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kim.sesn.timeouthandlers;
/**
* This Abstract Class allows the configuration of timeout fields.
* TODO: change how TimoutHandlers choose fields from ndx to name.
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*
*/
public abstract class AbstractTimeoutHandler implements TimeoutHandler{
private static final int MAX_IDLE_TIME_TIMEOUT_FIELD=1;
private static final int LAST_ACCCESS_TIMEOUT_FIELD=2;
private static final int DEFAULT_TIMEOUT_FIELD=MAX_IDLE_TIME_TIMEOUT_FIELD;
private int timeoutField=AbstractTimeoutHandler.DEFAULT_TIMEOUT_FIELD;
/**
* @return the timeoutField
*/
public int getTimeoutField() {
return this.timeoutField;
}
/**
* @param timeoutField the timeoutField to set
*/
public void setTimeoutField(int timeoutField) {
this.timeoutField = timeoutField;
}
}
| apache-2.0 |
gf53520/kafka | clients/src/main/java/org/apache/kafka/common/serialization/Serde.java | 1883 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import java.io.Closeable;
import java.util.Map;
/**
* The interface for wrapping a serializer and deserializer for the given data type.
*
* @param <T> Type to be serialized from and deserialized into.
*
* A class that implements this interface is expected to have a constructor with no parameter.
*/
public interface Serde<T> extends Closeable {
/**
* Configure this class, which will configure the underlying serializer and deserializer.
*
* @param configs configs in key/value pairs
* @param isKey whether is for key or value
*/
default void configure(Map<String, ?> configs, boolean isKey) {
// intentionally left blank
}
/**
* Close this serde class, which will close the underlying serializer and deserializer.
* <p>
* This method has to be idempotent because it might be called multiple times.
*/
@Override
default void close() {
// intentionally left blank
}
Serializer<T> serializer();
Deserializer<T> deserializer();
}
| apache-2.0 |
firzhan/wso2-ode | bpel-api-jca/src/main/java/org/apache/ode/bpel/jca/clientapi/ProcessManagementConnection.java | 1218 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.bpel.jca.clientapi;
import javax.resource.cci.Connection;
import org.apache.ode.bpel.pmapi.InstanceManagement;
import org.apache.ode.bpel.pmapi.ProcessManagement;
/**
* JCA {@link javax.resource.cci.Connection} interface combining process and
* instance management.
*/
public interface ProcessManagementConnection
extends ProcessManagement, InstanceManagement, Connection
{
}
| apache-2.0 |
vergilchiu/hive | contrib/src/java/org/apache/hadoop/hive/contrib/mr/Mapper.java | 1174 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.contrib.mr;
/**
* Mapper.
*/
public interface Mapper {
/**
* Maps a single row into an intermediate rows.
*
* @param record
* input record
* @param output
* collect mapped rows.
* @throws Exception
* on error
*/
void map(String[] record, Output output) throws Exception;
}
| apache-2.0 |
prestodb/presto | presto-hive/src/main/java/com/facebook/presto/hive/ForCachingDirectoryLister.java | 1048 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import javax.inject.Qualifier;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
@Retention(RUNTIME)
@Target({FIELD, PARAMETER, METHOD})
@Qualifier
public @interface ForCachingDirectoryLister
{
}
| apache-2.0 |
Kreolwolf1/Elastic | src/main/java/org/elasticsearch/common/trove/ExtTIntArrayList.java | 1210 | /*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.trove;
import gnu.trove.list.array.TIntArrayList;
/**
*
*/
public class ExtTIntArrayList extends TIntArrayList {
public ExtTIntArrayList() {
}
public ExtTIntArrayList(int capacity) {
super(capacity);
}
public ExtTIntArrayList(int[] values) {
super(values);
}
public int[] unsafeArray() {
return _data;
}
}
| apache-2.0 |
JoeChien23/hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppAttemptBlock.java | 7492 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._EVEN;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._ODD;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
import org.apache.hadoop.yarn.server.webapp.AppAttemptBlock;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import org.apache.hadoop.yarn.webapp.view.InfoBlock;
import com.google.inject.Inject;
import java.util.List;
public class RMAppAttemptBlock extends AppAttemptBlock{
private final ResourceManager rm;
protected Configuration conf;
@Inject
RMAppAttemptBlock(ViewContext ctx, ResourceManager rm, Configuration conf) {
super(rm.getClientRMService(), ctx);
this.rm = rm;
this.conf = conf;
}
private void createResourceRequestsTable(Block html) {
AppInfo app =
new AppInfo(rm, rm.getRMContext().getRMApps()
.get(this.appAttemptId.getApplicationId()), true,
WebAppUtils.getHttpSchemePrefix(conf));
List<ResourceRequest> resourceRequests = app.getResourceRequests();
if (resourceRequests == null || resourceRequests.isEmpty()) {
return;
}
DIV<Hamlet> div = html.div(_INFO_WRAP);
TABLE<DIV<Hamlet>> table =
div.h3("Total Outstanding Resource Requests: "
+ getTotalResource(resourceRequests)).table(
"#ResourceRequests");
table.tr().
th(_TH, "Priority").
th(_TH, "ResourceName").
th(_TH, "Capability").
th(_TH, "NumContainers").
th(_TH, "RelaxLocality").
th(_TH, "NodeLabelExpression").
_();
boolean odd = false;
for (ResourceRequest request : resourceRequests) {
if (request.getNumContainers() == 0) {
continue;
}
table.tr((odd = !odd) ? _ODD : _EVEN)
.td(String.valueOf(request.getPriority()))
.td(request.getResourceName())
.td(String.valueOf(request.getCapability()))
.td(String.valueOf(request.getNumContainers()))
.td(String.valueOf(request.getRelaxLocality()))
.td(request.getNodeLabelExpression() == null ? "N/A" : request
.getNodeLabelExpression())._();
}
table._();
div._();
}
private Resource getTotalResource(List<ResourceRequest> requests) {
Resource totalResource = Resource.newInstance(0, 0);
if (requests == null) {
return totalResource;
}
for (ResourceRequest request : requests) {
if (request.getNumContainers() == 0) {
continue;
}
if (request.getResourceName().equals(ResourceRequest.ANY)) {
Resources.addTo(
totalResource,
Resources.multiply(request.getCapability(),
request.getNumContainers()));
}
}
return totalResource;
}
private void createContainerLocalityTable(Block html) {
RMAppAttemptMetrics attemptMetrics = null;
RMAppAttempt attempt = getRMAppAttempt();
if (attempt != null) {
attemptMetrics = attempt.getRMAppAttemptMetrics();
}
if (attemptMetrics == null) {
return;
}
DIV<Hamlet> div = html.div(_INFO_WRAP);
TABLE<DIV<Hamlet>> table =
div.h3(
"Total Allocated Containers: "
+ attemptMetrics.getTotalAllocatedContainers()).h3("Each table cell"
+ " represents the number of NodeLocal/RackLocal/OffSwitch containers"
+ " satisfied by NodeLocal/RackLocal/OffSwitch resource requests.").table(
"#containerLocality");
table.
tr().
th(_TH, "").
th(_TH, "Node Local Request").
th(_TH, "Rack Local Request").
th(_TH, "Off Switch Request").
_();
String[] containersType =
{ "Num Node Local Containers (satisfied by)", "Num Rack Local Containers (satisfied by)",
"Num Off Switch Containers (satisfied by)" };
boolean odd = false;
for (int i = 0; i < attemptMetrics.getLocalityStatistics().length; i++) {
table.tr((odd = !odd) ? _ODD : _EVEN).td(containersType[i])
.td(String.valueOf(attemptMetrics.getLocalityStatistics()[i][0]))
.td(i == 0 ? "" : String.valueOf(attemptMetrics.getLocalityStatistics()[i][1]))
.td(i <= 1 ? "" : String.valueOf(attemptMetrics.getLocalityStatistics()[i][2]))._();
}
table._();
div._();
}
private boolean isApplicationInFinalState(YarnApplicationAttemptState state) {
return state == YarnApplicationAttemptState.FINISHED
|| state == YarnApplicationAttemptState.FAILED
|| state == YarnApplicationAttemptState.KILLED;
}
@Override
protected void createAttemptHeadRoomTable(Block html) {
RMAppAttempt attempt = getRMAppAttempt();
if (attempt != null) {
if (!isApplicationInFinalState(YarnApplicationAttemptState
.valueOf(attempt.getAppAttemptState().toString()))) {
RMAppAttemptMetrics metrics = attempt.getRMAppAttemptMetrics();
DIV<Hamlet> pdiv = html._(InfoBlock.class).div(_INFO_WRAP);
info("Application Attempt Overview").clear();
info("Application Attempt Metrics")._(
"Application Attempt Headroom : ", metrics == null ? "N/A" :
metrics.getApplicationAttemptHeadroom());
pdiv._();
}
}
}
private RMAppAttempt getRMAppAttempt() {
ApplicationId appId = this.appAttemptId.getApplicationId();
RMAppAttempt attempt = null;
RMApp rmApp = rm.getRMContext().getRMApps().get(appId);
if (rmApp != null) {
attempt = rmApp.getAppAttempts().get(appAttemptId);
}
return attempt;
}
@Override
protected void createTablesForAttemptMetrics(Block html) {
createContainerLocalityTable(html);
createResourceRequestsTable(html);
}
}
| apache-2.0 |
agolPL/keycloak | testsuite/integration-arquillian/tests/other/adapters/jboss/wildfly/src/test/java/org/keycloak/testsuite/adapter/example/authorization/WildflyServletAuthzAdapterTest.java | 1111 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.adapter.example.authorization;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.keycloak.testsuite.arquillian.annotation.AppServerContainer;
/**
*
* @author tkyjovsk
*/
@RunAsClient
@AppServerContainer("app-server-wildfly")
//@AdapterLibsLocationProperty("adapter.libs.wildfly")
public class WildflyServletAuthzAdapterTest extends AbstractServletAuthzFunctionalAdapterTest {
}
| apache-2.0 |
jasked/Zblog | src/main/java/com/zblog/core/util/JsoupUtils.java | 2730 | package com.zblog.core.util;
import java.util.LinkedList;
import java.util.List;
import org.jsoup.Jsoup;
import org.jsoup.helper.StringUtil;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.safety.Whitelist;
import org.jsoup.select.Elements;
/**
* html文本处理工具
*
* @author zhou
*
*/
public class JsoupUtils{
private final static Whitelist content_filter = Whitelist.relaxed();
static{
// 增加可信标签到白名单
content_filter.addTags("embed", "object", "param", "div", "font", "del");
// 增加可信属性
content_filter.addAttributes(":all", "style", "class", "id", "name", "on");
content_filter.addAttributes("object", "width", "height", "classid", "codebase");
content_filter.addAttributes("param", "name", "value");
content_filter.addAttributes("embed", "src", "quality", "width", "height", "allowFullScreen", "allowScriptAccess",
"flashvars", "name", "type", "pluginspage");
}
/**
* 对用户输入内容进行过滤
*
* @param html
* @return
*/
public static String filter(String html){
return StringUtil.isBlank(html) ? "" : Jsoup.clean(html, content_filter);
}
/**
* 获取当前html文本中所有可能存在图片的地址
*
* @param html
* @return
*/
public static List<String> getImagesOrLinks(String html){
Document doc = Jsoup.parse(html);
Elements eles = doc.select("img,a");
List<String> result = new LinkedList<>();
for(Element element : eles){
boolean isa = "a".equals(element.nodeName());
String link = element.attr(isa ? "href" : "src");
if(StringUtils.isBlank(link))
continue;
if(isa){
int question = link.indexOf("?");
if(question > 0)
link = link.substring(0, question);
int comma = link.lastIndexOf(".");
String ext = link.substring(comma + 1).toLowerCase();
if(FileUtils.isImageExt(ext)){
result.add(link);
}
}else{
result.add(link);
}
}
return result;
}
/**
* 比较宽松的过滤,但是会过滤掉object,script, span,div等标签,适用于富文本编辑器内容或其他html内容
*
* @param html
* @return
*/
public static String simpleText(String html){
return Jsoup.clean(html, Whitelist.simpleText());
}
/**
* 去掉所有标签,返回纯文字.适用于textarea,input
*
* @param html
* @return
*/
public static String plainText(String html){
return Jsoup.parse(html).text();
// return Jsoup.clean(html, Whitelist.none());
}
}
| apache-2.0 |
alina-ipatina/pentaho-kettle | ui/src/org/pentaho/di/ui/trans/steps/analyticquery/AnalyticQueryDialog.java | 16177 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.steps.analyticquery;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.dialogs.MessageDialogWithToggle;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.steps.analyticquery.AnalyticQuery;
import org.pentaho.di.trans.steps.analyticquery.AnalyticQueryMeta;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.widget.ColumnInfo;
import org.pentaho.di.ui.core.widget.TableView;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
public class AnalyticQueryDialog extends BaseStepDialog implements StepDialogInterface {
private static Class<?> PKG = AnalyticQuery.class; // for i18n purposes, needed by Translator2!!
public static final String STRING_SORT_WARNING_PARAMETER = "AnalyticQuerySortWarning";
private Label wlGroup;
private TableView wGroup;
private FormData fdlGroup, fdGroup;
private Label wlAgg;
private TableView wAgg;
private FormData fdlAgg, fdAgg;
private Button wGet, wGetAgg;
private FormData fdGet, fdGetAgg;
private Listener lsGet, lsGetAgg;
private AnalyticQueryMeta input;
private ColumnInfo[] ciKey;
private ColumnInfo[] ciReturn;
private Map<String, Integer> inputFields;
public AnalyticQueryDialog( Shell parent, Object in, TransMeta transMeta, String sname ) {
super( parent, (BaseStepMeta) in, transMeta, sname );
input = (AnalyticQueryMeta) in;
inputFields = new HashMap<String, Integer>();
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN );
props.setLook( shell );
setShellImage( shell, input );
ModifyListener lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
input.setChanged();
}
};
backupChanged = input.hasChanged();
// backupAllRows = input.passAllRows();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout( formLayout );
shell.setText( BaseMessages.getString( PKG, "AnalyticQueryDialog.Shell.Title" ) );
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
wlStepname = new Label( shell, SWT.RIGHT );
wlStepname.setText( BaseMessages.getString( PKG, "AnalyticQueryDialog.Stepname.Label" ) );
props.setLook( wlStepname );
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment( 0, 0 );
fdlStepname.right = new FormAttachment( middle, -margin );
fdlStepname.top = new FormAttachment( 0, margin );
wlStepname.setLayoutData( fdlStepname );
wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wStepname.setText( stepname );
props.setLook( wStepname );
wStepname.addModifyListener( lsMod );
fdStepname = new FormData();
fdStepname.left = new FormAttachment( middle, 0 );
fdStepname.top = new FormAttachment( 0, margin );
fdStepname.right = new FormAttachment( 100, 0 );
wStepname.setLayoutData( fdStepname );
wlGroup = new Label( shell, SWT.NONE );
wlGroup.setText( BaseMessages.getString( PKG, "AnalyticQueryDialog.Group.Label" ) );
props.setLook( wlGroup );
fdlGroup = new FormData();
fdlGroup.left = new FormAttachment( 0, 0 );
fdlGroup.top = new FormAttachment( wlStepname, margin );
wlGroup.setLayoutData( fdlGroup );
int nrKeyCols = 1;
int nrKeyRows = ( input.getGroupField() != null ? input.getGroupField().length : 1 );
ciKey = new ColumnInfo[nrKeyCols];
ciKey[0] =
new ColumnInfo(
BaseMessages.getString( PKG, "AnalyticQueryDialog.ColumnInfo.GroupField" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "" }, false );
wGroup =
new TableView(
transMeta, shell, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL, ciKey,
nrKeyRows, lsMod, props );
wGet = new Button( shell, SWT.PUSH );
wGet.setText( BaseMessages.getString( PKG, "AnalyticQueryDialog.GetFields.Button" ) );
fdGet = new FormData();
fdGet.top = new FormAttachment( wlGroup, margin );
fdGet.right = new FormAttachment( 100, 0 );
wGet.setLayoutData( fdGet );
fdGroup = new FormData();
fdGroup.left = new FormAttachment( 0, 0 );
fdGroup.top = new FormAttachment( wlGroup, margin );
fdGroup.right = new FormAttachment( wGet, -margin );
fdGroup.bottom = new FormAttachment( 45, 0 );
wGroup.setLayoutData( fdGroup );
// THE Aggregate fields
wlAgg = new Label( shell, SWT.NONE );
wlAgg.setText( BaseMessages.getString( PKG, "AnalyticQueryDialog.Aggregates.Label" ) );
props.setLook( wlAgg );
fdlAgg = new FormData();
fdlAgg.left = new FormAttachment( 0, 0 );
fdlAgg.top = new FormAttachment( wGroup, margin );
wlAgg.setLayoutData( fdlAgg );
int UpInsCols = 4;
int UpInsRows = ( input.getAggregateField() != null ? input.getAggregateField().length : 1 );
ciReturn = new ColumnInfo[UpInsCols];
ciReturn[0] =
new ColumnInfo(
BaseMessages.getString( PKG, "AnalyticQueryDialog.ColumnInfo.Name" ), ColumnInfo.COLUMN_TYPE_TEXT,
false );
ciReturn[1] =
new ColumnInfo(
BaseMessages.getString( PKG, "AnalyticQueryDialog.ColumnInfo.Subject" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "" }, false );
ciReturn[2] =
new ColumnInfo(
BaseMessages.getString( PKG, "AnalyticQueryDialog.ColumnInfo.Type" ), ColumnInfo.COLUMN_TYPE_CCOMBO,
AnalyticQueryMeta.typeGroupLongDesc );
ciReturn[3] =
new ColumnInfo(
BaseMessages.getString( PKG, "AnalyticQueryDialog.ColumnInfo.Value" ), ColumnInfo.COLUMN_TYPE_TEXT,
false );
ciReturn[3].setToolTip( BaseMessages.getString( PKG, "AnalyticQueryDialog.ColumnInfo.Value.Tooltip" ) );
wAgg =
new TableView(
transMeta, shell, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL, ciReturn,
UpInsRows, lsMod, props );
wGetAgg = new Button( shell, SWT.PUSH );
wGetAgg.setText( BaseMessages.getString( PKG, "AnalyticQueryDialog.GetLookupFields.Button" ) );
fdGetAgg = new FormData();
fdGetAgg.top = new FormAttachment( wlAgg, margin );
fdGetAgg.right = new FormAttachment( 100, 0 );
wGetAgg.setLayoutData( fdGetAgg );
//
// Search the fields in the background
final Runnable runnable = new Runnable() {
public void run() {
StepMeta stepMeta = transMeta.findStep( stepname );
if ( stepMeta != null ) {
try {
RowMetaInterface row = transMeta.getPrevStepFields( stepMeta );
// Remember these fields...
for ( int i = 0; i < row.size(); i++ ) {
inputFields.put( row.getValueMeta( i ).getName(), Integer.valueOf( i ) );
}
setComboBoxes();
} catch ( KettleException e ) {
logError( BaseMessages.getString( PKG, "System.Dialog.GetFieldsFailed.Message" ) );
}
}
}
};
new Thread( runnable ).start();
// THE BUTTONS
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
setButtonPositions( new Button[] { wOK, wCancel }, margin, null );
fdAgg = new FormData();
fdAgg.left = new FormAttachment( 0, 0 );
fdAgg.top = new FormAttachment( wlAgg, margin );
fdAgg.right = new FormAttachment( wGetAgg, -margin );
fdAgg.bottom = new FormAttachment( wOK, -margin );
wAgg.setLayoutData( fdAgg );
// Add listeners
lsOK = new Listener() {
public void handleEvent( Event e ) {
ok();
}
};
lsGet = new Listener() {
public void handleEvent( Event e ) {
get();
}
};
lsGetAgg = new Listener() {
public void handleEvent( Event e ) {
getAgg();
}
};
lsCancel = new Listener() {
public void handleEvent( Event e ) {
cancel();
}
};
wOK.addListener( SWT.Selection, lsOK );
wGet.addListener( SWT.Selection, lsGet );
wGetAgg.addListener( SWT.Selection, lsGetAgg );
wCancel.addListener( SWT.Selection, lsCancel );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wStepname.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
// Set the shell size, based upon previous time...
setSize();
getData();
input.setChanged( backupChanged );
shell.open();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return stepname;
}
protected void setComboBoxes() {
// Something was changed in the row.
//
final Map<String, Integer> fields = new HashMap<String, Integer>();
// Add the currentMeta fields...
fields.putAll( inputFields );
Set<String> keySet = fields.keySet();
List<String> entries = new ArrayList<String>( keySet );
String[] fieldNames = entries.toArray( new String[entries.size()] );
Const.sortStrings( fieldNames );
ciKey[0].setComboValues( fieldNames );
ciReturn[1].setComboValues( fieldNames );
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData() {
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "AnalyticQueryDialog.Log.GettingKeyInfo" ) );
}
if ( input.getGroupField() != null ) {
for ( int i = 0; i < input.getGroupField().length; i++ ) {
TableItem item = wGroup.table.getItem( i );
if ( input.getGroupField()[i] != null ) {
item.setText( 1, input.getGroupField()[i] );
}
}
}
if ( input.getAggregateField() != null ) {
for ( int i = 0; i < input.getAggregateField().length; i++ ) {
TableItem item = wAgg.table.getItem( i );
if ( input.getAggregateField()[i] != null ) {
item.setText( 1, input.getAggregateField()[i] );
}
if ( input.getSubjectField()[i] != null ) {
item.setText( 2, input.getSubjectField()[i] );
}
item.setText( 3, AnalyticQueryMeta.getTypeDescLong( input.getAggregateType()[i] ) );
int value = input.getValueField()[i];
String valuetext = Integer.toString( value );
if ( valuetext != null ) {
item.setText( 4, valuetext );
}
}
}
wGroup.setRowNums();
wGroup.optWidth( true );
wAgg.setRowNums();
wAgg.optWidth( true );
wStepname.selectAll();
wStepname.setFocus();
}
private void cancel() {
stepname = null;
input.setChanged( backupChanged );
dispose();
}
private void ok() {
if ( Utils.isEmpty( wStepname.getText() ) ) {
return;
}
int sizegroup = wGroup.nrNonEmpty();
int nrfields = wAgg.nrNonEmpty();
input.allocate( sizegroup, nrfields );
//CHECKSTYLE:Indentation:OFF
for ( int i = 0; i < sizegroup; i++ ) {
TableItem item = wGroup.getNonEmpty( i );
input.getGroupField()[i] = item.getText( 1 );
}
//CHECKSTYLE:Indentation:OFF
for ( int i = 0; i < nrfields; i++ ) {
TableItem item = wAgg.getNonEmpty( i );
input.getAggregateField()[i] = item.getText( 1 );
input.getSubjectField()[i] = item.getText( 2 );
input.getAggregateType()[i] = AnalyticQueryMeta.getType( item.getText( 3 ) );
input.getValueField()[i] = Const.toInt( item.getText( 4 ), 1 );
}
stepname = wStepname.getText();
if ( "Y".equalsIgnoreCase( props.getCustomParameter( STRING_SORT_WARNING_PARAMETER, "Y" ) ) ) {
MessageDialogWithToggle md =
new MessageDialogWithToggle( shell,
BaseMessages.getString( PKG, "AnalyticQueryDialog.GroupByWarningDialog.DialogTitle" ), null,
BaseMessages.getString( PKG, "AnalyticQueryDialog.GroupByWarningDialog.DialogMessage", Const.CR ) + Const.CR,
MessageDialog.WARNING,
new String[] { BaseMessages.getString( PKG, "AnalyticQueryDialog.GroupByWarningDialog.Option1" ) },
0, BaseMessages.getString( PKG, "AnalyticQueryDialog.GroupByWarningDialog.Option2" ),
"N".equalsIgnoreCase( props.getCustomParameter( STRING_SORT_WARNING_PARAMETER, "Y" ) ) );
MessageDialogWithToggle.setDefaultImage( GUIResource.getInstance().getImageSpoon() );
md.open();
props.setCustomParameter( STRING_SORT_WARNING_PARAMETER, md.getToggleState() ? "N" : "Y" );
props.saveProps();
}
dispose();
}
private void get() {
try {
RowMetaInterface r = transMeta.getPrevStepFields( stepname );
if ( r != null && !r.isEmpty() ) {
BaseStepDialog.getFieldsFromPrevious( r, wGroup, 1, new int[] { 1 }, new int[] {}, -1, -1, null );
}
} catch ( KettleException ke ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "AnalyticQueryDialog.FailedToGetFields.DialogTitle" ), BaseMessages
.getString( PKG, "AnalyticQueryDialog.FailedToGetFields.DialogMessage" ), ke );
}
}
private void getAgg() {
try {
RowMetaInterface r = transMeta.getPrevStepFields( stepname );
if ( r != null && !r.isEmpty() ) {
BaseStepDialog.getFieldsFromPrevious( r, wAgg, 1, new int[] { 1, 2 }, new int[] {}, -1, -1, null );
}
} catch ( KettleException ke ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "AnalyticQueryDialog.FailedToGetFields.DialogTitle" ), BaseMessages
.getString( PKG, "AnalyticQueryDialog.FailedToGetFields.DialogMessage" ), ke );
}
}
}
| apache-2.0 |
alanfgates/hive | service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java | 2115 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.service.cli;
/**
* CLIServiceUtils.
*
*/
public class CLIServiceUtils {
private static final char SEARCH_STRING_ESCAPE = '\\';
/**
* Convert a SQL search pattern into an equivalent Java Regex.
*
* @param pattern input which may contain '%' or '_' wildcard characters, or
* these characters escaped using getSearchStringEscape().
* @return replace %/_ with regex search characters, also handle escaped
* characters.
*/
public static String patternToRegex(String pattern) {
if (pattern == null) {
return ".*";
} else {
StringBuilder result = new StringBuilder(pattern.length());
boolean escaped = false;
for (int i = 0, len = pattern.length(); i < len; i++) {
char c = pattern.charAt(i);
if (escaped) {
if (c != SEARCH_STRING_ESCAPE) {
escaped = false;
}
result.append(c);
} else {
if (c == SEARCH_STRING_ESCAPE) {
escaped = true;
continue;
} else if (c == '%') {
result.append(".*");
} else if (c == '_') {
result.append('.');
} else {
result.append(Character.toLowerCase(c));
}
}
}
return result.toString();
}
}
}
| apache-2.0 |
wisgood/hive | ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java | 1558 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.security.authorization.plugin;
import static org.junit.Assert.fail;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.junit.Test;
/**
* Test HiveOperationType
*/
public class TestHiveOperationType {
/**
* test that all enums in {@link HiveOperation} match one in @{link HiveOperationType}
*/
@Test
public void checkHiveOperationTypeMatch(){
for (HiveOperation op : HiveOperation.values()) {
try {
HiveOperationType.valueOf(op.name());
} catch(IllegalArgumentException ex) {
// if value is null or not found, exception would get thrown
fail("Unable to find corresponding type in HiveOperationType for " + op + " : " + ex );
}
}
}
}
| apache-2.0 |
awhitford/Resteasy | testsuite/integration-tests/src/test/java/org/jboss/resteasy/test/providers/jackson2/resource/TestJsonView.java | 97 | package org.jboss.resteasy.test.providers.jackson2.resource;
public interface TestJsonView
{
}
| apache-2.0 |
ChristianMurphy/uPortal | uPortal-security/uPortal-security-core/src/main/java/org/apereo/portal/security/provider/JAASSecurityContext.java | 3479 | /**
* Licensed to Apereo under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright ownership. Apereo
* licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the License at the
* following location:
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apereo.portal.security.provider;
import java.io.Serializable;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apereo.portal.security.IAdditionalDescriptor;
import org.apereo.portal.security.ISecurityContext;
import org.apereo.portal.security.PortalSecurityException;
/** This is an implementation of a SecurityContext that checks a user's credentials using JAAS. */
class JAASSecurityContext extends ChainingSecurityContext
implements ISecurityContext, Serializable {
private static final Log log = LogFactory.getLog(JAASSecurityContext.class);
private final int JAASSECURITYAUTHTYPE = 0xFF05;
private IAdditionalDescriptor additionalDescriptor;
/* package-private */ JAASSecurityContext() {}
@Override
public int getAuthType() {
return this.JAASSECURITYAUTHTYPE;
}
@Override
public IAdditionalDescriptor getAdditionalDescriptor() {
return additionalDescriptor;
}
@Override
public synchronized void authenticate() throws PortalSecurityException {
this.isauth = false;
if (this.myPrincipal.UID != null && this.myOpaqueCredentials.credentialstring != null) {
try {
// JAAS Stuff
LoginContext lc = null;
lc =
new LoginContext(
"uPortal",
new JAASInlineCallbackHandler(
this.myPrincipal.UID,
(new String(this.myOpaqueCredentials.credentialstring))
.toCharArray())); // could not come up w/ a better
// way to do this
lc.login();
additionalDescriptor = new JAASSubject(lc.getSubject());
// the above will throw an exception if authentication does not succeed
if (log.isInfoEnabled())
log.info("User " + this.myPrincipal.UID + " is authenticated");
this.isauth = true;
} catch (LoginException e) {
if (log.isInfoEnabled())
log.info("User " + this.myPrincipal.UID + ": invalid password");
if (log.isDebugEnabled()) log.debug("LoginException", e);
}
} else {
log.error("Principal or OpaqueCredentials not initialized prior to authenticate");
}
// authenticate all subcontexts.
super.authenticate();
return;
}
}
| apache-2.0 |
msrader/zap-extensions | src/org/zaproxy/zap/extension/ascanrules/TestParameterTamper.java | 7073 | /*
*
* Paros and its related class files.
*
* Paros is an HTTP/HTTPS proxy for assessing web application security.
* Copyright (C) 2003-2004 Chinotec Technologies Company
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the Clarified Artistic License
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Clarified Artistic License for more details.
*
* You should have received a copy of the Clarified Artistic License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
// ZAP: 2012/01/02 Separate param and attack
// ZAP: 2012/03/15 Changed the method checkResult to use the class StringBuilder
// instead of StringBuffer.
// ZAP: 2012/04/25 Added @Override annotation to all appropriate methods.
// ZAP: 2012/12/28 Issue 447: Include the evidence in the attack field
// ZAP: 2013/01/25 Removed the "(non-Javadoc)" comments.
// ZAP: 2013/03/03 Issue 546: Remove all template Javadoc comments
package org.zaproxy.zap.extension.ascanrules;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.parosproxy.paros.Constant;
import org.parosproxy.paros.core.scanner.AbstractAppParamPlugin;
import org.parosproxy.paros.core.scanner.AbstractPlugin;
import org.parosproxy.paros.core.scanner.Alert;
import org.parosproxy.paros.core.scanner.Category;
import org.parosproxy.paros.network.HttpMessage;
import org.parosproxy.paros.network.HttpStatusCode;
public class TestParameterTamper extends AbstractAppParamPlugin {
/**
* Prefix for internationalised messages used by this rule
*/
private static final String MESSAGE_PREFIX = "ascanrules.testparametertamper.";
//private static final String[] PARAM_LIST = {"", "@", "+", "%A", "%1Z", "%", "%00", "|"};
// problem sending "%A", "%1Z" to server - assume server can handle properly on this.
// %0A not included as this is in CRLFInjection already.
private static String[] PARAM_LIST = {"", "", "@", "+", AbstractPlugin.getURLDecode("%00"), "|"};
private static Pattern patternErrorJava1 = Pattern.compile("javax\\.servlet\\.\\S+", PATTERN_PARAM);
private static Pattern patternErrorJava2 = Pattern.compile("invoke.+exception|exception.+invoke", PATTERN_PARAM);
private static Pattern patternErrorVBScript = Pattern.compile("Microsoft(\\s+| )*VBScript(\\s+| )+error", PATTERN_PARAM);
private static Pattern patternErrorODBC1 = Pattern.compile("Microsoft OLE DB Provider for ODBC Drivers.*error", PATTERN_PARAM);
private static Pattern patternErrorODBC2 = Pattern.compile("ODBC.*Drivers.*error", PATTERN_PARAM);
private static Pattern patternErrorJet = Pattern.compile("Microsoft JET Database Engine.*error", PATTERN_PARAM);
private static Pattern patternErrorPHP = Pattern.compile(" on line <b>", PATTERN_PARAM);
private static Pattern patternErrorTomcat = Pattern.compile("(Apache Tomcat).*(^Caused by:|HTTP Status 500 - Internal Server Error)", PATTERN_PARAM);
// ZAP: Added logger
private static Logger log = Logger.getLogger(TestParameterTamper.class);
@Override
public int getId() {
return 40008;
}
@Override
public String getName() {
return Constant.messages.getString(MESSAGE_PREFIX + "name");
}
@Override
public String[] getDependency() {
return null;
}
@Override
public String getDescription() {
return Constant.messages.getString(MESSAGE_PREFIX + "desc");
}
@Override
public int getCategory() {
return Category.INJECTION;
}
@Override
public String getSolution() {
return Constant.messages.getString(MESSAGE_PREFIX + "soln");
}
@Override
public String getReference() {
return "";
}
@Override
public void init() {
}
@Override
public void scan(HttpMessage msg, String param, String value) {
String attack = null;
// always try normal query first
HttpMessage normalMsg = getNewMsg();
try {
sendAndReceive(normalMsg);
} catch (Exception e) {
// ZAP: Log exceptions
log.warn(e.getMessage(), e);
return;
}
if (normalMsg.getResponseHeader().getStatusCode() != HttpStatusCode.OK) {
return;
}
for (int i = 0; i < PARAM_LIST.length; i++) {
msg = getNewMsg();
if (i == 0) {
// remove entire parameter when i=0;
setParameter(msg, null, null);
attack = null;
} else {
setParameter(msg, param, PARAM_LIST[i]);
attack = PARAM_LIST[i];
}
try {
sendAndReceive(msg);
if (checkResult(msg, param, attack, normalMsg.getResponseBody().toString())) {
return;
}
} catch (Exception e) {
// ZAP: Log exceptions
log.warn(e.getMessage(), e);
}
}
}
private boolean checkResult(HttpMessage msg, String param, String attack, String normalHTTPResponse) {
if (msg.getResponseHeader().getStatusCode() != HttpStatusCode.OK
&& !HttpStatusCode.isServerError(msg.getResponseHeader().getStatusCode())) {
return false;
}
// remove false positive if parameter have no effect on output
if (msg.getResponseBody().toString().equals(normalHTTPResponse)) {
return false;
}
StringBuilder sb = new StringBuilder();
if (matchBodyPattern(msg, patternErrorJava1, sb) && matchBodyPattern(msg, patternErrorJava2, null)) {
bingo(Alert.RISK_MEDIUM, Alert.CONFIDENCE_MEDIUM, null, param, attack, sb.toString(), msg);
return true;
} else if (matchBodyPattern(msg, patternErrorVBScript, sb)
|| matchBodyPattern(msg, patternErrorODBC1, sb)
|| matchBodyPattern(msg, patternErrorODBC2, sb)
|| matchBodyPattern(msg, patternErrorJet, sb)
|| matchBodyPattern(msg, patternErrorTomcat, sb)
|| matchBodyPattern(msg, patternErrorPHP, sb)) {
bingo(Alert.RISK_MEDIUM, Alert.CONFIDENCE_LOW, "", param, sb.toString(), attack, msg);
return true;
}
return false;
}
@Override
public int getRisk() {
return Alert.RISK_MEDIUM;
}
@Override
public int getCweId() {
return 472;
}
@Override
public int getWascId() {
return 20;
}
}
| apache-2.0 |
ern/elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java | 18536 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.security.authc.support.mapper;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.ContextPreservingActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.xpack.core.security.ScrollHelper;
import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction;
import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRequest;
import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest;
import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest;
import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping;
import org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName;
import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionModel;
import org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames;
import org.elasticsearch.xpack.core.security.authc.support.CachingRealm;
import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper;
import org.elasticsearch.xpack.security.support.SecurityIndexManager;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import static org.elasticsearch.action.DocWriteResponse.Result.CREATED;
import static org.elasticsearch.action.DocWriteResponse.Result.DELETED;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING;
import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN;
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
import static org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames.SECURITY_MAIN_ALIAS;
import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isIndexDeleted;
import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isMoveFromRedToNonRed;
/**
* This store reads + writes {@link ExpressionRoleMapping role mappings} in an Elasticsearch
* {@link RestrictedIndicesNames#SECURITY_MAIN_ALIAS index}.
* <br>
* The store is responsible for all read and write operations as well as
* {@link #resolveRoles(UserData, ActionListener) resolving roles}.
* <p>
* No caching is done by this class, it is handled at a higher level and no polling for changes
* is done by this class. Modification operations make a best effort attempt to clear the cache
* on all nodes for the user that was modified.
*/
public class NativeRoleMappingStore implements UserRoleMapper {
private static final Logger logger = LogManager.getLogger(NativeRoleMappingStore.class);
static final String DOC_TYPE_FIELD = "doc_type";
static final String DOC_TYPE_ROLE_MAPPING = "role-mapping";
private static final String ID_PREFIX = DOC_TYPE_ROLE_MAPPING + "_";
private static final ActionListener<Object> NO_OP_ACTION_LISTENER = new ActionListener<Object>() {
@Override
public void onResponse(Object o) {
// nothing
}
@Override
public void onFailure(Exception e) {
// nothing
}
};
private final Settings settings;
private final Client client;
private final SecurityIndexManager securityIndex;
private final ScriptService scriptService;
private final List<String> realmsToRefresh = new CopyOnWriteArrayList<>();
public NativeRoleMappingStore(Settings settings, Client client, SecurityIndexManager securityIndex, ScriptService scriptService) {
this.settings = settings;
this.client = client;
this.securityIndex = securityIndex;
this.scriptService = scriptService;
}
private String getNameFromId(String id) {
assert id.startsWith(ID_PREFIX);
return id.substring(ID_PREFIX.length());
}
private String getIdForName(String name) {
return ID_PREFIX + name;
}
/**
* Loads all mappings from the index.
* <em>package private</em> for unit testing
*/
protected void loadMappings(ActionListener<List<ExpressionRoleMapping>> listener) {
if (securityIndex.isIndexUpToDate() == false) {
listener.onFailure(new IllegalStateException(
"Security index is not on the current version - the native realm will not be operational until " +
"the upgrade API is run on the security index"));
return;
}
final QueryBuilder query = QueryBuilders.termQuery(DOC_TYPE_FIELD, DOC_TYPE_ROLE_MAPPING);
final Supplier<ThreadContext.StoredContext> supplier = client.threadPool().getThreadContext().newRestorableContext(false);
try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(SECURITY_ORIGIN)) {
SearchRequest request = client.prepareSearch(SECURITY_MAIN_ALIAS)
.setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings))
.setQuery(query)
.setSize(1000)
.setFetchSource(true)
.request();
request.indicesOptions().ignoreUnavailable();
ScrollHelper.fetchAllByEntity(client, request,
new ContextPreservingActionListener<>(supplier, ActionListener.wrap((Collection<ExpressionRoleMapping> mappings) ->
listener.onResponse(mappings.stream().filter(Objects::nonNull).collect(Collectors.toList())),
ex -> {
logger.error(new ParameterizedMessage("failed to load role mappings from index [{}] skipping all mappings.",
SECURITY_MAIN_ALIAS), ex);
listener.onResponse(Collections.emptyList());
})),
doc -> buildMapping(getNameFromId(doc.getId()), doc.getSourceRef()));
}
}
protected ExpressionRoleMapping buildMapping(String id, BytesReference source) {
try (InputStream stream = source.streamInput();
XContentParser parser = XContentType.JSON.xContent()
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
return ExpressionRoleMapping.parse(id, parser);
} catch (Exception e) {
logger.warn(new ParameterizedMessage("Role mapping [{}] cannot be parsed and will be skipped", id), e);
return null;
}
}
/**
* Stores (create or update) a single mapping in the index
*/
public void putRoleMapping(PutRoleMappingRequest request, ActionListener<Boolean> listener) {
// Validate all templates before storing the role mapping
for (TemplateRoleName templateRoleName : request.getRoleTemplates()) {
templateRoleName.validate(scriptService);
}
modifyMapping(request.getName(), this::innerPutMapping, request, listener);
}
/**
* Deletes a named mapping from the index
*/
public void deleteRoleMapping(DeleteRoleMappingRequest request, ActionListener<Boolean> listener) {
modifyMapping(request.getName(), this::innerDeleteMapping, request, listener);
}
private <Request, Result> void modifyMapping(String name, CheckedBiConsumer<Request, ActionListener<Result>, Exception> inner,
Request request, ActionListener<Result> listener) {
if (securityIndex.isIndexUpToDate() == false) {
listener.onFailure(new IllegalStateException(
"Security index is not on the current version - the native realm will not be operational until " +
"the upgrade API is run on the security index"));
} else {
try {
inner.accept(request, ActionListener.wrap(r -> refreshRealms(listener, r), listener::onFailure));
} catch (Exception e) {
logger.error(new ParameterizedMessage("failed to modify role-mapping [{}]", name), e);
listener.onFailure(e);
}
}
}
private void innerPutMapping(PutRoleMappingRequest request, ActionListener<Boolean> listener) {
final ExpressionRoleMapping mapping = request.getMapping();
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
final XContentBuilder xContentBuilder;
try {
xContentBuilder = mapping.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS, true);
} catch (IOException e) {
listener.onFailure(e);
return;
}
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
client.prepareIndex(SECURITY_MAIN_ALIAS).setId(getIdForName(mapping.getName()))
.setSource(xContentBuilder)
.setRefreshPolicy(request.getRefreshPolicy())
.request(),
new ActionListener<IndexResponse>() {
@Override
public void onResponse(IndexResponse indexResponse) {
boolean created = indexResponse.getResult() == CREATED;
listener.onResponse(created);
}
@Override
public void onFailure(Exception e) {
logger.error(new ParameterizedMessage("failed to put role-mapping [{}]", mapping.getName()), e);
listener.onFailure(e);
}
}, client::index);
});
}
private void innerDeleteMapping(DeleteRoleMappingRequest request, ActionListener<Boolean> listener) {
final SecurityIndexManager frozenSecurityIndex = securityIndex.freeze();
if (frozenSecurityIndex.indexExists() == false) {
listener.onResponse(false);
} else if (securityIndex.isAvailable() == false) {
listener.onFailure(frozenSecurityIndex.getUnavailableReason());
} else {
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
client.prepareDelete(SECURITY_MAIN_ALIAS, getIdForName(request.getName()))
.setRefreshPolicy(request.getRefreshPolicy())
.request(),
new ActionListener<DeleteResponse>() {
@Override
public void onResponse(DeleteResponse deleteResponse) {
boolean deleted = deleteResponse.getResult() == DELETED;
listener.onResponse(deleted);
}
@Override
public void onFailure(Exception e) {
logger.error(new ParameterizedMessage("failed to delete role-mapping [{}]", request.getName()), e);
listener.onFailure(e);
}
}, client::delete);
});
}
}
/**
* Retrieves one or more mappings from the index.
* If <code>names</code> is <code>null</code> or {@link Set#isEmpty empty}, then this retrieves all mappings.
* Otherwise it retrieves the specified mappings by name.
*/
public void getRoleMappings(Set<String> names, ActionListener<List<ExpressionRoleMapping>> listener) {
if (names == null || names.isEmpty()) {
getMappings(listener);
} else {
getMappings(listener.delegateFailure((l, mappings) -> {
final List<ExpressionRoleMapping> filtered = mappings.stream()
.filter(m -> names.contains(m.getName()))
.collect(Collectors.toList());
l.onResponse(filtered);
}));
}
}
private void getMappings(ActionListener<List<ExpressionRoleMapping>> listener) {
if (securityIndex.isAvailable()) {
loadMappings(listener);
} else {
logger.info("The security index is not yet available - no role mappings can be loaded");
if (logger.isDebugEnabled()) {
logger.debug("Security Index [{}] [exists: {}] [available: {}]",
SECURITY_MAIN_ALIAS,
securityIndex.indexExists(),
securityIndex.isAvailable()
);
}
listener.onResponse(Collections.emptyList());
}
}
/**
* Provides usage statistics for this store.
* The resulting map contains the keys
* <ul>
* <li><code>size</code> - The total number of mappings stored in the index</li>
* <li><code>enabled</code> - The number of mappings that are
* {@link ExpressionRoleMapping#isEnabled() enabled}</li>
* </ul>
*/
public void usageStats(ActionListener<Map<String, Object>> listener) {
if (securityIndex.isAvailable() == false) {
reportStats(listener, Collections.emptyList());
} else {
getMappings(ActionListener.wrap(mappings -> reportStats(listener, mappings), listener::onFailure));
}
}
private void reportStats(ActionListener<Map<String, Object>> listener, List<ExpressionRoleMapping> mappings) {
Map<String, Object> usageStats = new HashMap<>();
usageStats.put("size", mappings.size());
usageStats.put("enabled", mappings.stream().filter(ExpressionRoleMapping::isEnabled).count());
listener.onResponse(usageStats);
}
public void onSecurityIndexStateChange(SecurityIndexManager.State previousState, SecurityIndexManager.State currentState) {
if (isMoveFromRedToNonRed(previousState, currentState)
|| isIndexDeleted(previousState, currentState)
|| Objects.equals(previousState.indexUUID, currentState.indexUUID) == false
|| previousState.isIndexUpToDate != currentState.isIndexUpToDate) {
refreshRealms(NO_OP_ACTION_LISTENER, null);
}
}
private <Result> void refreshRealms(ActionListener<Result> listener, Result result) {
if (realmsToRefresh.isEmpty()) {
listener.onResponse(result);
return;
}
final String[] realmNames = this.realmsToRefresh.toArray(Strings.EMPTY_ARRAY);
executeAsyncWithOrigin(client, SECURITY_ORIGIN, ClearRealmCacheAction.INSTANCE, new ClearRealmCacheRequest().realms(realmNames),
ActionListener.wrap(
response -> {
logger.debug((org.apache.logging.log4j.util.Supplier<?>) () -> new ParameterizedMessage(
"Cleared cached in realms [{}] due to role mapping change", Arrays.toString(realmNames)));
listener.onResponse(result);
},
ex -> {
logger.warn(new ParameterizedMessage("Failed to clear cache for realms [{}]", Arrays.toString(realmNames)), ex);
listener.onFailure(ex);
}));
}
@Override
public void resolveRoles(UserData user, ActionListener<Set<String>> listener) {
getRoleMappings(null, ActionListener.wrap(
mappings -> {
final ExpressionModel model = user.asModel();
final Set<String> roles = mappings.stream()
.filter(ExpressionRoleMapping::isEnabled)
.filter(m -> m.getExpression().match(model))
.flatMap(m -> {
final Set<String> roleNames = m.getRoleNames(scriptService, model);
logger.trace("Applying role-mapping [{}] to user-model [{}] produced role-names [{}]",
m.getName(), model, roleNames);
return roleNames.stream();
})
.collect(Collectors.toSet());
logger.debug("Mapping user [{}] to roles [{}]", user, roles);
listener.onResponse(roles);
}, listener::onFailure
));
}
/**
* Indicates that the provided realm should have its cache cleared if this store is updated
* (that is, {@link #putRoleMapping(PutRoleMappingRequest, ActionListener)} or
* {@link #deleteRoleMapping(DeleteRoleMappingRequest, ActionListener)} are called).
* @see ClearRealmCacheAction
*/
@Override
public void refreshRealmOnChange(CachingRealm realm) {
realmsToRefresh.add(realm.name());
}
}
| apache-2.0 |
arturog8m/ocs | bundle/edu.gemini.spdb.reports.collection/src/main/java/edu/gemini/spdb/reports/IQueryManager.java | 699 | package edu.gemini.spdb.reports;
import java.util.List;
import edu.gemini.pot.spdb.IDBDatabaseService;
/**
* Interface for a service that can create and execute queries.
* @author rnorris
*/
public interface IQueryManager {
/**
* Creates a new IQuery on the specified table.
* @param table
* @return a new query
*/
IQuery createQuery(ITable table);
/**
* Runs the specified query on the specified database, returning a
* list of rows. The passed query must have been created by this
* query manager via createQuery().
* @param query a query created by this query manager
* @param dbs a database
*/
List<IRow> runQuery(IQuery query, IDBDatabaseService dbs);
}
| bsd-3-clause |
google/error-prone-javac | src/jdk.javadoc/share/classes/com/sun/tools/javadoc/main/WildcardTypeImpl.java | 4734 | /*
* Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.tools.javadoc.main;
import com.sun.javadoc.*;
import com.sun.tools.javac.code.Symbol.ClassSymbol;
import com.sun.tools.javac.code.Type;
import com.sun.tools.javac.util.List;
/**
* Implementation of <code>WildcardType</code>, which
* represents a wildcard type.
*
* <p><b>This is NOT part of any supported API.
* If you write code that depends on this, you do so at your own risk.
* This code and its internal interfaces are subject to change or
* deletion without notice.</b>
*
* @author Scott Seligman
* @since 1.5
*/
@Deprecated
public class WildcardTypeImpl extends AbstractTypeImpl implements WildcardType {
WildcardTypeImpl(DocEnv env, Type.WildcardType type) {
super(env, type);
}
/**
* Return the upper bounds of this wildcard type argument
* as given by the <i>extends</i> clause.
* Return an empty array if no such bounds are explicitly given.
*/
public com.sun.javadoc.Type[] extendsBounds() {
return TypeMaker.getTypes(env, getExtendsBounds((Type.WildcardType)type));
}
/**
* Return the lower bounds of this wildcard type argument
* as given by the <i>super</i> clause.
* Return an empty array if no such bounds are explicitly given.
*/
public com.sun.javadoc.Type[] superBounds() {
return TypeMaker.getTypes(env, getSuperBounds((Type.WildcardType)type));
}
/**
* Return the ClassDoc of the erasure of this wildcard type.
*/
@Override
public ClassDoc asClassDoc() {
return env.getClassDoc((ClassSymbol)env.types.erasure(type).tsym);
}
@Override
public WildcardType asWildcardType() {
return this;
}
@Override
public String typeName() { return "?"; }
@Override
public String qualifiedTypeName() { return "?"; }
@Override
public String simpleTypeName() { return "?"; }
@Override
public String toString() {
return wildcardTypeToString(env, (Type.WildcardType)type, true);
}
/**
* Return the string form of a wildcard type ("?") along with any
* "extends" or "super" clause. Delimiting brackets are not
* included. Class names are qualified if "full" is true.
*/
static String wildcardTypeToString(DocEnv env,
Type.WildcardType wildThing, boolean full) {
if (env.legacyDoclet) {
return TypeMaker.getTypeName(env.types.erasure(wildThing), full);
}
StringBuilder s = new StringBuilder("?");
List<Type> bounds = getExtendsBounds(wildThing);
if (bounds.nonEmpty()) {
s.append(" extends ");
} else {
bounds = getSuperBounds(wildThing);
if (bounds.nonEmpty()) {
s.append(" super ");
}
}
boolean first = true; // currently only one bound is allowed
for (Type b : bounds) {
if (!first) {
s.append(" & ");
}
s.append(TypeMaker.getTypeString(env, b, full));
first = false;
}
return s.toString();
}
private static List<Type> getExtendsBounds(Type.WildcardType wild) {
return wild.isSuperBound()
? List.nil()
: List.of(wild.type);
}
private static List<Type> getSuperBounds(Type.WildcardType wild) {
return wild.isExtendsBound()
? List.nil()
: List.of(wild.type);
}
}
| gpl-2.0 |
RodrigoRubino/DC-UFSCar-ES2-201601-Grupo-Brainstorm | src/main/java/net/sf/jabref/importer/EntryFromFileCreator.java | 6867 | /* Copyright (C) 2003-2015 JabRef contributors.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package net.sf.jabref.importer;
import java.io.File;
import java.io.FileFilter;
import java.util.List;
import java.util.Optional;
import java.util.StringTokenizer;
import net.sf.jabref.Globals;
import net.sf.jabref.JabRefGUI;
import net.sf.jabref.external.ExternalFileType;
import net.sf.jabref.external.ExternalFileTypes;
import net.sf.jabref.gui.FileListEntry;
import net.sf.jabref.gui.FileListTableModel;
import net.sf.jabref.logic.util.io.FileUtil;
import net.sf.jabref.model.entry.BibEntry;
/**
* The interface EntryFromFileCreator does twice: <br>
* On the one hand, it defines a set of files, which it can deal with, on the
* other hand it provides the functionality to create a Bibtex entry out of a
* file. The interface extends the java.io.FileFilter to inherit a common way of
* defining file sets.
*
* @author Dan&Nosh
* @version 25.11.2008 | 23:39:03
*
*/
public abstract class EntryFromFileCreator implements FileFilter {
protected final ExternalFileType externalFileType;
private static final int MIN_PATH_TOKEN_LENGTH = 4;
/**
* Constructor. <br>
* Forces subclasses to provide an {@link ExternalFileType} instance, which
* they build on.
*
* @param externalFileType
*/
EntryFromFileCreator(ExternalFileType externalFileType) {
this.externalFileType = externalFileType;
}
protected abstract Optional<BibEntry> createBibtexEntry(File f);
/**
* <p>
* To support platform independence, a creator must define what types of
* files it accepts on it's own.
* </p>
* <p>
* Basically, accepting files which end with the file extension that is
* described in the nested {@link #externalFileType} would work on windows
* systems. This is also the recommended criterion, on which files should be
* accepted.
* </p>
* <p>
* However, defining what types of files this creator accepts, is a property
* of <i>entry creators</i>, that is left to the user.
* </p>
*/
@Override
public abstract boolean accept(File f);
/**
* Name of this import format.
*
* <p>
* The name must be unique.
* </p>
*
* @return format name, must be unique and not <code>null</code>
*/
public abstract String getFormatName();
/**
* Create one BibEntry containing information regarding the given File.
*
* @param f
* @param addPathTokensAsKeywords
* @return
*/
public Optional<BibEntry> createEntry(File f, boolean addPathTokensAsKeywords) {
if ((f == null) || !f.exists()) {
return Optional.empty();
}
Optional<BibEntry> newEntry = createBibtexEntry(f);
if (!(newEntry.isPresent())) {
return newEntry;
}
if (addPathTokensAsKeywords) {
appendToField(newEntry.get(), "keywords", extractPathesToKeyWordsfield(f.getAbsolutePath()));
}
if (!newEntry.get().hasField("title")) {
newEntry.get().setField("title", f.getName());
}
addFileInfo(newEntry.get(), f);
return newEntry;
}
/** Returns the ExternalFileType that is imported here */
public ExternalFileType getExternalFileType() {
return externalFileType;
}
/**
* Splits the path to the file and builds a keywords String in the format
* that is used by Jabref.
*
* @param absolutePath
* @return
*/
private static String extractPathesToKeyWordsfield(String absolutePath) {
StringBuilder sb = new StringBuilder();
StringTokenizer st = new StringTokenizer(absolutePath, String.valueOf(File.separatorChar));
while (st.hasMoreTokens()) {
String token = st.nextToken();
if (!st.hasMoreTokens()) {
// ignore last token. The filename ist not wanted as keyword.
break;
}
if (token.length() >= MIN_PATH_TOKEN_LENGTH) {
if (sb.length() > 0) {
// TODO: find Jabref constant for delimter
sb.append(',');
}
sb.append(token);
}
}
return sb.toString();
}
private void addFileInfo(BibEntry entry, File file) {
Optional<ExternalFileType> fileType = ExternalFileTypes.getInstance()
.getExternalFileTypeByExt(externalFileType.getFieldName());
List<String> possibleFilePaths = JabRefGUI.getMainFrame().getCurrentBasePanel().getBibDatabaseContext().getFileDirectory();
File shortenedFileName = FileUtil.shortenFileName(file, possibleFilePaths);
FileListEntry fileListEntry = new FileListEntry("", shortenedFileName.getPath(), fileType);
FileListTableModel model = new FileListTableModel();
model.addEntry(0, fileListEntry);
entry.setField(Globals.FILE_FIELD, model.getStringRepresentation());
}
protected void appendToField(BibEntry entry, String field, String value) {
if ((value == null) || value.isEmpty()) {
return;
}
String oVal = entry.getField(field);
if (oVal == null) {
entry.setField(field, value);
} else {
// TODO: find Jabref constant for delimter
if (!oVal.contains(value)) {
entry.setField(field, oVal + "," + value);
}
}
}
protected void addEntrysToEntry(BibEntry entry, List<BibEntry> entrys) {
if (entrys != null) {
for (BibEntry e : entrys) {
addEntryDataToEntry(entry, e);
}
}
}
protected void addEntryDataToEntry(BibEntry entry, BibEntry e) {
for (String field : e.getFieldNames()) {
appendToField(entry, field, e.getField(field));
}
}
@Override
public String toString() {
if (externalFileType == null) {
return "(undefined)";
}
return externalFileType.getName() + " (." + externalFileType.getExtension() + ")";
}
}
| gpl-2.0 |
kkoop64/oStorybook | src/storybook/model/hbn/dao/SceneDAO.java | 918 | /*
Storybook: Open Source software for novelists and authors.
Copyright (C) 2008 - 2012 Martin Mustun
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package storybook.model.hbn.dao;
import storybook.model.hbn.entity.Scene;
import com.googlecode.genericdao.dao.hibernate.GenericDAO;
public interface SceneDAO extends GenericDAO<Scene, Long> {
}
| gpl-3.0 |
xasx/wildfly | testsuite/integration/web/src/test/java/org/jboss/as/test/integration/web/session/SessionPersistenceTestCase.java | 3385 | /*
* JBoss, Home of Professional Open Source.
* Copyright (c) 2011, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.test.integration.web.session;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.jboss.arquillian.container.test.api.Deployer;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.arquillian.test.api.ArquillianResource;
import org.jboss.as.test.shared.TestSuiteEnvironment;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
/**
*/
@RunWith(Arquillian.class)
@RunAsClient
@Ignore
public class SessionPersistenceTestCase {
@ArquillianResource
public Deployer deployer;
@Deployment(name = "web", managed = false, testable = false)
public static Archive<?> dependent() {
return ShrinkWrap.create(WebArchive.class, "sessionPersistence.war")
.addClasses(SessionTestServlet.class);
}
@Test
public void testLifeCycle() throws Exception {
try (CloseableHttpClient client = HttpClients.createDefault()) {
HttpGet get = new HttpGet("http://" + TestSuiteEnvironment.getServerAddress() + ":8080/sessionPersistence/SessionPersistenceServlet");
deployer.deploy("web");
String result = runGet(get, client);
assertEquals("0", result);
result = runGet(get, client);
assertEquals("1", result);
result = runGet(get, client);
assertEquals("2", result);
deployer.undeploy("web");
deployer.deploy("web");
result = runGet(get, client);
assertEquals("3", result);
result = runGet(get, client);
assertEquals("4", result);
}
}
private String runGet(HttpGet get, HttpClient client) throws IOException {
HttpResponse res = client.execute(get);
return EntityUtils.toString(res.getEntity());
}
}
| lgpl-2.1 |
xasx/wildfly | connector/src/main/java/org/jboss/as/connector/subsystems/jca/Attribute.java | 2735 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2012, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.connector.subsystems.jca;
import java.util.HashMap;
import java.util.Map;
/**
* An Attribute.
* @author <a href="mailto:stefano.maestri@redhat.com">Stefano Maestri</a>
*/
public enum Attribute {
/** always the first **/
UNKNOWN(null),
ENABLED("enabled"),
/**
* fail-on-error attribute
*/
FAIL_ON_ERROR("fail-on-error"),
/**
* fail-on-warn attribute
*/
FAIL_ON_WARN("fail-on-warn"),
SHORT_RUNNING_THREAD_POOL("short-running-thread-pool"),
LONG_RUNNING_THREAD_POOL("long-running-thread-pool"),
DEBUG("debug"),
ERROR("error"),
IGNORE_UNKNOWN_CONNECHIONS("ignore-unknown-connections"),
NAME("name"),
WORKMANAGER("workmanager"),
JGROUPS_STACK("jgroups-stack"),
JGROUPS_CLUSTER("jgroups-cluster"),
REQUEST_TIMEOUT("request-timeout");
private final String name;
Attribute(final String name) {
this.name = name;
}
/**
* Get the local name of this element.
* @return the local name
*/
public String getLocalName() {
return name;
}
private static final Map<String, Attribute> MAP;
static {
final Map<String, Attribute> map = new HashMap<String, Attribute>();
for (Attribute element : values()) {
final String name = element.getLocalName();
if (name != null)
map.put(name, element);
}
MAP = map;
}
public static Attribute forName(String localName) {
final Attribute element = MAP.get(localName);
return element == null ? UNKNOWN : element;
}
@Override
public String toString() {
return getLocalName();
}
}
| lgpl-2.1 |
MatthiasMann/EnderIO | src/main/java/crazypants/enderio/machine/painter/BlockItemPaintedFence.java | 936 | package crazypants.enderio.machine.painter;
import java.util.List;
import net.minecraft.block.Block;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemStack;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import crazypants.enderio.EnderIO;
public class BlockItemPaintedFence extends ItemBlock {
public BlockItemPaintedFence(Block p_i45328_1_) {
super(p_i45328_1_);
setHasSubtypes(true);
}
public BlockItemPaintedFence() {
super(EnderIO.blockPaintedFence);
setHasSubtypes(true);
}
@Override
public int getMetadata(int par1) {
return par1;
}
@Override
@SideOnly(Side.CLIENT)
public void addInformation(ItemStack item, EntityPlayer par2EntityPlayer, List list, boolean par4) {
super.addInformation(item, par2EntityPlayer, list, par4);
list.add(PainterUtil.getTooltTipText(item));
}
}
| unlicense |
Drifftr/devstudio-tooling-bps | plugins/org.eclipse.bpel.ui.noEmbeddedEditors/src/org/eclipse/bpel/ui/actions/editpart/CreateOnAlarmAction.java | 2364 | /*******************************************************************************
* Copyright (c) 2005, 2012 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.bpel.ui.actions.editpart;
import org.eclipse.bpel.model.BPELPackage;
import org.eclipse.bpel.ui.BPELEditor;
import org.eclipse.bpel.ui.BPELUIPlugin;
import org.eclipse.bpel.ui.IBPELUIConstants;
import org.eclipse.bpel.ui.Messages;
import org.eclipse.bpel.ui.commands.CompoundCommand;
import org.eclipse.bpel.ui.commands.InsertInContainerCommand;
import org.eclipse.bpel.ui.commands.SetNameAndDirectEditCommand;
import org.eclipse.bpel.ui.factories.UIObjectFactoryProvider;
import org.eclipse.bpel.ui.util.ModelHelper;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.gef.EditPart;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.swt.graphics.Image;
public class CreateOnAlarmAction extends AbstractAction {
public CreateOnAlarmAction(EditPart editPart) {
super(editPart);
}
public ImageDescriptor getIcon() {
return BPELUIPlugin.INSTANCE.getImageDescriptor(IBPELUIConstants.ICON_ACTION_ONALARM);
}
public Image getIconImg() {
return BPELUIPlugin.INSTANCE.getImage(IBPELUIConstants.ICON_ACTION_ONALARM);
}
public boolean onButtonPressed() {
CompoundCommand command = new CompoundCommand();
EObject child = UIObjectFactoryProvider.getInstance().getFactoryFor(
BPELPackage.eINSTANCE.getOnAlarm()).createInstance();
command.add(new InsertInContainerCommand((EObject)modelObject, child, null));
command.add(new SetNameAndDirectEditCommand(child, viewer));
BPELEditor bpelEditor = ModelHelper.getBPELEditor(modelObject);
bpelEditor.getCommandStack().execute(command);
return true;
}
public String getToolTip() {
return Messages.CreateOnAlarmAction_Add_OnAlarm_1;
}
@Override
public ImageDescriptor getDisabledIcon() { return ImageDescriptor.getMissingImageDescriptor(); }
@Override
public boolean isEnabled() { return true; }
} | apache-2.0 |
hequn8128/flink | flink-java/src/test/java/org/apache/flink/api/java/utils/AbstractParameterToolTest.java | 22786 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.java.utils;
import org.apache.flink.api.java.ClosureCleaner;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.util.InstantiationUtil;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.Set;
/**
* Base class for tests for {@link ParameterTool}.
*/
public abstract class AbstractParameterToolTest {
@Rule
public TemporaryFolder tmp = new TemporaryFolder();
@Rule
public final ExpectedException exception = ExpectedException.none();
// Test parser
@Test
public void testThrowExceptionIfParameterIsNotPrefixed() {
exception.expect(IllegalArgumentException.class);
exception.expectMessage("Error parsing arguments '[a]' on 'a'. Please prefix keys with -- or -.");
createParameterToolFromArgs(new String[]{"a"});
}
@Test
public void testNoVal() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-berlin"});
Assert.assertEquals(1, parameter.getNumberOfParameters());
Assert.assertTrue(parameter.has("berlin"));
}
@Test
public void testNoValDouble() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"--berlin"});
Assert.assertEquals(1, parameter.getNumberOfParameters());
Assert.assertTrue(parameter.has("berlin"));
}
@Test
public void testMultipleNoVal() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"--a", "--b", "--c", "--d", "--e", "--f"});
Assert.assertEquals(6, parameter.getNumberOfParameters());
Assert.assertTrue(parameter.has("a"));
Assert.assertTrue(parameter.has("b"));
Assert.assertTrue(parameter.has("c"));
Assert.assertTrue(parameter.has("d"));
Assert.assertTrue(parameter.has("e"));
Assert.assertTrue(parameter.has("f"));
}
@Test
public void testMultipleNoValMixed() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"--a", "-b", "-c", "-d", "--e", "--f"});
Assert.assertEquals(6, parameter.getNumberOfParameters());
Assert.assertTrue(parameter.has("a"));
Assert.assertTrue(parameter.has("b"));
Assert.assertTrue(parameter.has("c"));
Assert.assertTrue(parameter.has("d"));
Assert.assertTrue(parameter.has("e"));
Assert.assertTrue(parameter.has("f"));
}
@Test
public void testEmptyVal() {
exception.expect(IllegalArgumentException.class);
exception.expectMessage("The input [--a, -b, --] contains an empty argument");
createParameterToolFromArgs(new String[]{"--a", "-b", "--"});
}
@Test
public void testEmptyValShort() {
exception.expect(IllegalArgumentException.class);
exception.expectMessage("The input [--a, -b, -] contains an empty argument");
createParameterToolFromArgs(new String[]{"--a", "-b", "-"});
}
// Test unrequested
// Boolean
@Test
public void testUnrequestedBoolean() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-boolean", "true"});
Assert.assertEquals(createHashSet("boolean"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertTrue(parameter.getBoolean("boolean"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertTrue(parameter.getBoolean("boolean"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedBooleanWithDefaultValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-boolean", "true"});
Assert.assertEquals(createHashSet("boolean"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertTrue(parameter.getBoolean("boolean", false));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertTrue(parameter.getBoolean("boolean", false));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedBooleanWithMissingValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-boolean"});
Assert.assertEquals(createHashSet("boolean"), parameter.getUnrequestedParameters());
parameter.getBoolean("boolean");
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
// Byte
@Test
public void testUnrequestedByte() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-byte", "1"});
Assert.assertEquals(createHashSet("byte"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals(1, parameter.getByte("byte"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals(1, parameter.getByte("byte"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedByteWithDefaultValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-byte", "1"});
Assert.assertEquals(createHashSet("byte"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals(1, parameter.getByte("byte", (byte) 0));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals(1, parameter.getByte("byte", (byte) 0));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedByteWithMissingValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-byte"});
Assert.assertEquals(createHashSet("byte"), parameter.getUnrequestedParameters());
exception.expect(RuntimeException.class);
exception.expectMessage("For input string: \"__NO_VALUE_KEY\"");
parameter.getByte("byte");
}
// Short
@Test
public void testUnrequestedShort() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-short", "2"});
Assert.assertEquals(createHashSet("short"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals(2, parameter.getShort("short"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals(2, parameter.getShort("short"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedShortWithDefaultValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-short", "2"});
Assert.assertEquals(createHashSet("short"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals(2, parameter.getShort("short", (short) 0));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals(2, parameter.getShort("short", (short) 0));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedShortWithMissingValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-short"});
Assert.assertEquals(createHashSet("short"), parameter.getUnrequestedParameters());
exception.expect(RuntimeException.class);
exception.expectMessage("For input string: \"__NO_VALUE_KEY\"");
parameter.getShort("short");
}
// Int
@Test
public void testUnrequestedInt() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-int", "4"});
Assert.assertEquals(createHashSet("int"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals(4, parameter.getInt("int"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals(4, parameter.getInt("int"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedIntWithDefaultValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-int", "4"});
Assert.assertEquals(createHashSet("int"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals(4, parameter.getInt("int", 0));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals(4, parameter.getInt("int", 0));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedIntWithMissingValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-int"});
Assert.assertEquals(createHashSet("int"), parameter.getUnrequestedParameters());
exception.expect(RuntimeException.class);
exception.expectMessage("For input string: \"__NO_VALUE_KEY\"");
parameter.getInt("int");
}
// Long
@Test
public void testUnrequestedLong() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-long", "8"});
Assert.assertEquals(createHashSet("long"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals(8, parameter.getLong("long"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals(8, parameter.getLong("long"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedLongWithDefaultValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-long", "8"});
Assert.assertEquals(createHashSet("long"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals(8, parameter.getLong("long", 0));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals(8, parameter.getLong("long", 0));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedLongWithMissingValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-long"});
Assert.assertEquals(createHashSet("long"), parameter.getUnrequestedParameters());
exception.expect(RuntimeException.class);
exception.expectMessage("For input string: \"__NO_VALUE_KEY\"");
parameter.getLong("long");
}
// Float
@Test
public void testUnrequestedFloat() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-float", "4"});
Assert.assertEquals(createHashSet("float"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals(4.0, parameter.getFloat("float"), 0.00001);
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals(4.0, parameter.getFloat("float"), 0.00001);
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedFloatWithDefaultValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-float", "4"});
Assert.assertEquals(createHashSet("float"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals(4.0, parameter.getFloat("float", 0.0f), 0.00001);
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals(4.0, parameter.getFloat("float", 0.0f), 0.00001);
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedFloatWithMissingValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-float"});
Assert.assertEquals(createHashSet("float"), parameter.getUnrequestedParameters());
exception.expect(RuntimeException.class);
exception.expectMessage("For input string: \"__NO_VALUE_KEY\"");
parameter.getFloat("float");
}
// Double
@Test
public void testUnrequestedDouble() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-double", "8"});
Assert.assertEquals(createHashSet("double"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals(8.0, parameter.getDouble("double"), 0.00001);
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals(8.0, parameter.getDouble("double"), 0.00001);
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedDoubleWithDefaultValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-double", "8"});
Assert.assertEquals(createHashSet("double"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals(8.0, parameter.getDouble("double", 0.0), 0.00001);
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals(8.0, parameter.getDouble("double", 0.0), 0.00001);
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedDoubleWithMissingValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-double"});
Assert.assertEquals(createHashSet("double"), parameter.getUnrequestedParameters());
exception.expect(RuntimeException.class);
exception.expectMessage("For input string: \"__NO_VALUE_KEY\"");
parameter.getDouble("double");
}
// String
@Test
public void testUnrequestedString() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-string", "∞"});
Assert.assertEquals(createHashSet("string"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals("∞", parameter.get("string"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals("∞", parameter.get("string"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedStringWithDefaultValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-string", "∞"});
Assert.assertEquals(createHashSet("string"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals("∞", parameter.get("string", "0.0"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals("∞", parameter.get("string", "0.0"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedStringWithMissingValue() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-string"});
Assert.assertEquals(createHashSet("string"), parameter.getUnrequestedParameters());
parameter.get("string");
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
// Additional methods
@Test
public void testUnrequestedHas() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-boolean"});
Assert.assertEquals(createHashSet("boolean"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertTrue(parameter.has("boolean"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertTrue(parameter.has("boolean"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedRequired() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-required", "∞"});
Assert.assertEquals(createHashSet("required"), parameter.getUnrequestedParameters());
// test parameter access
Assert.assertEquals("∞", parameter.getRequired("required"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
// test repeated access
Assert.assertEquals("∞", parameter.getRequired("required"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedMultiple() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{"-boolean", "true", "-byte", "1",
"-short", "2", "-int", "4", "-long", "8", "-float", "4.0", "-double", "8.0", "-string", "∞"});
Assert.assertEquals(createHashSet("boolean", "byte", "short", "int", "long", "float", "double", "string"),
parameter.getUnrequestedParameters());
Assert.assertTrue(parameter.getBoolean("boolean"));
Assert.assertEquals(createHashSet("byte", "short", "int", "long", "float", "double", "string"),
parameter.getUnrequestedParameters());
Assert.assertEquals(1, parameter.getByte("byte"));
Assert.assertEquals(createHashSet("short", "int", "long", "float", "double", "string"),
parameter.getUnrequestedParameters());
Assert.assertEquals(2, parameter.getShort("short"));
Assert.assertEquals(createHashSet("int", "long", "float", "double", "string"),
parameter.getUnrequestedParameters());
Assert.assertEquals(4, parameter.getInt("int"));
Assert.assertEquals(createHashSet("long", "float", "double", "string"),
parameter.getUnrequestedParameters());
Assert.assertEquals(8, parameter.getLong("long"));
Assert.assertEquals(createHashSet("float", "double", "string"),
parameter.getUnrequestedParameters());
Assert.assertEquals(4.0, parameter.getFloat("float"), 0.00001);
Assert.assertEquals(createHashSet("double", "string"),
parameter.getUnrequestedParameters());
Assert.assertEquals(8.0, parameter.getDouble("double"), 0.00001);
Assert.assertEquals(createHashSet("string"),
parameter.getUnrequestedParameters());
Assert.assertEquals("∞", parameter.get("string"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
@Test
public void testUnrequestedUnknown() {
AbstractParameterTool parameter = createParameterToolFromArgs(new String[]{});
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
Assert.assertTrue(parameter.getBoolean("boolean", true));
Assert.assertEquals(0, parameter.getByte("byte", (byte) 0));
Assert.assertEquals(0, parameter.getShort("short", (short) 0));
Assert.assertEquals(0, parameter.getInt("int", 0));
Assert.assertEquals(0, parameter.getLong("long", 0));
Assert.assertEquals(0, parameter.getFloat("float", 0), 0.00001);
Assert.assertEquals(0, parameter.getDouble("double", 0), 0.00001);
Assert.assertEquals("0", parameter.get("string", "0"));
Assert.assertEquals(Collections.emptySet(), parameter.getUnrequestedParameters());
}
protected AbstractParameterTool createParameterToolFromArgs(String[] args) {
return ParameterTool.fromArgs(args);
}
protected static <T> Set<T> createHashSet(T... elements) {
Set<T> set = new HashSet<>();
for (T element : elements) {
set.add(element);
}
return set;
}
protected void validate(AbstractParameterTool parameter) {
ClosureCleaner.ensureSerializable(parameter);
internalValidate(parameter);
// -------- test behaviour after serialization ------------
try {
byte[] b = InstantiationUtil.serializeObject(parameter);
final AbstractParameterTool copy = InstantiationUtil.deserializeObject(b, getClass().getClassLoader());
internalValidate(copy);
} catch (IOException | ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
private void internalValidate(AbstractParameterTool parameter) {
Assert.assertEquals("myInput", parameter.getRequired("input"));
Assert.assertEquals("myDefaultValue", parameter.get("output", "myDefaultValue"));
Assert.assertNull(parameter.get("whatever"));
Assert.assertEquals(15L, parameter.getLong("expectedCount", -1L));
Assert.assertTrue(parameter.getBoolean("thisIsUseful", true));
Assert.assertEquals(42, parameter.getByte("myDefaultByte", (byte) 42));
Assert.assertEquals(42, parameter.getShort("myDefaultShort", (short) 42));
if (parameter instanceof ParameterTool) {
ParameterTool parameterTool = (ParameterTool) parameter;
final Configuration config = parameterTool.getConfiguration();
Assert.assertEquals(15L, config.getLong("expectedCount", -1L));
final Properties props = parameterTool.getProperties();
Assert.assertEquals("myInput", props.getProperty("input"));
// -------- test the default file creation ------------
try {
final String pathToFile = tmp.newFile().getAbsolutePath();
parameterTool.createPropertiesFile(pathToFile);
final Properties defaultProps = new Properties();
try (FileInputStream fis = new FileInputStream(pathToFile)) {
defaultProps.load(fis);
}
Assert.assertEquals("myDefaultValue", defaultProps.get("output"));
Assert.assertEquals("-1", defaultProps.get("expectedCount"));
Assert.assertTrue(defaultProps.containsKey("input"));
} catch (IOException e) {
throw new RuntimeException(e);
}
} else if (parameter instanceof MultipleParameterTool) {
MultipleParameterTool multipleParameterTool = (MultipleParameterTool) parameter;
List<String> multiValues = Arrays.asList("multiValue1", "multiValue2");
Assert.assertEquals(multiValues, multipleParameterTool.getMultiParameter("multi"));
Assert.assertEquals(multiValues, multipleParameterTool.toMultiMap().get("multi"));
// The last value is used.
Assert.assertEquals("multiValue2", multipleParameterTool.toMap().get("multi"));
}
}
}
| apache-2.0 |
siosio/intellij-community | platform/platform-api/src/com/intellij/openapi/ui/TreeComboBox.java | 8911 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.ui;
import com.intellij.openapi.util.Iconable;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.ui.SimpleColoredComponent;
import com.intellij.ui.SimpleColoredRenderer;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.containers.Stack;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreeNode;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
public class TreeComboBox extends ComboBoxWithWidePopup {
private static final int INDENT = UIUtil.getTreeLeftChildIndent();
private TreeModel myTreeModel;
private final boolean myShowRootNode;
public TreeComboBox(@NotNull final TreeModel model) {
this(model, true);
}
public TreeComboBox(@NotNull final TreeModel model, final boolean showRootNode) {
this(model, showRootNode, null);
}
public TreeComboBox(@NotNull final TreeModel model, final boolean showRootNode, final String defaultText) {
myTreeModel = model;
myShowRootNode = showRootNode;
setModel(new TreeModelWrapper(myTreeModel, showRootNode));
setRenderer(new TreeListCellRenderer(this, showRootNode, defaultText));
}
public void setTreeModel(@NotNull final TreeModel model, final boolean showRootNode) {
myTreeModel = model;
setModel(new TreeModelWrapper(model, showRootNode));
}
public TreeModel getTreeModel() {
return myTreeModel;
}
public JTree createFakeTree() {
final JTree tree = new JTree(getTreeModel());
tree.setRootVisible(myShowRootNode);
return tree;
}
private static final class TreeListCellRenderer extends SimpleColoredRenderer implements ListCellRenderer {
private static final Border SELECTION_PAINTER = (Border)UIManager.get("MenuItem.selectedBackgroundPainter");
private boolean mySelected;
private boolean myInList;
private final JComboBox myComboBox;
private boolean myChecked;
private boolean myEditable;
private final boolean myShowRootNode;
private final @NlsContexts.Label String myDefaultText;
private TreeListCellRenderer(@NotNull final JComboBox comboBox, final boolean showRootNode, @Nullable @NlsContexts.Label String defaultText) {
myComboBox = comboBox;
myShowRootNode = showRootNode;
myDefaultText = defaultText;
setOpaque(true);
}
private static Icon getValueIcon(final Object value, final int index) {
if (value instanceof CustomPresentation) {
return ((CustomPresentation)value).getIcon(index, 0);
}
if (value instanceof Iconable) {
return ((Iconable)value).getIcon(0);
}
return null;
}
private TreeModelWrapper getTreeModelWrapper() {
return (TreeModelWrapper)myComboBox.getModel();
}
@Override
public Component getListCellRendererComponent(final JList list,
final Object value,
final int index,
final boolean isSelected,
final boolean cellHasFocus) {
clear();
myInList = index >= 0;
if (index >= 0) {
Object obj1 = myComboBox.getItemAt(index);
myChecked = obj1 != null && obj1.equals(myComboBox.getSelectedItem());
}
else {
myChecked = false;
}
int indent = 0;
if (myInList) {
final TreePath path = getTreeModelWrapper().getPathForRow(index);
indent = (path.getPathCount() - 1 - (myShowRootNode ? 0 : 1)) * INDENT;
}
setIpad(new Insets(1, !myInList || myEditable ? 5 : 5 + indent, 1, 5));
setIcon(getValueIcon(value, index));
setIconOpaque(true);
myEditable = myComboBox.isEditable();
setForeground(isSelected ? list.getSelectionForeground() : list.getForeground());
setBackground(isSelected ? list.getSelectionBackground() : list.getBackground());
if (value instanceof CustomPresentation) {
((CustomPresentation)value).append(this, index);
} else {
if (value == null) {
if (index == -1 && myDefaultText != null) {
append(myDefaultText, SimpleTextAttributes.GRAY_ATTRIBUTES);
} else {
append("");
}
} else {
//noinspection HardCodedStringLiteral
append(value.toString());
}
}
setSelected(isSelected);
setFont(list.getFont());
return this;
}
private void setSelected(final boolean selected) {
mySelected = selected;
}
@Override
protected boolean shouldPaintBackground() {
return true;
}
}
private static final class TreeModelWrapper extends AbstractListModel implements ComboBoxModel {
private final TreeModel myTreeModel;
private Object mySelectedItem;
private final boolean myShowRootNode;
private final List<TreeNode> myTreeModelAsList = new ArrayList<>();
private TreeModelWrapper(@NotNull final TreeModel treeModel, final boolean showRootNode) {
myTreeModel = treeModel;
myShowRootNode = showRootNode;
accumulateChildren((TreeNode) treeModel.getRoot(), myTreeModelAsList, showRootNode);
}
public TreeModel getTreeModel() {
return myTreeModel;
}
@Override
public void setSelectedItem(final Object obj) {
if (mySelectedItem != null && !mySelectedItem.equals(obj) || mySelectedItem == null && obj != null) {
mySelectedItem = obj;
fireContentsChanged(this, -1, -1);
}
}
private static void accumulateChildren(@NotNull final TreeNode node, @NotNull final List<? super TreeNode> list, final boolean showRoot) {
if (showRoot || node.getParent() != null) list.add(node);
final int count = node.getChildCount();
for (int i = 0; i < count; i++) {
accumulateChildren(node.getChildAt(i), list, showRoot);
}
}
private TreePath getPathForRow(final int row) {
TreeNode node = myTreeModelAsList.get(row);
final List<TreeNode> path = new ArrayList<>();
while (node != null) {
path.add(0, node);
node = node.getParent();
}
return new TreePath(path.toArray(new TreeNode[0]));
}
@Override
public Object getSelectedItem() {
return mySelectedItem;
}
@Override
public int getSize() {
int count = 0;
Enumeration e = new PreorderEnumeration(myTreeModel);
while (e.hasMoreElements()) {
e.nextElement();
count++;
}
return count - (myShowRootNode ? 0 : 1);
}
@Override
public Object getElementAt(int index) {
Enumeration e = new PreorderEnumeration(myTreeModel);
if (!myShowRootNode) index++;
for (int i = 0; i < index; i++) {
e.nextElement();
}
return e.nextElement();
}
}
private static class ChildrenEnumeration implements Enumeration {
private final TreeModel myTreeModel;
private final Object myNode;
private int myIndex = -1;
ChildrenEnumeration(@NotNull final TreeModel treeModel, @NotNull final Object node) {
myTreeModel = treeModel;
myNode = node;
}
@Override
public boolean hasMoreElements() {
return myIndex < myTreeModel.getChildCount(myNode) - 1;
}
@Override
public Object nextElement() {
return myTreeModel.getChild(myNode, ++myIndex);
}
}
private static class PreorderEnumeration implements Enumeration {
private final TreeModel myTreeModel;
private final Stack<Enumeration> myStack;
PreorderEnumeration(@NotNull final TreeModel treeModel) {
myTreeModel = treeModel;
myStack = new Stack<>();
myStack.push(Collections.enumeration(Collections.singleton(treeModel.getRoot())));
}
@Override
public boolean hasMoreElements() {
return !myStack.empty() &&
myStack.peek().hasMoreElements();
}
@Override
public Object nextElement() {
Enumeration e = myStack.peek();
Object node = e.nextElement();
if (!e.hasMoreElements()) {
myStack.pop();
}
Enumeration children = new ChildrenEnumeration(myTreeModel, node);
if (children.hasMoreElements()) {
myStack.push(children);
}
return node;
}
}
public interface CustomPresentation {
void append(SimpleColoredComponent component, int index);
Icon getIcon(int index, @Iconable.IconFlags int flags);
}
}
| apache-2.0 |
elubow/titan | titan-core/src/main/java/com/thinkaurelius/titan/diskstorage/common/DistributedStoreManager.java | 9429 | package com.thinkaurelius.titan.diskstorage.common;
import com.google.common.base.Preconditions;
import com.thinkaurelius.titan.diskstorage.BackendException;
import com.thinkaurelius.titan.diskstorage.PermanentBackendException;
import com.thinkaurelius.titan.diskstorage.util.time.TimestampProvider;
import com.thinkaurelius.titan.diskstorage.configuration.Configuration;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.StoreTransaction;
import com.thinkaurelius.titan.diskstorage.util.time.TimestampProviders;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.time.temporal.TemporalUnit;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import static com.thinkaurelius.titan.graphdb.configuration.GraphDatabaseConfiguration.*;
/**
* Abstract class that handles configuration options shared by all distributed storage backends
*
* @author Matthias Broecheler (me@matthiasb.com)
*/
public abstract class DistributedStoreManager extends AbstractStoreManager {
protected final TimestampProvider times;
public enum Deployment {
/**
* Connects to storage backend over the network or some other connection with significant latency
*/
REMOTE,
/**
* Connects to storage backend over localhost or some other connection with very low latency
*/
LOCAL,
/**
* Embedded with storage backend and communicates inside the JVM
*/
EMBEDDED
}
private static final Logger log = LoggerFactory.getLogger(DistributedStoreManager.class);
private static final Random random = new Random();
protected final String[] hostnames;
protected final int port;
protected final Duration connectionTimeoutMS;
protected final int pageSize;
protected final String username;
protected final String password;
public DistributedStoreManager(Configuration storageConfig, int portDefault) {
super(storageConfig);
this.hostnames = storageConfig.get(STORAGE_HOSTS);
Preconditions.checkArgument(hostnames.length > 0, "No hostname configured");
if (storageConfig.has(STORAGE_PORT)) this.port = storageConfig.get(STORAGE_PORT);
else this.port = portDefault;
this.connectionTimeoutMS = storageConfig.get(CONNECTION_TIMEOUT);
this.pageSize = storageConfig.get(PAGE_SIZE);
this.times = storageConfig.get(TIMESTAMP_PROVIDER);
if (storageConfig.has(AUTH_USERNAME)) {
this.username = storageConfig.get(AUTH_USERNAME);
this.password = storageConfig.get(AUTH_PASSWORD);
} else {
this.username=null;
this.password=null;
}
}
/**
* Returns a randomly chosen host name. This is used to pick one host when multiple are configured
*
* @return
*/
protected String getSingleHostname() {
return hostnames[random.nextInt(hostnames.length)];
}
/**
* Whether authentication is enabled for this storage backend
*
* @return
*/
public boolean hasAuthentication() {
return username!=null;
}
/**
* Returns the default configured page size for this storage backend. The page size is used to determine
* the number of records to request at a time when streaming result data.
* @return
*/
public int getPageSize() {
return pageSize;
}
/*
* TODO this should go away once we have a TitanConfig that encapsulates TimestampProvider
*/
public TimestampProvider getTimestampProvider() {
return times;
}
/**
* Returns the {@link Deployment} mode of this connection to the storage backend
*
* @return
*/
public abstract Deployment getDeployment();
@Override
public String toString() {
String hn = getSingleHostname();
return hn.substring(0, Math.min(hn.length(), 256)) + ":" + port;
}
/**
* This method attempts to generate Rid in the following three ways, in order,
* returning the value produced by the first successful attempt in the sequence.
* <p/>
* <ol>
* <li>
* If {@code config} contains {@see GraphDatabaseConfiguration#INSTANCE_RID_RAW_KEY},
* then read it as a String value. Convert the String returned into a char[] and
* call {@code org.apache.commons.codec.binary.Hex#decodeHex on the char[]}. The
* byte[] returned by {@code decodeHex} is then returned as Rid.
* </li>
* <li>
* If {@code config} contains {@see GraphDatabaseConfiguration#INSTANCE_RID_SHORT_KEY},
* then read it as a short value. Call {@see java.net.InetAddress#getLocalHost()},
* and on its return value call {@see java.net.InetAddress#getAddress()} to retrieve
* the machine's IP address in byte[] form. The returned Rid is a byte[] containing
* the localhost address bytes in its lower indices and the short value in its
* penultimate and final indices.
* </li>
* <li>
* If both of the previous failed, then call
* {@see java.lang.management.RuntimeMXBean#getName()} and then call
* {@code String#getBytes()} on the returned value. Return a Rid as described in the
* previous point, replacing the short value with the byte[] representing the JVM name.
* </li>
* </ol>
*
* @param config commons config from which to read Rid-related keys
* @return A byte array which should uniquely identify this machine
*/
// public static byte[] getRid(Configuration config) {
// Preconditions.checkArgument(config.has(UNIQUE_INSTANCE_ID));
// return config.get(UNIQUE_INSTANCE_ID).getBytes();
//
// byte tentativeRid[] = null;
//
// if (config.has(GraphDatabaseConfiguration.INSTANCE_RID_RAW)) {
// String ridText =
// config.get(GraphDatabaseConfiguration.INSTANCE_RID_RAW);
// try {
// tentativeRid = Hex.decodeHex(ridText.toCharArray());
// } catch (DecoderException e) {
// throw new TitanConfigurationException("Could not decode hex value", e);
// }
//
// log.debug("Set rid from hex string: 0x{}", ridText);
// } else {
// final byte[] endBytes;
//
// if (config.has(GraphDatabaseConfiguration.INSTANCE_RID_SHORT)) {
//
// short s = config.get(
// GraphDatabaseConfiguration.INSTANCE_RID_SHORT);
//
// endBytes = new byte[2];
//
// endBytes[0] = (byte) ((s & 0x0000FF00) >> 8);
// endBytes[1] = (byte) (s & 0x000000FF);
// } else {
// //endBytes = ManagementFactory.getRuntimeMXBean().getName().getBytes();
// endBytes = new StringBuilder(String.valueOf(Thread.currentThread().getId()))
// .append("@")
// .append(ManagementFactory.getRuntimeMXBean().getName())
// .toString()
// .getBytes();
// }
//
// byte[] addrBytes;
// try {
// addrBytes = Inet4Address.getLocalHost().getAddress();
// } catch (UnknownHostException e) {
// throw new TitanConfigurationException("Unknown host specified", e);
// }
//
// tentativeRid = new byte[addrBytes.length + endBytes.length];
// System.arraycopy(addrBytes, 0, tentativeRid, 0, addrBytes.length);
// System.arraycopy(endBytes, 0, tentativeRid, addrBytes.length, endBytes.length);
//
// if (log.isDebugEnabled()) {
// log.debug("Set rid: 0x{}", new String(Hex.encodeHex(tentativeRid)));
// }
// }
//
// return tentativeRid;
// }
protected void sleepAfterWrite(StoreTransaction txh, MaskedTimestamp mustPass) throws BackendException {
assert mustPass.getDeletionTime(times) < mustPass.getAdditionTime(times);
try {
times.sleepPast(mustPass.getAdditionTimeInstant(times));
} catch (InterruptedException e) {
throw new PermanentBackendException("Unexpected interrupt", e);
}
}
/**
* Helper class to create the deletion and addition timestamps for a particular transaction.
* It needs to be ensured that the deletion time is prior to the addition time since
* some storage backends use the time to resolve conflicts.
*/
public class MaskedTimestamp {
private final Instant t;
public MaskedTimestamp(Instant commitTime) {
Preconditions.checkNotNull(commitTime);
this.t=commitTime;
}
public MaskedTimestamp(StoreTransaction txh) {
this(txh.getConfiguration().getCommitTime());
}
public long getDeletionTime(TimestampProvider times) {
return times.getTime(t) & 0xFFFFFFFFFFFFFFFEL; // zero the LSB
}
public long getAdditionTime(TimestampProvider times) {
return (times.getTime(t) & 0xFFFFFFFFFFFFFFFEL) | 1L; // force the LSB to 1
}
public Instant getAdditionTimeInstant(TimestampProvider times) {
return times.getTime(getAdditionTime(times));
}
}
}
| apache-2.0 |
Xylus/pinpoint | thrift/src/main/java/com/navercorp/pinpoint/io/header/InvalidHeaderException.java | 912 | /*
* Copyright 2018 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.io.header;
/**
* @author minwoo.jung
*/
public class InvalidHeaderException extends RuntimeException {
public InvalidHeaderException(String message) {
super(message);
}
public InvalidHeaderException(String message, Throwable e) {
super(message, e);
}
}
| apache-2.0 |
aahlenst/spring-boot | spring-boot-project/spring-boot/src/main/java/org/springframework/boot/context/properties/source/IterableConfigurationPropertySource.java | 2871 | /*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.context.properties.source;
import java.util.Iterator;
import java.util.function.Predicate;
import java.util.stream.Stream;
import org.springframework.boot.origin.OriginTrackedValue;
import org.springframework.util.StringUtils;
/**
* A {@link ConfigurationPropertySource} with a fully {@link Iterable} set of entries.
* Implementations of this interface <strong>must</strong> be able to iterate over all
* contained configuration properties. Any {@code non-null} result from
* {@link #getConfigurationProperty(ConfigurationPropertyName)} must also have an
* equivalent entry in the {@link #iterator() iterator}.
*
* @author Phillip Webb
* @author Madhura Bhave
* @since 2.0.0
* @see ConfigurationPropertyName
* @see OriginTrackedValue
* @see #getConfigurationProperty(ConfigurationPropertyName)
* @see #iterator()
* @see #stream()
*/
public interface IterableConfigurationPropertySource
extends ConfigurationPropertySource, Iterable<ConfigurationPropertyName> {
/**
* Return an iterator for the {@link ConfigurationPropertyName names} managed by this
* source.
* @return an iterator (never {@code null})
*/
@Override
default Iterator<ConfigurationPropertyName> iterator() {
return stream().iterator();
}
/**
* Returns a sequential {@code Stream} for the {@link ConfigurationPropertyName names}
* managed by this source.
* @return a stream of names (never {@code null})
*/
Stream<ConfigurationPropertyName> stream();
@Override
default ConfigurationPropertyState containsDescendantOf(ConfigurationPropertyName name) {
return ConfigurationPropertyState.search(this, name::isAncestorOf);
}
@Override
default IterableConfigurationPropertySource filter(Predicate<ConfigurationPropertyName> filter) {
return new FilteredIterableConfigurationPropertiesSource(this, filter);
}
@Override
default IterableConfigurationPropertySource withAliases(ConfigurationPropertyNameAliases aliases) {
return new AliasedIterableConfigurationPropertySource(this, aliases);
}
@Override
default IterableConfigurationPropertySource withPrefix(String prefix) {
return (StringUtils.hasText(prefix)) ? new PrefixedIterableConfigurationPropertySource(this, prefix) : this;
}
}
| apache-2.0 |
Sellegit/j2objc | guava/sources/com/google/common/net/HostSpecifier.java | 5951 | /*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.net;
import com.google.common.annotations.Beta;
import com.google.common.base.Preconditions;
import java.net.InetAddress;
import java.text.ParseException;
import javax.annotation.Nullable;
/**
* A syntactically valid host specifier, suitable for use in a URI.
* This may be either a numeric IP address in IPv4 or IPv6 notation, or a
* domain name.
*
* <p>Because this class is intended to represent host specifiers which can
* reasonably be used in a URI, the domain name case is further restricted to
* include only those domain names which end in a recognized public suffix; see
* {@link InternetDomainName#isPublicSuffix()} for details.
*
* <p>Note that no network lookups are performed by any {@code HostSpecifier}
* methods. No attempt is made to verify that a provided specifier corresponds
* to a real or accessible host. Only syntactic and pattern-based checks are
* performed.
*
* <p>If you know that a given string represents a numeric IP address, use
* {@link InetAddresses} to obtain and manipulate a
* {@link java.net.InetAddress} instance from it rather than using this class.
* Similarly, if you know that a given string represents a domain name, use
* {@link InternetDomainName} rather than this class.
*
* @author Craig Berry
* @since 5.0
*/
@Beta
public final class HostSpecifier {
private final String canonicalForm;
private HostSpecifier(String canonicalForm) {
this.canonicalForm = canonicalForm;
}
/**
* Returns a {@code HostSpecifier} built from the provided {@code specifier},
* which is already known to be valid. If the {@code specifier} might be
* invalid, use {@link #from(String)} instead.
*
* <p>The specifier must be in one of these formats:
* <ul>
* <li>A domain name, like {@code google.com}
* <li>A IPv4 address string, like {@code 127.0.0.1}
* <li>An IPv6 address string with or without brackets, like
* {@code [2001:db8::1]} or {@code 2001:db8::1}
* </ul>
*
* @throws IllegalArgumentException if the specifier is not valid.
*/
public static HostSpecifier fromValid(String specifier) {
// Verify that no port was specified, and strip optional brackets from
// IPv6 literals.
final HostAndPort parsedHost = HostAndPort.fromString(specifier);
Preconditions.checkArgument(!parsedHost.hasPort());
final String host = parsedHost.getHostText();
// Try to interpret the specifier as an IP address. Note we build
// the address rather than using the .is* methods because we want to
// use InetAddresses.toUriString to convert the result to a string in
// canonical form.
InetAddress addr = null;
try {
addr = InetAddresses.forString(host);
} catch (IllegalArgumentException e) {
// It is not an IPv4 or IPv6 literal
}
if (addr != null) {
return new HostSpecifier(InetAddresses.toUriString(addr));
}
// It is not any kind of IP address; must be a domain name or invalid.
// TODO(user): different versions of this for different factories?
final InternetDomainName domain = InternetDomainName.from(host);
if (domain.hasPublicSuffix()) {
return new HostSpecifier(domain.name());
}
throw new IllegalArgumentException("Domain name does not have a recognized public suffix: "
+ host);
}
/**
* Attempts to return a {@code HostSpecifier} for the given string, throwing
* an exception if parsing fails. Always use this method in preference to
* {@link #fromValid(String)} for a specifier that is not already known to be
* valid.
*
* @throws ParseException if the specifier is not valid.
*/
public static HostSpecifier from(String specifier) throws ParseException {
try {
return fromValid(specifier);
} catch (IllegalArgumentException e) {
// Since the IAE can originate at several different points inside
// fromValid(), we implement this method in terms of that one rather
// than the reverse.
ParseException parseException = new ParseException("Invalid host specifier: " + specifier, 0);
parseException.initCause(e);
throw parseException;
}
}
/**
* Determines whether {@code specifier} represents a valid
* {@link HostSpecifier} as described in the documentation for
* {@link #fromValid(String)}.
*/
public static boolean isValid(String specifier) {
try {
fromValid(specifier);
return true;
} catch (IllegalArgumentException e) {
return false;
}
}
@Override
public boolean equals(@Nullable Object other) {
if (this == other) {
return true;
}
if (other instanceof HostSpecifier) {
final HostSpecifier that = (HostSpecifier) other;
return this.canonicalForm.equals(that.canonicalForm);
}
return false;
}
@Override
public int hashCode() {
return canonicalForm.hashCode();
}
/**
* Returns a string representation of the host specifier suitable for
* inclusion in a URI. If the host specifier is a domain name, the
* string will be normalized to all lower case. If the specifier was
* an IPv6 address without brackets, brackets are added so that the
* result will be usable in the host part of a URI.
*/
@Override
public String toString() {
return canonicalForm;
}
}
| apache-2.0 |
ollie314/spring-security | core/src/test/java/org/springframework/security/OtherTargetObject.java | 1784 | /*
* Copyright 2004, 2005, 2006 Acegi Technology Pty Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security;
/**
* Simply extends {@link TargetObject} so we have a different object to put configuration
* attributes against.
* <P>
* There is no different behaviour. We have to define each method so that
* <code>Class.getMethod(methodName, args)</code> returns a <code>Method</code>
* referencing this class rather than the parent class.
* </p>
* <P>
* We need to implement <code>ITargetObject</code> again because the
* <code>MethodDefinitionAttributes</code> only locates attributes on interfaces
* explicitly defined by the intercepted class (not the interfaces defined by its parent
* class or classes).
* </p>
*
* @author Ben Alex
*/
public class OtherTargetObject extends TargetObject implements ITargetObject {
// ~ Methods
// ========================================================================================================
public String makeLowerCase(String input) {
return super.makeLowerCase(input);
}
public String makeUpperCase(String input) {
return super.makeUpperCase(input);
}
public String publicMakeLowerCase(String input) {
return super.publicMakeLowerCase(input);
}
}
| apache-2.0 |
nat2013/netty | example/src/main/java/io/netty/example/http2/client/Http2SettingsHandler.java | 2229 | /*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.example.http2.client;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.handler.codec.http2.Http2Settings;
import java.util.concurrent.TimeUnit;
/**
* Reads the first {@link Http2Settings} object and notifies a {@link ChannelPromise}
*/
public class Http2SettingsHandler extends SimpleChannelInboundHandler<Http2Settings> {
private ChannelPromise promise;
/**
* Create new instance
*
* @param promise Promise object used to notify when first settings are received
*/
public Http2SettingsHandler(ChannelPromise promise) {
this.promise = promise;
}
/**
* Wait for this handler to be added after the upgrade to HTTP/2, and for initial preface
* handshake to complete.
*
* @param timeout Time to wait
* @param unit {@link TimeUnit} for {@code timeout}
* @throws Exception if timeout or other failure occurs
*/
public void awaitSettings(long timeout, TimeUnit unit) throws Exception {
if (!promise.awaitUninterruptibly(timeout, unit)) {
throw new IllegalStateException("Timed out waiting for settings");
}
if (!promise.isSuccess()) {
throw new RuntimeException(promise.cause());
}
}
@Override
protected void messageReceived(ChannelHandlerContext ctx, Http2Settings msg) throws Exception {
promise.setSuccess();
// Only care about the first settings message
ctx.pipeline().remove(this);
}
}
| apache-2.0 |
GlenRSmith/elasticsearch | server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java | 24737 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.persistent;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ResourceAlreadyExistsException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.NotMasterException;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.metadata.NodesShutdownMetadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AbstractAsyncTask;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.persistent.PersistentTasksCustomMetadata.Assignment;
import org.elasticsearch.persistent.PersistentTasksCustomMetadata.PersistentTask;
import org.elasticsearch.persistent.decider.AssignmentDecision;
import org.elasticsearch.persistent.decider.EnableAssignmentDecider;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.Closeable;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
/**
* Component that runs only on the master node and is responsible for assigning running tasks to nodes
*/
public class PersistentTasksClusterService implements ClusterStateListener, Closeable {
public static final Setting<TimeValue> CLUSTER_TASKS_ALLOCATION_RECHECK_INTERVAL_SETTING = Setting.timeSetting(
"cluster.persistent_tasks.allocation.recheck_interval",
TimeValue.timeValueSeconds(30),
TimeValue.timeValueSeconds(10),
Setting.Property.Dynamic,
Setting.Property.NodeScope
);
private static final Logger logger = LogManager.getLogger(PersistentTasksClusterService.class);
private final ClusterService clusterService;
private final PersistentTasksExecutorRegistry registry;
private final EnableAssignmentDecider enableDecider;
private final ThreadPool threadPool;
private final PeriodicRechecker periodicRechecker;
private final AtomicBoolean reassigningTasks = new AtomicBoolean(false);
public PersistentTasksClusterService(
Settings settings,
PersistentTasksExecutorRegistry registry,
ClusterService clusterService,
ThreadPool threadPool
) {
this.clusterService = clusterService;
this.registry = registry;
this.enableDecider = new EnableAssignmentDecider(settings, clusterService.getClusterSettings());
this.threadPool = threadPool;
this.periodicRechecker = new PeriodicRechecker(CLUSTER_TASKS_ALLOCATION_RECHECK_INTERVAL_SETTING.get(settings));
if (DiscoveryNode.isMasterNode(settings)) {
clusterService.addListener(this);
}
clusterService.getClusterSettings()
.addSettingsUpdateConsumer(CLUSTER_TASKS_ALLOCATION_RECHECK_INTERVAL_SETTING, this::setRecheckInterval);
}
// visible for testing only
public void setRecheckInterval(TimeValue recheckInterval) {
periodicRechecker.setInterval(recheckInterval);
}
// visible for testing only
PeriodicRechecker getPeriodicRechecker() {
return periodicRechecker;
}
@Override
public void close() {
periodicRechecker.close();
}
/**
* Creates a new persistent task on master node
*
* @param taskId the task's id
* @param taskName the task's name
* @param taskParams the task's parameters
* @param listener the listener that will be called when task is started
*/
public <Params extends PersistentTaskParams> void createPersistentTask(
String taskId,
String taskName,
Params taskParams,
ActionListener<PersistentTask<?>> listener
) {
clusterService.submitStateUpdateTask("create persistent task", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
PersistentTasksCustomMetadata.Builder builder = builder(currentState);
if (builder.hasTask(taskId)) {
throw new ResourceAlreadyExistsException("task with id {" + taskId + "} already exist");
}
PersistentTasksExecutor<Params> taskExecutor = registry.getPersistentTaskExecutorSafe(taskName);
taskExecutor.validate(taskParams, currentState);
Assignment assignment = createAssignment(taskName, taskParams, currentState);
return update(currentState, builder.addTask(taskId, taskName, taskParams, assignment));
}
@Override
public void onFailure(String source, Exception e) {
listener.onFailure(e);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
PersistentTasksCustomMetadata tasks = newState.getMetadata().custom(PersistentTasksCustomMetadata.TYPE);
if (tasks != null) {
PersistentTask<?> task = tasks.getTask(taskId);
listener.onResponse(task);
if (task != null && task.isAssigned() == false && periodicRechecker.isScheduled() == false) {
periodicRechecker.rescheduleIfNecessary();
}
} else {
listener.onResponse(null);
}
}
});
}
/**
* Restarts a record about a running persistent task from cluster state
*
* @param id the id of the persistent task
* @param allocationId the allocation id of the persistent task
* @param failure the reason for restarting the task or null if the task completed successfully
* @param listener the listener that will be called when task is removed
*/
public void completePersistentTask(String id, long allocationId, Exception failure, ActionListener<PersistentTask<?>> listener) {
final String source;
if (failure != null) {
logger.warn("persistent task " + id + " failed", failure);
source = "finish persistent task (failed)";
} else {
source = "finish persistent task (success)";
}
clusterService.submitStateUpdateTask(source, new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
PersistentTasksCustomMetadata.Builder tasksInProgress = builder(currentState);
if (tasksInProgress.hasTask(id, allocationId)) {
tasksInProgress.removeTask(id);
return update(currentState, tasksInProgress);
} else {
if (tasksInProgress.hasTask(id)) {
logger.warn(
"The task [{}] with id [{}] was found but it has a different allocation id [{}], status is not updated",
PersistentTasksCustomMetadata.getTaskWithId(currentState, id).getTaskName(),
id,
allocationId
);
} else {
logger.warn("The task [{}] wasn't found, status is not updated", id);
}
throw new ResourceNotFoundException("the task with id [" + id + "] and allocation id [" + allocationId + "] not found");
}
}
@Override
public void onFailure(String source, Exception e) {
listener.onFailure(e);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
// Using old state since in the new state the task is already gone
listener.onResponse(PersistentTasksCustomMetadata.getTaskWithId(oldState, id));
}
});
}
/**
* Removes the persistent task
*
* @param id the id of a persistent task
* @param listener the listener that will be called when task is removed
*/
public void removePersistentTask(String id, ActionListener<PersistentTask<?>> listener) {
clusterService.submitStateUpdateTask("remove persistent task", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
PersistentTasksCustomMetadata.Builder tasksInProgress = builder(currentState);
if (tasksInProgress.hasTask(id)) {
return update(currentState, tasksInProgress.removeTask(id));
} else {
throw new ResourceNotFoundException("the task with id {} doesn't exist", id);
}
}
@Override
public void onFailure(String source, Exception e) {
listener.onFailure(e);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
// Using old state since in the new state the task is already gone
listener.onResponse(PersistentTasksCustomMetadata.getTaskWithId(oldState, id));
}
});
}
/**
* Update the state of a persistent task
*
* @param taskId the id of a persistent task
* @param taskAllocationId the expected allocation id of the persistent task
* @param taskState new state
* @param listener the listener that will be called when task is removed
*/
public void updatePersistentTaskState(
final String taskId,
final long taskAllocationId,
final PersistentTaskState taskState,
final ActionListener<PersistentTask<?>> listener
) {
clusterService.submitStateUpdateTask("update task state [" + taskId + "]", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
PersistentTasksCustomMetadata.Builder tasksInProgress = builder(currentState);
if (tasksInProgress.hasTask(taskId, taskAllocationId)) {
return update(currentState, tasksInProgress.updateTaskState(taskId, taskState));
} else {
if (tasksInProgress.hasTask(taskId)) {
logger.warn("trying to update state on task {} with unexpected allocation id {}", taskId, taskAllocationId);
} else {
logger.warn("trying to update state on non-existing task {}", taskId);
}
throw new ResourceNotFoundException("the task with id {} and allocation id {} doesn't exist", taskId, taskAllocationId);
}
}
@Override
public void onFailure(String source, Exception e) {
listener.onFailure(e);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
listener.onResponse(PersistentTasksCustomMetadata.getTaskWithId(newState, taskId));
}
});
}
/**
* This unassigns a task from any node, i.e. it is assigned to a {@code null} node with the provided reason.
*
* Since the assignment executor node is null, the {@link PersistentTasksClusterService} will attempt to reassign it to a valid
* node quickly.
*
* @param taskId the id of a persistent task
* @param taskAllocationId the expected allocation id of the persistent task
* @param reason the reason for unassigning the task from any node
* @param listener the listener that will be called when task is unassigned
*/
public void unassignPersistentTask(
final String taskId,
final long taskAllocationId,
final String reason,
final ActionListener<PersistentTask<?>> listener
) {
clusterService.submitStateUpdateTask("unassign persistent task from any node", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
PersistentTasksCustomMetadata.Builder tasksInProgress = builder(currentState);
if (tasksInProgress.hasTask(taskId, taskAllocationId)) {
logger.trace("Unassigning task {} with allocation id {}", taskId, taskAllocationId);
return update(currentState, tasksInProgress.reassignTask(taskId, unassignedAssignment(reason)));
} else {
throw new ResourceNotFoundException("the task with id {} and allocation id {} doesn't exist", taskId, taskAllocationId);
}
}
@Override
public void onFailure(String source, Exception e) {
listener.onFailure(e);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
listener.onResponse(PersistentTasksCustomMetadata.getTaskWithId(newState, taskId));
}
});
}
/**
* Creates a new {@link Assignment} for the given persistent task.
*
* @param taskName the task's name
* @param taskParams the task's parameters
* @param currentState the current {@link ClusterState}
* @return a new {@link Assignment}
*/
private <Params extends PersistentTaskParams> Assignment createAssignment(
final String taskName,
final Params taskParams,
final ClusterState currentState
) {
PersistentTasksExecutor<Params> persistentTasksExecutor = registry.getPersistentTaskExecutorSafe(taskName);
AssignmentDecision decision = enableDecider.canAssign();
if (decision.getType() == AssignmentDecision.Type.NO) {
return unassignedAssignment("persistent task [" + taskName + "] cannot be assigned [" + decision.getReason() + "]");
}
// Filter all nodes that are marked as shutting down, because we do not
// want to assign a persistent task to a node that will shortly be
// leaving the cluster
final List<DiscoveryNode> candidateNodes = currentState.nodes()
.getAllNodes()
.stream()
.filter(dn -> isNodeShuttingDown(currentState, dn.getId()) == false)
.collect(Collectors.toList());
// Task assignment should not rely on node order
Randomness.shuffle(candidateNodes);
final Assignment assignment = persistentTasksExecutor.getAssignment(taskParams, candidateNodes, currentState);
assert assignment != null : "getAssignment() should always return an Assignment object, containing a node or a reason why not";
assert (assignment.getExecutorNode() == null || isNodeShuttingDown(currentState, assignment.getExecutorNode()) == false)
: "expected task ["
+ taskName
+ "] to be assigned to a node that is not marked as shutting down, but "
+ assignment.getExecutorNode()
+ " is currently marked as shutting down";
return assignment;
}
/**
* Returns true if the given node is marked as shutting down with any
* shutdown type.
*/
static boolean isNodeShuttingDown(final ClusterState state, final String nodeId) {
// Right now we make no distinction between the type of shutdown, but maybe in the future we might?
return NodesShutdownMetadata.getShutdowns(state)
.map(NodesShutdownMetadata::getAllNodeMetadataMap)
.map(allNodes -> allNodes.get(nodeId))
.isPresent();
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
if (event.localNodeMaster()) {
if (shouldReassignPersistentTasks(event)) {
// We want to avoid a periodic check duplicating this work
periodicRechecker.cancel();
logger.trace("checking task reassignment for cluster state {}", event.state().getVersion());
reassignPersistentTasks();
}
} else {
periodicRechecker.cancel();
}
}
/**
* Submit a cluster state update to reassign any persistent tasks that need reassigning
*/
void reassignPersistentTasks() {
if (this.reassigningTasks.compareAndSet(false, true) == false) {
return;
}
clusterService.submitStateUpdateTask("reassign persistent tasks", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
return reassignTasks(currentState);
}
@Override
public void onFailure(String source, Exception e) {
reassigningTasks.set(false);
logger.warn("failed to reassign persistent tasks", e);
if (e instanceof NotMasterException == false) {
// There must be a task that's worth rechecking because there was one
// that caused this method to be called and the method failed to assign it,
// but only do this if the node is still the master
periodicRechecker.rescheduleIfNecessary();
}
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
reassigningTasks.set(false);
if (isAnyTaskUnassigned(newState.getMetadata().custom(PersistentTasksCustomMetadata.TYPE))) {
periodicRechecker.rescheduleIfNecessary();
}
}
});
}
/**
* Returns true if the cluster state change(s) require to reassign some persistent tasks. It can happen in the following
* situations: a node left or is added, the routing table changed, the master node changed, the metadata changed or the
* persistent tasks changed.
*/
boolean shouldReassignPersistentTasks(final ClusterChangedEvent event) {
final PersistentTasksCustomMetadata tasks = event.state().getMetadata().custom(PersistentTasksCustomMetadata.TYPE);
if (tasks == null) {
return false;
}
boolean masterChanged = event.previousState().nodes().isLocalNodeElectedMaster() == false;
if (persistentTasksChanged(event)
|| event.nodesChanged()
|| event.routingTableChanged()
|| event.metadataChanged()
|| masterChanged) {
for (PersistentTask<?> task : tasks.tasks()) {
if (needsReassignment(task.getAssignment(), event.state().nodes())) {
Assignment assignment = createAssignment(task.getTaskName(), task.getParams(), event.state());
if (Objects.equals(assignment, task.getAssignment()) == false) {
return true;
}
}
}
}
return false;
}
/**
* Returns true if any persistent task is unassigned.
*/
private boolean isAnyTaskUnassigned(final PersistentTasksCustomMetadata tasks) {
return tasks != null && tasks.tasks().stream().anyMatch(task -> task.getAssignment().isAssigned() == false);
}
/**
* Evaluates the cluster state and tries to assign tasks to nodes.
*
* @param currentState the cluster state to analyze
* @return an updated version of the cluster state
*/
ClusterState reassignTasks(final ClusterState currentState) {
ClusterState clusterState = currentState;
final PersistentTasksCustomMetadata tasks = currentState.getMetadata().custom(PersistentTasksCustomMetadata.TYPE);
if (tasks != null) {
logger.trace("reassigning {} persistent tasks", tasks.tasks().size());
final DiscoveryNodes nodes = currentState.nodes();
// We need to check if removed nodes were running any of the tasks and reassign them
for (PersistentTask<?> task : tasks.tasks()) {
if (needsReassignment(task.getAssignment(), nodes)) {
Assignment assignment = createAssignment(task.getTaskName(), task.getParams(), clusterState);
if (Objects.equals(assignment, task.getAssignment()) == false) {
logger.trace(
"reassigning task {} from node {} to node {}",
task.getId(),
task.getAssignment().getExecutorNode(),
assignment.getExecutorNode()
);
clusterState = update(clusterState, builder(clusterState).reassignTask(task.getId(), assignment));
} else {
logger.trace("ignoring task {} because assignment is the same {}", task.getId(), assignment);
}
} else {
logger.trace("ignoring task {} because it is still running", task.getId());
}
}
}
return clusterState;
}
/** Returns true if the persistent tasks are not equal between the previous and the current cluster state **/
static boolean persistentTasksChanged(final ClusterChangedEvent event) {
String type = PersistentTasksCustomMetadata.TYPE;
return Objects.equals(event.state().metadata().custom(type), event.previousState().metadata().custom(type)) == false;
}
/** Returns true if the task is not assigned or is assigned to a non-existing node */
public static boolean needsReassignment(final Assignment assignment, final DiscoveryNodes nodes) {
return (assignment.isAssigned() == false || nodes.nodeExists(assignment.getExecutorNode()) == false);
}
private static PersistentTasksCustomMetadata.Builder builder(ClusterState currentState) {
return PersistentTasksCustomMetadata.builder(currentState.getMetadata().custom(PersistentTasksCustomMetadata.TYPE));
}
private static ClusterState update(ClusterState currentState, PersistentTasksCustomMetadata.Builder tasksInProgress) {
if (tasksInProgress.isChanged()) {
return ClusterState.builder(currentState)
.metadata(Metadata.builder(currentState.metadata()).putCustom(PersistentTasksCustomMetadata.TYPE, tasksInProgress.build()))
.build();
} else {
return currentState;
}
}
private static Assignment unassignedAssignment(String reason) {
return new Assignment(null, reason);
}
/**
* Class to periodically try to reassign unassigned persistent tasks.
*/
class PeriodicRechecker extends AbstractAsyncTask {
PeriodicRechecker(TimeValue recheckInterval) {
super(logger, threadPool, recheckInterval, false);
}
@Override
protected boolean mustReschedule() {
return true;
}
@Override
public void runInternal() {
if (clusterService.localNode().isMasterNode()) {
final ClusterState state = clusterService.state();
logger.trace("periodic persistent task assignment check running for cluster state {}", state.getVersion());
if (isAnyTaskUnassigned(state.getMetadata().custom(PersistentTasksCustomMetadata.TYPE))) {
reassignPersistentTasks();
}
}
}
@Override
public String toString() {
return "persistent_task_recheck";
}
}
}
| apache-2.0 |
daedric/buck | src/com/facebook/buck/intellij/ideabuck/src/com/facebook/buck/intellij/ideabuck/file/BuckFileTypeFactory.java | 1614 | /*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.intellij.ideabuck.file;
import com.intellij.openapi.fileTypes.FileNameMatcherEx;
import com.intellij.openapi.fileTypes.FileTypeConsumer;
import com.intellij.openapi.fileTypes.FileTypeFactory;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.text.StringUtilRt;
public class BuckFileTypeFactory extends FileTypeFactory {
@Override
public void createFileTypes(FileTypeConsumer fileTypeConsumer) {
fileTypeConsumer.consume(
BuckFileType.INSTANCE, new FileNameMatcherEx() {
@Override
public String getPresentableString() {
return BuckFileUtil.getBuildFileName();
}
@Override
public boolean acceptsCharSequence(CharSequence fileName) {
String buildFileName = BuckFileUtil.getBuildFileName();
return StringUtilRt.endsWithIgnoreCase(fileName, buildFileName) ||
Comparing.equal(fileName, buildFileName, true);
}
});
}
}
| apache-2.0 |
motokito/jabref | src/main/java/net/sf/jabref/gui/search/HitOrMissComparator.java | 1601 | /* Copyright (C) 2003-2011 JabRef contributors.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package net.sf.jabref.gui.search;
import java.util.Comparator;
import java.util.Objects;
import net.sf.jabref.model.entry.BibEntry;
import ca.odell.glazedlists.matchers.Matcher;
/**
* This Comparator compares two objects based on whether none, one of them, or both
* match a given Matcher. It is used to "float" group and search hits in the main table.
*/
public class HitOrMissComparator implements Comparator<BibEntry> {
private final Matcher<BibEntry> hitOrMiss;
public HitOrMissComparator(Matcher<BibEntry> hitOrMiss) {
this.hitOrMiss = Objects.requireNonNull(hitOrMiss);
}
@Override
public int compare(BibEntry o1, BibEntry o2) {
if (hitOrMiss == null) {
return 0;
}
return Boolean.compare(hitOrMiss.matches(o2), hitOrMiss.matches(o1));
}
}
| mit |
GradyD/smarthome | bundles/core/org.eclipse.smarthome.core.test/src/test/java/org/eclipse/smarthome/core/library/types/HSBTypeTest.java | 3788 | /**
* Copyright (c) 2014-2015 openHAB UG (haftungsbeschraenkt) and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.smarthome.core.library.types;
import static org.junit.Assert.*;
import java.math.BigDecimal;
import org.junit.Test;
/**
*
* @author Chris Jackson - added fromRGB() test
*
*/
public class HSBTypeTest {
@Test
public void testEquals() {
HSBType hsb1 = new HSBType("53,86,1");
HSBType hsb2 = new HSBType("53,86,1");
assertTrue(hsb1.equals(hsb2));
hsb1 = new HSBType("0,0,0");
hsb2 = new HSBType("0,0,0");
assertTrue(hsb1.equals(hsb2));
}
@Test
public void testHsbToRgbConversion() {
compareHsbToRgbValues("0,100,100", 255, 0, 0); // red
compareHsbToRgbValues("360,100,100", 255, 0, 0); // red
compareHsbToRgbValues("0,0,0", 0, 0, 0); // black
compareHsbToRgbValues("0,0,100", 255, 255, 255); // white
compareHsbToRgbValues("120,100,100", 0, 255, 0); // green
compareHsbToRgbValues("240,100,100", 0, 0, 255); // blue
compareHsbToRgbValues("229,37,62", 99, 110, 158); // blueish
compareHsbToRgbValues("316,69,47", 119, 37, 97); // purple
compareHsbToRgbValues("60,60,60", 153, 153, 61); // green
compareHsbToRgbValues("300,100,40", 102, 0, 102);
}
private int convertPercentToByte(PercentType percent) {
return percent.value.multiply(BigDecimal.valueOf(255))
.divide(BigDecimal.valueOf(100), 2, BigDecimal.ROUND_HALF_UP).intValue();
}
private void compareHsbToRgbValues(String hsbValues, int red, int green, int blue) {
HSBType hsb = new HSBType(hsbValues);
System.out.println("HSB INPUT: " + hsbValues);
System.out.println("RGB EXPECTED: " + red + "," + green + "," + blue);
System.out.println("RGB ACTUAL (0-100): " + hsb.getRed() + "," + hsb.getGreen() + "," + hsb.getBlue());
System.out.println("RGB ACTUAL (0-255): " + convertPercentToByte(hsb.getRed()) + ","
+ convertPercentToByte(hsb.getGreen()) + "," + convertPercentToByte(hsb.getBlue()) + "\n");
assertEquals(red, convertPercentToByte(hsb.getRed()));
assertEquals(green, convertPercentToByte(hsb.getGreen()));
assertEquals(blue, convertPercentToByte(hsb.getBlue()));
}
@Test
public void testRgbToHsbConversion() {
compareRgbToHsbValues("0,100,100", 255, 0, 0); // red
compareRgbToHsbValues("0,0,0", 0, 0, 0); // black
compareRgbToHsbValues("0,0,100", 255, 255, 255); // white
compareRgbToHsbValues("120,100,100", 0, 255, 0); // green
compareRgbToHsbValues("240,100,100", 0, 0, 255); // blue
compareRgbToHsbValues("60,60,60", 153, 153, 61); // green
compareRgbToHsbValues("300,100,40", 102, 0, 102);
compareRgbToHsbValues("228,37,61", 99, 110, 158); // blueish
compareRgbToHsbValues("316,68,46", 119, 37, 97); // purple
}
private void compareRgbToHsbValues(String hsbValues, int red, int green, int blue) {
HSBType hsb = new HSBType(hsbValues);
HSBType hsbRgb = HSBType.fromRGB(red, green, blue);
System.out.println("HSB EXPECTED: " + hsbValues);
System.out.println(
"HSB ACTUAL : " + hsbRgb.getHue() + "," + hsbRgb.getSaturation() + "," + hsbRgb.getBrightness());
assertEquals(hsb.getHue(), hsbRgb.getHue());
assertEquals(hsb.getSaturation(), hsbRgb.getSaturation());
assertEquals(hsb.getBrightness(), hsbRgb.getBrightness());
}
}
| epl-1.0 |
md-5/jdk10 | test/hotspot/jtreg/vmTestbase/nsk/jdi/ArrayType/componentSignature/componentsignature002.java | 12343 | /*
* Copyright (c) 2001, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package nsk.jdi.ArrayType.componentSignature;
import nsk.share.*;
import nsk.share.jpda.*;
import nsk.share.jdi.*;
import com.sun.jdi.*;
import java.util.*;
import java.io.*;
/**
* The test for the implementation of an object of the type <BR>
* ArrayType. <BR>
* <BR>
* The test checks up that results of the method <BR>
* <code>com.sun.jdi.ArrayType.componentSignature()</code> <BR>
* complies with its spec when a type is one of ReferenceType(s).<BR>
* <BR>
* The test checks up that the component signatures of <BR>
* ArrayType objects, corresponding in a debugger to <BR>
* the following in a debuggee: <BR>
* <BR>
* a class type array - "ClassForCheck_2 class3[]" field, <BR>
* an interface type array - "InterfaceForCheck iface[]" field, <BR>
* <BR>
* are as follows: <BR>
* "Lnsk/jdi/ArrayType/componentSignature/ClassForCheck_2;" <BR>
* "Lnsk/jdi/ArrayType/componentSignature/InterfaceForCheck;" <BR>
* <BR>
*/
public class componentsignature002 {
//----------------------------------------------------- templete section
static final int PASSED = 0;
static final int FAILED = 2;
static final int PASS_BASE = 95;
//----------------------------------------------------- templete parameters
static final String
sHeader1 = "\n==> nsk/jdi/ArrayType/componentSignature/componentsignature002",
sHeader2 = "--> componentsignature002: ",
sHeader3 = "##> componentsignature002: ";
//----------------------------------------------------- main method
public static void main (String argv[]) {
int result = run(argv, System.out);
System.exit(result + PASS_BASE);
}
public static int run (String argv[], PrintStream out) {
return new componentsignature002().runThis(argv, out);
}
//-------------------------------------------------- log procedures
private static boolean verbMode = false;
private static Log logHandler;
private static void log1(String message) {
logHandler.display(sHeader1 + message);
}
private static void log2(String message) {
logHandler.display(sHeader2 + message);
}
private static void log3(String message) {
logHandler.complain(sHeader3 + message);
}
// ************************************************ test parameters
private String debuggeeName =
"nsk.jdi.ArrayType.componentSignature.componentsignature002a";
String mName = "nsk.jdi.ArrayType.componentSignature";
String signaturePrefix = "nsk/jdi/ArrayType/componentSignature";
//====================================================== test program
static ArgumentHandler argsHandler;
static int testExitCode = PASSED;
//------------------------------------------------------ common section
private int runThis (String argv[], PrintStream out) {
Debugee debuggee;
argsHandler = new ArgumentHandler(argv);
logHandler = new Log(out, argsHandler);
Binder binder = new Binder(argsHandler, logHandler);
if (argsHandler.verbose()) {
debuggee = binder.bindToDebugee(debuggeeName + " -vbs"); // *** tp
} else {
debuggee = binder.bindToDebugee(debuggeeName); // *** tp
}
IOPipe pipe = new IOPipe(debuggee);
debuggee.redirectStderr(out);
log2("componentsignature002a debuggee launched");
debuggee.resume();
String line = pipe.readln();
if ((line == null) || !line.equals("ready")) {
log3("signal received is not 'ready' but: " + line);
return FAILED;
} else {
log2("'ready' recieved");
}
VirtualMachine vm = debuggee.VM();
//------------------------------------------------------ testing section
log1(" TESTING BEGINS");
for (int i = 0; ; i++) {
pipe.println("newcheck");
line = pipe.readln();
if (line.equals("checkend")) {
log2(" : returned string is 'checkend'");
break ;
} else if (!line.equals("checkready")) {
log3("ERROR: returned string is not 'checkready'");
testExitCode = FAILED;
break ;
}
log1("new check: #" + i);
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ variable part
List listOfDebuggeeClasses = null;
ReferenceType reftypeObj = null;
int i2;
for (i2 = 0; ; i2++) {
int expresult = 0;
log2("new check: #" + i2);
switch (i2) {
case 0 : // ClassType array
listOfDebuggeeClasses =
vm.classesByName(mName + ".ClassForCheck");
if (listOfDebuggeeClasses.size() != 1) {
log3("ERROR: for ClassType listOfDebuggeeClasses.size() != 1");
expresult = 1;
break ;
}
reftypeObj = (ReferenceType) listOfDebuggeeClasses.get(0);
Field classField = reftypeObj.fieldByName("class3");
Type classfieldType = null;
try {
classfieldType = classField.type();
} catch ( ClassNotLoadedException e ) {
log3("ERROR: classfieldType = classField.type();");
expresult =1;
break ;
}
String classtypeComponentSignature =
((ArrayType) classfieldType).componentSignature();
if (!classtypeComponentSignature.equals(
"L" + signaturePrefix + "/ClassForCheck_2;")) {
log3("ERROR: !classtypeComponentSignature.equals(namePrefix + " +
"'.ClassForCheck_2')" + " " + classtypeComponentSignature );
expresult = 1;
break;
}
break;
case 1 : // InterfaceType array
listOfDebuggeeClasses =
vm.classesByName(mName + ".ClassForCheck");
if (listOfDebuggeeClasses.size() != 1) {
log3("ERROR: for InterfaceType listOfDebuggeeClasses.size() != 1");
expresult = 1;
break ;
}
reftypeObj = (ReferenceType) listOfDebuggeeClasses.get(0);
Field ifaceField = reftypeObj.fieldByName("iface");
Type ifacefieldType = null;
try {
ifacefieldType = ifaceField.type();
} catch ( ClassNotLoadedException e ) {
log3("ERROR: ifacefieldType = ifaceField.type();");
expresult =1;
break ;
}
String interfacetypeComponentSignature =
((ArrayType) ifacefieldType).componentSignature();
if (!interfacetypeComponentSignature.equals(
"L" + signaturePrefix + "/InterfaceForCheck;")) {
log3("ERROR: !interfacetypeComponentSignature.equals(namePrefix + " +
"/InterfaceForCheck')" + " " + interfacetypeComponentSignature );
expresult = 1;
break;
}
break;
/*
case 2 : // PrimitiveType array !!!!! throws ClassNotLoadedException
listOfDebuggeeClasses =
vm.classesByName(mName + ".ClassForCheck");
if (listOfDebuggeeClasses.size() != 1) {
expresult = 1;
log3("ERROR: for ArrayType listOfDebuggeeClasses.size() != 1");
break ;
}
reftypeObj = (ReferenceType) listOfDebuggeeClasses.get(0);
Field arrayField = reftypeObj.fieldByName("bl");
Type arrayfieldType = null;
try {
arrayfieldType = arrayField.type();
} catch ( ClassNotLoadedException e ) {
log3("ERROR: arrayfieldType = arrayField.type();");
expresult =1;
break ;
}
String arrayTypecomponentSignature =
((ArrayType) arrayfieldType).componentSignature();
if (!arrayTypecomponentSignature.equals(
"L" + signaturePrefix + "/ClassForCheck_2;")) {
log3("ERROR: !arrayfieldTypeSignature.equals(namePrefix +" +
"'.ClassForCheck_2[]')" + " " + arrayTypecomponentSignature );
expresult = 1;
break;
}
break;
*/
default: expresult = 2;
break ;
}
if (expresult == 2) {
log2(" test cases finished");
break ;
} else if (expresult == 1) {
log3("ERROR: expresult != true; check # = " + i);
testExitCode = FAILED;
}
}
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
}
log1(" TESTING ENDS");
//-------------------------------------------------- test summary section
//------------------------------------------------- standard end section
pipe.println("quit");
log2("waiting for the debuggee to finish ...");
debuggee.waitFor();
int status = debuggee.getStatus();
if (status != PASSED + PASS_BASE) {
log3("debuggee returned UNEXPECTED exit status: " +
status + " != PASS_BASE");
testExitCode = FAILED;
} else {
log2("debuggee returned expected exit status: " +
status + " == PASS_BASE");
}
if (testExitCode != PASSED) {
System.out.println("TEST FAILED");
}
return testExitCode;
}
}
| gpl-2.0 |
huquanbo/lirenewdemo | src/main/java/net/semanticmetadata/lire/imageanalysis/features/LireFeature.java | 3721 | /*
* This file is part of the LIRE project: http://lire-project.net
* LIRE is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* LIRE is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with LIRE; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* We kindly ask you to refer the any or one of the following publications in
* any publication mentioning or employing Lire:
*
* Lux Mathias, Savvas A. Chatzichristofis. Lire: Lucene Image Retrieval –
* An Extensible Java CBIR Library. In proceedings of the 16th ACM International
* Conference on Multimedia, pp. 1085-1088, Vancouver, Canada, 2008
* URL: http://doi.acm.org/10.1145/1459359.1459577
*
* Lux Mathias. Content Based Image Retrieval with LIRE. In proceedings of the
* 19th ACM International Conference on Multimedia, pp. 735-738, Scottsdale,
* Arizona, USA, 2011
* URL: http://dl.acm.org/citation.cfm?id=2072432
*
* Mathias Lux, Oge Marques. Visual Information Retrieval using Java and LIRE
* Morgan & Claypool, 2013
* URL: http://www.morganclaypool.com/doi/abs/10.2200/S00468ED1V01Y201301ICR025
*/
package net.semanticmetadata.lire.imageanalysis.features;
/**
* This is the basic interface for all content based features.
* Created by mlux on 28/05/2008.
*
* @author Mathias Lux, mathias@juggle.at
* @author Nektarios Anagnostopoulos, nek.anag@gmail.com
*/
public interface LireFeature extends FeatureVector {
/**
* Gives a descriptive name of the feature, i.e. a name to show up in benchmarks, menus, UIs, etc.
* @return the name of the feature.
*/
public String getFeatureName();
/**
* Returns the preferred field name for indexing.
* @return the field name preferred for indexing in a Lucene index.
*/
public String getFieldName();
/**
* Returns a compact byte[] based representation of the feature vector.
* @return a compact byte[] array containing the feature vector.
* @see LireFeature#setByteArrayRepresentation(byte[])
*/
public byte[] getByteArrayRepresentation();
/**
* Sets the feature vector values based on the byte[] data. Use
* {@link LireFeature#getByteArrayRepresentation()}
* to generate a compatible byte[] array.
* @param featureData the byte[] data.
* @see LireFeature#getByteArrayRepresentation()
*/
public void setByteArrayRepresentation(byte[] featureData);
/**
* Sets the feature vector values based on the byte[] data.
* Use {@link LireFeature#getByteArrayRepresentation()}
* to generate a compatible byte[] array.
* @param featureData the byte[] array containing the data.
* @param offset the offset, i.e. where the feature vector starts.
* @param length the length of the data representing the feature vector.
* @see LireFeature#getByteArrayRepresentation()
*/
public void setByteArrayRepresentation(byte[] featureData, int offset, int length);
/**
* The distance function for this type of feature
* @param feature the feature vector to compare the current instance to.
* @return the distance (or dissimilarity) between the instance and the parameter.
*/
double getDistance(LireFeature feature);
}
| gpl-2.0 |
lennartj/maven-plugins | maven-source-plugin/src/test/java/org/apache/maven/plugins/source/stubs/Project005Stub.java | 3152 | package org.apache.maven.plugins.source.stubs;
import static org.apache.maven.plugins.source.stubs.Project001Stub.readModelFromFile;
import java.io.File;
import java.util.List;
import org.apache.maven.model.Build;
import org.apache.maven.model.Model;
import org.apache.maven.model.Resource;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.plugin.testing.stubs.MavenProjectStub;
/**
* @author <a href="mailto:oching@exist.com">Maria Odea Ching</a>
*/
public class Project005Stub
extends MavenProjectStub
{
private Build build;
private List<Resource> resources;
private List<Resource> testResources;
public Project005Stub()
{
Model model;
try
{
model = readModelFromFile( new File( getBasedir(), "target/test-classes/unit/project-005/pom.xml" ) );
setModel( model );
setGroupId( model.getGroupId() );
setArtifactId( model.getArtifactId() );
setVersion( model.getVersion() );
setName( model.getName() );
setUrl( model.getUrl() );
setPackaging( model.getPackaging() );
Build build = new Build();
build.setFinalName( getArtifactId() + "-" + getVersion() );
build.setDirectory( getBasedir() + "/target/test/unit/project-005/target" );
setBuild( build );
SourcePluginArtifactStub artifact =
new SourcePluginArtifactStub( getGroupId(), getArtifactId(), getVersion(), getPackaging(), null );
artifact.setArtifactHandler( new DefaultArtifactHandlerStub() );
artifact.setType( "jar" );
artifact.setBaseVersion( "1.0-SNAPSHOT" );
setArtifact( artifact );
}
catch ( Exception e )
{
e.printStackTrace();
}
}
public Build getBuild()
{
return build;
}
public void setBuild( Build build )
{
this.build = build;
}
public List<Resource> getResources()
{
return resources;
}
public void setResources( List<Resource> resources )
{
this.resources = resources;
}
public List<Resource> getTestResources()
{
return testResources;
}
public void setTestResources( List<Resource> testResources )
{
this.testResources = testResources;
}
}
| apache-2.0 |
DT9/osmdroid | OpenStreetMapViewer/src/org/osmdroid/samples/SampleLoader.java | 2645 | // Created by plusminus on 18:23:13 - 03.10.2008
package org.osmdroid.samples;
import java.util.ArrayList;
import android.app.ListActivity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.ListView;
public class SampleLoader extends ListActivity {
// ===========================================================
// Constants
// ===========================================================
// ===========================================================
// Fields
// ===========================================================
// ===========================================================
// Constructors
// ===========================================================
/** Called when the activity is first created. */
@Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final ArrayList<String> list = new ArrayList<String>();
list.add("OSMapView with Minimap, ZoomControls, Animations, Scale Bar and MyLocationOverlay");
list.add("OSMapView with ItemizedOverlay");
list.add("OSMapView with Minimap and ZoomControls");
list.add("Sample with tiles overlay");
list.add("Sample with tiles overlay and custom tile source");
this.setListAdapter(new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1,
list));
}
// ===========================================================
// Getter & Setter
// ===========================================================
// ===========================================================
// Methods from SuperClass/Interfaces
// ===========================================================
@Override
protected void onListItemClick(final ListView l, final View v, final int position, final long id) {
switch (position) {
case 0:
this.startActivity(new Intent(this, SampleExtensive.class));
break;
case 1:
this.startActivity(new Intent(this, SampleWithMinimapItemizedoverlay.class));
break;
case 2:
this.startActivity(new Intent(this, SampleWithMinimapZoomcontrols.class));
break;
case 3:
this.startActivity(new Intent(this, SampleWithTilesOverlay.class));
break;
case 4:
this.startActivity(new Intent(this, SampleWithTilesOverlayAndCustomTileSource.class));
break;
}
}
// ===========================================================
// Methods
// ===========================================================
// ===========================================================
// Inner and Anonymous Classes
// ===========================================================
}
| apache-2.0 |
CesarPantoja/jena | jena-arq/src/main/java/org/apache/jena/sparql/engine/optimizer/StatsMatcher.java | 16160 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.engine.optimizer;
import static org.apache.jena.sparql.engine.optimizer.reorder.PatternElements.* ;
import java.io.OutputStream ;
import java.util.ArrayList ;
import java.util.HashMap ;
import java.util.List ;
import java.util.Map ;
import org.apache.jena.atlas.io.IndentedWriter ;
import org.apache.jena.atlas.logging.Log ;
import org.apache.jena.graph.Node ;
import org.apache.jena.graph.Triple ;
import org.apache.jena.sparql.ARQException ;
import org.apache.jena.sparql.core.Var ;
import org.apache.jena.sparql.engine.optimizer.reorder.PatternTriple ;
import org.apache.jena.sparql.graph.NodeConst ;
import org.apache.jena.sparql.sse.Item ;
import org.apache.jena.sparql.sse.ItemException ;
import org.apache.jena.sparql.sse.ItemList ;
import org.apache.jena.sparql.sse.SSE ;
import org.slf4j.Logger ;
import org.slf4j.LoggerFactory ;
/** Stats format:<pre>(stats
* (meta ...)
* ((S P O) weight)
* (<predicate uri> weight)
* )</pre>
* where <code>S</code>, <code>P</code>, <code>O</code> is a URI, variable,
* literal or one of the words <code>ANY</code> (matches anything),
* <code>VAR</code> (matches a variable), <code>TERM</code> (matches a
* fixed URI, or literal), <code>URI</code>, <code>BNODE</code>,
* <code>LITERAL</code> (matches one of these types).
*/
public final class StatsMatcher
{
private static Logger log = LoggerFactory.getLogger(StatsMatcher.class) ;
public static final String STATS = "stats" ;
public static final String META = "meta" ;
public static final String COUNT = "count" ;
public static final Item OTHER = Item.createSymbol("other") ;
private static double NOMATCH = -1 ;
private static class Match
{
double weight = NOMATCH ;
int exactMatches = 0 ;
int termMatches = 0 ;
int varMatches = 0 ;
int anyMatches = 0 ;
}
// General structure
protected List<Pattern> patterns = new ArrayList<>() ;
// Map keyed by P for faster lookup (if no P available, we'll use the full list).
protected Map<Item, List<Pattern>> mapPatterns = new HashMap<>() ;
// Default behaviour
double DefaultMatch = NOMATCH ;
long count = -1 ;
public StatsMatcher() {}
public StatsMatcher(String filename)
{
try {
Item stats = SSE.readFile(filename) ;
if ( stats.isNil() )
{
Log.warn(this, "Empty stats file: "+filename) ;
return ;
}
if ( !stats.isTagged(STATS) )
throw new ARQException("Not a stats file: "+filename) ;
init(stats) ;
} catch (ItemException ex)
{ // Debug
throw ex ;
}
}
public StatsMatcher(Item stats)
{ init(stats) ; }
private void init(Item stats)
{
if ( !stats.isTagged(STATS) )
throw new ARQException("Not a tagged '"+STATS+"'") ;
ItemList list = stats.getList().cdr(); // Skip tag
if ( list.car().isTagged(META) )
{
// Process the meta tag.
Item elt1 = list.car();
list = list.cdr(); // Move list on
// Get count.
Item x = Item.find(elt1.getList(), COUNT) ;
if ( x != null )
count = x.getList().get(1).asInteger() ;
}
while (!list.isEmpty())
{
Item elt = list.car() ;
list = list.cdr();
onePattern(elt) ;
}
}
private void onePattern(Item elt)
{
Item pat = elt.getList().get(0) ;
if (pat.isNode())
{
// (<uri> weight)
Node n = pat.getNode() ;
if (!n.isURI())
{
log.warn("Not a preicate URI: " + pat.toString()) ;
return ;
}
addAbbreviation(elt) ;
}
else if (pat.isSymbol())
{
if ( pat.equals(OTHER) )
{
double d = elt.getList().get(1).getDouble() ;
DefaultMatch = d ;
return ;
}
if ( pat.equals(BNODE) || pat.equals(LITERAL) )
{
log.warn("Not a match for a predicate URI: " + pat.toString()) ;
return ;
}
if ( pat.equals(TERM) || pat.equals(VAR) || pat.equals(ANY) )
addAbbreviation(elt) ;
else
{
log.warn("Not understood: " + pat) ;
return ;
}
}
else if (pat.isList() && pat.getList().size() == 3)
{
// It's of the form ((S P O) weight)
Item w = elt.getList().get(1) ;
Pattern pattern = new Pattern(((Number)(w.getNode().getLiteralValue())).doubleValue(),
intern(pat.getList().get(0)), intern(pat.getList().get(1)),
intern(pat.getList().get(2))) ;
addPattern(pattern) ;
}
else
{
log.warn("Unrecognized pattern: " + pat) ;
}
}
private void addAbbreviation(Item elt)
{
Item predicateTerm = elt.getList().get(0) ;
// Single node - it's a predicate abbreviate.
double numProp = elt.getList().get(1).getDouble() ;
if ( count < 100 )
addPatternsSmall(predicateTerm, numProp) ;
else
addPatterns(predicateTerm, numProp) ;
}
// Knowing ?PO is quite important - it ranges from IFP (1) to
// rdf:type rdf:Resource (potentially everything).
public static final double weightSP = 2 ;
public static final double weightPO = 10 ;
public static final double weightTypeO = 1000 ; // ? rdf:type <Object> -- Avoid as can be very, very bad.
public static final double weightSP_small = 2 ;
public static final double weightPO_small = 4 ;
public static final double weightTypeO_small = 40 ;
/** Add patterns based solely on the predicate count and some guessing */
public void addPatterns(Node predicate, double numProp)
{
addPatterns(Item.createNode(predicate), numProp) ;
}
/** Add patterns based solely on the predicate count and some guessing for a small graph
* (less than a few thousand triples)
*/
public void addPatternsSmall(Node predicate, double numProp)
{
addPatternsSmall(Item.createNode(predicate), numProp) ;
}
private void addPatterns(Item predicate, double numProp)
{
double wSP = weightSP ;
double wPO = weightPO ;
wPO = Math.min(numProp, wPO) ;
wSP = Math.min(numProp, wSP) ;
if ( NodeConst.nodeRDFType.equals(predicate.getNode()) )
// ? rdf:type <Object> -- Avoid as can be very, very bad.
wPO = weightTypeO ;
addPatterns(predicate, numProp, wSP, wPO) ;
}
private void addPatternsSmall(Item predicate, double numProp)
{
double wSP = weightSP_small ;
double wPO = weightPO_small ;
wPO = Math.min(numProp, wPO) ;
wSP = Math.min(numProp, wSP) ;
if ( predicate.isNode() && NodeConst.nodeRDFType.equals(predicate.getNode()) )
wPO = weightTypeO_small ;
addPatterns(predicate, numProp, wSP, wPO) ;
}
private void addPatterns(Item predicate, double wP, double wSP, double wPO)
{
addPattern(new Pattern(wSP, TERM, predicate, ANY)) ; // S, P, ? : approx weight
addPattern(new Pattern(wPO, ANY, predicate, TERM)) ; // ?, P, O : approx weight
addPattern(new Pattern(wP, ANY, predicate, ANY)) ; // ?, P, ?
}
public void addPattern(Pattern pattern)
{
// Check for named variables whch should not appear in a Pattern
check(pattern) ;
patterns.add(pattern) ;
List<Pattern> entry = mapPatterns.get(pattern.predItem) ;
if ( entry == null )
{
entry = new ArrayList<>() ;
mapPatterns.put(pattern.predItem, entry ) ;
}
entry.add(pattern) ;
}
// public void addPattern(Triple triple)
// {
// if ( triple.getSubject().isVariable() )
// {
// // PO, P and O
// }
// else
// {
// //SPO, SP and SO
// }
// throw new NotImplementedException("StatsMatcher.addPattern") ;
// }
private static void check(Pattern pattern)
{
check(pattern.subjItem) ;
check(pattern.predItem) ;
check(pattern.objItem) ;
}
private static void check(Item item)
{
if ( Var.isVar(item.getNode()) )
throw new ARQException("Explicit variable used in a pattern (use VAR): "+item.getNode()) ;
}
private Item intern(Item item)
{
if ( item.sameSymbol(ANY.getSymbol()) ) return ANY ;
if ( item.sameSymbol(VAR.getSymbol()) ) return VAR ;
if ( item.sameSymbol(TERM.getSymbol()) ) return TERM ;
if ( item.sameSymbol(URI.getSymbol()) ) return URI ;
if ( item.sameSymbol(LITERAL.getSymbol()) ) return LITERAL ;
if ( item.sameSymbol(BNODE.getSymbol()) ) return BNODE ;
return item ;
}
public double match(Triple t)
{
return match(Item.createNode(t.getSubject()),
Item.createNode(t.getPredicate()),
Item.createNode(t.getObject())) ;
}
public double match(PatternTriple pTriple)
{
return match(pTriple.subject, pTriple.predicate, pTriple.object) ;
}
/** Return the matching weight for the first triple match found,
* else apply default value for fixed, unknnown predciate,
* else return NOMATCH
*/
public double match(Item subj, Item pred, Item obj)
{
double m = matchWorker(subj, pred, obj) ;
if ( m == NOMATCH && pred.isNodeURI() )
m = DefaultMatch ;
//System.out.println("("+subj+" "+pred+" "+obj+") => "+m) ;
return m ;
}
private double matchWorker(Item subj, Item pred, Item obj)
{
if ( isSet(subj) && isSet(pred) && isSet(obj) )
// A set of triples ...
return 1.0 ;
// A predicate can be :
// A URI - search on that URI, the TERM and ANY chains.
// A variable - search on that VAR and ANY chains.
if ( pred.isNodeURI() )
{
double w = NOMATCH ;
w = search(pred, subj, pred, obj, w) ;
w = search(TERM, subj, pred, obj, w) ; //??
w = search(ANY, subj, pred, obj, w) ; //??
return w ;
}
if ( pred.isVar() )
{
double w = NOMATCH ;
w = search(VAR, subj, pred, obj, w) ;
w = search(ANY, subj, pred, obj, w) ;
return w ;
}
if ( pred.equals(TERM) )
{
double w = NOMATCH ;
w = search(TERM, subj, pred, obj, w) ;
w = search(ANY, subj, pred, obj, w) ;
return w ;
}
if ( pred.equals(ANY) )
{
throw new ARQException("Predicate is ANY") ;
//double w = matchLinear(patterns, subj, pred, obj) ;
// double w = NOMATCH ;
// w = search(VAR, subj, pred, obj, w) ;
// w = search(ANY, subj, pred, obj, w) ;
// return w ;
}
throw new ARQException("Unidentified predicate: "+pred+" in ("+subj+" "+pred+" "+obj+")") ;
//return matchLinear(subj, pred, obj) ;
}
private double search(Item key, Item subj, Item pred, Item obj, double oldMin)
{
List<Pattern> entry = mapPatterns.get(key) ;
if ( entry == null )
return oldMin ;
double w = matchLinear(entry, subj, pred, obj) ;
return minPos(w, oldMin) ;
}
//Minimum respecting NOMATCH for "not known"
private static double minPos(double x, double y)
{
if ( x == NOMATCH ) return y ;
if ( y == NOMATCH ) return x ;
return Math.min(x, y) ;
}
private static double matchLinear(List<Pattern> patterns, Item subj, Item pred, Item obj)
{
for ( Pattern pattern : patterns )
{
Match match = new Match() ;
if ( ! matchNode(subj, pattern.subjItem, match) )
continue ;
if ( ! matchNode(pred, pattern.predItem, match) )
continue ;
if ( ! matchNode(obj, pattern.objItem, match) )
continue ;
// First match.
return pattern.weight ;
}
return NOMATCH ;
}
private static boolean matchNode(Item node, Item item, Match details)
{
if ( isAny(item) )
{
details.anyMatches ++ ;
return true ;
}
if ( isAnyVar(item) )
{
details.varMatches ++ ;
return true ;
}
if ( node.isSymbol() )
{
//TERM in the thing to be matched means something concrete will be there.
if ( node.equals(TERM) )
{
if ( item.equals(TERM) )
{
details.termMatches ++ ;
return true ;
}
// Does not match LITERAL, URI, BNODE and VAR/ANY were done above.
return false ;
}
throw new ARQException("StatsMatcher: unexpected slot type: "+node) ;
}
if ( ! node.isNode() )
return false ;
Node n = node.getNode() ;
if ( n.isConcrete() )
{
if ( item.isNode() && item.getNode().equals(n) )
{
details.exactMatches ++ ;
return true ;
}
if ( isAnyTerm(item) )
{
details.termMatches ++ ;
return true ;
}
if ( isAnyURI(item) && n.isURI() )
{
details.termMatches ++ ;
return true ;
}
if ( isAnyLiteral(item) && n.isLiteral() )
{
details.termMatches ++ ;
return true ;
}
if ( isAnyBNode(item) && n.isBlank() )
{
details.termMatches ++ ;
return true ;
}
}
return false ;
}
@Override
public String toString()
{
String $ = "" ;
for ( Pattern p : patterns )
$ = $+p+"\n" ;
return $ ;
}
public void printSSE(OutputStream ps)
{
IndentedWriter out = new IndentedWriter(ps) ;
out.println("(stats") ;
out.incIndent() ;
for ( Pattern p : patterns )
{
p.output(out) ;
out.println();
}
out.decIndent() ;
out.println(")") ;
out.flush();
}
}
| apache-2.0 |
fabiofumarola/elasticsearch | src/main/java/org/elasticsearch/action/index/IndexAction.java | 1417 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.index;
import org.elasticsearch.action.Action;
import org.elasticsearch.client.Client;
/**
*/
public class IndexAction extends Action<IndexRequest, IndexResponse, IndexRequestBuilder> {
public static final IndexAction INSTANCE = new IndexAction();
public static final String NAME = "index";
private IndexAction() {
super(NAME);
}
@Override
public IndexResponse newResponse() {
return new IndexResponse();
}
@Override
public IndexRequestBuilder newRequestBuilder(Client client) {
return new IndexRequestBuilder(client);
}
}
| apache-2.0 |
michaelrice/asterisk-java | src/main/java/org/asteriskjava/manager/response/ExtensionStateResponse.java | 2457 | /*
* Copyright 2004-2006 Stefan Reuter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.asteriskjava.manager.response;
/**
* Response to an {@link org.asteriskjava.manager.action.ExtensionStateAction}.
*
* @author srt
* @version $Id$
* @see org.asteriskjava.manager.action.ExtensionStateAction
*/
public class ExtensionStateResponse extends ManagerResponse
{
private static final long serialVersionUID = -2044248427247227390L;
private String exten;
private String context;
private String hint;
private Integer status;
public String getExten()
{
return exten;
}
public void setExten(String exten)
{
this.exten = exten;
}
public String getContext()
{
return context;
}
public void setContext(String context)
{
this.context = context;
}
public String getHint()
{
return hint;
}
public void setHint(String hint)
{
this.hint = hint;
}
public Integer getStatus()
{
return status;
}
public void setStatus(Integer status)
{
this.status = status;
}
@Override
public String toString()
{
StringBuffer sb;
sb = new StringBuffer(getClass().getName() + ": ");
sb.append("actionId='").append(getActionId()).append("'; ");
sb.append("message='").append(getMessage()).append("'; ");
sb.append("response='").append(getResponse()).append("'; ");
sb.append("uniqueId='").append(getUniqueId()).append("'; ");
sb.append("exten='").append(getExten()).append("'; ");
sb.append("context='").append(getContext()).append("'; ");
sb.append("hint='").append(getHint()).append("'; ");
sb.append("status='").append(getStatus()).append("'; ");
sb.append("systemHashcode=").append(System.identityHashCode(this));
return sb.toString();
}
}
| apache-2.0 |
medicayun/medicayundicom | dcm4jboss-all/trunk/dcm4jboss-wado/src/java/org/dcm4chex/wado/web/WADOServlet.java | 8697 | /* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), available at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* TIANI Medgraph AG.
* Portions created by the Initial Developer are Copyright (C) 2003-2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Gunter Zeilinger <gunter.zeilinger@tiani.com>
* Franz Willer <franz.willer@gwi-ag.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chex.wado.web;
import java.io.IOException;
import java.io.OutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dcm4chex.archive.util.RequestedFrameNumbersOutOfRangeException;
import org.dcm4chex.wado.common.BasicRequestObject;
import org.dcm4chex.wado.common.WADORequestObject;
import org.dcm4chex.wado.common.WADOResponseObject;
/**
* @author franz.willer
*
* TODO To change the template for this generated type comment go to
* Window - Preferences - Java - Code Style - Code Templates
*/
public class WADOServlet extends HttpServlet {
/** holds the WADOServiceDelegate instance */
private static WADOServiceDelegate delegate;
/** serialVersionUID because super class is serializable. */
private static final long serialVersionUID = 3257008748022085682L;
private static Logger log = Logger.getLogger( WADOServlet.class.getName() );
/**
* Initialize the WADOServiceDelegator.
* <p>
* Set the name of the MBean from servlet init param.
*/
public void init() {
delegate = new WADOServiceDelegate();
delegate.init( getServletConfig() );
}
/**
* Handles the POST requset in the doGet method.
*
* @param request The http request.
* @param response The http response.
*/
public void doPost( HttpServletRequest request, HttpServletResponse response ){
doGet( request, response);
}
/**
* Handles the GET requset.
*
* @param request The http request.
* @param response The http response.
*/
public void doGet( HttpServletRequest request, HttpServletResponse response ){
long twrsp1 = System.currentTimeMillis();
log.info(request.getRemoteAddr()+" - WADO URL:"+request.getRequestURI()+"?"+request.getQueryString());
BasicRequestObject reqObject = RequestObjectFactory.getRequestObject( request );
if ( reqObject == null || ! (reqObject instanceof WADORequestObject) ) {
reqObject = RequestObjectFactory.getRequestObject( request );
if ( reqObject == null ) {
sendError( response, HttpServletResponse.SC_BAD_REQUEST, "Not A WADO URL" );
return;
}
}
int iErr = reqObject.checkRequest();
if ( iErr < 0 ) {
sendError( response, HttpServletResponse.SC_BAD_REQUEST, reqObject.getErrorMsg() );//required params missing or invalid!
return;
}
long twget1 = System.currentTimeMillis();
WADOResponseObject respObject = delegate.getWADOObject( (WADORequestObject)reqObject );
long twget2 = System.currentTimeMillis();
int returnCode = respObject.getReturnCode();
if ( returnCode == HttpServletResponse.SC_OK ) {
sendWADOFile( response, respObject );
} else if ( returnCode == HttpServletResponse.SC_TEMPORARY_REDIRECT ) {
try {
response.sendRedirect( respObject.getErrorMessage() ); //error message contains redirect host.
} catch (IOException e) {
sendError( response, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error: cant send redirect to client! Redirect to host "+respObject.getErrorMessage()+" failed!" );
}
} else {
sendError( response, returnCode, respObject.getErrorMessage() );
}
long twrsp2 = System.currentTimeMillis();
if (log.isDebugEnabled())
log.debug("TimesToDeliverDICOMObject "+((WADORequestObject)reqObject).getObjectUID()+
", with requesttype: "+respObject.getContentType()+" to "+((WADORequestObject)reqObject).getRemoteAddr()+
" took total [ms]: " +(twrsp2 -twrsp1)+", FetchObject took [ms]: "+(twget2 -twget1)+
", Transmission took [ms]: "+((twrsp2 -twrsp1)-(twget2-twget1)));
}
/**
* Send an error response with given response code and message to the client.
* <p>
* It is recommended that this method is only called once per erequest!<br>
* Otherwise a IllegalStateException will be thrown!
*
* @param response The HttpServletResponse of the request.
* @param errCode One of the response code defined in HttpServletResponse.
* @param msg A description for the reason to send the error.
*/
private void sendError( HttpServletResponse response, int errCode, String msg ) {
try {
response.sendError( errCode, msg );
} catch (IOException e) {
log.error("Cant perform sendError( "+errCode+", "+msg+" )! reason:"+e.getMessage(), e );
}
}
/**
* Send the retrieved file to the client.
* <p>
* Sets the content type as defined in the WADOResponseObject object.
*
* @param response
* @param respObject
*/
private void sendWADOFile( HttpServletResponse response, WADOResponseObject respObject ) {
response.setHeader("Expires","0");//disables client side caching!!!
log.debug("sendResponse:"+respObject);
try {
if ( respObject != null ) {
log.info("send WADO response: "+respObject.getContentType());
response.setContentType( respObject.getContentType() );
long len = respObject.length();
if ( len != -1 )
response.setContentLength((int)len);
final String errMsg = "Exception while writing WADO response to client! reason:";
try {
log.debug("respObject execute");
OutputStream respStream = response.getOutputStream();
try {
respObject.execute(respStream);
} finally {
respStream.close();
}
} catch ( RequestedFrameNumbersOutOfRangeException e ) {
sendError(response, HttpServletResponse.SC_BAD_REQUEST,
"Error: Requested Frame Numbers Out of Range");
} catch ( IOException ioe) {
if(ioe.toString().startsWith("ClientAbortException:")) {
log.debug(errMsg + ioe);
} else {
log.error(errMsg + ioe.getMessage(), ioe);
}
} catch ( Exception e ) {
log.error(errMsg+e.getMessage(), e );
}
}
} catch ( Exception x ) {
log.error("Exception handling WADO, will send error response", x);
sendError( response, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, x.getMessage() );
}
}
}
| apache-2.0 |
karouani/javasimon | core/src/test/java/org/javasimon/IncrementalSimonsPurgerTest.java | 5088 | package org.javasimon;
import org.mockito.ArgumentMatcher;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import static org.mockito.Mockito.*;
/**
* @author <a href="mailto:ivan.mushketyk@gmail.com">Ivan Mushketyk</a>
*/
public class IncrementalSimonsPurgerTest {
private ScheduledExecutorService executorService;
private ScheduledFuture scheduledFuture;
@BeforeMethod
public void beforeMethod() {
executorService = mock(ScheduledExecutorService.class);
scheduledFuture = mock(ScheduledFuture.class);
when(executorService.scheduleWithFixedDelay(any(Runnable.class), anyLong(), anyLong(), any(TimeUnit.class)))
.thenReturn(scheduledFuture);
}
@Test(dataProvider = "managersDataProvider")
public void testPeriodicalIncrementalSimonsPurge(Manager manager) {
IncrementalSimonsPurger incrementalSimonsPurger = new IncrementalSimonsPurger(manager, executorService);
long duration = 1;
TimeUnit timeUnit = TimeUnit.SECONDS;
incrementalSimonsPurger.start(duration, timeUnit);
verify(executorService).scheduleWithFixedDelay(
argThat(new PurgerRunnableMatcher(manager)), eq(duration), eq(duration), eq(timeUnit));
}
private class PurgerRunnableMatcher extends ArgumentMatcher<Runnable> {
private final Manager expectedManager;
public PurgerRunnableMatcher(Manager expectedManager) {
this.expectedManager = expectedManager;
}
@Override
public boolean matches(Object o) {
IncrementalSimonsPurger.PurgerRunnable purger = (IncrementalSimonsPurger.PurgerRunnable) o;
return purger.getManager() == expectedManager;
}
}
@Test
public void testCancel() {
EnabledManager manager = new EnabledManager();
IncrementalSimonsPurger incrementalSimonsPurger = new IncrementalSimonsPurger(manager, executorService);
long duration = 1;
TimeUnit timeUnit = TimeUnit.SECONDS;
incrementalSimonsPurger.start(duration, timeUnit);
incrementalSimonsPurger.cancel();
verify(scheduledFuture).cancel(false);
}
@Test(expectedExceptions = IllegalStateException.class)
public void testCancelWithoutStart() {
EnabledManager manager = new EnabledManager();
IncrementalSimonsPurger incrementalSimonsPurger = new IncrementalSimonsPurger(manager, executorService);
incrementalSimonsPurger.cancel();
}
@Test(expectedExceptions = IllegalStateException.class)
public void testStartWithoutCancel() {
EnabledManager manager = new EnabledManager();
IncrementalSimonsPurger incrementalSimonsPurger = new IncrementalSimonsPurger(manager, executorService);
incrementalSimonsPurger.start(1, TimeUnit.SECONDS);
incrementalSimonsPurger.start(1, TimeUnit.SECONDS);
}
@Test(expectedExceptions = IllegalStateException.class)
public void testDoubleCancel() {
EnabledManager manager = new EnabledManager();
IncrementalSimonsPurger incrementalSimonsPurger = new IncrementalSimonsPurger(manager, executorService);
incrementalSimonsPurger.start(1, TimeUnit.SECONDS);
incrementalSimonsPurger.cancel();
incrementalSimonsPurger.cancel();
}
@DataProvider(name = "managersProvider")
public Object[][] managersDataProvider() {
return new Object[][] {
{new EnabledManager()},
{new SwitchingManager()}
};
}
@Test(dataProvider = "managersProvider")
public void testPurging(Manager manager) {
Stopwatch stopwatch = manager.getStopwatch("stopwatch");
stopwatch.sampleIncrement("key");
stopwatch.start().stop();
long timeInFuture = manager.milliTime() + 1000;
IncrementalSimonsPurger.PurgerRunnable runnable = new IncrementalSimonsPurger.PurgerRunnable(manager, timeInFuture);
runnable.run();
// should return false if it was removed by purger runnable
Assert.assertFalse(stopwatch.stopIncrementalSampling("key"));
}
@Test
public void testDaemonThreadFactoryCreatesDaemonThread() {
IncrementalSimonsPurger.DaemonThreadFactory threadFactory = new IncrementalSimonsPurger.DaemonThreadFactory();
Runnable runnable = mock(Runnable.class);
Thread thread = threadFactory.newThread(runnable);
Assert.assertTrue(thread.isDaemon());
}
@Test
public void testDaemonThreadFactorySetName() {
IncrementalSimonsPurger.DaemonThreadFactory threadFactory = new IncrementalSimonsPurger.DaemonThreadFactory();
Runnable runnable = mock(Runnable.class);
Thread thread = threadFactory.newThread(runnable);
Assert.assertEquals(thread.getName(), "javasimon-simonsPurger-1");
}
@Test
public void testDaemonThreadFactoryThreadNameChanges() {
IncrementalSimonsPurger.DaemonThreadFactory threadFactory = new IncrementalSimonsPurger.DaemonThreadFactory();
Runnable runnable = mock(Runnable.class);
Thread thread1 = threadFactory.newThread(runnable);
Thread thread2 = threadFactory.newThread(runnable);
Assert.assertEquals(thread1.getName(), "javasimon-simonsPurger-1");
Assert.assertEquals(thread2.getName(), "javasimon-simonsPurger-2");
}
}
| bsd-3-clause |
liufeiit/tulip | net/src/test/java/io/netty/example/http/websocketx/client/package-info.java | 930 | /*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/**
* <p>This is an example web service client.
* <p>To run this example, you must first start
* {@link io.netty.example.http.websocketx.server.WebSocketServer} and
* then {@link io.netty.example.http.websocketx.client.WebSocketClient}.
*/
package io.netty.example.http.websocketx.client;
| mit |
jtnord/jenkins | core/src/test/java/hudson/util/SecretTest.java | 3085 | /*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.util;
import jenkins.model.Jenkins;
import junit.framework.TestCase;
import java.security.SecureRandom;
import hudson.Util;
/**
* @author Kohsuke Kawaguchi
*/
public class SecretTest extends TestCase {
@Override protected void setUp() throws Exception {
SecureRandom sr = new SecureRandom();
byte[] random = new byte[32];
sr.nextBytes(random);
Secret.SECRET = Util.toHexString(random);
}
@Override protected void tearDown() throws Exception {
Secret.SECRET = null;
}
public void testEncrypt() {
Secret secret = Secret.fromString("abc");
assertEquals("abc",secret.getPlainText());
// make sure we got some encryption going
System.out.println(secret.getEncryptedValue());
assertTrue(!"abc".equals(secret.getEncryptedValue()));
// can we round trip?
assertEquals(secret,Secret.fromString(secret.getEncryptedValue()));
}
public void testDecrypt() {
assertEquals("abc",Secret.toString(Secret.fromString("abc")));
}
public void testSerialization() {
Secret s = Secret.fromString("Mr.Hudson");
String xml = Jenkins.XSTREAM.toXML(s);
assertTrue(xml, !xml.contains(s.getPlainText()));
assertTrue(xml, xml.contains(s.getEncryptedValue()));
Object o = Jenkins.XSTREAM.fromXML(xml);
assertEquals(xml, o, s);
}
public static class Foo {
Secret password;
}
/**
* Makes sure the serialization form is backward compatible with String.
*/
public void testCompatibilityFromString() {
String tagName = Foo.class.getName().replace("$","_-");
String xml = "<"+tagName+"><password>secret</password></"+tagName+">";
Foo foo = new Foo();
Jenkins.XSTREAM.fromXML(xml, foo);
assertEquals("secret",Secret.toString(foo.password));
}
}
| mit |
philomatic/smarthome | bundles/core/org.eclipse.smarthome.core/src/main/java/org/eclipse/smarthome/core/events/Event.java | 1211 | /**
* Copyright (c) 2014-2017 by the respective copyright holders.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.smarthome.core.events;
/**
* {@link Event} objects are delivered by the {@link EventPublisher} through the Eclipse SmartHome event bus.
* The callback interface {@link EventSubscriber} can be implemented in order to receive such events.
*
* @author Stefan Bußweiler - Initial contribution
*/
public interface Event {
/**
* Gets the event type.
*
* @return the event type
*/
String getType();
/**
* Gets the topic of an event.
*
* @return the event topic
*/
String getTopic();
/**
* Gets the payload as a serialized string.
*
* @return the serialized event
*/
String getPayload();
/**
* Gets the name of the source identifying the sender.
*
* @return the name of the source
*/
String getSource();
}
| epl-1.0 |
md-5/jdk10 | src/java.xml/share/classes/com/sun/org/apache/xml/internal/serialize/XML11Serializer.java | 20683 | /*
* Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Sep 14, 2000:
// Fixed problem with namespace handling. Contributed by
// David Blondeau <blondeau@intalio.com>
// Sep 14, 2000:
// Fixed serializer to report IO exception directly, instead at
// the end of document processing.
// Reported by Patrick Higgins <phiggins@transzap.com>
// Aug 21, 2000:
// Fixed bug in startDocument not calling prepare.
// Reported by Mikael Staldal <d96-mst-ingen-reklam@d.kth.se>
// Aug 21, 2000:
// Added ability to omit DOCTYPE declaration.
package com.sun.org.apache.xml.internal.serialize;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import com.sun.org.apache.xerces.internal.dom.DOMMessageFormatter;
import com.sun.org.apache.xerces.internal.util.NamespaceSupport;
import com.sun.org.apache.xerces.internal.util.SymbolTable;
import com.sun.org.apache.xerces.internal.util.XML11Char;
import com.sun.org.apache.xerces.internal.util.XMLChar;
import org.w3c.dom.DOMError;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
/**
* Implements an XML serializer supporting both DOM and SAX pretty
* serializing. For usage instructions see {@link Serializer}.
* <p>
* If an output stream is used, the encoding is taken from the
* output format (defaults to <tt>UTF-8</tt>). If a writer is
* used, make sure the writer uses the same encoding (if applies)
* as specified in the output format.
* <p>
* The serializer supports both DOM and SAX. SAX serializing is done by firing
* SAX events and using the serializer as a document handler. DOM serializing is done
* by calling {@link #serialize(Document)} or by using DOM Level 3
* {@link org.w3c.dom.ls.LSSerializer} and
* serializing with {@link org.w3c.dom.ls.LSSerializer#write},
* {@link org.w3c.dom.ls.LSSerializer#writeToString}.
* <p>
* If an I/O exception occurs while serializing, the serializer
* will not throw an exception directly, but only throw it
* at the end of serializing (either DOM or SAX's {@link
* org.xml.sax.DocumentHandler#endDocument}.
* <p>
* For elements that are not specified as whitespace preserving,
* the serializer will potentially break long text lines at space
* boundaries, indent lines, and serialize elements on separate
* lines. Line terminators will be regarded as spaces, and
* spaces at beginning of line will be stripped.
*
* @author <a href="mailto:arkin@intalio.com">Assaf Arkin</a>
* @author Rahul Srivastava
* @author Elena Litani IBM
* @see Serializer
*
* @deprecated As of JDK 9, Xerces 2.9.0, Xerces DOM L3 Serializer implementation
* is replaced by that of Xalan. Main class
* {@link com.sun.org.apache.xml.internal.serialize.DOMSerializerImpl} is replaced
* by {@link com.sun.org.apache.xml.internal.serializer.dom3.LSSerializerImpl}.
*/
@Deprecated
public class XML11Serializer
extends XMLSerializer {
//
// constants
//
protected static final boolean DEBUG = false;
//
// data
//
//
// DOM Level 3 implementation: variables intialized in DOMSerializerImpl
//
/** stores namespaces in scope */
protected NamespaceSupport fNSBinder;
/** stores all namespace bindings on the current element */
protected NamespaceSupport fLocalNSBinder;
/** symbol table for serialization */
protected SymbolTable fSymbolTable;
// is node dom level 1 node?
protected boolean fDOML1 = false;
// counter for new prefix names
protected int fNamespaceCounter = 1;
protected final static String PREFIX = "NS";
/**
* Controls whether namespace fixup should be performed during
* the serialization.
* NOTE: if this field is set to true the following
* fields need to be initialized: fNSBinder, fLocalNSBinder, fSymbolTable,
* XMLSymbols.EMPTY_STRING, fXmlSymbol, fXmlnsSymbol, fNamespaceCounter.
*/
protected boolean fNamespaces = false;
/**
* Constructs a new serializer. The serializer cannot be used without
* calling {@link #setOutputCharStream} or {@link #setOutputByteStream}
* first.
*/
public XML11Serializer() {
super( );
_format.setVersion("1.1");
}
/**
* Constructs a new serializer. The serializer cannot be used without
* calling {@link #setOutputCharStream} or {@link #setOutputByteStream}
* first.
*/
public XML11Serializer( OutputFormat format ) {
super( format );
_format.setVersion("1.1");
}
/**
* Constructs a new serializer that writes to the specified writer
* using the specified output format. If <tt>format</tt> is null,
* will use a default output format.
*
* @param writer The writer to use
* @param format The output format to use, null for the default
*/
public XML11Serializer( Writer writer, OutputFormat format ) {
super( writer, format );
_format.setVersion("1.1");
}
/**
* Constructs a new serializer that writes to the specified output
* stream using the specified output format. If <tt>format</tt>
* is null, will use a default output format.
*
* @param output The output stream to use
* @param format The output format to use, null for the default
*/
public XML11Serializer( OutputStream output, OutputFormat format ) {
super( output, format != null ? format : new OutputFormat( Method.XML, null, false ) );
_format.setVersion("1.1");
}
//-----------------------------------------//
// SAX content handler serializing methods //
//-----------------------------------------//
public void characters( char[] chars, int start, int length )
throws SAXException
{
ElementState state;
try {
state = content();
// Check if text should be print as CDATA section or unescaped
// based on elements listed in the output format (the element
// state) or whether we are inside a CDATA section or entity.
if ( state.inCData || state.doCData ) {
int saveIndent;
// Print a CDATA section. The text is not escaped, but ']]>'
// appearing in the code must be identified and dealt with.
// The contents of a text node is considered space preserving.
if ( ! state.inCData ) {
_printer.printText( "<![CDATA[" );
state.inCData = true;
}
saveIndent = _printer.getNextIndent();
_printer.setNextIndent( 0 );
char ch;
final int end = start + length;
for ( int index = start; index < end; ++index ) {
ch = chars[index];
if ( ch == ']' && index + 2 < end &&
chars[ index + 1 ] == ']' && chars[ index + 2 ] == '>' ) {
_printer.printText("]]]]><![CDATA[>");
index +=2;
continue;
}
if (!XML11Char.isXML11Valid(ch)) {
// check if it is surrogate
if (++index < end) {
surrogates(ch, chars[index], true);
}
else {
fatalError("The character '"+ch+"' is an invalid XML character");
}
continue;
}
if ( _encodingInfo.isPrintable(ch) && XML11Char.isXML11ValidLiteral(ch)) {
_printer.printText(ch);
}
else {
// The character is not printable -- split CDATA section
_printer.printText("]]>&#x");
_printer.printText(Integer.toHexString(ch));
_printer.printText(";<![CDATA[");
}
}
_printer.setNextIndent( saveIndent );
}
else {
int saveIndent;
if ( state.preserveSpace ) {
// If preserving space then hold of indentation so no
// excessive spaces are printed at line breaks, escape
// the text content without replacing spaces and print
// the text breaking only at line breaks.
saveIndent = _printer.getNextIndent();
_printer.setNextIndent( 0 );
printText( chars, start, length, true, state.unescaped );
_printer.setNextIndent( saveIndent );
}
else {
printText( chars, start, length, false, state.unescaped );
}
}
}
catch ( IOException except ) {
throw new SAXException( except );
}
}
//
// overwrite printing functions to make sure serializer prints out valid XML
//
protected void printEscaped( String source ) throws IOException {
int length = source.length();
for ( int i = 0 ; i < length ; ++i ) {
int ch = source.charAt(i);
if (!XML11Char.isXML11Valid(ch)) {
if (++i <length) {
surrogates(ch, source.charAt(i), false);
}
else {
fatalError("The character '"+(char)ch+"' is an invalid XML character");
}
continue;
}
if (ch == '\n' || ch == '\r' || ch == '\t' || ch == 0x0085 || ch == 0x2028) {
printHex(ch);
}
else if (ch == '<') {
_printer.printText("<");
}
else if (ch == '&') {
_printer.printText("&");
}
else if (ch == '"') {
_printer.printText(""");
}
else if ((ch >= ' ' && _encodingInfo.isPrintable((char) ch))) {
_printer.printText((char) ch);
}
else {
printHex(ch);
}
}
}
protected final void printCDATAText(String text) throws IOException {
int length = text.length();
char ch;
for (int index = 0; index < length; ++index) {
ch = text.charAt(index);
if (ch == ']'
&& index + 2 < length
&& text.charAt(index + 1) == ']'
&& text.charAt(index + 2) == '>') { // check for ']]>'
if (fDOMErrorHandler != null){
// REVISIT: this means that if DOM Error handler is not registered we don't report any
// fatal errors and might serialize not wellformed document
if ((features & DOMSerializerImpl.SPLITCDATA) == 0
&& (features & DOMSerializerImpl.WELLFORMED) == 0) {
// issue fatal error
String msg =
DOMMessageFormatter.formatMessage(
DOMMessageFormatter.SERIALIZER_DOMAIN,
"EndingCDATA",
null);
modifyDOMError(
msg,
DOMError.SEVERITY_FATAL_ERROR,
null, fCurrentNode);
boolean continueProcess =
fDOMErrorHandler.handleError(fDOMError);
if (!continueProcess) {
throw new IOException();
}
} else {
// issue warning
String msg =
DOMMessageFormatter.formatMessage(
DOMMessageFormatter.SERIALIZER_DOMAIN,
"SplittingCDATA",
null);
modifyDOMError(
msg,
DOMError.SEVERITY_WARNING,
null, fCurrentNode);
fDOMErrorHandler.handleError(fDOMError);
}
}
// split CDATA section
_printer.printText("]]]]><![CDATA[>");
index += 2;
continue;
}
if (!XML11Char.isXML11Valid(ch)) {
// check if it is surrogate
if (++index < length) {
surrogates(ch, text.charAt(index), true);
}
else {
fatalError("The character '" + ch + "' is an invalid XML character");
}
continue;
}
if (_encodingInfo.isPrintable(ch)
&& XML11Char.isXML11ValidLiteral(ch)) {
_printer.printText(ch);
}
else {
// The character is not printable -- split CDATA section
_printer.printText("]]>&#x");
_printer.printText(Integer.toHexString(ch));
_printer.printText(";<![CDATA[");
}
}
}
// note that this "int" should, in all cases, be a char.
// REVISIT: make it a char...
protected final void printXMLChar( int ch ) throws IOException {
if (ch == '\r' || ch == 0x0085 || ch == 0x2028) {
printHex(ch);
}
else if ( ch == '<') {
_printer.printText("<");
}
else if (ch == '&') {
_printer.printText("&");
}
else if (ch == '>'){
// character sequence "]]>" can't appear in content, therefore
// we should escape '>'
_printer.printText(">");
}
else if ( _encodingInfo.isPrintable((char)ch) && XML11Char.isXML11ValidLiteral(ch)) {
_printer.printText((char)ch);
}
else {
printHex(ch);
}
}
protected final void surrogates(int high, int low, boolean inContent) throws IOException{
if (XMLChar.isHighSurrogate(high)) {
if (!XMLChar.isLowSurrogate(low)) {
//Invalid XML
fatalError("The character '"+(char)low+"' is an invalid XML character");
}
else {
int supplemental = XMLChar.supplemental((char)high, (char)low);
if (!XML11Char.isXML11Valid(supplemental)) {
//Invalid XML
fatalError("The character '"+(char)supplemental+"' is an invalid XML character");
}
else {
if (inContent && content().inCData) {
_printer.printText("]]>&#x");
_printer.printText(Integer.toHexString(supplemental));
_printer.printText(";<![CDATA[");
}
else {
printHex(supplemental);
}
}
}
}
else {
fatalError("The character '"+(char)high+"' is an invalid XML character");
}
}
protected void printText( String text, boolean preserveSpace, boolean unescaped )
throws IOException {
int index;
char ch;
int length = text.length();
if ( preserveSpace ) {
// Preserving spaces: the text must print exactly as it is,
// without breaking when spaces appear in the text and without
// consolidating spaces. If a line terminator is used, a line
// break will occur.
for ( index = 0 ; index < length ; ++index ) {
ch = text.charAt( index );
if (!XML11Char.isXML11Valid(ch)) {
// check if it is surrogate
if (++index <length) {
surrogates(ch, text.charAt(index), true);
} else {
fatalError("The character '"+ch+"' is an invalid XML character");
}
continue;
}
if ( unescaped && XML11Char.isXML11ValidLiteral(ch)) {
_printer.printText( ch );
}
else {
printXMLChar( ch );
}
}
}
else {
// Not preserving spaces: print one part at a time, and
// use spaces between parts to break them into different
// lines. Spaces at beginning of line will be stripped
// by printing mechanism. Line terminator is treated
// no different than other text part.
for ( index = 0 ; index < length ; ++index ) {
ch = text.charAt( index );
if (!XML11Char.isXML11Valid(ch)) {
// check if it is surrogate
if (++index <length) {
surrogates(ch, text.charAt(index), true);
} else {
fatalError("The character '"+ch+"' is an invalid XML character");
}
continue;
}
if ( unescaped && XML11Char.isXML11ValidLiteral(ch) ) {
_printer.printText( ch );
}
else {
printXMLChar( ch );
}
}
}
}
protected void printText( char[] chars, int start, int length,
boolean preserveSpace, boolean unescaped ) throws IOException {
if ( preserveSpace ) {
// Preserving spaces: the text must print exactly as it is,
// without breaking when spaces appear in the text and without
// consolidating spaces. If a line terminator is used, a line
// break will occur.
while ( length-- > 0 ) {
char ch = chars[start++];
if (!XML11Char.isXML11Valid(ch)) {
// check if it is surrogate
if ( length-- > 0) {
surrogates(ch, chars[start++], true);
} else {
fatalError("The character '"+ch+"' is an invalid XML character");
}
continue;
}
if ( unescaped && XML11Char.isXML11ValidLiteral(ch)) {
_printer.printText( ch );
}
else {
printXMLChar( ch );
}
}
}
else {
// Not preserving spaces: print one part at a time, and
// use spaces between parts to break them into different
// lines. Spaces at beginning of line will be stripped
// by printing mechanism. Line terminator is treated
// no different than other text part.
while ( length-- > 0 ) {
char ch = chars[start++];
if (!XML11Char.isXML11Valid(ch)) {
// check if it is surrogate
if ( length-- > 0) {
surrogates(ch, chars[start++], true);
} else {
fatalError("The character '"+ch+"' is an invalid XML character");
}
continue;
}
if ( unescaped && XML11Char.isXML11ValidLiteral(ch)) {
_printer.printText( ch );
}
else {
printXMLChar( ch );
}
}
}
}
public boolean reset() {
super.reset();
return true;
}
}
| gpl-2.0 |
squawk-mirror/squawk | ssl/src/com/sun/spot/security/implementation/RSASig.java | 9291 | /*
* Copyright 2006-2008 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER
*
* This code is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2
* only, as published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License version 2 for more details (a copy is
* included in the LICENSE file that accompanied this code).
*
* You should have received a copy of the GNU General Public License
* version 2 along with this work; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA
*
* Please contact Sun Microsystems, Inc., 16 Network Circle, Menlo
* Park, CA 94025 or visit www.sun.com if you need additional
* information or have any questions.
*/
package com.sun.spot.security.implementation;
import com.sun.midp.pki.Utils;
import com.sun.spot.security.GeneralSecurityException;
import com.sun.spot.security.InvalidKeyException;
import com.sun.spot.security.MessageDigest;
import com.sun.spot.security.NoSuchAlgorithmException;
import com.sun.spot.security.PrivateKey;
import com.sun.spot.security.PublicKey;
import com.sun.spot.security.SignatureException;
import com.sun.spotx.crypto.Cipher;
import com.sun.spotx.crypto.NoSuchPaddingException;
/**
* Implements RSA Signatures.
*/
final class RSASig {
/** Current algorithm. */
String alg;
/** Current message digest. */
MessageDigest md = null;
/** Current cipher. */
Cipher c = null;
/** Current key. */
RSAKey k = null;
/** Signature prefix. */
byte[] prefix;
/**
* Constructs an RSA signature object that uses the specified
* signature algorithm.
*
* @param sigPrefix Prefix for the signature
* @param messageDigest Message digest for the signature
*
* @exception NoSuchAlgorithmException if RSA is
* not available in the caller's environment.
*/
RSASig(byte[] sigPrefix, MessageDigest messageDigest)
throws NoSuchAlgorithmException {
prefix = sigPrefix;
md = messageDigest;
try {
c = Cipher.getInstance("RSA");
} catch (NoSuchPaddingException e) {
// we used the default mode and padding this should not happen
throw new NoSuchAlgorithmException();
}
}
/**
* Gets the byte-length of the signature.
*
* @return the byte-length of the signature produced by this object
*/
public int getLength() {
if (k == null)
return (short)0;
else // return the modulus length in bytes
return (short)k.getModulusLen();
}
/**
* Initializes the <CODE>RSASig</CODE> object with the appropriate
* <CODE>Key</CODE> for signature verification.
*
* @param theKey the key object to use for verification
*
* @exception InvalidKeyException if the key type is inconsistent
* with the mode or signature implementation.
*/
public void initVerify(PublicKey theKey) throws InvalidKeyException {
if (!(theKey instanceof RSAPublicKey)) {
throw new InvalidKeyException();
}
c.init(Cipher.DECRYPT_MODE, theKey);
k = (RSAKey)theKey;
}
/**
* Initializes the <CODE>RSASig</CODE> object with the appropriate
* <CODE>Key</CODE> for signature creation.
*
* @param theKey the key object to use for signing
*
* @exception InvalidKeyException if the key type is inconsistent
* with the mode or signature implementation.
*/
public void initSign(PrivateKey theKey) throws InvalidKeyException {
if (!(theKey instanceof RSAPrivateKey)) {
throw new InvalidKeyException();
}
c.init(Cipher.ENCRYPT_MODE, theKey);
k = (RSAKey)theKey;
}
/**
* Accumulates a signature of the input data. When this method is used,
* temporary storage of intermediate results is required. This method
* should only be used if all the input data required for the signature
* is not available in one byte array. The sign() or verify() method is
* recommended whenever possible.
*
* @param inBuf the input buffer of data to be signed
* @param inOff starting offset within the input buffer for data to
* be signed
* @param inLen the byte length of data to be signed
*
* @exception SignatureException if this signature object is not
* initialized properly.
*/
public void update(byte[] inBuf, int inOff, int inLen)
throws SignatureException {
if (k == null) {
throw new SignatureException("Illegal State");
}
md.update(inBuf, inOff, inLen);
}
/**
* Generates the signature of all/last input data. A call to this
* method also resets this signature object to the state it was in
* when previously initialized via a call to init(). That is, the
* object is reset and available to sign another message.
*
* @param sigBuf the output buffer to store signature data
* @param sigOff starting offset within the output buffer at which
* to begin signature data
* @param sigLen max byte length of signature data
*
* @return number of bytes of signature output in sigBuf
*
* @exception SignatureException if this signature object is not
* initialized properly, or len is less than the actual signature
*/
public int sign(byte[] sigBuf, int sigOff, int sigLen)
throws SignatureException {
if (k == null || !(k instanceof RSAPrivateKey)) {
throw new SignatureException("Illegal State");
}
if (sigLen < k.getModulusLen()) {
throw new SignatureException("Buffer too short");
}
byte[] data = new byte[prefix.length + md.getDigestLength()];
// Include the OID of signing algorithm in padding
System.arraycopy(prefix, 0, data, 0, prefix.length);
try {
md.digest(data, prefix.length, md.getDigestLength());
/*
* we can cast to a short because a private key encryption is
* is less than the key length, which is a short.
*/
return c.doFinal(data, 0, data.length, sigBuf, sigOff);
} catch (GeneralSecurityException ce) {
throw new SignatureException(ce.getMessage());
}
};
/**
* Verifies the signature of all/last input data against the passed
* in signature. A call to this method also resets this signature
* object to the state it was in when previously initialized via a
* call to init(). That is, the object is reset and available to
* verify another message.
*
* @param sigBuf the input buffer containing signature data
* @param sigOff starting offset within the sigBuf where signature
* data begins
* @param sigLen byte length of signature data
*
* @return true if signature verifies, false otherwise
*
* @exception SignatureException if this signature object is not
* initialized properly, or the passed-in signature is improperly
* encoded or of the wrong type, etc.
*/
public boolean verify(byte[] sigBuf, int sigOff, int sigLen)
throws SignatureException {
if (k == null || !(k instanceof RSAPublicKey)) {
throw new SignatureException("Illegal State");
}
byte[] res = null;
int val;
byte[] digest = new byte[md.getDigestLength()];
try {
md.digest(digest, 0, digest.length);
res = new byte[k.getModulusLen()];
val = c.doFinal(sigBuf, sigOff, sigLen, res, 0);
} catch (IllegalArgumentException iae) {
throw new SignatureException(iae.getMessage());
} catch (GeneralSecurityException e) {
System.out.println("RSASig.verify() caught " + e +
" returning false");
return false;
}
int size = prefix.length + md.getDigestLength();
if (val != size) {
return false;
}
// System.out.println("RSASig:res:[" + res.length + "]:" +
// Utils.hexEncode(res));
// System.out.println("RSASig:prefix:[" + prefix.length + "]:" +
// Utils.hexEncode(prefix));
// System.out.println("RSASig:digest:[" + digest.length + "]:" +
// Utils.hexEncode(digest));
// Match the prefix corresponding to the signature algorithm
for (int i = 0; i < prefix.length; i++) {
if (res[i] != prefix[i]) {
return false;
}
}
for (int i = prefix.length; i < size; i++) {
if (res[i] != digest[i - prefix.length]) {
return false;
}
}
return true;
}
}
| gpl-2.0 |
sics-sse/moped | squawk/cldc-native-declarations/src/com/sun/squawk/platform/posix/natives/NetDB.java | 4543 | /*
* Copyright 2004-2008 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER
*
* This code is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2
* only, as published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License version 2 for more details (a copy is
* included in the LICENSE file that accompanied this code).
*
* You should have received a copy of the GNU General Public License
* version 2 along with this work; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA
*
* Please contact Sun Microsystems, Inc., 16 Network Circle, Menlo
* Park, CA 94025 or visit www.sun.com if you need additional
* information or have any questions.
*/
package com.sun.squawk.platform.posix.natives;
import com.sun.cldc.jna.*;
/**
*
* java wrapper around #include <netdb.h>
*/
@Includes("<netdb.h>")
public interface NetDB extends Library {
NetDB INSTANCE = (NetDB)
Native.loadLibrary("RTLD",
NetDB.class);
/**
* The gethostbyname() function returns a HostEnt structure describing an internet host referenced by name.
*
* @param name the host name
* @return the address of struct hostent, or null on error
*/
hostent gethostbyname(String name);
/** Authoritative Answer Host not found.
* @see #h_errno() */
int HOST_NOT_FOUND = IMPORT;
/** Non-Authoritative Host not found, or SERVERFAIL.
* @see #h_errno() */
int TRY_AGAIN = IMPORT;
/** Non recoverable errors, FORMERR, REFUSED, NOTIMP.
* @see #h_errno() */
int NO_RECOVERY = IMPORT;
/** Valid name, no data record of requested type.
* @see #h_errno() */
int NO_DATA = IMPORT;
/** Return error code for last call to gethostbyname() or gethostbyaddr().
* @return one of the error codes defined in this class.
*/
@GlobalVar
int h_errno();
/** C STRUCTURE HostEnt
struct hostent {
char *h_name; official name of host
char **h_aliases; alias list
int h_addrtype; host address type
int h_length; length of address
char **h_addr_list; list of addresses from name server
};
#define h_addr h_addr_list[0] address, for backward compatibility
*/
public static class hostent extends Structure {
public String h_name; /* official name of host */
public int h_addrtype; /* host address type */
public int h_length; /* length of address */
public int[] h_addr_list; /* list of addresses from name server */
public void read() {
final int MAX_ADDRS = 16;
Pointer p = getPointer();
h_name = p.getPointer(0, 1024).getString(0);
h_addrtype = p.getInt(8);
h_length = p.getInt(12);
if (h_length != 4) {
System.err.println("WARNING: Unexpected h_length value");
}
Pointer adrlist = p.getPointer(16, MAX_ADDRS * 4);
System.out.println("in read(). Buffer: " + p);
System.out.println(" name: " + h_name);
System.out.println(" h_addrtype: " + h_addrtype);
System.out.println(" adrlist: " + adrlist);
Pointer[] addrPtrs = new Pointer[MAX_ADDRS];
int count = 0;
for (int i = 0; i < MAX_ADDRS; i++) {
Pointer addrPtr = adrlist.getPointer(i * 4, h_length);
if (addrPtr == null) {
break;
}
addrPtrs[i] = addrPtr;
count++;
}
System.out.println(" adrlist count: " + count);
h_addr_list = new int[count];
for (int i = 0; i < count; i++) {
int addr = addrPtrs[i].getInt(0);
System.err.println(" addr " + addr);
h_addr_list[i] = addr;
}
}
public void write() {
}
} /* HostEnt */
}
| gpl-2.0 |
UnlimitedFreedom/UF-WorldEdit | worldedit-core/src/main/java/com/sk89q/worldedit/world/registry/SimpleState.java | 1950 | /*
* WorldEdit, a Minecraft world manipulation toolkit
* Copyright (C) sk89q <http://www.sk89q.com>
* Copyright (C) WorldEdit team and contributors
*
* This program is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.sk89q.worldedit.world.registry;
import com.sk89q.worldedit.blocks.BaseBlock;
import javax.annotation.Nullable;
import java.util.Collections;
import java.util.Map;
class SimpleState implements State {
private Byte dataMask;
private Map<String, SimpleStateValue> values;
@Override
public Map<String, SimpleStateValue> valueMap() {
return Collections.unmodifiableMap(values);
}
@Nullable
@Override
public StateValue getValue(BaseBlock block) {
for (StateValue value : values.values()) {
if (value.isSet(block)) {
return value;
}
}
return null;
}
byte getDataMask() {
return dataMask != null ? dataMask : 0xF;
}
@Override
public boolean hasDirection() {
for (SimpleStateValue value : values.values()) {
if (value.getDirection() != null) {
return true;
}
}
return false;
}
void postDeserialization() {
for (SimpleStateValue v : values.values()) {
v.setState(this);
}
}
}
| gpl-3.0 |
ingokegel/intellij-community | platform/indexing-api/src/com/intellij/util/indexing/SingleEntryIndexer.java | 1452 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.indexing;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.testFramework.LightVirtualFile;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.Map;
/**
* Simplifies API and ensures that data key will always be equal to virtual file id
*
* @author Eugene Zhuravlev
*/
@ApiStatus.OverrideOnly
public abstract class SingleEntryIndexer<V> implements DataIndexer<Integer, V, FileContent>{
private final boolean myAcceptNullValues;
protected SingleEntryIndexer(boolean acceptNullValues) {
myAcceptNullValues = acceptNullValues;
}
@Override
@NotNull
public final Map<Integer, V> map(@NotNull FileContent inputData) {
final V value = computeValue(inputData);
if (value == null && !myAcceptNullValues) {
return Collections.emptyMap();
}
VirtualFile file = inputData.getFile();
int key = file instanceof LightVirtualFile ? -1 : Math.abs(FileBasedIndex.getFileId(file));
return Collections.singletonMap(key, value);
}
protected abstract @Nullable V computeValue(@NotNull FileContent inputData);
@ApiStatus.Internal
public boolean isAcceptNullValues() {
return myAcceptNullValues;
}
}
| apache-2.0 |
signed/intellij-community | platform/lang-impl/src/com/intellij/codeInsight/problems/WolfTheProblemSolverImpl.java | 17244 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.problems;
import com.intellij.codeInsight.daemon.impl.*;
import com.intellij.codeInsight.daemon.impl.analysis.HighlightInfoHolder;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.TextEditor;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.FileStatusListener;
import com.intellij.openapi.vcs.FileStatusManager;
import com.intellij.openapi.vfs.*;
import com.intellij.problems.Problem;
import com.intellij.problems.WolfTheProblemSolver;
import com.intellij.psi.*;
import com.intellij.util.containers.ContainerUtil;
import gnu.trove.THashMap;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
/**
* @author cdr
*/
public class WolfTheProblemSolverImpl extends WolfTheProblemSolver {
private final Map<VirtualFile, ProblemFileInfo> myProblems = new THashMap<>(); // guarded by myProblems
private final Collection<VirtualFile> myCheckingQueue = new THashSet<>(10);
private final Project myProject;
private final List<ProblemListener> myProblemListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private final List<Condition<VirtualFile>> myFilters = ContainerUtil.createLockFreeCopyOnWriteList();
private boolean myFiltersLoaded = false;
private final ProblemListener fireProblemListeners = new ProblemListener() {
@Override
public void problemsAppeared(@NotNull VirtualFile file) {
for (final ProblemListener problemListener : myProblemListeners) {
problemListener.problemsAppeared(file);
}
}
@Override
public void problemsChanged(@NotNull VirtualFile file) {
for (final ProblemListener problemListener : myProblemListeners) {
problemListener.problemsChanged(file);
}
}
@Override
public void problemsDisappeared(@NotNull VirtualFile file) {
for (final ProblemListener problemListener : myProblemListeners) {
problemListener.problemsDisappeared(file);
}
}
};
private void doRemove(@NotNull VirtualFile problemFile) {
ProblemFileInfo old;
synchronized (myProblems) {
old = myProblems.remove(problemFile);
}
synchronized (myCheckingQueue) {
myCheckingQueue.remove(problemFile);
}
if (old != null) {
// firing outside lock
fireProblemListeners.problemsDisappeared(problemFile);
}
}
private static class ProblemFileInfo {
private final Collection<Problem> problems = new THashSet<>();
private boolean hasSyntaxErrors;
public boolean equals(@Nullable final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ProblemFileInfo that = (ProblemFileInfo)o;
return hasSyntaxErrors == that.hasSyntaxErrors && problems.equals(that.problems);
}
public int hashCode() {
int result = problems.hashCode();
result = 31 * result + (hasSyntaxErrors ? 1 : 0);
return result;
}
}
public WolfTheProblemSolverImpl(@NotNull Project project,
@NotNull PsiManager psiManager,
@NotNull VirtualFileManager virtualFileManager) {
myProject = project;
PsiTreeChangeListener changeListener = new PsiTreeChangeAdapter() {
@Override
public void childAdded(@NotNull PsiTreeChangeEvent event) {
childrenChanged(event);
}
@Override
public void childRemoved(@NotNull PsiTreeChangeEvent event) {
childrenChanged(event);
}
@Override
public void childReplaced(@NotNull PsiTreeChangeEvent event) {
childrenChanged(event);
}
@Override
public void childMoved(@NotNull PsiTreeChangeEvent event) {
childrenChanged(event);
}
@Override
public void propertyChanged(@NotNull PsiTreeChangeEvent event) {
childrenChanged(event);
}
@Override
public void childrenChanged(@NotNull PsiTreeChangeEvent event) {
clearSyntaxErrorFlag(event);
}
};
psiManager.addPsiTreeChangeListener(changeListener);
VirtualFileListener virtualFileListener = new VirtualFileListener() {
@Override
public void fileDeleted(@NotNull final VirtualFileEvent event) {
onDeleted(event.getFile());
}
@Override
public void fileMoved(@NotNull final VirtualFileMoveEvent event) {
onDeleted(event.getFile());
}
private void onDeleted(@NotNull final VirtualFile file) {
if (file.isDirectory()) {
clearInvalidFiles();
}
else {
doRemove(file);
}
}
};
virtualFileManager.addVirtualFileListener(virtualFileListener, myProject);
FileStatusManager fileStatusManager = FileStatusManager.getInstance(myProject);
if (fileStatusManager != null) { //tests?
fileStatusManager.addFileStatusListener(new FileStatusListener() {
@Override
public void fileStatusesChanged() {
clearInvalidFiles();
}
@Override
public void fileStatusChanged(@NotNull VirtualFile virtualFile) {
fileStatusesChanged();
}
});
}
}
private void clearInvalidFiles() {
VirtualFile[] files;
synchronized (myProblems) {
files = VfsUtilCore.toVirtualFileArray(myProblems.keySet());
}
for (VirtualFile problemFile : files) {
if (!problemFile.isValid() || !isToBeHighlighted(problemFile)) {
doRemove(problemFile);
}
}
}
private void clearSyntaxErrorFlag(@NotNull final PsiTreeChangeEvent event) {
PsiFile file = event.getFile();
if (file == null) return;
VirtualFile virtualFile = file.getVirtualFile();
if (virtualFile == null) return;
synchronized (myProblems) {
ProblemFileInfo info = myProblems.get(virtualFile);
if (info != null) {
info.hasSyntaxErrors = false;
}
}
}
public void startCheckingIfVincentSolvedProblemsYet(@NotNull ProgressIndicator progress,
@NotNull ProgressableTextEditorHighlightingPass pass)
throws ProcessCanceledException {
if (!myProject.isOpen()) return;
List<VirtualFile> files;
synchronized (myCheckingQueue) {
files = new ArrayList<>(myCheckingQueue);
}
// (rough approx number of PSI elements = file length/2) * (visitor count = 2 usually)
long progressLimit = files.stream().filter(VirtualFile::isValid).mapToLong(VirtualFile::getLength).sum();
pass.setProgressLimit(progressLimit);
for (final VirtualFile virtualFile : files) {
progress.checkCanceled();
if (virtualFile == null) break;
if (!virtualFile.isValid() || orderVincentToCleanTheCar(virtualFile, progress)) {
doRemove(virtualFile);
}
if (virtualFile.isValid()) {
pass.advanceProgress(virtualFile.getLength());
}
}
}
// returns true if car has been cleaned
private boolean orderVincentToCleanTheCar(@NotNull final VirtualFile file,
@NotNull final ProgressIndicator progressIndicator) throws ProcessCanceledException {
if (!isToBeHighlighted(file)) {
clearProblems(file);
return true; // file is going to be red waved no more
}
if (hasSyntaxErrors(file)) {
// optimization: it's no use anyway to try clean the file with syntax errors, only changing the file itself can help
return false;
}
if (myProject.isDisposed()) return false;
if (willBeHighlightedAnyway(file)) return false;
final PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
if (psiFile == null) return false;
final Document document = FileDocumentManager.getInstance().getDocument(file);
if (document == null) return false;
final AtomicReference<HighlightInfo> error = new AtomicReference<>();
final AtomicBoolean hasErrorElement = new AtomicBoolean();
try {
GeneralHighlightingPass pass = new GeneralHighlightingPass(myProject, psiFile, document, 0, document.getTextLength(),
false, new ProperTextRange(0, document.getTextLength()), null, HighlightInfoProcessor.getEmpty()) {
@Override
protected HighlightInfoHolder createInfoHolder(@NotNull final PsiFile file) {
return new HighlightInfoHolder(file) {
@Override
public boolean add(@Nullable HighlightInfo info) {
if (info != null && info.getSeverity() == HighlightSeverity.ERROR) {
error.set(info);
hasErrorElement.set(myHasErrorElement);
throw new ProcessCanceledException();
}
return super.add(info);
}
};
}
};
pass.collectInformation(progressIndicator);
}
catch (ProcessCanceledException e) {
if (error.get() != null) {
ProblemImpl problem = new ProblemImpl(file, error.get(), hasErrorElement.get());
reportProblems(file, Collections.singleton(problem));
}
return false;
}
clearProblems(file);
return true;
}
@Override
public boolean hasSyntaxErrors(final VirtualFile file) {
synchronized (myProblems) {
ProblemFileInfo info = myProblems.get(file);
return info != null && info.hasSyntaxErrors;
}
}
private boolean willBeHighlightedAnyway(final VirtualFile file) {
// opened in some editor, and hence will be highlighted automatically sometime later
FileEditor[] selectedEditors = FileEditorManager.getInstance(myProject).getSelectedEditors();
for (FileEditor editor : selectedEditors) {
if (!(editor instanceof TextEditor)) continue;
Document document = ((TextEditor)editor).getEditor().getDocument();
PsiFile psiFile = PsiDocumentManager.getInstance(myProject).getCachedPsiFile(document);
if (psiFile == null) continue;
if (Comparing.equal(file, psiFile.getVirtualFile())) return true;
}
return false;
}
@Override
public boolean hasProblemFilesBeneath(@NotNull Condition<VirtualFile> condition) {
if (!myProject.isOpen()) return false;
synchronized (myProblems) {
if (!myProblems.isEmpty()) {
for (VirtualFile problemFile : myProblems.keySet()) {
if (problemFile.isValid() && condition.value(problemFile)) return true;
}
}
return false;
}
}
@Override
public boolean hasProblemFilesBeneath(@NotNull final Module scope) {
return hasProblemFilesBeneath(virtualFile -> ModuleUtilCore.moduleContainsFile(scope, virtualFile, false));
}
@Override
public void addProblemListener(@NotNull ProblemListener listener) {
myProblemListeners.add(listener);
}
@Override
public void addProblemListener(@NotNull final ProblemListener listener, @NotNull Disposable parentDisposable) {
addProblemListener(listener);
Disposer.register(parentDisposable, new Disposable() {
@Override
public void dispose() {
removeProblemListener(listener);
}
});
}
@Override
public void removeProblemListener(@NotNull ProblemListener listener) {
myProblemListeners.remove(listener);
}
@Override
public void registerFileHighlightFilter(@NotNull final Condition<VirtualFile> filter, @NotNull Disposable parentDisposable) {
myFilters.add(filter);
Disposer.register(parentDisposable, new Disposable() {
@Override
public void dispose() {
myFilters.remove(filter);
}
});
}
@Override
public void queue(VirtualFile suspiciousFile) {
if (!isToBeHighlighted(suspiciousFile)) return;
doQueue(suspiciousFile);
}
private void doQueue(@NotNull VirtualFile suspiciousFile) {
synchronized (myCheckingQueue) {
myCheckingQueue.add(suspiciousFile);
}
}
@Override
public boolean isProblemFile(VirtualFile virtualFile) {
synchronized (myProblems) {
return myProblems.containsKey(virtualFile);
}
}
private boolean isToBeHighlighted(@Nullable VirtualFile virtualFile) {
if (virtualFile == null) return false;
synchronized (myFilters) {
if (!myFiltersLoaded) {
myFiltersLoaded = true;
myFilters.addAll(Arrays.asList(Extensions.getExtensions(FILTER_EP_NAME, myProject)));
}
}
for (final Condition<VirtualFile> filter : myFilters) {
ProgressManager.checkCanceled();
if (filter.value(virtualFile)) {
return true;
}
}
return false;
}
@Override
public void weHaveGotProblems(@NotNull final VirtualFile virtualFile, @NotNull List<Problem> problems) {
if (problems.isEmpty()) return;
if (!isToBeHighlighted(virtualFile)) return;
weHaveGotNonIgnorableProblems(virtualFile, problems);
}
@Override
public void weHaveGotNonIgnorableProblems(@NotNull VirtualFile virtualFile, @NotNull List<Problem> problems) {
if (problems.isEmpty()) return;
boolean fireListener = false;
synchronized (myProblems) {
ProblemFileInfo storedProblems = myProblems.get(virtualFile);
if (storedProblems == null) {
storedProblems = new ProblemFileInfo();
myProblems.put(virtualFile, storedProblems);
fireListener = true;
}
storedProblems.problems.addAll(problems);
}
doQueue(virtualFile);
if (fireListener) {
fireProblemListeners.problemsAppeared(virtualFile);
}
}
@Override
public void clearProblems(@NotNull VirtualFile virtualFile) {
doRemove(virtualFile);
}
@Override
public Problem convertToProblem(@Nullable final VirtualFile virtualFile,
final int line,
final int column,
@NotNull final String[] message) {
if (virtualFile == null || virtualFile.isDirectory() || virtualFile.getFileType().isBinary()) return null;
HighlightInfo info = ReadAction.compute(() -> {
TextRange textRange = getTextRange(virtualFile, line, column);
String description = StringUtil.join(message, "\n");
return HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(textRange).descriptionAndTooltip(description).create();
});
if (info == null) return null;
return new ProblemImpl(virtualFile, info, false);
}
@Override
public void reportProblems(@NotNull final VirtualFile file, @NotNull Collection<Problem> problems) {
if (problems.isEmpty()) {
clearProblems(file);
return;
}
if (!isToBeHighlighted(file)) return;
boolean hasProblemsBefore;
boolean fireChanged;
synchronized (myProblems) {
final ProblemFileInfo oldInfo = myProblems.remove(file);
hasProblemsBefore = oldInfo != null;
ProblemFileInfo newInfo = new ProblemFileInfo();
myProblems.put(file, newInfo);
for (Problem problem : problems) {
newInfo.problems.add(problem);
newInfo.hasSyntaxErrors |= ((ProblemImpl)problem).isSyntaxOnly();
}
fireChanged = hasProblemsBefore && !oldInfo.equals(newInfo);
}
doQueue(file);
if (!hasProblemsBefore) {
fireProblemListeners.problemsAppeared(file);
}
else if (fireChanged) {
fireProblemListeners.problemsChanged(file);
}
}
@NotNull
private static TextRange getTextRange(@NotNull final VirtualFile virtualFile, int line, final int column) {
Document document = FileDocumentManager.getInstance().getDocument(virtualFile);
if (line > document.getLineCount()) line = document.getLineCount();
line = line <= 0 ? 0 : line - 1;
int offset = document.getLineStartOffset(line) + (column <= 0 ? 0 : column - 1);
return new TextRange(offset, offset);
}
}
| apache-2.0 |
mcgilman/nifi | nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/SiteToSiteResource.java | 14751 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.web.api;
import static org.apache.commons.lang3.StringUtils.isEmpty;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import io.swagger.annotations.Authorization;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.nifi.authorization.Authorizer;
import org.apache.nifi.authorization.RequestAction;
import org.apache.nifi.authorization.resource.Authorizable;
import org.apache.nifi.authorization.user.NiFiUserUtils;
import org.apache.nifi.cluster.coordination.ClusterCoordinator;
import org.apache.nifi.cluster.coordination.node.NodeWorkload;
import org.apache.nifi.cluster.protocol.NodeIdentifier;
import org.apache.nifi.remote.HttpRemoteSiteListener;
import org.apache.nifi.remote.PeerDescription;
import org.apache.nifi.remote.PeerDescriptionModifier;
import org.apache.nifi.remote.VersionNegotiator;
import org.apache.nifi.remote.client.http.TransportProtocolVersionNegotiator;
import org.apache.nifi.remote.exception.BadRequestException;
import org.apache.nifi.remote.protocol.SiteToSiteTransportProtocol;
import org.apache.nifi.remote.protocol.http.HttpHeaders;
import org.apache.nifi.util.NiFiProperties;
import org.apache.nifi.web.NiFiServiceFacade;
import org.apache.nifi.web.api.dto.ControllerDTO;
import org.apache.nifi.web.api.dto.remote.PeerDTO;
import org.apache.nifi.web.api.entity.ControllerEntity;
import org.apache.nifi.web.api.entity.PeersEntity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* RESTful endpoint for managing a SiteToSite connection.
*/
@Path("/site-to-site")
@Api(
value = "/site-to-site",
description = "Provide access to site to site with this NiFi"
)
public class SiteToSiteResource extends ApplicationResource {
private static final Logger logger = LoggerFactory.getLogger(SiteToSiteResource.class);
private NiFiServiceFacade serviceFacade;
private ClusterCoordinator clusterCoordinator;
private Authorizer authorizer;
private final ResponseCreator responseCreator = new ResponseCreator();
private final VersionNegotiator transportProtocolVersionNegotiator = new TransportProtocolVersionNegotiator(1);
private final HttpRemoteSiteListener transactionManager;
private final PeerDescriptionModifier peerDescriptionModifier;
public SiteToSiteResource(final NiFiProperties nifiProperties) {
transactionManager = HttpRemoteSiteListener.getInstance(nifiProperties);
peerDescriptionModifier = new PeerDescriptionModifier(nifiProperties);
}
/**
* Authorizes access to Site To Site details.
* <p>
* Note: Protected for testing purposes
*/
protected void authorizeSiteToSite() {
serviceFacade.authorizeAccess(lookup -> {
final Authorizable siteToSite = lookup.getSiteToSite();
siteToSite.authorize(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser());
});
}
/**
* Returns the details of this NiFi.
*
* @return A controllerEntity.
*/
@GET
@Consumes(MediaType.WILDCARD)
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(
value = "Returns the details about this NiFi necessary to communicate via site to site",
response = ControllerEntity.class,
authorizations = {
@Authorization(value = "Read - /site-to-site")
}
)
@ApiResponses(
value = {
@ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
@ApiResponse(code = 401, message = "Client could not be authenticated."),
@ApiResponse(code = 403, message = "Client is not authorized to make this request."),
@ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
}
)
public Response getSiteToSiteDetails(@Context HttpServletRequest req) {
authorizeSiteToSite();
// get the controller dto
final ControllerDTO controller = serviceFacade.getSiteToSiteDetails();
// Alter s2s port.
final boolean modificationNeededRaw = peerDescriptionModifier.isModificationNeeded(SiteToSiteTransportProtocol.RAW);
final boolean modificationNeededHttp = peerDescriptionModifier.isModificationNeeded(SiteToSiteTransportProtocol.HTTP);
if (modificationNeededRaw || modificationNeededHttp) {
final PeerDescription source = getSourcePeerDescription(req);
final Boolean isSiteToSiteSecure = controller.isSiteToSiteSecure();
final String siteToSiteHostname = getSiteToSiteHostname(req);
final Map<String, String> httpHeaders = getHttpHeaders(req);
if (modificationNeededRaw) {
final PeerDescription rawTarget = new PeerDescription(siteToSiteHostname, controller.getRemoteSiteListeningPort(), isSiteToSiteSecure);
final PeerDescription modifiedRawTarget = peerDescriptionModifier.modify(source, rawTarget,
SiteToSiteTransportProtocol.RAW, PeerDescriptionModifier.RequestType.SiteToSiteDetail, new HashMap<>(httpHeaders));
controller.setRemoteSiteListeningPort(modifiedRawTarget.getPort());
}
if (modificationNeededHttp) {
final PeerDescription httpTarget = new PeerDescription(siteToSiteHostname, controller.getRemoteSiteHttpListeningPort(), isSiteToSiteSecure);
final PeerDescription modifiedHttpTarget = peerDescriptionModifier.modify(source, httpTarget,
SiteToSiteTransportProtocol.HTTP, PeerDescriptionModifier.RequestType.SiteToSiteDetail, new HashMap<>(httpHeaders));
controller.setRemoteSiteHttpListeningPort(modifiedHttpTarget.getPort());
if (!controller.isSiteToSiteSecure() && modifiedHttpTarget.isSecure()) {
// In order to enable TLS terminate at the reverse proxy server, even if NiFi itself is not secured, introduce the endpoint as secure.
controller.setSiteToSiteSecure(true);
}
}
}
// build the response entity
final ControllerEntity entity = new ControllerEntity();
entity.setController(controller);
if (isEmpty(req.getHeader(HttpHeaders.PROTOCOL_VERSION))) {
// This indicates the client uses older NiFi version,
// which strictly read JSON properties and fail with unknown properties.
// Convert result entity so that old version clients can understand.
logger.debug("Converting result to provide backward compatibility...");
controller.setRemoteSiteHttpListeningPort(null);
}
// generate the response
return noCache(Response.ok(entity)).build();
}
private PeerDescription getSourcePeerDescription(@Context HttpServletRequest req) {
return new PeerDescription(req.getRemoteHost(), req.getRemotePort(), req.isSecure());
}
private Map<String, String> getHttpHeaders(@Context HttpServletRequest req) {
final Map<String, String> headers = new HashMap<>();
final Enumeration<String> headerNames = req.getHeaderNames();
while (headerNames.hasMoreElements()) {
final String name = headerNames.nextElement();
headers.put(name, req.getHeader(name));
}
return headers;
}
/**
* Returns the available Peers and its status of this NiFi.
*
* @return A peersEntity.
*/
@GET
@Path("/peers")
@Consumes(MediaType.WILDCARD)
@Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
@ApiOperation(
value = "Returns the available Peers and its status of this NiFi",
response = PeersEntity.class,
authorizations = {
@Authorization(value = "Read - /site-to-site")
}
)
@ApiResponses(
value = {
@ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."),
@ApiResponse(code = 401, message = "Client could not be authenticated."),
@ApiResponse(code = 403, message = "Client is not authorized to make this request."),
@ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.")
}
)
public Response getPeers(@Context HttpServletRequest req) {
authorizeSiteToSite();
if (!properties.isSiteToSiteHttpEnabled()) {
return responseCreator.httpSiteToSiteIsNotEnabledResponse();
}
final Integer transportProtocolVersion;
try {
transportProtocolVersion = negotiateTransportProtocolVersion(req, transportProtocolVersionNegotiator);
} catch (BadRequestException e) {
return responseCreator.badRequestResponse(e);
}
final List<PeerDTO> peers = new ArrayList<>();
final PeerDescription source = getSourcePeerDescription(req);
final boolean modificationNeeded = peerDescriptionModifier.isModificationNeeded(SiteToSiteTransportProtocol.HTTP);
final Map<String, String> headers = modificationNeeded ? getHttpHeaders(req) : null;
if (properties.isNode()) {
try {
final Map<NodeIdentifier, NodeWorkload> clusterWorkload = clusterCoordinator.getClusterWorkload();
clusterWorkload.forEach((nodeId, workload) -> {
final String siteToSiteHostname = nodeId.getSiteToSiteAddress() == null ? nodeId.getApiAddress() : nodeId.getSiteToSiteAddress();
final int siteToSitePort = nodeId.getSiteToSiteHttpApiPort() == null ? nodeId.getApiPort() : nodeId.getSiteToSiteHttpApiPort();
PeerDescription target = new PeerDescription(siteToSiteHostname, siteToSitePort, nodeId.isSiteToSiteSecure());
if (modificationNeeded) {
target = peerDescriptionModifier.modify(source, target,
SiteToSiteTransportProtocol.HTTP, PeerDescriptionModifier.RequestType.Peers, new HashMap<>(headers));
}
final PeerDTO peer = new PeerDTO();
peer.setHostname(target.getHostname());
peer.setPort(target.getPort());
peer.setSecure(target.isSecure());
peer.setFlowFileCount(workload.getFlowFileCount());
peers.add(peer);
});
} catch (IOException e) {
throw new RuntimeException("Failed to retrieve cluster workload due to " + e, e);
}
} else {
// Standalone mode.
final PeerDTO peer = new PeerDTO();
final String siteToSiteHostname = getSiteToSiteHostname(req);
PeerDescription target = new PeerDescription(siteToSiteHostname,
properties.getRemoteInputHttpPort(), properties.isSiteToSiteSecure());
if (modificationNeeded) {
target = peerDescriptionModifier.modify(source, target,
SiteToSiteTransportProtocol.HTTP, PeerDescriptionModifier.RequestType.Peers, new HashMap<>(headers));
}
peer.setHostname(target.getHostname());
peer.setPort(target.getPort());
peer.setSecure(target.isSecure());
peer.setFlowFileCount(0); // doesn't matter how many FlowFiles we have, because we're the only host.
peers.add(peer);
}
final PeersEntity entity = new PeersEntity();
entity.setPeers(peers);
return noCache(setCommonHeaders(Response.ok(entity), transportProtocolVersion, transactionManager)).build();
}
private String getSiteToSiteHostname(final HttpServletRequest req) {
// Private IP address or hostname may not be accessible from client in some environments.
// So, use the value defined in nifi.properties instead when it is defined.
final String remoteInputHost = properties.getRemoteInputHost();
String localName;
try {
// Get local host name using InetAddress if available, same as RAW socket does.
localName = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
if (logger.isDebugEnabled()) {
logger.debug("Failed to get local host name using InetAddress.", e);
}
localName = req.getLocalName();
}
return isEmpty(remoteInputHost) ? localName : remoteInputHost;
}
// setters
public void setServiceFacade(final NiFiServiceFacade serviceFacade) {
this.serviceFacade = serviceFacade;
}
public void setAuthorizer(Authorizer authorizer) {
this.authorizer = authorizer;
}
@Override
public void setClusterCoordinator(final ClusterCoordinator clusterCoordinator) {
super.setClusterCoordinator(clusterCoordinator);
this.clusterCoordinator = clusterCoordinator;
}
}
| apache-2.0 |
thomasdarimont/keycloak | core/src/main/java/org/keycloak/OAuth2Constants.java | 4671 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public interface OAuth2Constants {
String CODE = "code";
String TOKEN = "token";
String CLIENT_ID = "client_id";
String CLIENT_SECRET = "client_secret";
String ERROR = "error";
String ERROR_DESCRIPTION = "error_description";
String REDIRECT_URI = "redirect_uri";
String DISPLAY = "display";
String SCOPE = "scope";
String STATE = "state";
String GRANT_TYPE = "grant_type";
String RESPONSE_TYPE = "response_type";
String ACCESS_TOKEN = "access_token";
String TOKEN_TYPE = "token_type";
String EXPIRES_IN = "expires_in";
String ID_TOKEN = "id_token";
String REFRESH_TOKEN = "refresh_token";
String LOGOUT_TOKEN = "logout_token";
String AUTHORIZATION_CODE = "authorization_code";
String IMPLICIT = "implicit";
String USERNAME="username";
String PASSWORD = "password";
String CLIENT_CREDENTIALS = "client_credentials";
// https://tools.ietf.org/html/draft-ietf-oauth-assertions-01#page-5
String CLIENT_ASSERTION_TYPE = "client_assertion_type";
String CLIENT_ASSERTION = "client_assertion";
// https://tools.ietf.org/html/draft-jones-oauth-jwt-bearer-03#section-2.2
String CLIENT_ASSERTION_TYPE_JWT = "urn:ietf:params:oauth:client-assertion-type:jwt-bearer";
// http://openid.net/specs/openid-connect-core-1_0.html#OfflineAccess
String OFFLINE_ACCESS = "offline_access";
// http://openid.net/specs/openid-connect-core-1_0.html#AuthRequest
String SCOPE_OPENID = "openid";
// http://openid.net/specs/openid-connect-core-1_0.html#ScopeClaims
String SCOPE_PROFILE = "profile";
String SCOPE_EMAIL = "email";
String SCOPE_ADDRESS = "address";
String SCOPE_PHONE = "phone";
String UI_LOCALES_PARAM = "ui_locales";
String PROMPT = "prompt";
String ACR_VALUES = "acr_values";
String MAX_AGE = "max_age";
// OIDC Session Management
String SESSION_STATE = "session_state";
String JWT = "JWT";
// https://tools.ietf.org/html/rfc7636#section-6.1
String CODE_VERIFIER = "code_verifier";
String CODE_CHALLENGE = "code_challenge";
String CODE_CHALLENGE_METHOD = "code_challenge_method";
// https://tools.ietf.org/html/rfc7636#section-6.2.2
String PKCE_METHOD_PLAIN = "plain";
String PKCE_METHOD_S256 = "S256";
// https://tools.ietf.org/html/rfc8693#section-2.1
String TOKEN_EXCHANGE_GRANT_TYPE="urn:ietf:params:oauth:grant-type:token-exchange";
String AUDIENCE="audience";
String RESOURCE="resource";
String REQUESTED_SUBJECT="requested_subject";
String SUBJECT_TOKEN="subject_token";
String SUBJECT_TOKEN_TYPE="subject_token_type";
String ACTOR_TOKEN="actor_token";
String ACTOR_TOKEN_TYPE="actor_token_type";
String REQUESTED_TOKEN_TYPE="requested_token_type";
String ISSUED_TOKEN_TYPE="issued_token_type";
String REQUESTED_ISSUER="requested_issuer";
String SUBJECT_ISSUER="subject_issuer";
String ACCESS_TOKEN_TYPE="urn:ietf:params:oauth:token-type:access_token";
String REFRESH_TOKEN_TYPE="urn:ietf:params:oauth:token-type:refresh_token";
String JWT_TOKEN_TYPE="urn:ietf:params:oauth:token-type:jwt";
String ID_TOKEN_TYPE="urn:ietf:params:oauth:token-type:id_token";
String SAML2_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:saml2";
String UMA_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:uma-ticket";
// https://tools.ietf.org/html/draft-ietf-oauth-device-flow-15#section-3.4
String DEVICE_CODE_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:device_code";
String DEVICE_CODE = "device_code";
String CIBA_GRANT_TYPE = "urn:openid:params:grant-type:ciba";
String DISPLAY_CONSOLE = "console";
String INTERVAL = "interval";
String USER_CODE = "user_code";
// https://openid.net/specs/openid-financial-api-jarm-ID1.html
String RESPONSE = "response";
}
| apache-2.0 |
goodwinnk/intellij-community | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/mergeIfAnd/beforeIfNoBlock.java | 245 | // "Merge nested 'if's" "true"
class Test {
public static void main(String[] args) {
i<caret>f(args.length > 0/*comment1*/)
// comment3
if(args[/*comment2*/0].equals("foo")) {
System.out.println("oops");
}
}
}
| apache-2.0 |
sumanth232/presto | presto-benchmark/src/main/java/com/facebook/presto/benchmark/AbstractOperatorBenchmark.java | 6024 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.benchmark;
import com.facebook.presto.Session;
import com.facebook.presto.execution.TaskId;
import com.facebook.presto.execution.TaskStateMachine;
import com.facebook.presto.memory.MemoryPool;
import com.facebook.presto.memory.MemoryPoolId;
import com.facebook.presto.memory.QueryContext;
import com.facebook.presto.operator.Driver;
import com.facebook.presto.operator.OperatorFactory;
import com.facebook.presto.operator.TaskContext;
import com.facebook.presto.operator.TaskStats;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.testing.LocalQueryRunner;
import com.facebook.presto.util.CpuTimer;
import com.facebook.presto.util.CpuTimer.CpuDuration;
import com.google.common.collect.ImmutableMap;
import io.airlift.units.DataSize;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import static com.facebook.presto.spi.type.TimeZoneKey.UTC_KEY;
import static com.google.common.base.Preconditions.checkNotNull;
import static io.airlift.units.DataSize.Unit.BYTE;
import static io.airlift.units.DataSize.Unit.GIGABYTE;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static java.util.Locale.ENGLISH;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* Abstract template for benchmarks that want to test the performance of an Operator.
*/
public abstract class AbstractOperatorBenchmark
extends AbstractBenchmark
{
protected final LocalQueryRunner localQueryRunner;
protected AbstractOperatorBenchmark(
LocalQueryRunner localQueryRunner,
String benchmarkName,
int warmupIterations,
int measuredIterations)
{
super(benchmarkName, warmupIterations, measuredIterations);
this.localQueryRunner = checkNotNull(localQueryRunner, "localQueryRunner is null");
}
protected OperatorFactory createTableScanOperator(int operatorId, String tableName, String... columnNames)
{
return localQueryRunner.createTableScanOperator(operatorId, tableName, columnNames);
}
protected OperatorFactory createHashProjectOperator(int operatorId, List<Type> types)
{
return localQueryRunner.createHashProjectOperator(operatorId, types);
}
protected abstract List<Driver> createDrivers(TaskContext taskContext);
protected void execute(TaskContext taskContext)
{
List<Driver> drivers = createDrivers(taskContext);
boolean done = false;
while (!done) {
boolean processed = false;
for (Driver driver : drivers) {
if (!driver.isFinished()) {
driver.process();
processed = true;
}
}
done = !processed;
}
}
@Override
protected Map<String, Long> runOnce()
{
Session session = Session.builder()
.setUser("user")
.setSource("source")
.setCatalog("catalog")
.setSchema("schema")
.setTimeZoneKey(UTC_KEY)
.setLocale(ENGLISH)
.setSystemProperties(ImmutableMap.of("optimizer.optimize-hash-generation", "true"))
.build();
ExecutorService executor = localQueryRunner.getExecutor();
MemoryPool memoryPool = new MemoryPool(new MemoryPoolId("test"), new DataSize(1, GIGABYTE), false);
TaskContext taskContext = new QueryContext(false, new DataSize(256, MEGABYTE), memoryPool, executor)
.addTaskContext(new TaskStateMachine(new TaskId("query", "stage", "task"), executor),
session,
new DataSize(256, MEGABYTE),
new DataSize(1, MEGABYTE),
false,
false);
CpuTimer cpuTimer = new CpuTimer();
execute(taskContext);
CpuDuration executionTime = cpuTimer.elapsedTime();
TaskStats taskStats = taskContext.getTaskStats();
long inputRows = taskStats.getRawInputPositions();
long inputBytes = taskStats.getRawInputDataSize().toBytes();
long outputRows = taskStats.getOutputPositions();
long outputBytes = taskStats.getOutputDataSize().toBytes();
double inputMegaBytes = new DataSize(inputBytes, BYTE).getValue(MEGABYTE);
return ImmutableMap.<String, Long>builder()
// legacy computed values
.put("elapsed_millis", executionTime.getWall().toMillis())
.put("input_rows_per_second", (long) (inputRows / executionTime.getWall().getValue(SECONDS)))
.put("output_rows_per_second", (long) (outputRows / executionTime.getWall().getValue(SECONDS)))
.put("input_megabytes", (long) inputMegaBytes)
.put("input_megabytes_per_second", (long) (inputMegaBytes / executionTime.getWall().getValue(SECONDS)))
.put("wall_nanos", executionTime.getWall().roundTo(NANOSECONDS))
.put("cpu_nanos", executionTime.getCpu().roundTo(NANOSECONDS))
.put("user_nanos", executionTime.getUser().roundTo(NANOSECONDS))
.put("input_rows", inputRows)
.put("input_bytes", inputBytes)
.put("output_rows", outputRows)
.put("output_bytes", outputBytes)
.build();
}
}
| apache-2.0 |
barneykim/pinpoint | web/src/main/java/com/navercorp/pinpoint/web/service/stat/AgentStatChartService.java | 1012 | /*
* Copyright 2016 Naver Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.web.service.stat;
import com.navercorp.pinpoint.web.util.TimeWindow;
import com.navercorp.pinpoint.web.vo.stat.chart.StatChart;
import java.util.List;
/**
* @author HyunGil Jeong
*/
public interface AgentStatChartService {
StatChart selectAgentChart(String agentId, TimeWindow timeWindow);
List<StatChart> selectAgentChartList(String agentId, TimeWindow timeWindow);
}
| apache-2.0 |
xiaohanz/softcontroller | third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/ScoringUtils.java | 2393 | /*
* Created on Jul 12, 2007
*
* Copyright (c) 2007, the JUNG Project and the Regents of the University
* of California
* All rights reserved.
*
* This software is open-source under the BSD license; see either
* "license.txt" or
* http://jung.sourceforge.net/license.txt for a description.
*/
package edu.uci.ics.jung.algorithms.scoring.util;
import java.util.Collection;
import org.apache.commons.collections15.Transformer;
import edu.uci.ics.jung.algorithms.scoring.HITS;
/**
* Methods for assigning values (to be interpreted as prior probabilities) to vertices in the context
* of random-walk-based scoring algorithms.
*/
public class ScoringUtils
{
/**
* Assigns a probability of 1/<code>roots.size()</code> to each of the elements of <code>roots</code>.
* @param <V> the vertex type
* @param roots the vertices to be assigned nonzero prior probabilities
* @return
*/
public static <V> Transformer<V, Double> getUniformRootPrior(Collection<V> roots)
{
final Collection<V> inner_roots = roots;
Transformer<V, Double> distribution = new Transformer<V, Double>()
{
public Double transform(V input)
{
if (inner_roots.contains(input))
return new Double(1.0 / inner_roots.size());
else
return 0.0;
}
};
return distribution;
}
/**
* Returns a Transformer that hub and authority values of 1/<code>roots.size()</code> to each
* element of <code>roots</code>.
* @param <V> the vertex type
* @param roots the vertices to be assigned nonzero scores
* @return a Transformer that assigns uniform prior hub/authority probabilities to each root
*/
public static <V> Transformer<V, HITS.Scores> getHITSUniformRootPrior(Collection<V> roots)
{
final Collection<V> inner_roots = roots;
Transformer<V, HITS.Scores> distribution =
new Transformer<V, HITS.Scores>()
{
public HITS.Scores transform(V input)
{
if (inner_roots.contains(input))
return new HITS.Scores(1.0 / inner_roots.size(), 1.0 / inner_roots.size());
else
return new HITS.Scores(0.0, 0.0);
}
};
return distribution;
}
}
| epl-1.0 |
developermakj/cloudgram | TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierRelativeLayout.java | 3098 | /*
* This is the source code of Telegram for Android v. 1.3.2.
* It is licensed under GNU GPL v. 2 or later.
* You should have received a copy of the license in this archive (see LICENSE).
*
* Copyright Nikolai Kudashov, 2013.
*/
package org.telegram.ui.Components;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.view.View;
import android.widget.RelativeLayout;
import org.telegram.android.AndroidUtilities;
import org.telegram.messenger.FileLog;
public class SizeNotifierRelativeLayout extends RelativeLayout {
private Rect rect = new Rect();
private Drawable backgroundDrawable;
public SizeNotifierRelativeLayoutDelegate delegate;
public abstract interface SizeNotifierRelativeLayoutDelegate {
public abstract void onSizeChanged(int keyboardHeight);
}
public SizeNotifierRelativeLayout(Context context) {
super(context);
}
public SizeNotifierRelativeLayout(android.content.Context context, android.util.AttributeSet attrs) {
super(context, attrs);
}
public SizeNotifierRelativeLayout(android.content.Context context, android.util.AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
public void setBackgroundImage(int resourceId) {
try {
backgroundDrawable = getResources().getDrawable(resourceId);
} catch (Throwable e) {
FileLog.e("tmessages", e);
}
}
public void setBackgroundImage(Drawable bitmap) {
backgroundDrawable = bitmap;
}
public Drawable getBackgroundImage() {
return backgroundDrawable;
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
super.onLayout(changed, l, t, r, b);
if (delegate != null) {
View rootView = this.getRootView();
int usableViewHeight = rootView.getHeight() - AndroidUtilities.statusBarHeight - AndroidUtilities.getViewInset(rootView);
this.getWindowVisibleDisplayFrame(rect);
int keyboardHeight = usableViewHeight - (rect.bottom - rect.top);
delegate.onSizeChanged(keyboardHeight);
}
}
@Override
protected void onDraw(Canvas canvas) {
if (backgroundDrawable != null) {
float scaleX = (float)getMeasuredWidth() / (float)backgroundDrawable.getIntrinsicWidth();
float scaleY = (float)getMeasuredHeight() / (float)backgroundDrawable.getIntrinsicHeight();
float scale = scaleX < scaleY ? scaleY : scaleX;
int width = (int)Math.ceil(backgroundDrawable.getIntrinsicWidth() * scale);
int height = (int)Math.ceil(backgroundDrawable.getIntrinsicHeight() * scale);
int x = (getMeasuredWidth() - width) / 2;
int y = (getMeasuredHeight() - height) / 2;
backgroundDrawable.setBounds(x, y, x + width, y + height);
backgroundDrawable.draw(canvas);
} else {
super.onDraw(canvas);
}
}
}
| gpl-2.0 |
simleo/openmicroscopy | components/server/src/ome/services/query/PojosGetUserImagesQueryDefinition.java | 1413 | /*
* Copyright 2006 University of Dundee. All rights reserved.
* Use is subject to license terms supplied in LICENSE.txt
*/
package ome.services.query;
import java.sql.SQLException;
import ome.model.core.Image;
import ome.parameters.Parameters;
import org.hibernate.Criteria;
import org.hibernate.HibernateException;
import org.hibernate.Session;
public class PojosGetUserImagesQueryDefinition extends Query {
static Definitions defs = new Definitions();
public PojosGetUserImagesQueryDefinition(Parameters parameters) {
super(defs, parameters);
}
@Override
protected void buildQuery(Session session) throws HibernateException,
SQLException {
// TODO copied from PojosGetImages refactor
Criteria c = session.createCriteria(Image.class);
c.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY);
Criteria pix = c.createCriteria("pixels", LEFT_JOIN);
pix.createCriteria("pixelsType", LEFT_JOIN);
//pix.createCriteria("pixelsDimensions", LEFT_JOIN);
// endTODO
setCriteria(c);
}
@Override
protected void enableFilters(Session session) {
ownerOrGroupFilters(session, new String[] { Image.OWNER_FILTER },
new String[] { Image.GROUP_FILTER });
}
}
// select i from Image i
// #bottomUpHierarchy()
// where
// #imagelist()
// #filters()
// #typeExperimenter()
| gpl-2.0 |
FauxFaux/jdk9-jaxws | src/jdk.xml.bind/share/classes/com/sun/tools/internal/xjc/reader/internalizer/NamespaceContextImpl.java | 3723 | /*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
* THIS FILE WAS MODIFIED BY SUN MICROSYSTEMS, INC.
*/
package com.sun.tools.internal.xjc.reader.internalizer;
import java.util.Iterator;
import javax.xml.XMLConstants;
import javax.xml.namespace.NamespaceContext;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
/**
* Implements {@link NamespaceContext} by looking at the in-scope
* namespace binding of a DOM element.
*
* @author Kohsuke Kawaguchi
*/
final class NamespaceContextImpl implements NamespaceContext {
private final Element e;
public NamespaceContextImpl(Element e) {
this.e = e;
}
public String getNamespaceURI(String prefix) {
Node parent = e;
String namespace = null;
final String prefixColon = prefix + ':';
if (prefix.equals("xml")) {
namespace = XMLConstants.XML_NS_URI;
} else {
int type;
while ((null != parent) && (null == namespace)
&& (((type = parent.getNodeType()) == Node.ELEMENT_NODE)
|| (type == Node.ENTITY_REFERENCE_NODE))) {
if (type == Node.ELEMENT_NODE) {
if (parent.getNodeName().startsWith(prefixColon))
return parent.getNamespaceURI();
NamedNodeMap nnm = parent.getAttributes();
for (int i = 0; i < nnm.getLength(); i++) {
Node attr = nnm.item(i);
String aname = attr.getNodeName();
boolean isPrefix = aname.startsWith("xmlns:");
if (isPrefix || aname.equals("xmlns")) {
int index = aname.indexOf(':');
String p = isPrefix ? aname.substring(index + 1) : "";
if (p.equals(prefix)) {
namespace = attr.getNodeValue();
break;
}
}
}
}
parent = parent.getParentNode();
}
}
if(prefix.equals(""))
return ""; // default namespace
return namespace;
}
public String getPrefix(String namespaceURI) {
throw new UnsupportedOperationException();
}
public Iterator getPrefixes(String namespaceURI) {
throw new UnsupportedOperationException();
}
}
| gpl-2.0 |
MjAbuz/exist | extensions/security/openid/src/org/exist/security/realm/openid/OpenIDRealm.java | 2649 | /*
* eXist Open Source Native XML Database
* Copyright (C) 2010-2011 The eXist Project
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* $Id$
*/
package org.exist.security.realm.openid;
import org.exist.EXistException;
import org.exist.config.Configuration;
import org.exist.config.ConfigurationException;
import org.exist.config.annotation.ConfigurationClass;
import org.exist.config.annotation.ConfigurationFieldAsAttribute;
import org.exist.security.AbstractRealm;
import org.exist.security.Account;
import org.exist.security.AuthenticationException;
import org.exist.security.Group;
import org.exist.security.PermissionDeniedException;
import org.exist.security.Subject;
import org.exist.security.internal.SecurityManagerImpl;
/**
* OpenID realm.
*
* @author <a href="mailto:shabanovd@gmail.com">Dmitriy Shabanov</a>
*
*/
@ConfigurationClass("realm") //TODO: id = OpenID
public class OpenIDRealm extends AbstractRealm {
public static OpenIDRealm instance = null;
@ConfigurationFieldAsAttribute("id")
public static String ID = "OpenID";
@ConfigurationFieldAsAttribute("version")
public final static String version = "1.0";
public OpenIDRealm(SecurityManagerImpl sm, Configuration config) {
super(sm, config);
instance = this;
}
@Override
public String getId() {
return ID;
}
@Override
public Subject authenticate(final String accountName, Object credentials) throws AuthenticationException {
return null;
}
@Override
public boolean deleteAccount(Account account) throws PermissionDeniedException, EXistException, ConfigurationException {
return false;
}
@Override
public boolean updateGroup(Group group) throws PermissionDeniedException, EXistException, ConfigurationException {
return false;
}
@Override
public boolean deleteGroup(Group group) throws PermissionDeniedException, EXistException, ConfigurationException {
return false;
}
}
| lgpl-2.1 |
dev4mobile/u2020 | src/main/java/com/jakewharton/u2020/data/api/oauth/OauthManager.java | 2784 | package com.jakewharton.u2020.data.api.oauth;
import android.content.Intent;
import android.net.Uri;
import com.f2prateek.rx.preferences.Preference;
import com.jakewharton.u2020.data.IntentFactory;
import com.squareup.moshi.Moshi;
import com.squareup.okhttp.FormEncodingBuilder;
import com.squareup.okhttp.HttpUrl;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.Response;
import java.io.IOException;
import javax.inject.Inject;
import javax.inject.Singleton;
import timber.log.Timber;
@Singleton public final class OauthManager {
private static final String CLIENT_ID = "5793abe5bcb6d90f0240";
private static final String CLIENT_SECRET = "81a35659c60fc376629432a51fd81e5c66a8dace";
private final IntentFactory intentFactory;
private final OkHttpClient client;
private final Moshi moshi;
private final Preference<String> accessToken;
@Inject public OauthManager(IntentFactory intentFactory, OkHttpClient client, Moshi moshi,
@AccessToken Preference<String> accessToken) {
this.intentFactory = intentFactory;
this.client = client;
this.moshi = moshi;
this.accessToken = accessToken;
}
public Intent createLoginIntent() {
HttpUrl authorizeUrl = HttpUrl.parse("https://github.com/login/oauth/authorize") //
.newBuilder() //
.addQueryParameter("client_id", CLIENT_ID) //
.build();
return intentFactory.createUrlIntent(authorizeUrl.toString());
}
public void handleResult(Uri data) {
if (data == null) return;
String code = data.getQueryParameter("code");
if (code == null) return;
try {
// Trade our code for an access token.
Request request = new Request.Builder() //
.url("https://github.com/login/oauth/access_token") //
.header("Accept", "application/json") //
.post(new FormEncodingBuilder() //
.add("client_id", CLIENT_ID) //
.add("client_secret", CLIENT_SECRET) //
.add("code", code) //
.build()) //
.build();
Response response = client.newCall(request).execute();
if (response.isSuccessful()) {
AccessTokenResponse accessTokenResponse =
moshi.adapter(AccessTokenResponse.class).fromJson(response.body().string());
if (accessTokenResponse != null && accessTokenResponse.access_token != null) {
accessToken.set(accessTokenResponse.access_token);
}
}
} catch (IOException e) {
Timber.w(e, "Failed to get access token.");
}
}
private static final class AccessTokenResponse {
public final String access_token;
private AccessTokenResponse(String access_token, String scope) {
this.access_token = access_token;
}
}
}
| apache-2.0 |
siosio/intellij-community | plugins/InspectionGadgets/InspectionGadgetsAnalysis/src/com/siyeh/ig/threading/ThreadLocalNotStaticFinalInspection.java | 2128 | /*
* Copyright 2009-2015 Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.threading;
import com.intellij.psi.*;
import com.intellij.psi.util.InheritanceUtil;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspection;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.InspectionGadgetsFix;
import com.siyeh.ig.fixes.MakeFieldStaticFinalFix;
import org.jetbrains.annotations.NotNull;
public class ThreadLocalNotStaticFinalInspection extends BaseInspection {
@NotNull
@Override
protected String buildErrorString(Object... infos) {
return InspectionGadgetsBundle.message("thread.local.not.static.final.problem.descriptor");
}
@Override
protected InspectionGadgetsFix buildFix(Object... infos) {
return MakeFieldStaticFinalFix.buildFix((PsiField)infos[0]);
}
@Override
public BaseInspectionVisitor buildVisitor() {
return new ThreadLocalNotStaticFinalVisitor();
}
private static class ThreadLocalNotStaticFinalVisitor extends BaseInspectionVisitor {
@Override
public void visitField(PsiField field) {
super.visitField(field);
final PsiType type = field.getType();
if (!InheritanceUtil.isInheritor(type, "java.lang.ThreadLocal")) {
return;
}
final PsiModifierList modifierList = field.getModifierList();
if (modifierList == null) {
return;
}
if (modifierList.hasModifierProperty(PsiModifier.STATIC) &&
modifierList.hasModifierProperty(PsiModifier.FINAL)) {
return;
}
registerFieldError(field, field);
}
}
} | apache-2.0 |
sanyaade-g2g-repos/orientdb | core/src/main/java/com/orientechnologies/orient/core/index/hashindex/local/ODirectoryFirstPage.java | 2396 | /*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.core.index.hashindex.local;
import com.orientechnologies.common.serialization.types.OIntegerSerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.storage.cache.OCacheEntry;
import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.OWALChangesTree;
import java.io.IOException;
/**
* @author Andrey Lomakin (a.lomakin-at-orientechnologies.com)
* @since 5/14/14
*/
public class ODirectoryFirstPage extends ODirectoryPage {
private static final int TREE_SIZE_OFFSET = NEXT_FREE_POSITION;
private static final int TOMBSTONE_OFFSET = TREE_SIZE_OFFSET + OIntegerSerializer.INT_SIZE;
private static final int ITEMS_OFFSET = TOMBSTONE_OFFSET + OIntegerSerializer.INT_SIZE;
public static final int NODES_PER_PAGE = (OGlobalConfiguration.DISK_CACHE_PAGE_SIZE.getValueAsInteger() * 1024 - ITEMS_OFFSET)
/ OHashTableDirectory.BINARY_LEVEL_SIZE;
public ODirectoryFirstPage(OCacheEntry cacheEntry, OWALChangesTree changesTree, OCacheEntry entry) {
super(cacheEntry, changesTree, entry);
}
public void setTreeSize(int treeSize) throws IOException {
setIntValue(TREE_SIZE_OFFSET, treeSize);
}
public int getTreeSize() {
return getIntValue(TREE_SIZE_OFFSET);
}
public void setTombstone(int tombstone) throws IOException {
setIntValue(TOMBSTONE_OFFSET, tombstone);
}
public int getTombstone() {
return getIntValue(TOMBSTONE_OFFSET);
}
@Override
protected int getItemsOffset() {
return ITEMS_OFFSET;
}
}
| apache-2.0 |
ingokegel/intellij-community | plugins/maven/src/main/java/org/jetbrains/idea/maven/project/actions/EditSettingsAction.java | 1482 | /*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.maven.project.actions;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.options.ShowSettingsUtil;
import com.intellij.openapi.project.Project;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.maven.project.MavenProjectBundle;
import org.jetbrains.idea.maven.utils.actions.MavenAction;
import org.jetbrains.idea.maven.utils.actions.MavenActionUtil;
public class EditSettingsAction extends MavenAction {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
showSettingsFor(MavenActionUtil.getProject(e.getDataContext()));
}
protected static void showSettingsFor(@Nullable Project project) {
ShowSettingsUtil.getInstance().showSettingsDialog(project, MavenProjectBundle.message("configurable.MavenSettings.display.name"));
}
} | apache-2.0 |
siosio/intellij-community | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/addCatchBlock/afterIncompleteTry.java | 204 | // "Add 'catch' clause(s)" "true"
class Foo {
void test(String s) {
try {
System.out.println(s);
} catch (Exception e) {
e.printStackTrace();
}
}
}
| apache-2.0 |