gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package gui;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.net.URL;
import java.util.Random;
import javax.imageio.ImageIO;
import javax.swing.*;
import javax.swing.text.AbstractDocument;
import ttable.LevelProgeny;
/**
* The class LGame, a populated BackgroundPanel.
*
* @author Yaqzan Ali
* @author Taylor Calder
* @version 0.1
*/
@SuppressWarnings("serial")
public class LGame extends BackgroundPanel implements KeyListener{
/** The level progeny holding the level details. */
private LevelProgeny level;
private static int BOMB_TIME = 50; // Bomb Timer
private static int SCORE_NEEDED = 20000; // Score needed to defuse bomb
private static int BASIC_SCORE= 500; // Points awarded for getting a right answer
private JLabel lblBombTimer; // The label for the bomb timer
private JLabel lblNum1; // Label holding the first number
private JLabel lblNum2; // Label holding second number
private JLabel lblx; // Label holding "x"
private JLabel lblScore; // Label holding the score
private JLabel lblScoreNeeded; // Label holding the required score
private JLabel lblMultiplier; // Label holding the multiplier
private JLabel lblBombLit; // Picture of the bomb
private JLabel lblBombFrz; // Picture of the frozen bomb
private JTextField txtAnswer; // Answer field
private Random rnd; // Random number generator
private Timer tmrBomb; // Bomb timer
private Timer tmrDelay; // Delay timer
private int answer; // Int holding the answer
private DelayTimer delay; // ActionListener for the delay
private int score; // Int holding the score
private int multiplier; // Int holding the multiplier
private JProgressBar bar; // Progress bar
/**
* Instantiates an LGame instance.
*
* @param level the level
*/
public LGame(LevelProgeny level) {
//Calls superclass constructor to create the background panel
super(0, new GridBagLayout());
this.level = level;
// Initialize
lblBombTimer = new JLabel();
lblScore= new JLabel();
lblNum1= new JLabel();
lblNum2= new JLabel();
lblx= new JLabel(" x ");
lblMultiplier = new JLabel();
lblScoreNeeded = new JLabel();
txtAnswer = new JTextField();
tmrBomb = new Timer(1000, new Listener(this));
delay = new DelayTimer(this);
tmrDelay = new Timer(1000, delay);
score = 0;
multiplier = 1;
bar = new JProgressBar(0, SCORE_NEEDED);
rnd = new Random();
newQuestion();
GridBagConstraints c = new GridBagConstraints();
//Score
c.gridx=0;
c.gridy=3;
c.gridwidth=2;
c.anchor= GridBagConstraints.LINE_START;
add(lblScore, c);
lblScore.setFont(Controller.getFont().deriveFont(Font.BOLD, 40));
lblScore.setText(""+score);
lblScore.setForeground(Color.green);
//Numbers
c.gridx=1;
c.gridwidth=1;
c.gridy=0;
c.anchor= GridBagConstraints.LINE_END;
add(lblNum1, c);
lblNum1.setFont(Controller.getFont().deriveFont(Font.BOLD, 200));
c.gridx=4;
c.anchor= GridBagConstraints.LINE_START;
add(lblNum2, c);
lblNum2.setFont(Controller.getFont().deriveFont(Font.BOLD, 200));
newQuestion();
// Multiplication sign
c.gridwidth=2;
c.anchor= GridBagConstraints.CENTER;
c.gridx = 2;
add(lblx, c);
lblx.setFont(Controller.getFont().deriveFont(Font.BOLD, 80));
// Textfield
c.fill = GridBagConstraints.HORIZONTAL;
c.gridy=1;
add(txtAnswer, c);
txtAnswer.setFont(Controller.getFont().deriveFont(Font.BOLD, 60));
txtAnswer.addKeyListener(this);
((AbstractDocument) txtAnswer.getDocument()).setDocumentFilter(new DocumentLengthFilter(3));
txtAnswer.requestFocus();
//Progress Bar
c.gridy=3;
c.ipady=20;
c.insets = new Insets(10,0,0,0);
bar.setValue(0);
bar.setStringPainted(true);
add(bar, c);
bar.setForeground(Color.green);
//Timer
c.ipady=0;
c.gridwidth=1;
c.gridx=2;
c.gridy=2;
c.anchor = GridBagConstraints.LINE_START;
add(lblBombTimer, c);
Font fntBomb = new Font("Stencil", Font.BOLD, 40);
lblBombTimer.setFont(fntBomb);
lblBombTimer.setForeground(Color.red);
tmrBomb.start();
// Bomb Pictures
try {
Image img = ImageIO.read(new URL("http://jbaron6.cs2212.ca/img/bomb_lit.png"));
lblBombLit = new JLabel(new ImageIcon(img));
} catch (Exception e) {
lblBombLit = new JLabel("Ticking...");
}
try {
Image img = ImageIO.read(new URL("http://jbaron6.cs2212.ca/img/bomb_frozen.png"));
lblBombFrz = new JLabel(new ImageIcon(img));
} catch (Exception e) {
lblBombFrz = new JLabel("Frozen");
}
c.anchor = GridBagConstraints.LINE_END;
c.gridx=3;
add(lblBombLit,c);
add(lblBombFrz,c);
lblBombFrz.setVisible(false);
//Multiplier
c.gridx = 4;
c.gridy=1;
c.anchor = GridBagConstraints.LINE_START;
add(lblMultiplier, c);
lblMultiplier.setFont(Controller.getFont().deriveFont(Font.BOLD, 90));
lblMultiplier.setText("x" + multiplier);
lblMultiplier.setVisible(false);
//Score needed
c.gridy =3;
c.gridx=4;
c.gridwidth=2;
add(lblScoreNeeded, c);
lblScoreNeeded.setFont(Controller.getFont().deriveFont(Font.BOLD, 30));
lblScoreNeeded.setText("Need "+ SCORE_NEEDED);
}
/**
* Generates a new question
*/
public void newQuestion(){
int currentQ = rnd.nextInt(11)+1;
int level = this.level.getLevelNumber();
answer = currentQ * level;
if (rnd.nextInt(2) > 0) {
lblNum1.setText("" + currentQ);
lblNum2.setText("" + level);
}
else {
lblNum1.setText("" + level);
lblNum2.setText("" + currentQ);
}
}
/**
* Sets the time left for the bomb timer
* @param time the timer left
*/
public void setBombTime(int time) {
lblBombTimer.setText("0:"+ time);
if (time == 0) {
tmrBomb.stop();
Controller.setScreen(new ScoreReportL(false, level.getLevelNumber()));
}
}
/**
* Calculates the score
*/
public void calculateScore(){
score = score + ((BASIC_SCORE)*multiplier);
lblScore.setText(""+score);
lblMultiplier.setText(multiplier+"x");
if (score >= SCORE_NEEDED){
tmrBomb.stop();
tmrDelay.stop();
Controller.setScreen(new ScoreReportL(true, level.getLevelNumber()));
}
bar.setValue(score);
}
/**
* Gets the amount of time left on the bomb
* @return BOMB_TIME the time left
*/
public static int getBombTime(){
return BOMB_TIME;
}
/**
* Gets the bomb timer
* @return tmrBomb the bomb's timer
*/
public Timer getBombTimer(){
return tmrBomb;
}
/**
* Gets the delay timer
* @return tmrDelay the delay timer
*/
public Timer getDelayTimer(){
return tmrDelay;
}
/**
* Gets the bomb label
* @return the bomb label
*/
public JLabel getBombLabel(){
return lblBombTimer;
}
/**
* Sets the current score multiplier
* @param i the multiplier
*/
public void setMultiplier(int i){
multiplier = i;
}
/**
* Gets the multiplier label
* @return the JLabel
*/
public JLabel getMultiplierLabel(){
return lblMultiplier;
}
/**
* Gets the label that displays a lit bomb
* @return the JLabel
*/
public JLabel getBombLit(){
return lblBombLit;
}
/**
* Gets the label that displays a frozen bomb
* @return the JLabel
*/
public JLabel getBombFrz(){
return lblBombFrz;
}
@Override
public void keyPressed(KeyEvent arg0) {
// TODO Auto-generated method stub
}
@Override
/**
* Check for correct answer when the player is not pressing a key
*/
public void keyReleased(KeyEvent e) {
try{
int text = Integer.valueOf(txtAnswer.getText());
char c = e.getKeyChar();
// Check if key press was a number or backspace, if so:
if(Character.isDigit(c) || (c==KeyEvent.VK_BACK_SPACE)){
// If right answer
if (text==answer){
txtAnswer.setText("");
calculateScore();
newQuestion();
//if not delayed
if(tmrBomb.isRunning()){
tmrBomb.stop();
delay.start();
tmrDelay.start();
lblBombTimer.setForeground(Color.black);
lblBombLit.setVisible(false);
lblBombFrz.setVisible(true);
}else{
//If delay is already on, extend it
delay.increaseTime();
multiplier+=1;
lblMultiplier.setText(multiplier+"x");
lblMultiplier.setVisible(true);
}
}
}
} catch (Exception e3){}
}
@Override
public void keyTyped(KeyEvent e) {
char c = e.getKeyChar();
// Prevents all keys from working except numbers and backspace
if(!(Character.isDigit(c) || (c==KeyEvent.VK_BACK_SPACE))){
e.consume();
}
}
/*
* (non-Javadoc)
* @see gui.BackgroundPanel#close()
*/
@Override
public void close() {
tmrBomb.stop();
tmrDelay.stop();
}
}
/**
* The delay timer, an ActionListener
* @author Yaqzan
*/
class DelayTimer implements ActionListener{
private LGame lgame;
private int time;
private static int DELAY_TIME = 5;
public DelayTimer(LGame game) {
this.lgame = game;
this.time = DELAY_TIME;
}
@Override
public void actionPerformed(ActionEvent e) {
if (time > 0) {
time--;
}else{
lgame.getDelayTimer().stop();
lgame.getBombTimer().start();
lgame.getBombLabel().setForeground(Color.red);
lgame.setMultiplier(1);
lgame.getMultiplierLabel().setVisible(false);
lgame.getBombLit().setVisible(true);
lgame.getBombFrz().setVisible(false);
}
}
/**
* Increase the time of the delay
*/
public void increaseTime(){
this.time+= (DELAY_TIME/2);
}
/**
* Start the delay
*/
public void start(){
this.time = DELAY_TIME;
}
}
class Listener implements ActionListener {
/** The game. */
private LGame lgame;
/** The the number of seconds remaining. */
private int timeRemaining;
/**
* Instantiates the Timer Action.
*
* @param drill the drill pane
*/
public Listener(LGame game) {
this.lgame = game;
this.timeRemaining = LGame.getBombTime();
game.setBombTime(timeRemaining);
}
/**
* Set the amount of time remaining for this time
*
* @param time the new amount of time remaining
*/
public void setBombTime(int time) {
timeRemaining = time;
}
/*
* (non-Javadoc)
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed(ActionEvent e) {
if (timeRemaining > 0) {
timeRemaining--;
}
lgame.setBombTime(timeRemaining);
}
}
| |
package com.suscipio_solutions.consecro_mud.Abilities.Spells;
import java.util.LinkedList;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Vector;
import com.suscipio_solutions.consecro_mud.Abilities.interfaces.Ability;
import com.suscipio_solutions.consecro_mud.Areas.interfaces.Area;
import com.suscipio_solutions.consecro_mud.CharClasses.interfaces.CharClass;
import com.suscipio_solutions.consecro_mud.Common.interfaces.CMMsg;
import com.suscipio_solutions.consecro_mud.Common.interfaces.CharStats;
import com.suscipio_solutions.consecro_mud.Common.interfaces.CMTableRow;
import com.suscipio_solutions.consecro_mud.Common.interfaces.PhyStats;
import com.suscipio_solutions.consecro_mud.Items.interfaces.ClanItem;
import com.suscipio_solutions.consecro_mud.Items.interfaces.Coins;
import com.suscipio_solutions.consecro_mud.Items.interfaces.ImmortalOnly;
import com.suscipio_solutions.consecro_mud.Items.interfaces.Item;
import com.suscipio_solutions.consecro_mud.Items.interfaces.PackagedItems;
import com.suscipio_solutions.consecro_mud.Locales.interfaces.Room;
import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB;
import com.suscipio_solutions.consecro_mud.Races.interfaces.Race;
import com.suscipio_solutions.consecro_mud.core.CMClass;
import com.suscipio_solutions.consecro_mud.core.CMLib;
import com.suscipio_solutions.consecro_mud.core.CMParms;
import com.suscipio_solutions.consecro_mud.core.CMProps;
import com.suscipio_solutions.consecro_mud.core.CMSecurity;
import com.suscipio_solutions.consecro_mud.core.CMath;
import com.suscipio_solutions.consecro_mud.core.Directions;
import com.suscipio_solutions.consecro_mud.core.Log;
import com.suscipio_solutions.consecro_mud.core.interfaces.Environmental;
import com.suscipio_solutions.consecro_mud.core.interfaces.ItemPossessor;
import com.suscipio_solutions.consecro_mud.core.interfaces.Physical;
@SuppressWarnings({"unchecked","rawtypes"})
public class Spell_Wish extends Spell
{
@Override public String ID() { return "Spell_Wish"; }
private final static String localizedName = CMLib.lang().L("Wish");
@Override public String name() { return localizedName; }
@Override protected int canTargetCode(){return 0;}
@Override public int classificationCode(){return Ability.ACODE_SPELL|Ability.DOMAIN_ALTERATION;}
@Override public int enchantQuality(){return Ability.QUALITY_INDIFFERENT;}
@Override public int abstractQuality(){ return Ability.QUALITY_INDIFFERENT;}
@Override public long flags(){return Ability.FLAG_NOORDERING;}
@Override protected int overrideMana(){return Ability.COST_ALL;}
protected Physical maybeAdd(MOB mob, Physical E, Vector foundAll, Physical foundThang)
{
final Room R=CMLib.map().roomLocation(E);
if((E!=null)
&&(!(E instanceof ImmortalOnly))
&&(!(E instanceof ClanItem))
&&((R==null)||(CMLib.law().getLandTitle(R)==null)||(CMLib.law().doesHavePriviledgesHere(mob, R)))
&&((foundThang==null)
||((foundThang.ID().equals(E.ID()))&&(foundThang.name().equals(E.name())))))
{
if(foundThang==null) foundThang=E;
foundAll.addElement(E);
}
return foundThang;
}
private void bringThangHere(MOB mob, Room here, Physical target)
{
if(target instanceof MOB)
{
mob.location().show((MOB)target,null,CMMsg.MSG_OK_VISUAL,L("<S-NAME> teleport(s) to @x1.",here.displayText()));
here.bringMobHere((MOB)target,false);
if(here.isInhabitant((MOB)target))
here.show((MOB)target,null,CMMsg.MSG_OK_VISUAL,L("<S-NAME> appear(s) out of nowhere."));
}
else
if(target instanceof Item)
{
final Item item=(Item)target;
mob.location().show(mob,target,CMMsg.MSG_OK_VISUAL,L("<T-NAME> is teleported to @x1!",here.displayText()));
item.unWear();
item.setContainer(null);
item.removeFromOwnerContainer();
here.addItem(item,ItemPossessor.Expire.Player_Drop);
mob.location().show(mob,target,CMMsg.MSG_OK_VISUAL,L("<T-NAME> appears out of the Java Plane!"));
}
}
public void wishDrain(MOB mob, int expLoss, boolean conLoss)
{
if(mob==null) return;
expLoss=getXPCOSTAdjustment(mob,expLoss);
if(expLoss > mob.getExperience())
expLoss=mob.getExperience();
CMLib.leveler().postExperience(mob,null,null,-expLoss,false);
if(conLoss)
{
mob.tell(L("Your wish drains you of @x1 experience points and a point of constitution.",""+(expLoss)));
mob.baseCharStats().setStat(CharStats.STAT_CONSTITUTION,mob.baseCharStats().getStat(CharStats.STAT_CONSTITUTION)-1);
mob.baseCharStats().setStat(CharStats.STAT_MAX_CONSTITUTION_ADJ,mob.baseCharStats().getStat(CharStats.STAT_MAX_CONSTITUTION_ADJ)-1);
mob.recoverCharStats();
if(mob.charStats().getStat(CharStats.STAT_CONSTITUTION)<1)
CMLib.combat().postDeath(mob,mob,null);
}
else
mob.tell(L("Your wish drains @x1 experience points.",""+(expLoss)));
}
public void age(MOB mob)
{
final Ability A=CMClass.getAbility("Chant_SpeedAging");
if(A!=null){ A.setAbilityCode(65536); A.invoke(mob,mob,true,0);}
}
@Override
public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel)
{
if(mob.isMonster())
{
mob.location().show(mob,null,CMMsg.MSG_NOISE,L("<S-NAME> sigh(s)."));
CMLib.commands().postSay(mob,null,L("My wishes never seem to come true."),false,false);
return false;
}
String myWish=CMParms.combine(commands,0);
if(((!auto)&&(mob.phyStats().level()<20))||(mob.charStats().getStat(CharStats.STAT_CONSTITUTION)<2))
{
mob.tell(L("You are too weak to wish."));
return false;
}
if(myWish.toUpperCase().trim().startsWith("FOR ")) myWish=myWish.trim().substring(3);
if(myWish.length()==0)
{
mob.tell(L("What would you like to wish for?"));
return false;
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
int baseLoss=25;
final CMMsg msg=CMClass.getMsg(mob,null,this,verbalCastCode(mob,null,auto),L("^S<S-NAME> wish(es) for '@x1'!!^?",myWish));
final boolean success=proficiencyCheck(mob,0,auto);
if(!success)
{
baseLoss=getXPCOSTAdjustment(mob,baseLoss);
CMLib.leveler().postExperience(mob,null,null,-baseLoss,false);
beneficialWordsFizzle(mob,null,L("<S-NAME> wish(es) for '@x1', but the spell fizzles.",myWish));
return false;
}
else
if(mob.location().okMessage(mob,msg))
{
// cast wish bless were cast on me
// cast wish to have restoration cast on me
// cast wish to cast bless on me
// cast wish to cast disintegrate on orc
// cast wish to cast geas on orc to kill bob
Log.sysOut("Wish",mob.Name()+" wished for "+myWish+".");
mob.location().send(mob,msg);
final StringBuffer wish=new StringBuffer(myWish);
for(int i=0;i<wish.length();i++)
if(!Character.isLetterOrDigit(wish.charAt(i)))
wish.setCharAt(i,' ');
myWish=wish.toString().trim().toUpperCase();
final Vector wishV=CMParms.parse(myWish);
myWish=" "+myWish+" ";
if(wishV.size()==0)
{
baseLoss=getXPCOSTAdjustment(mob,baseLoss);
CMLib.leveler().postExperience(mob,null,null,-baseLoss,false);
beneficialWordsFizzle(mob,null,L("<S-NAME> make(s) a wish comes true! Nothing happens!"));
return false;
}
// do locate object first.. its the most likely
String objectWish=myWish;
final String[] redundantStarts={"CREATE","TO CREATE","ANOTHER","THERE WAS","I HAD","I COULD HAVE","MAY I HAVE","CAN I HAVE","CAN YOU","CAN I","MAKE","TO MAKE","GIVE","ME","TO HAVE","TO GET","A NEW","SOME MORE","MY OWN","A","PLEASE","THE","I OWNED"};
final String[] redundantEnds={"TO APPEAR","OF MY OWN","FOR ME","BE","CREATED","PLEASE","HERE"};
int i=0;
while(i<redundantStarts.length)
{
if(objectWish.startsWith(" "+redundantStarts[i]+" "))
{ objectWish=objectWish.substring(1+redundantStarts[i].length()); i=-1;}
i++;
}
i=0;
while(i<redundantEnds.length)
{
if(objectWish.endsWith(" "+redundantEnds[i]+" "))
{ objectWish=objectWish.substring(0,objectWish.length()-(1+redundantEnds[i].length())); i=-1;}i++;}
String goldWish=objectWish.toUpperCase();
objectWish=objectWish.toLowerCase().trim();
final String[] redundantGoldStarts={"A PILE OF","A STACK OF","PILE OF","STACK OF"};
i=0;
while(i<redundantGoldStarts.length)
{
if(goldWish.startsWith(" "+redundantGoldStarts[i]+" "))
{ goldWish=goldWish.substring(1+redundantGoldStarts[i].length()); i=-1;}
i++;
}
final Vector goldCheck=CMParms.parse(goldWish.trim().toLowerCase());
if((goldCheck.size()>1)
&&(CMath.isNumber((String)goldCheck.firstElement()))
&&(CMath.s_int((String)goldCheck.firstElement())>0)
&&(CMLib.english().matchAnyCurrencySet(CMParms.combine(goldCheck,1))!=null))
{
final Coins newItem=(Coins)CMClass.getItem("StdCoins");
newItem.setCurrency(CMLib.english().matchAnyCurrencySet(CMParms.combine(goldCheck,1)));
newItem.setDenomination(CMLib.english().matchAnyDenomination(newItem.getCurrency(),CMParms.combine(goldCheck,1)));
final long goldCoins=CMath.s_long((String)goldCheck.firstElement());
newItem.setNumberOfCoins(goldCoins);
int experienceRequired=Math.max((int)Math.round(CMath.div(newItem.getTotalValue(),10.0)),0);
while((experienceRequired > mob.getExperience())
&& (experienceRequired > 0)
&& (newItem.getNumberOfCoins() > 1))
{
final int difference=experienceRequired-mob.getExperience();
final double diffPct=CMath.div(difference, experienceRequired);
long numCoinsToLose=Math.round(CMath.mul(diffPct, newItem.getNumberOfCoins()));
if(numCoinsToLose<1) numCoinsToLose=1;
newItem.setNumberOfCoins(newItem.getNumberOfCoins()-numCoinsToLose);
experienceRequired=Math.max((int)Math.round(CMath.div(newItem.getTotalValue(),10.0)),0);
}
newItem.setContainer(null);
newItem.wearAt(0);
newItem.recoverPhyStats();
mob.location().addItem(newItem,ItemPossessor.Expire.Player_Drop);
mob.location().showHappens(CMMsg.MSG_OK_ACTION,L("Suddenly, @x1 drops from the sky.",newItem.name()));
mob.location().recoverRoomStats();
wishDrain(mob,(baseLoss+experienceRequired),false);
return true;
}
final Vector thangsFound=new Vector();
Physical foundThang=null;
final Physical P=mob.location().fetchFromRoomFavorItems(null,objectWish);
foundThang=maybeAdd(mob,P,thangsFound,foundThang);
try
{
final List<Environmental> items=new LinkedList<Environmental>();
items.addAll(CMLib.map().findRoomItems(CMLib.map().rooms(), mob,objectWish,true,10));
items.addAll(CMLib.map().findInhabitants(CMLib.map().rooms(), mob,objectWish,10));
items.addAll(CMLib.map().findInventory(CMLib.map().rooms(), mob,objectWish,10));
items.addAll(CMLib.map().findShopStock(CMLib.map().rooms(), mob,objectWish,10));
for(final Environmental O : items)
if(O instanceof Physical)
foundThang=maybeAdd(mob,((Physical)O),thangsFound,foundThang);
}catch(final NoSuchElementException nse){}
if(foundThang instanceof PackagedItems)
foundThang = ((PackagedItems)foundThang).getItem();
if((thangsFound.size()>0)&&(foundThang!=null))
{
// yea, we get to DO something!
int experienceRequired=100*(foundThang.phyStats().level()-1);
if(foundThang instanceof MOB)
{
final MOB foundMOB=(MOB)foundThang;
MOB newMOB;
final boolean isPlayer=foundMOB.playerStats()!=null;
if(isPlayer && (!foundMOB.isMonster()) && CMLib.flags().isInTheGame(foundMOB, true))
{
newMOB=foundMOB;
mob.location().bringMobHere(newMOB, false);
}
else
{
if(isPlayer)
{
newMOB=CMClass.getMOB("GenMOB");
newMOB.setName(L("CopyOf@x1",foundThang.Name()));
newMOB.setDisplayText(((MOB) foundThang).displayText(mob));
newMOB.setDescription(foundThang.description());
}
else
newMOB=(MOB)foundMOB.copyOf();
newMOB.setStartRoom(null);
newMOB.setLocation(mob.location());
newMOB.recoverCharStats();
newMOB.recoverPhyStats();
newMOB.recoverMaxState();
newMOB.resetToMaxState();
newMOB.bringToLife(mob.location(),true);
}
newMOB.location().showOthers(newMOB,null,CMMsg.MSG_OK_ACTION,L("<S-NAME> appears!"));
mob.location().show(mob,null,CMMsg.MSG_OK_ACTION,L("Suddenly, @x1 instantiates from the Java Plane.",newMOB.name()));
newMOB.setFollowing(mob);
if(experienceRequired<=0)
experienceRequired=0;
wishDrain(mob,(baseLoss+experienceRequired),false);
return true;
}
else
if((foundThang instanceof Item)
&&(!(foundThang instanceof ImmortalOnly))
&&(!(foundThang instanceof ClanItem))
&&(!CMath.bset(foundThang.phyStats().sensesMask(), PhyStats.SENSE_ITEMNOWISH)))
{
final Item newItem=(Item)foundThang.copyOf();
experienceRequired+=newItem.value();
if(experienceRequired>mob.getExpPrevLevel())
newItem.setContainer(null);
newItem.wearAt(0);
mob.location().addItem(newItem,ItemPossessor.Expire.Player_Drop);
mob.location().showHappens(CMMsg.MSG_OK_ACTION,L("Suddenly, @x1 drops from the sky.",newItem.name()));
mob.location().recoverRoomStats();
if(experienceRequired<=0)
experienceRequired=0;
wishDrain(mob,(baseLoss+experienceRequired),false);
return true;
}
}
// anything else may refer to another person or item
Physical target=null;
String possName=((String)wishV.elementAt(0)).trim();
if(wishV.size()>2)
{
possName=CMParms.combine(wishV,0,2);
target=mob.location().fetchFromRoomFavorMOBs(null,possName);
if(target==null) target=mob.findItem(possName);
if(target==null) possName=((String)wishV.elementAt(0)).trim();
}
if(target==null) target=mob.location().fetchFromRoomFavorMOBs(null,possName);
if(target==null) target=mob.findItem(possName);
if((target==null)
||(possName.equalsIgnoreCase("FOR"))
||(possName.equalsIgnoreCase("TO"))
||(possName.equalsIgnoreCase("BE"))
||(possName.equalsIgnoreCase("WOULD"))
||(possName.equalsIgnoreCase("A"))
||(possName.equalsIgnoreCase("THE"))
||(possName.equalsIgnoreCase("AN"))
||(possName.equalsIgnoreCase("I")))
{
if(possName.equalsIgnoreCase("I"))
{
wishV.removeElementAt(0);
myWish=" "+CMParms.combine(wishV,0).toUpperCase()+" ";
}
target=mob;
}
else
{
wishV.removeElementAt(0);
myWish=" "+CMParms.combine(wishV,0).toUpperCase().trim()+" ";
}
if(target instanceof PackagedItems)
target = ((PackagedItems)target).getItem();
if((target instanceof ImmortalOnly)
||(target instanceof ClanItem))
target=null;
if((target!=null)
&&(target!=mob)
&&(target instanceof MOB)
&&(!((MOB)target).isMonster())
&&(!mob.mayIFight((MOB)target)))
{
mob.tell(L("You cannot cast wish on @x1 until @x2 permits you. You must both toggle your playerkill flags on.",target.name(mob),mob.charStats().heshe()));
return false;
}
// a wish for recall
if((myWish.startsWith(" TO BE RECALLED "))
||(myWish.startsWith(" TO RECALL "))
||(myWish.startsWith(" RECALL "))
||(myWish.startsWith(" BE RECALLED "))
||(myWish.startsWith(" WAS RECALLED "))
||(myWish.startsWith(" WOULD RECALL "))
||(myWish.endsWith(" WAS RECALLED "))
||(myWish.endsWith(" WOULD RECALL "))
||(myWish.endsWith(" TO RECALL "))
||(myWish.endsWith(" BE RECALLED "))
||(myWish.endsWith(" RECALL ")&&(!myWish.endsWith(" OF RECALL "))))
{
Room recallRoom=mob.getStartRoom();
if((recallRoom==null)&&(target instanceof MOB)&&(((MOB)target).getStartRoom()!=null))
recallRoom=((MOB)target).getStartRoom();
if(recallRoom!=null)
{
wishDrain(mob,baseLoss,false);
bringThangHere(mob,recallRoom,target);
return true;
}
}
// a wish for death or destruction
if((myWish.startsWith(" TO DIE "))
||(myWish.startsWith(" TO BE DESTROYED "))
||(myWish.startsWith(" TO CROAK "))
||(myWish.startsWith(" WAS DEAD "))
||(myWish.startsWith(" WAS GONE "))
||(myWish.startsWith(" WOULD GO AWAY "))
||(myWish.startsWith(" WAS BANISHED "))
||(myWish.startsWith(" WOULD DIE "))
||(myWish.startsWith(" WOULD BE DEAD "))
||(myWish.startsWith(" WAS DESTROYED "))
||(myWish.startsWith(" DEATH "))
||(myWish.startsWith(" FOR DEATH "))
||(myWish.startsWith(" DESTRUCTION "))
||(myWish.startsWith(" TO BE BANISHED "))
||(myWish.startsWith(" TO BE DEAD "))
||(myWish.startsWith(" TO BE GONE "))
||(myWish.startsWith(" TO DISAPPEAR "))
||(myWish.startsWith(" TO VANISH "))
||(myWish.startsWith(" TO BE INVISIBLE "))
||(myWish.startsWith(" TO GO AWAY "))
||(myWish.startsWith(" TO GO TO HELL ")))
{
if(target instanceof Item)
{
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 quietly vanishes.",target.name()));
((Item)target).destroy();
}
else
if(target instanceof MOB)
{
final int exp=mob.getExperience();
CMLib.combat().postDeath(mob,(MOB)target,null);
if((!CMSecurity.isDisabled(CMSecurity.DisFlag.EXPERIENCE))
&&!mob.charStats().getCurrentClass().expless()
&&!mob.charStats().getMyRace().expless()
&&(mob.getExperience()>exp))
baseLoss=mob.getExperience()-exp;
}
wishDrain(mob,baseLoss*2,false);
return true;
}
// a wish for movement
String locationWish=myWish;
final String[] redundantStarts2={"TO GO TO",
"TO TELEPORT TO",
"TO TRANSPORT TO",
"TO TRANSFER TO",
"TO PORTAL TO",
"WOULD TELEPORT TO",
"WOULD TRANSPORT TO",
"WOULD TRANSFER TO",
"WOULD PORTAL TO",
"WOULD GO TO",
"TO PORTAL TO",
"TO BE TELEPORTED TO",
"TO BE TRANSPORTED TO",
"TO BE TRANSFERRED TO",
"TO BE PORTALLED TO",
"TO BE PORTALED TO",
"TO BE TELEPORTED",
"TO BE TRANSPORTED",
"TO BE TRANSFERRED",
"TO BE PORTALLED",
"TO BE PORTALED",
"TO APPEAR IN ",
"TO BE IN",
"TO APPEAR AT",
"TO BE AT",
"TO GO",
"TO MOVE TO",
"TO MOVE",
"TO BE AT",
"TO BE IN",
"TO BE",
"TO TRAVEL",
"TO WALK TO",
"TO WALK",
"TO TRAVEL TO",
"TO GOTO",
"TELEPORTATION TO",
"TRANSPORTED TO",
"TELEPORTED TO",
"TRANSFERRED TO",
"WAS TRANSPORTED TO",
"WAS TELEPORTED TO",
"WAS TRANSFERRED TO",
"TELEPORT",
"GO",
"GO TO",
"GOTO",
"TRANSFER",
"PORTAL",
"TELEPORTATION"};
final String[] redundantEnds2={"IMMEDIATELY","PLEASE","NOW","AT ONCE"};
boolean validStart=false;
i=0;
while(i<redundantStarts2.length)
{
if(locationWish.startsWith(" "+redundantStarts2[i]+" "))
{
validStart=true;
locationWish=locationWish.substring(1+redundantStarts2[i].length());
i=-1;
}
i++;
}
i=0;
while(i<redundantEnds2.length)
{
if(locationWish.endsWith(" "+redundantEnds2[i]+" "))
{
locationWish=locationWish.substring(0,locationWish.length()-(1+redundantEnds2[i].length()));
i=-1;
}
i++;
}
// a wish for teleportation
if(validStart)
{
Room newRoom=null;
final int dir=Directions.getGoodDirectionCode((String)wishV.lastElement());
if(dir>=0)
newRoom=mob.location().getRoomInDir(dir);
if(newRoom==null)
{
try
{
final List<Room> rooms=CMLib.map().findRooms(CMLib.map().rooms(), mob, locationWish.trim(), true, 10);
if(rooms.size()>0)
newRoom=rooms.get(CMLib.dice().roll(1,rooms.size(),-1));
}catch(final NoSuchElementException nse){}
}
if(newRoom!=null)
{
bringThangHere(mob,newRoom,target);
newRoom.show(mob, null, CMMsg.MSG_OK_VISUAL, L("<S-NAME> appears!"));
wishDrain(mob,baseLoss,false);
return true;
}
}
// temporary stat changes
if((target instanceof MOB)
&&((myWish.indexOf(" MORE ")>=0)
||(myWish.indexOf(" HIGHER ")>=0)
||(myWish.indexOf(" BIGGER ")>=0)
||(myWish.indexOf(" TO HAVE ")>=0)))
{
final MOB tm=(MOB)target;
if((myWish.indexOf("HIT POINT")>=0)&&(tm.curState().getHitPoints()<tm.maxState().getHitPoints()))
{
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 is healthier!",target.name()));
tm.curState().setHitPoints(tm.maxState().getHitPoints());
wishDrain(mob,baseLoss,false);
return true;
}
else
if(myWish.indexOf("HIT POINT")>=0)
{
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 is healthier!",target.name()));
tm.baseState().setHitPoints(tm.baseState().getHitPoints()+2);
tm.recoverMaxState();
wishDrain(mob,baseLoss,true);
return true;
}
if((myWish.indexOf("MANA")>=0)&&(tm.curState().getMana()<tm.maxState().getMana()))
{
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 has more mana!",target.name()));
tm.curState().setMana(tm.maxState().getMana());
wishDrain(mob,baseLoss,false);
return true;
}
else
if(myWish.indexOf("MANA")>=0)
{
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 has more mana!",target.name()));
tm.baseState().setMana(tm.baseState().getMana()+2);
tm.recoverMaxState();
wishDrain(mob,baseLoss,true);
return true;
}
if((myWish.indexOf("MOVE")>=0)&&(tm.curState().getMovement()<tm.maxState().getMovement()))
{
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 has more move points!",target.name()));
tm.curState().setMovement(tm.maxState().getMovement());
wishDrain(mob,baseLoss,false);
return true;
}
else
if(myWish.indexOf("MOVE")>=0)
{
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 has more move points!",target.name()));
tm.baseState().setMovement(tm.baseState().getMovement()+5);
tm.recoverMaxState();
wishDrain(mob,baseLoss,true);
return true;
}
}
if((target instanceof MOB)
&&(((MOB)target).charStats().getStat(CharStats.STAT_GENDER)!='M')
&&((myWish.indexOf(" BECOME ")>=0)
||(myWish.indexOf(" TURN INTO ")>=0)
||(myWish.indexOf(" CHANGE")>=0)
||(myWish.indexOf(" BE A")>=0)
||(myWish.indexOf(" WAS A")>=0)
||(myWish.indexOf(" TRANSFORM")>=0))
&&((myWish.indexOf(" MALE ")>=0)
||(myWish.indexOf(" MAN ")>=0)
||(myWish.indexOf(" BOY ")>=0)))
{
wishDrain(mob,baseLoss,true);
((MOB)target).baseCharStats().setStat(CharStats.STAT_GENDER,'M');
((MOB)target).recoverCharStats();
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 is now male!",target.name()));
return true;
}
if((target instanceof MOB)
&&((myWish.indexOf(" BECOME ")>=0)
||(myWish.indexOf(" WAS ")>=0))
&&((myWish.indexOf(" LIGHTER ")>=0)
||(myWish.indexOf(" LOSE WEIGHT ")>=0)))
{
wishDrain(mob,baseLoss,true);
int weight=((MOB)target).basePhyStats().weight();
weight-=50;
if(weight<=0) weight=1;
((MOB)target).basePhyStats().setWeight(weight);
((MOB)target).recoverPhyStats();
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 is now lighter!",target.name()));
return true;
}
if((target instanceof MOB)
&&((myWish.indexOf(" BECOME ")>=0)
||(myWish.indexOf(" WAS ")>=0))
&&((myWish.indexOf(" HEAVIER ")>=0)
||(myWish.indexOf(" GAIN WEIGHT ")>=0)))
{
wishDrain(mob,baseLoss,true);
int weight=((MOB)target).basePhyStats().weight();
weight+=50;
((MOB)target).basePhyStats().setWeight(weight);
((MOB)target).recoverPhyStats();
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 is now heavier!",target.name()));
return true;
}
if((target instanceof MOB)
&&((myWish.indexOf(" EXP ")>=0)
||(myWish.indexOf(" EXPERIENCE ")>=0)))
{
int x=myWish.indexOf(" EXP");
final String wsh=myWish.substring(0,x).trim();
x=wsh.lastIndexOf(' ');
int amount=25;
if((x>=0)&&(CMath.isNumber(wsh.substring(x).trim())))
amount=CMath.s_int(wsh.substring(x).trim());
if((amount*4)>mob.getExperience())
amount=mob.getExperience()/4;
CMLib.leveler().postExperience(mob,null,null,-(amount*4),false);
mob.tell(L("Your wish has drained you of @x1 experience points.",""+(amount*4)));
CMLib.leveler().postExperience((MOB)target,null,null,amount,false);
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 gains experience!",target.name()));
return true;
}
if((target!=null)
&&((myWish.indexOf(" LOWER ")>=0)||(myWish.indexOf(" LOSE ")>=0)||(myWish.indexOf(" GAIN ")>=0)||(myWish.indexOf(" HIGHER ")>=0)||(myWish.indexOf(" WAS ")>=0)||(myWish.indexOf(" WOULD BE ")>=0)||(myWish.indexOf(" WOULD BECOME ")>=0)||(myWish.indexOf(" BECAME ")>=0))
&&((myWish.indexOf(" LEVEL ")>=0)||(myWish.indexOf(" LEVELS ")>=0))
&&(!CMSecurity.isDisabled(CMSecurity.DisFlag.LEVELS)))
{
int level=0;
if(myWish.indexOf(" LOWER ")>=0)
level=-1;
else
if(myWish.indexOf(" HIGHER" )>=0)
level=1;
else
if(myWish.indexOf(" GAIN ")>=0)
{
level=1;
final Vector<String> V=CMParms.parse(myWish);
for(int i2=1;i2<V.size();i2++)
{
if(V.elementAt(i2).equalsIgnoreCase("LEVELS"))
{
final String s=V.elementAt(i2-1);
if(CMath.isNumber(s)
&&((CMath.s_int(s)!=0)||(s.equalsIgnoreCase("0"))))
{
level=CMath.s_int(s);
break;
}
}
}
}
else
if(myWish.indexOf(" LOSE" )>=0)
{
level=-1;
final Vector<String> V=CMParms.parse(myWish);
for(int i2=1;i2<V.size();i2++)
{
if(V.elementAt(i2).equalsIgnoreCase("LEVELS"))
{
final String s=V.elementAt(i2);
if(CMath.isNumber(s)
&&((CMath.s_int(s)!=0)||(s.equalsIgnoreCase("0"))))
{
level=-CMath.s_int(s);
break;
}
}
}
}
else
{
final Vector<String> V=CMParms.parse(myWish);
for(int i2=0;i2<V.size()-1;i2++)
{
if(V.elementAt(i2).equalsIgnoreCase("LEVEL"))
{
final String s=V.elementAt(i2+1);
if(CMath.isNumber(s)
&&((CMath.s_int(s)!=0)||(s.equalsIgnoreCase("0"))))
{
level=CMath.s_int(s)-target.basePhyStats().level();
break;
}
}
}
}
if(level!=0)
{
int levelsLost=level;
if(levelsLost<0) levelsLost=levelsLost*-1;
int levelsGained=levelsLost;
levelsLost*=4;
if(levelsLost>=mob.basePhyStats().level())
{
levelsLost=mob.basePhyStats().level()-1;
levelsGained=levelsLost/4;
if(level>0) level=levelsGained;
else level=-levelsGained;
}
final int newLevel=target.basePhyStats().level()+level;
if(target instanceof MOB)
{
if(((newLevel>CMProps.getIntVar(CMProps.Int.LASTPLAYERLEVEL))
||(((MOB)target).charStats().getCurrentClass().leveless())
||(((MOB)target).charStats().isLevelCapped(((MOB)target).charStats().getCurrentClass()))
||(((MOB)target).charStats().getMyRace().leveless()))
&&(newLevel>target.basePhyStats().level()))
{
wishDrain(mob,baseLoss,false);
mob.tell(L("That's beyond your power, but you lost exp even for trying."));
return false;
}
}
if(target instanceof MOB)
{
final MOB MT=(MOB)target;
if(level>0)
{
for(int i2=0;i2<levelsGained;i2++)
{
CMLib.leveler().level(MT);
MT.recoverPhyStats();
MT.setExperience(CMLib.leveler().getLevelExperience(MT.basePhyStats().level()-1));
}
}
else
while(MT.basePhyStats().level()>newLevel)
{
CMLib.leveler().unLevel(MT);
MT.setExperience(CMLib.leveler().getLevelExperience(MT.basePhyStats().level()-1));
MT.recoverPhyStats();
}
}
else
{
target.basePhyStats().setLevel(newLevel);
target.recoverPhyStats();
}
wishDrain(mob,baseLoss*levelsLost,true);
if((mob!=target)||(level>0))
for(int i2=0;i2<levelsLost;i2++)
{
CMLib.leveler().unLevel(mob);
mob.setExperience(CMLib.leveler().getLevelExperience(mob.basePhyStats().level()-1));
}
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 is now level @x2!",target.name(),""+target.phyStats().level()));
}
return true;
}
if((target instanceof MOB)
&&((myWish.indexOf(" BECOME ")>=0)
||(myWish.indexOf(" WAS ")>=0))
&&((myWish.indexOf(" SMALL ")>=0)
||(myWish.indexOf(" SHORT ")>=0)
||(myWish.indexOf(" LITTLE ")>=0)))
{
wishDrain(mob,baseLoss,true);
int weight=((MOB)target).basePhyStats().height();
weight-=12;
if(weight<=0) weight=5;
((MOB)target).basePhyStats().setHeight(weight);
((MOB)target).recoverPhyStats();
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 is now shorter!",target.name()));
return true;
}
if((target instanceof MOB)
&&((myWish.indexOf(" BECOME ")>=0)
||(myWish.indexOf(" WAS ")>=0))
&&((myWish.indexOf(" LARGE ")>=0)
||(myWish.indexOf(" BIG ")>=0)
||(myWish.indexOf(" TALL ")>=0)))
{
wishDrain(mob,baseLoss,true);
int weight=((MOB)target).basePhyStats().height();
weight+=12;
((MOB)target).basePhyStats().setHeight(weight);
((MOB)target).recoverPhyStats();
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 is now taller!",target.name()));
return true;
}
if((target instanceof MOB)
&&(((MOB)target).charStats().getStat(CharStats.STAT_GENDER)!='F')
&&((myWish.indexOf(" BECOME ")>=0)
||(myWish.indexOf(" TURN INTO ")>=0)
||(myWish.indexOf(" CHANGE")>=0)
||(myWish.indexOf(" BE A")>=0)
||(myWish.indexOf(" WAS A")>=0)
||(myWish.indexOf(" TRANSFORM")>=0))
&&((myWish.indexOf(" FEMALE ")>=0)
||(myWish.indexOf(" WOMAN ")>=0)
||(myWish.indexOf(" GIRL ")>=0)))
{
wishDrain(mob,baseLoss,true);
((MOB)target).baseCharStats().setStat(CharStats.STAT_GENDER,'F');
((MOB)target).recoverCharStats();
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 is now female!",target.name()));
return true;
}
// change race
if((target instanceof MOB)
&&((myWish.indexOf(" BECOME ")>=0)
||(myWish.indexOf(" TURN INTO ")>=0)
||(myWish.indexOf(" CHANGE")>=0)
||(myWish.indexOf(" BE A")>=0)
||(myWish.indexOf(" WAS A")>=0)
||(myWish.indexOf(" TRANSFORM")>=0)))
{
final Race R=CMClass.findRace((String)wishV.lastElement());
if((R!=null)
&& (CMath.bset(R.availabilityCode(),Area.THEME_FANTASY))
&&(!R.ID().equalsIgnoreCase("StdRace"))
&&(!R.ID().equalsIgnoreCase("Unique")))
{
if(!((MOB)target).isMonster())
{
baseLoss+=500;
CMLib.leveler().unLevel(mob);
mob.setExperience(CMLib.leveler().getLevelExperience(mob.basePhyStats().level()-1));
}
wishDrain(mob,baseLoss,true);
final int oldCat=((MOB)target).baseCharStats().ageCategory();
((MOB)target).baseCharStats().setMyRace(R);
((MOB)target).baseCharStats().getMyRace().startRacing(((MOB)target),true);
((MOB)target).baseCharStats().getMyRace().setHeightWeight(((MOB)target).basePhyStats(),(char)((MOB)target).baseCharStats().getStat(CharStats.STAT_GENDER));
((MOB)target).recoverCharStats();
((MOB)target).recoverPhyStats();
CMLib.utensils().confirmWearability((MOB)target);
if(!((MOB)target).isMonster())
((MOB)target).baseCharStats().setStat(CharStats.STAT_AGE,R.getAgingChart()[oldCat]);
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 is now a @x2!",target.name(),R.name()));
return true;
}
}
// change class
if((target instanceof MOB)
&&((myWish.indexOf(" BECOME ")>=0)
||(myWish.indexOf(" TURN INTO ")>=0)
||(myWish.indexOf(" CHANGE")>=0)
||(myWish.indexOf(" LEARN TO BE")>=0)
||(myWish.indexOf(" BE A")>=0)
||(myWish.indexOf(" WAS A")>=0)
||(myWish.indexOf(" TRANSFORM")>=0)))
{
final CharClass C=CMClass.findCharClass((String)wishV.lastElement());
if((C!=null)&&(CMath.bset(C.availabilityCode(),Area.THEME_FANTASY)))
{
final CharClass oldC=mob.baseCharStats().getCurrentClass();
baseLoss+=1000;
wishDrain(mob,baseLoss,true);
CMLib.leveler().unLevel(mob);
CMLib.leveler().unLevel(mob);
CMLib.leveler().unLevel(mob);
mob.setExperience(CMLib.leveler().getLevelExperience(mob.basePhyStats().level()-1));
final StringBuffer str=new StringBuffer("");
for(final int trait: CharStats.CODES.BASECODES())
{
final int newVal=C.maxStatAdjustments()[trait];
final int amountToLose=oldC.maxStatAdjustments()[trait]-newVal;
if((amountToLose>0)&&(mob.baseCharStats().getStat(trait)>amountToLose))
{
mob.baseCharStats().setStat(trait,mob.baseCharStats().getStat(trait)-amountToLose);
str.append(L("\n\rYou lost @x1 points of @x2.",""+amountToLose,CharStats.CODES.DESC(trait).toLowerCase()));
}
}
mob.tell(str.toString()+"\n\r");
((MOB)target).baseCharStats().setCurrentClass(C);
if((!((MOB)target).isMonster())&&(((MOB)target).soulMate()==null))
CMLib.coffeeTables().bump(target,CMTableRow.STAT_CLASSCHANGE);
((MOB)target).baseCharStats().getCurrentClass().startCharacter((MOB)target,false,true);
((MOB)target).recoverCharStats();
((MOB)target).recoverPhyStats();
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 is now a @x2!",target.name(),C.name(((MOB)target).baseCharStats().getCurrentClassLevel())));
return true;
}
}
// gaining new abilities!
if(target instanceof MOB)
{
int code=-1;
int x=myWish.indexOf(" KNOW "); if((x>=0)&&(x+5>code)) code=x+5;
x=myWish.indexOf(" KNEW "); if((x>=0)&&(x+5>code)) code=x+5;
x=myWish.indexOf(" LEARN "); if((x>=0)&&(x+6>code)) code=x+6;
x=myWish.indexOf(" COULD "); if((x>=0)&&(x+6>code)) code=x+6;
x=myWish.indexOf(" GAIN "); if((x>=0)&&(x+5>code)) code=x+5;
x=myWish.indexOf(" BE TAUGHT "); if((x>=0)&&(x+10>code)) code=x+10;
x=myWish.indexOf(" HOW TO "); if((x>=0)&&(x+7>code)) code=x+7;
x=myWish.indexOf(" ABLE TO "); if((x>=0)&&(x+8>code)) code=x+8;
x=myWish.indexOf(" CAST "); if((x>=0)&&(x+5>code)) code=x+5;
x=myWish.indexOf(" SING "); if((x>=0)&&(x+5>code)) code=x+5;
x=myWish.indexOf(" PRAY FOR "); if((x>=0)&&(x+9>code)) code=x+9;
if((code>=0)&&(code<myWish.length()))
{
final MOB tm=(MOB)target;
Ability A=CMClass.findAbility(myWish.substring(code).trim());
if((A!=null)
&&(CMLib.ableMapper().lowestQualifyingLevel(A.ID())>0)
&&(!CMath.bset(A.classificationCode(),Ability.DOMAIN_IMMORTAL)))
{
if(CMLib.ableMapper().lowestQualifyingLevel(A.ID())>=25)
{
baseLoss=getXPCOSTAdjustment(mob,baseLoss);
CMLib.leveler().postExperience(mob,null,null,-baseLoss,false);
mob.tell(L("Your wish has drained you of @x1 experience points, but that is beyond your wishing ability.",""+baseLoss));
return false;
}
if(tm.fetchAbility(A.ID())!=null)
{
baseLoss=getXPCOSTAdjustment(mob,baseLoss);
A=tm.fetchAbility(A.ID());
CMLib.leveler().postExperience(mob,null,null,-baseLoss,false);
mob.tell(L("Your wish has drained you of @x1 experience points.",""+baseLoss));
}
else
{
tm.addAbility(A);
baseLoss+=500;
wishDrain(mob,baseLoss,true);
CMLib.leveler().unLevel(mob);
CMLib.leveler().unLevel(mob);
mob.setExperience(CMLib.leveler().getLevelExperience(mob.basePhyStats().level()-1));
}
A=tm.fetchAbility(A.ID());
A.setProficiency(100);
A.autoInvocation(tm);
mob.location().show(mob,null,CMMsg.MSG_OK_VISUAL,L("@x1 now knows @x2!",target.name(),A.name()));
final Ability A2=tm.fetchEffect(A.ID());
if(A2!=null) A2.setProficiency(100);
return true;
}
}
}
// attributes will be hairy
int foundAttribute=-1;
for(final int attributes : CharStats.CODES.ALLCODES())
{
if(CMLib.english().containsString(myWish,CharStats.CODES.DESC(attributes)))
{ foundAttribute=attributes; break;}
}
if(myWish.indexOf("STRONG")>=0)
foundAttribute=CharStats.STAT_STRENGTH;
if(myWish.indexOf(" INTELLIGEN")>=0)
foundAttribute=CharStats.STAT_INTELLIGENCE;
if(myWish.indexOf(" SMART")>=0)
foundAttribute=CharStats.STAT_INTELLIGENCE;
if(myWish.indexOf(" WISE")>=0)
foundAttribute=CharStats.STAT_WISDOM;
if(myWish.indexOf(" FAST")>=0)
foundAttribute=CharStats.STAT_DEXTERITY;
if(myWish.indexOf(" DEXTROUS")>=0)
foundAttribute=CharStats.STAT_DEXTERITY;
if(myWish.indexOf(" HEALTH")>=0)
foundAttribute=CharStats.STAT_CONSTITUTION;
if(myWish.indexOf(" PRETTY")>=0)
foundAttribute=CharStats.STAT_CHARISMA;
if(myWish.indexOf(" NICE")>=0)
foundAttribute=CharStats.STAT_CHARISMA;
if(myWish.indexOf(" PRETTIER")>=0)
foundAttribute=CharStats.STAT_CHARISMA;
if((myWish.indexOf("RESIST")>=0)
||(myWish.indexOf("IMMUN")>=0))
{
for(final int saveStat : CharStats.CODES.SAVING_THROWS())
if(myWish.indexOf(" "+CharStats.CODES.DESC(saveStat))>=0)
foundAttribute=saveStat;
if(foundAttribute<0)
for(final int saveStat : CharStats.CODES.SAVING_THROWS())
if(myWish.indexOf(" "+CharStats.CODES.NAME(saveStat))>=0)
foundAttribute=saveStat;
if(myWish.indexOf(" PARALY")>=0)
foundAttribute=CharStats.STAT_SAVE_PARALYSIS;
if(myWish.indexOf(" FIRE")>=0)
foundAttribute=CharStats.STAT_SAVE_FIRE;
if(myWish.indexOf(" FLAMES")>=0)
foundAttribute=CharStats.STAT_SAVE_FIRE;
if(myWish.indexOf(" COLD")>=0)
foundAttribute=CharStats.STAT_SAVE_COLD;
if(myWish.indexOf(" FROST")>=0)
foundAttribute=CharStats.STAT_SAVE_COLD;
if(myWish.indexOf(" GAS")>=0)
foundAttribute=CharStats.STAT_SAVE_GAS;
if(myWish.indexOf(" ACID")>=0)
foundAttribute=CharStats.STAT_SAVE_ACID;
if(myWish.indexOf(" SPELL ")>=0)
foundAttribute=CharStats.STAT_SAVE_MAGIC;
if(myWish.indexOf(" TRAPS ")>=0)
foundAttribute=CharStats.STAT_SAVE_TRAPS;
if(myWish.indexOf(" SPELLS ")>=0)
foundAttribute=CharStats.STAT_SAVE_MAGIC;
if(myWish.indexOf(" SONGS")>=0)
foundAttribute=CharStats.STAT_SAVE_MIND;
if(myWish.indexOf(" CHARMS")>=0)
foundAttribute=CharStats.STAT_SAVE_MIND;
if(myWish.indexOf(" ELECTRI")>=0)
foundAttribute=CharStats.STAT_SAVE_ELECTRIC;
if(myWish.indexOf(" POISON")>=0)
foundAttribute=CharStats.STAT_SAVE_POISON;
if(myWish.indexOf(" DEATH")>=0)
foundAttribute=CharStats.STAT_SAVE_UNDEAD;
if(myWish.indexOf(" DISEASE")>=0)
foundAttribute=CharStats.STAT_SAVE_DISEASE;
if(myWish.indexOf(" PLAGUE")>=0)
foundAttribute=CharStats.STAT_SAVE_DISEASE;
if(myWish.indexOf(" COLDS ")>=0)
foundAttribute=CharStats.STAT_SAVE_DISEASE;
if(myWish.indexOf(" SICK")>=0)
foundAttribute=CharStats.STAT_SAVE_DISEASE;
if(myWish.indexOf(" UNDEAD")>=0)
foundAttribute=CharStats.STAT_SAVE_UNDEAD;
if(myWish.indexOf(" EVIL")>=0)
foundAttribute=CharStats.STAT_SAVE_UNDEAD;
}
if((foundAttribute>=0)
&&(target instanceof MOB)
&&((myWish.indexOf(" LESS ")>=0)
||(myWish.indexOf(" LOWER ")>=0)
||(myWish.indexOf(" LESS RESIST")>=0)
||(myWish.indexOf(" LESS IMMUN")>=0)
||(myWish.indexOf(" NO RESIST")>=0)
||(myWish.indexOf(" NO IMMUN")>=0)
||(myWish.indexOf(" LOSE ")>=0)))
{
if(CharStats.CODES.isBASE(foundAttribute))
baseLoss-=1000;
else
baseLoss-=10;
wishDrain(mob,baseLoss,true);
if(foundAttribute<=6)
((MOB)target).baseCharStats().setStat(foundAttribute,((MOB)target).baseCharStats().getStat(foundAttribute)-1);
else
((MOB)target).baseCharStats().setStat(foundAttribute,((MOB)target).baseCharStats().getStat(foundAttribute)-33);
((MOB)target).recoverCharStats();
mob.recoverCharStats();
mob.location().show(mob,null,CMMsg.MSG_OK_ACTION,L("@x1 has lost @x2.",target.name(),CharStats.CODES.DESC(foundAttribute).toLowerCase()));
return true;
}
if((foundAttribute>=0)
&&(target instanceof MOB)
&&((myWish.indexOf(" MORE ")>=0)
||(myWish.indexOf(" HIGHER ")>=0)
||(myWish.indexOf("RESIST")>=0)
||(myWish.indexOf("IMMUN")>=0)
||(myWish.indexOf(" BIGGER ")>=0)
||(myWish.indexOf(" TO HAVE ")>=0)
||(myWish.indexOf(" GAIN ")>=0)
||(myWish.indexOf(" WAS ")>=0)
||(myWish.indexOf(" TO BE ")>=0)))
{
switch(foundAttribute)
{
case CharStats.STAT_CHARISMA:
case CharStats.STAT_CONSTITUTION:
case CharStats.STAT_DEXTERITY:
case CharStats.STAT_INTELLIGENCE:
case CharStats.STAT_STRENGTH:
case CharStats.STAT_WISDOM:
{
int trainsRequired=CMLib.login().getTrainingCost(mob, foundAttribute, true);
if(trainsRequired<0)
trainsRequired=-trainsRequired;
if(trainsRequired>100) trainsRequired=100;
baseLoss+=((CMLib.leveler().getLevelExperienceJustThisLevel(mob.basePhyStats().level())/5)*(1+trainsRequired));
break;
}
default:
baseLoss+=10;
break;
}
wishDrain(mob,baseLoss,true);
if(!CMSecurity.isDisabled(CMSecurity.DisFlag.LEVELS))
{
CMLib.leveler().unLevel(mob);
mob.setExperience(CMLib.leveler().getLevelExperience(mob.basePhyStats().level()-1));
}
if(foundAttribute<=6)
((MOB)target).baseCharStats().setStat(foundAttribute,((MOB)target).baseCharStats().getStat(foundAttribute)+1);
else
((MOB)target).baseCharStats().setStat(foundAttribute,((MOB)target).baseCharStats().getStat(foundAttribute)+10);
mob.recoverCharStats();
((MOB)target).recoverCharStats();
mob.location().show(mob,null,CMMsg.MSG_OK_ACTION,L("@x1 has gained @x2.",target.name(),CharStats.CODES.DESC(foundAttribute).toLowerCase()));
return true;
}
baseLoss=getXPCOSTAdjustment(mob,baseLoss);
CMLib.leveler().postExperience(mob,null,null,-baseLoss,false);
Log.sysOut("Wish",mob.Name()+" unsuccessfully wished for '"+CMParms.combine(commands,0)+"'");
mob.tell(L("Your attempted wish has cost you @x1 experience points, but it did not come true. You might try rewording your wish next time.",""+baseLoss));
return false;
}
return success;
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2011-19 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.corecomponents;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.awt.Image;
import java.awt.Toolkit;
import java.lang.ref.SoftReference;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.FutureTask;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import javax.swing.SwingUtilities;
import javax.swing.Timer;
import org.apache.commons.lang3.StringUtils;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.FilterNode;
import org.openide.nodes.Node;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.corecomponents.ResultViewerPersistence.SortCriterion;
import static org.sleuthkit.autopsy.corecomponents.ResultViewerPersistence.loadSortCriteria;
import org.sleuthkit.autopsy.coreutils.ImageUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
/**
* Wraps around original data result children nodes of the passed in parent
* node, and creates filter nodes for the supported children nodes, adding the
* thumbnail. If original nodes are lazy loaded, this will support lazy loading.
* We add a page node hierarchy to divide children nodes into "pages".
*
* Filter-node like class, but adds additional hierarchy (pages) as parents of
* the filtered nodes.
*/
class ThumbnailViewChildren extends Children.Keys<Integer> {
private static final Logger logger = Logger.getLogger(ThumbnailViewChildren.class.getName());
@NbBundle.Messages("ThumbnailViewChildren.progress.cancelling=(Cancelling)")
private static final String CANCELLING_POSTIX = Bundle.ThumbnailViewChildren_progress_cancelling();
static final int IMAGES_PER_PAGE = 200;
private final ExecutorService executor = Executors.newFixedThreadPool(3,
new ThreadFactoryBuilder().setNameFormat("Thumbnail-Loader-%d").build());
private final List<ThumbnailViewNode.ThumbnailLoadTask> tasks = new ArrayList<>();
private final Node parent;
private final List<List<Node>> pages = new ArrayList<>();
private int thumbSize;
/**
* The constructor
*
* @param parent The node which is the parent of this children.
* @param thumbSize The hight and/or width of the thumbnails in pixels.
*/
ThumbnailViewChildren(Node parent, int thumbSize) {
super(true); //support lazy loading
this.parent = parent;
this.thumbSize = thumbSize;
}
@Override
protected void addNotify() {
super.addNotify();
/*
* TODO: When lazy loading of original nodes is fixed, we should be
* asking the datamodel for the children instead and not counting the
* children nodes (which might not be preloaded at this point).
*/
// get list of supported children sorted by persisted criteria
final List<Node> suppContent
= Stream.of(parent.getChildren().getNodes())
.filter(ThumbnailViewChildren::isSupported)
.sorted(getComparator())
.collect(Collectors.toList());
if (suppContent.isEmpty()) {
//if there are no images, there is nothing more to do
return;
}
//divide the supported content into buckets
pages.addAll(Lists.partition(suppContent, IMAGES_PER_PAGE));
//the keys are just the indices into the pages list.
setKeys(IntStream.range(0, pages.size()).boxed().collect(Collectors.toList()));
}
/**
* Get a comparator for the child nodes loaded from the persisted sort
* criteria. The comparator is a composite one that applies all the sort
* criteria at once.
*
* @return A Comparator used to sort the child nodes.
*/
private synchronized Comparator<Node> getComparator() {
Comparator<Node> comp = (node1, node2) -> 0; //eveything is equal.
if (!(parent instanceof TableFilterNode)) {
return comp;
} else {
List<SortCriterion> sortCriteria = loadSortCriteria((TableFilterNode) parent);
/**
* Make a comparator that will sort the nodes.
*
* Map each SortCriterion to a Comparator<Node> and then collapse
* them to a single comparator that uses the next subsequent
* Comparator to break ties.
*/
return sortCriteria.stream()
.map(this::getCriterionComparator)
.collect(Collectors.reducing(Comparator::thenComparing))
.orElse(comp); // default to unordered if nothing is persisted
}
}
/**
* Make a comparator from the given criterion. The comparator compares Nodes
* according to the value of the property specified in the SortCriterion.
*
*
* @param criterion The criterion to make a comparator for.
*
* @return The comparator for the given criterion.
*/
private Comparator<Node> getCriterionComparator(SortCriterion criterion) {
@SuppressWarnings("unchecked")
Comparator<Node> c = Comparator.comparing(node -> getPropertyValue(node, criterion.getProperty()),
Comparator.nullsFirst(Comparator.naturalOrder()));// Null values go first, unless reversed below.
switch (criterion.getSortOrder()) {
case DESCENDING:
case UNSORTED:
return c.reversed();
case ASCENDING:
default:
return c;
}
}
/**
* Get the value of the given property from the given node.
*
* @param node The node to get the value from.
* @param prop The property to get the value of.
*
* @return The value of the property in the node.
*/
@SuppressWarnings("rawtypes")
private Comparable getPropertyValue(Node node, Node.Property<?> prop) {
for (Node.PropertySet ps : node.getPropertySets()) {
for (Node.Property<?> p : ps.getProperties()) {
if (p.equals(prop)) {
try {
if (p.getValue() instanceof Comparable) {
return (Comparable) p.getValue();
} else {
//if the value is not comparable use its string representation
return p.getValue().toString();
}
} catch (IllegalAccessException | InvocationTargetException ex) {
logger.log(Level.WARNING, "Error getting value for thumbnail children", ex);
}
}
}
}
return null;
}
@Override
protected void removeNotify() {
super.removeNotify();
pages.clear();
}
@Override
protected Node[] createNodes(Integer pageNum) {
return new Node[]{new ThumbnailPageNode(pageNum, pages.get(pageNum))};
}
private static boolean isSupported(Node node) {
if (node != null) {
Content content = node.getLookup().lookup(AbstractFile.class);
if (content != null) {
return ImageUtils.thumbnailSupported(content);
}
}
return false;
}
public void setThumbsSize(int thumbSize) {
this.thumbSize = thumbSize;
for (Node page : getNodes()) {
for (Node node : page.getChildren().getNodes()) {
((ThumbnailViewNode) node).setThumbSize(thumbSize);
}
}
}
synchronized void cancelLoadingThumbnails() {
tasks.forEach(task -> task.cancel(Boolean.TRUE));
executor.shutdownNow();
tasks.clear();
}
private synchronized ThumbnailViewNode.ThumbnailLoadTask loadThumbnail(ThumbnailViewNode node) {
if (executor.isShutdown() == false) {
ThumbnailViewNode.ThumbnailLoadTask task = node.new ThumbnailLoadTask();
tasks.add(task);
executor.submit(task);
return task;
} else {
return null;
}
}
/**
* Node that wraps around original node and adds the thumbnail representing
* the image/video.
*/
private class ThumbnailViewNode extends FilterNode {
private final Logger logger = Logger.getLogger(ThumbnailViewNode.class.getName());
private final Image waitingIcon = Toolkit.getDefaultToolkit().createImage(ThumbnailViewNode.class.getResource("/org/sleuthkit/autopsy/images/working_spinner.gif")); //NOI18N
private SoftReference<Image> thumbCache = null;
private int thumbSize;
private final Content content;
private ThumbnailLoadTask thumbTask;
private Timer waitSpinnerTimer;
/**
* The constructor
*
* @param wrappedNode The original node that this Node wraps.
* @param thumbSize The hight and/or width of the thumbnail in pixels.
*/
private ThumbnailViewNode(Node wrappedNode, int thumbSize) {
super(wrappedNode, FilterNode.Children.LEAF);
this.thumbSize = thumbSize;
this.content = this.getLookup().lookup(AbstractFile.class);
}
@Override
public String getDisplayName() {
return StringUtils.abbreviate(super.getDisplayName(), 18);
}
@Override
@NbBundle.Messages({"# {0} - file name",
"ThumbnailViewNode.progressHandle.text=Generating thumbnail for {0}"})
synchronized public Image getIcon(int type) {
if (content == null) {
return ImageUtils.getDefaultThumbnail();
}
if (thumbCache != null) {
Image thumbnail = thumbCache.get();
if (thumbnail != null) {
return thumbnail;
}
}
if (thumbTask == null) {
thumbTask = loadThumbnail(ThumbnailViewNode.this);
}
if (waitSpinnerTimer == null) {
waitSpinnerTimer = new Timer(1, actionEvent -> fireIconChange());
waitSpinnerTimer.start();
}
return waitingIcon;
}
synchronized void setThumbSize(int iconSize) {
this.thumbSize = iconSize;
thumbCache = null;
if (thumbTask != null) {
thumbTask.cancel(true);
thumbTask = null;
}
}
private class ThumbnailLoadTask extends FutureTask<Image> {
private final ProgressHandle progressHandle;
private final String progressText;
private boolean cancelled = false;
ThumbnailLoadTask() {
super(new Callable<Image>() { //Does not work as lambda expression in dependent projects in IDE
public Image call() {
return ImageUtils.getThumbnail(content, thumbSize);
}
});
//super(() -> ImageUtils.getThumbnail(content, thumbSize));
progressText = Bundle.ThumbnailViewNode_progressHandle_text(content.getName());
progressHandle = ProgressHandleFactory.createSystemHandle(progressText);
progressHandle.setInitialDelay(500);
progressHandle.start();
}
@Override
synchronized public boolean cancel(boolean mayInterrupt) {
cancelled = true;
progressHandle.suspend(progressText + " " + CANCELLING_POSTIX);
return super.cancel(mayInterrupt);
}
@Override
synchronized public boolean isCancelled() {
return cancelled || super.isCancelled(); //To change body of generated methods, choose Tools | Templates.
}
@Override
synchronized protected void done() {
progressHandle.finish();
SwingUtilities.invokeLater(() -> {
if (waitSpinnerTimer != null) {
waitSpinnerTimer.stop();
waitSpinnerTimer = null;
}
try {
if (isCancelled() == false) {
thumbCache = new SoftReference<>(get());
fireIconChange();
}
} catch (CancellationException ex) {
//Task was cancelled, do nothing
} catch (InterruptedException | ExecutionException ex) {
if (false == (ex.getCause() instanceof CancellationException)) {
logger.log(Level.SEVERE, "Error getting thumbnail icon for " + content.getName(), ex); //NON-NLS
}
}
});
}
}
}
/**
* Node representing a page of thumbnails, a parent of image nodes, with a
* name showing children range
*/
private class ThumbnailPageNode extends AbstractNode {
private ThumbnailPageNode(Integer pageNum, List<Node> childNodes) {
super(new ThumbnailPageNodeChildren(childNodes), Lookups.singleton(pageNum));
setName(Integer.toString(pageNum + 1));
int from = 1 + (pageNum * IMAGES_PER_PAGE);
int to = from + ((ThumbnailPageNodeChildren) getChildren()).getChildCount() - 1;
setDisplayName(from + "-" + to);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/Folder-icon.png"); //NON-NLS
}
}
/**
* Children.Keys implementation which uses nodes as keys, and wraps them in
* ThumbnailViewNodes as the child nodes.
*
*/
private class ThumbnailPageNodeChildren extends Children.Keys<Node> {
/*
* wrapped original nodes
*/
private List<Node> keyNodes = null;
ThumbnailPageNodeChildren(List<Node> keyNodes) {
super(true);
this.keyNodes = keyNodes;
}
@Override
protected void addNotify() {
super.addNotify();
setKeys(keyNodes);
}
@Override
protected void removeNotify() {
super.removeNotify();
setKeys(Collections.emptyList());
}
int getChildCount() {
return keyNodes.size();
}
@Override
protected Node[] createNodes(Node wrapped) {
if (wrapped != null) {
final ThumbnailViewNode thumb = new ThumbnailViewNode(wrapped, thumbSize);
return new Node[]{thumb};
} else {
return new Node[]{};
}
}
}
}
| |
package com.orientechnologies.orient.core.storage.index.sbtree.singlevalue.v3;
import com.orientechnologies.common.exception.OException;
import com.orientechnologies.common.exception.OHighLevelException;
import com.orientechnologies.common.io.OFileUtils;
import com.orientechnologies.common.serialization.types.OUTF8Serializer;
import com.orientechnologies.common.util.ORawPair;
import com.orientechnologies.orient.core.db.ODatabaseInternal;
import com.orientechnologies.orient.core.db.ODatabaseSession;
import com.orientechnologies.orient.core.db.OrientDB;
import com.orientechnologies.orient.core.db.OrientDBConfig;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.storage.impl.local.OAbstractPaginatedStorage;
import com.orientechnologies.orient.core.storage.impl.local.paginated.atomicoperations.OAtomicOperationsManager;
import java.io.File;
import java.util.Iterator;
import java.util.Map;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.Random;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.stream.Stream;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class CellBTreeSingleValueV3TestIT {
private OAtomicOperationsManager atomicOperationsManager;
private CellBTreeSingleValueV3<String> singleValueTree;
private OrientDB orientDB;
private String dbName;
@Before
public void before() throws Exception {
final String buildDirectory =
System.getProperty("buildDirectory", ".")
+ File.separator
+ CellBTreeSingleValueV3TestIT.class.getSimpleName();
dbName = "localSingleBTreeTest";
final File dbDirectory = new File(buildDirectory, dbName);
OFileUtils.deleteRecursively(dbDirectory);
final OrientDBConfig config = OrientDBConfig.builder().build();
orientDB = new OrientDB("plocal:" + buildDirectory, config);
orientDB.execute(
"create database " + dbName + " plocal users ( admin identified by 'admin' role admin)");
OAbstractPaginatedStorage storage;
try (ODatabaseSession databaseDocumentTx = orientDB.open(dbName, "admin", "admin")) {
storage =
(OAbstractPaginatedStorage) ((ODatabaseInternal<?>) databaseDocumentTx).getStorage();
}
singleValueTree = new CellBTreeSingleValueV3<>("singleBTree", ".sbt", ".nbt", storage);
atomicOperationsManager = storage.getAtomicOperationsManager();
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation ->
singleValueTree.create(atomicOperation, OUTF8Serializer.INSTANCE, null, 1, null));
}
@After
public void afterMethod() {
orientDB.drop(dbName);
orientDB.close();
}
@Test
public void testKeyPut() throws Exception {
final int keysCount = 1_000_000;
final int rollbackInterval = 100;
String[] lastKey = new String[1];
for (int i = 0; i < keysCount / rollbackInterval; i++) {
for (int n = 0; n < 2; n++) {
final int iterationCounter = i;
final int rollbackCounter = n;
try {
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
for (int j = 0; j < rollbackInterval; j++) {
final String key = Integer.toString(iterationCounter * rollbackInterval + j);
singleValueTree.put(
atomicOperation,
key,
new ORecordId(
(iterationCounter * rollbackInterval + j) % 32000,
iterationCounter * rollbackInterval + j));
if (rollbackCounter == 1) {
if ((iterationCounter * rollbackInterval + j) % 100_000 == 0) {
System.out.printf(
"%d items loaded out of %d%n",
iterationCounter * rollbackInterval + j, keysCount);
}
if (lastKey[0] == null) {
lastKey[0] = key;
} else if (key.compareTo(lastKey[0]) > 0) {
lastKey[0] = key;
}
}
}
if (rollbackCounter == 0) {
throw new RollbackException();
}
});
} catch (RollbackException ignore) {
}
}
Assert.assertEquals("0", singleValueTree.firstKey());
Assert.assertEquals(lastKey[0], singleValueTree.lastKey());
}
for (int i = 0; i < keysCount; i++) {
Assert.assertEquals(
i + " key is absent",
new ORecordId(i % 32000, i),
singleValueTree.get(Integer.toString(i)));
if (i % 100_000 == 0) {
System.out.printf("%d items tested out of %d%n", i, keysCount);
}
}
for (int i = keysCount; i < 2 * keysCount; i++) {
Assert.assertNull(singleValueTree.get(Integer.toString(i)));
}
}
@Test
public void testKeyPutRandomUniform() throws Exception {
final NavigableSet<String> keys = new TreeSet<>();
final Random random = new Random();
final int keysCount = 1_000_000;
final int rollbackRange = 100;
while (keys.size() < keysCount) {
for (int n = 0; n < 2; n++) {
final int rollbackCounter = n;
try {
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
for (int i = 0; i < rollbackRange; i++) {
int val = random.nextInt(Integer.MAX_VALUE);
String key = Integer.toString(val);
singleValueTree.put(atomicOperation, key, new ORecordId(val % 32000, val));
if (rollbackCounter == 1) {
keys.add(key);
}
Assert.assertEquals(singleValueTree.get(key), new ORecordId(val % 32000, val));
}
if (rollbackCounter == 0) {
throw new RollbackException();
}
});
} catch (RollbackException ignore) {
}
}
}
Assert.assertEquals(singleValueTree.firstKey(), keys.first());
Assert.assertEquals(singleValueTree.lastKey(), keys.last());
for (String key : keys) {
final int val = Integer.parseInt(key);
Assert.assertEquals(singleValueTree.get(key), new ORecordId(val % 32000, val));
}
}
@Test
public void testKeyPutRandomGaussian() throws Exception {
NavigableSet<String> keys = new TreeSet<>();
long seed = System.currentTimeMillis();
System.out.println("testKeyPutRandomGaussian seed : " + seed);
Random random = new Random(seed);
final int keysCount = 1_000_000;
final int rollbackRange = 100;
while (keys.size() < keysCount) {
for (int n = 0; n < 2; n++) {
final int rollbackCounter = n;
try {
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
for (int i = 0; i < rollbackRange; i++) {
int val;
do {
val = (int) (random.nextGaussian() * Integer.MAX_VALUE / 2 + Integer.MAX_VALUE);
} while (val < 0);
String key = Integer.toString(val);
singleValueTree.put(atomicOperation, key, new ORecordId(val % 32000, val));
if (rollbackCounter == 1) {
keys.add(key);
}
Assert.assertEquals(singleValueTree.get(key), new ORecordId(val % 32000, val));
}
if (rollbackCounter == 0) {
throw new RollbackException();
}
});
} catch (RollbackException ignore) {
}
}
}
Assert.assertEquals(singleValueTree.firstKey(), keys.first());
Assert.assertEquals(singleValueTree.lastKey(), keys.last());
for (String key : keys) {
int val = Integer.parseInt(key);
Assert.assertEquals(singleValueTree.get(key), new ORecordId(val % 32000, val));
}
}
@Test
public void testKeyDeleteRandomUniform() throws Exception {
final int keysCount = 1_000_000;
NavigableSet<String> keys = new TreeSet<>();
for (int i = 0; i < keysCount; i++) {
String key = Integer.toString(i);
final int k = i;
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation ->
singleValueTree.put(atomicOperation, key, new ORecordId(k % 32000, k)));
keys.add(key);
}
final int rollbackInterval = 10;
Iterator<String> keysIterator = keys.iterator();
while (keysIterator.hasNext()) {
String key = keysIterator.next();
if (Integer.parseInt(key) % 3 == 0) {
atomicOperationsManager.executeInsideAtomicOperation(
null, atomicOperation -> singleValueTree.remove(atomicOperation, key));
keysIterator.remove();
}
try {
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
int rollbackCounter = 0;
final Iterator<String> keysDeletionIterator = keys.tailSet(key, false).iterator();
while (keysDeletionIterator.hasNext() && rollbackCounter < rollbackInterval) {
String keyToDelete = keysDeletionIterator.next();
rollbackCounter++;
singleValueTree.remove(atomicOperation, keyToDelete);
}
throw new RollbackException();
});
} catch (RollbackException ignore) {
}
}
Assert.assertEquals(singleValueTree.firstKey(), keys.first());
Assert.assertEquals(singleValueTree.lastKey(), keys.last());
for (String key : keys) {
int val = Integer.parseInt(key);
if (val % 3 == 0) {
Assert.assertNull(singleValueTree.get(key));
} else {
Assert.assertEquals(singleValueTree.get(key), new ORecordId(val % 32000, val));
}
}
}
@Test
public void testKeyDeleteRandomGaussian() throws Exception {
NavigableSet<String> keys = new TreeSet<>();
final int keysCount = 1_000_000;
long seed = System.currentTimeMillis();
System.out.println("testKeyDeleteRandomGaussian seed : " + seed);
Random random = new Random(seed);
while (keys.size() < keysCount) {
int val = (int) (random.nextGaussian() * Integer.MAX_VALUE / 2 + Integer.MAX_VALUE);
if (val < 0) {
continue;
}
String key = Integer.toString(val);
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation ->
singleValueTree.put(atomicOperation, key, new ORecordId(val % 32000, val)));
keys.add(key);
Assert.assertEquals(singleValueTree.get(key), new ORecordId(val % 32000, val));
}
Iterator<String> keysIterator = keys.iterator();
final int rollbackInterval = 10;
while (keysIterator.hasNext()) {
String key = keysIterator.next();
if (Integer.parseInt(key) % 3 == 0) {
atomicOperationsManager.executeInsideAtomicOperation(
null, atomicOperation -> singleValueTree.remove(atomicOperation, key));
keysIterator.remove();
}
try {
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
int rollbackCounter = 0;
final Iterator<String> keysDeletionIterator = keys.tailSet(key, false).iterator();
while (keysDeletionIterator.hasNext() && rollbackCounter < rollbackInterval) {
String keyToDelete = keysDeletionIterator.next();
rollbackCounter++;
singleValueTree.remove(atomicOperation, keyToDelete);
}
throw new RollbackException();
});
} catch (RollbackException ignore) {
}
}
Assert.assertEquals(singleValueTree.firstKey(), keys.first());
Assert.assertEquals(singleValueTree.lastKey(), keys.last());
for (String key : keys) {
int val = Integer.parseInt(key);
if (val % 3 == 0) {
Assert.assertNull(singleValueTree.get(key));
} else {
Assert.assertEquals(singleValueTree.get(key), new ORecordId(val % 32000, val));
}
}
}
@Test
public void testKeyDelete() throws Exception {
final int keysCount = 1_000_000;
for (int i = 0; i < keysCount; i++) {
final int k = i;
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation ->
singleValueTree.put(
atomicOperation, Integer.toString(k), new ORecordId(k % 32000, k)));
}
final int rollbackInterval = 100;
for (int i = 0; i < keysCount / rollbackInterval; i++) {
for (int n = 0; n < 2; n++) {
final int rollbackCounter = n;
final int iterationsCounter = i;
try {
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
for (int j = 0; j < rollbackInterval; j++) {
final int key = iterationsCounter * rollbackInterval + j;
if (key % 3 == 0) {
Assert.assertEquals(
singleValueTree.remove(atomicOperation, Integer.toString(key)),
new ORecordId(key % 32000, key));
}
}
if (rollbackCounter == 0) {
throw new RollbackException();
}
});
} catch (RollbackException ignore) {
}
}
}
for (int i = 0; i < keysCount; i++) {
if (i % 3 == 0) {
Assert.assertNull(singleValueTree.get(Integer.toString(i)));
} else {
Assert.assertEquals(singleValueTree.get(Integer.toString(i)), new ORecordId(i % 32000, i));
}
}
}
@Test
public void testKeyAddDelete() throws Exception {
final int keysCount = 1_000_000;
for (int i = 0; i < keysCount; i++) {
final int key = i;
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation ->
singleValueTree.put(
atomicOperation, Integer.toString(key), new ORecordId(key % 32000, key)));
Assert.assertEquals(singleValueTree.get(Integer.toString(i)), new ORecordId(i % 32000, i));
}
final int rollbackInterval = 100;
for (int i = 0; i < keysCount / rollbackInterval; i++) {
for (int n = 0; n < 2; n++) {
final int rollbackCounter = n;
final int iterationsCounter = i;
try {
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
for (int j = 0; j < rollbackInterval; j++) {
final int key = iterationsCounter * rollbackInterval + j;
if (key % 3 == 0) {
Assert.assertEquals(
singleValueTree.remove(atomicOperation, Integer.toString(key)),
new ORecordId(key % 32000, key));
}
if (key % 2 == 0) {
singleValueTree.put(
atomicOperation,
Integer.toString(keysCount + key),
new ORecordId((keysCount + key) % 32000, keysCount + key));
}
}
if (rollbackCounter == 0) {
throw new RollbackException();
}
});
} catch (RollbackException ignore) {
}
}
}
for (int i = 0; i < keysCount; i++) {
if (i % 3 == 0) {
Assert.assertNull(singleValueTree.get(Integer.toString(i)));
} else {
Assert.assertEquals(singleValueTree.get(Integer.toString(i)), new ORecordId(i % 32000, i));
}
if (i % 2 == 0) {
Assert.assertEquals(
singleValueTree.get(Integer.toString(keysCount + i)),
new ORecordId((keysCount + i) % 32000, keysCount + i));
}
}
}
@Test
public void testKeyAddDeleteAll() throws Exception {
for (int iterations = 0; iterations < 4; iterations++) {
System.out.println("testKeyAddDeleteAll : iteration " + iterations);
final int keysCount = 1_000_000;
for (int i = 0; i < keysCount; i++) {
final int key = i;
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation ->
singleValueTree.put(
atomicOperation, Integer.toString(key), new ORecordId(key % 32000, key)));
Assert.assertEquals(singleValueTree.get(Integer.toString(i)), new ORecordId(i % 32000, i));
}
for (int i = 0; i < keysCount; i++) {
final int key = i;
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
Assert.assertEquals(
singleValueTree.remove(atomicOperation, Integer.toString(key)),
new ORecordId(key % 32000, key));
if (key > 0 && key % 100_000 == 0) {
for (int keyToVerify = 0; keyToVerify < keysCount; keyToVerify++) {
if (keyToVerify > key) {
Assert.assertEquals(
new ORecordId(keyToVerify % 32000, keyToVerify),
singleValueTree.get(Integer.toString(keyToVerify)));
} else {
Assert.assertNull(singleValueTree.get(Integer.toString(keyToVerify)));
}
}
}
});
}
for (int i = 0; i < keysCount; i++) {
Assert.assertNull(singleValueTree.get(Integer.toString(i)));
}
singleValueTree.assertFreePages(atomicOperationsManager.getCurrentOperation());
}
}
@Test
public void testKeyAddDeleteHalf() throws Exception {
final int keysCount = 1_000_000;
for (int i = 0; i < keysCount / 2; i++) {
final int key = i;
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation ->
singleValueTree.put(
atomicOperation, Integer.toString(key), new ORecordId(key % 32000, key)));
Assert.assertEquals(singleValueTree.get(Integer.toString(i)), new ORecordId(i % 32000, i));
}
for (int iterations = 0; iterations < 4; iterations++) {
System.out.println("testKeyAddDeleteHalf : iteration " + iterations);
for (int i = 0; i < keysCount / 2; i++) {
final int key = i + (iterations + 1) * keysCount / 2;
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation ->
singleValueTree.put(
atomicOperation, Integer.toString(key), new ORecordId(key % 32000, key)));
Assert.assertEquals(
singleValueTree.get(Integer.toString(key)), new ORecordId(key % 32000, key));
}
final int offset = iterations * (keysCount / 2);
for (int i = 0; i < keysCount / 2; i++) {
final int key = i + offset;
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation ->
Assert.assertEquals(
singleValueTree.remove(atomicOperation, Integer.toString(key)),
new ORecordId(key % 32000, key)));
}
final int start = (iterations + 1) * (keysCount / 2);
for (int i = 0; i < (iterations + 2) * keysCount / 2; i++) {
if (i < start) {
Assert.assertNull(singleValueTree.get(Integer.toString(i)));
} else {
Assert.assertEquals(
new ORecordId(i % 32000, i), singleValueTree.get(Integer.toString(i)));
}
}
singleValueTree.assertFreePages(atomicOperationsManager.getCurrentOperation());
}
}
@Test
public void testKeyCursor() throws Exception {
final int keysCount = 1_000_000;
NavigableMap<String, ORID> keyValues = new TreeMap<>();
final long seed = System.nanoTime();
System.out.println("testKeyCursor: " + seed);
Random random = new Random(seed);
final int rollbackInterval = 100;
int printCounter = 0;
while (keyValues.size() < keysCount) {
for (int n = 0; n < 2; n++) {
final int rollbackCounter = n;
try {
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
for (int j = 0; j < rollbackInterval; j++) {
int val = random.nextInt(Integer.MAX_VALUE);
String key = Integer.toString(val);
singleValueTree.put(atomicOperation, key, new ORecordId(val % 32000, val));
if (rollbackCounter == 1) {
keyValues.put(key, new ORecordId(val % 32000, val));
}
}
if (rollbackCounter == 0) {
throw new RollbackException();
}
});
} catch (RollbackException ignore) {
}
}
if (keyValues.size() > printCounter * 100_000) {
System.out.println(keyValues.size() + " entries were added.");
printCounter++;
}
}
Assert.assertEquals(singleValueTree.firstKey(), keyValues.firstKey());
Assert.assertEquals(singleValueTree.lastKey(), keyValues.lastKey());
final Iterator<String> indexIterator;
try (Stream<String> stream = singleValueTree.keyStream()) {
indexIterator = stream.iterator();
for (String entryKey : keyValues.keySet()) {
final String indexKey = indexIterator.next();
Assert.assertEquals(entryKey, indexKey);
}
}
}
@Test
public void testIterateEntriesMajor() throws Exception {
final int keysCount = 1_000_000;
NavigableMap<String, ORID> keyValues = new TreeMap<>();
final long seed = System.nanoTime();
System.out.println("testIterateEntriesMajor: " + seed);
final Random random = new Random(seed);
final int rollbackInterval = 100;
int printCounter = 0;
while (keyValues.size() < keysCount) {
for (int n = 0; n < 2; n++) {
final int rollbackCounter = n;
try {
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
for (int j = 0; j < rollbackInterval; j++) {
int val = random.nextInt(Integer.MAX_VALUE);
String key = Integer.toString(val);
singleValueTree.put(atomicOperation, key, new ORecordId(val % 32000, val));
if (rollbackCounter == 1) {
keyValues.put(key, new ORecordId(val % 32000, val));
}
}
if (rollbackCounter == 0) {
throw new RollbackException();
}
});
} catch (RollbackException ignore) {
}
}
if (keyValues.size() > printCounter * 100_000) {
System.out.println(keyValues.size() + " entries were added.");
printCounter++;
}
}
assertIterateMajorEntries(keyValues, random, true, true);
assertIterateMajorEntries(keyValues, random, false, true);
assertIterateMajorEntries(keyValues, random, true, false);
assertIterateMajorEntries(keyValues, random, false, false);
Assert.assertEquals(singleValueTree.firstKey(), keyValues.firstKey());
Assert.assertEquals(singleValueTree.lastKey(), keyValues.lastKey());
}
@Test
public void testIterateEntriesMinor() throws Exception {
final int keysCount = 1_000_000;
NavigableMap<String, ORID> keyValues = new TreeMap<>();
final long seed = System.nanoTime();
System.out.println("testIterateEntriesMinor: " + seed);
final Random random = new Random(seed);
final int rollbackInterval = 100;
int printCounter = 0;
while (keyValues.size() < keysCount) {
for (int n = 0; n < 2; n++) {
final int rollbackCounter = n;
try {
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
for (int j = 0; j < rollbackInterval; j++) {
int val = random.nextInt(Integer.MAX_VALUE);
String key = Integer.toString(val);
singleValueTree.put(atomicOperation, key, new ORecordId(val % 32000, val));
if (rollbackCounter == 1) {
keyValues.put(key, new ORecordId(val % 32000, val));
}
}
if (rollbackCounter == 0) {
throw new RollbackException();
}
});
} catch (RollbackException ignore) {
}
}
if (keyValues.size() > printCounter * 100_000) {
System.out.println(keyValues.size() + " entries were added.");
printCounter++;
}
}
assertIterateMinorEntries(keyValues, random, true, true);
assertIterateMinorEntries(keyValues, random, false, true);
assertIterateMinorEntries(keyValues, random, true, false);
assertIterateMinorEntries(keyValues, random, false, false);
Assert.assertEquals(singleValueTree.firstKey(), keyValues.firstKey());
Assert.assertEquals(singleValueTree.lastKey(), keyValues.lastKey());
}
@Test
public void testIterateEntriesBetween() throws Exception {
final int keysCount = 1_000_000;
NavigableMap<String, ORID> keyValues = new TreeMap<>();
final Random random = new Random();
final int rollbackInterval = 100;
int printCounter = 0;
while (keyValues.size() < keysCount) {
for (int n = 0; n < 2; n++) {
final int rollbackCounter = n;
try {
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
for (int j = 0; j < rollbackInterval; j++) {
int val = random.nextInt(Integer.MAX_VALUE);
String key = Integer.toString(val);
singleValueTree.put(atomicOperation, key, new ORecordId(val % 32000, val));
if (rollbackCounter == 1) {
keyValues.put(key, new ORecordId(val % 32000, val));
}
}
if (rollbackCounter == 0) {
throw new RollbackException();
}
});
} catch (RollbackException ignore) {
}
}
if (keyValues.size() > printCounter * 100_000) {
System.out.println(keyValues.size() + " entries were added.");
printCounter++;
}
}
assertIterateBetweenEntries(keyValues, random, true, true, true);
assertIterateBetweenEntries(keyValues, random, true, false, true);
assertIterateBetweenEntries(keyValues, random, false, true, true);
assertIterateBetweenEntries(keyValues, random, false, false, true);
assertIterateBetweenEntries(keyValues, random, true, true, false);
assertIterateBetweenEntries(keyValues, random, true, false, false);
assertIterateBetweenEntries(keyValues, random, false, true, false);
assertIterateBetweenEntries(keyValues, random, false, false, false);
Assert.assertEquals(singleValueTree.firstKey(), keyValues.firstKey());
Assert.assertEquals(singleValueTree.lastKey(), keyValues.lastKey());
}
@Test
public void testIterateEntriesBetweenString() throws Exception {
final int keysCount = 10;
final NavigableMap<String, ORID> keyValues = new TreeMap<>();
final Random random = new Random();
try {
atomicOperationsManager.executeInsideAtomicOperation(
null,
atomicOperation -> {
for (int j = 0; j < keysCount; j++) {
final String key = "name" + j;
final int val = random.nextInt(Integer.MAX_VALUE);
final int clusterId = val % 32000;
singleValueTree.put(atomicOperation, key, new ORecordId(clusterId, val));
System.out.println("Added key=" + key + ", value=" + val);
keyValues.put(key, new ORecordId(clusterId, val));
}
});
} catch (final RollbackException ignore) {
Assert.fail();
}
assertIterateBetweenEntriesNonRandom("name5", keyValues, true, true, true, 5);
Assert.assertEquals(singleValueTree.firstKey(), keyValues.firstKey());
Assert.assertEquals(singleValueTree.lastKey(), keyValues.lastKey());
}
private void assertIterateMajorEntries(
NavigableMap<String, ORID> keyValues,
Random random,
boolean keyInclusive,
boolean ascSortOrder) {
String[] keys = new String[keyValues.size()];
int index = 0;
for (String key : keyValues.keySet()) {
keys[index] = key;
index++;
}
for (int i = 0; i < 100; i++) {
final int fromKeyIndex = random.nextInt(keys.length);
String fromKey = keys[fromKeyIndex];
if (random.nextBoolean()) {
fromKey =
fromKey.substring(0, fromKey.length() - 2) + (fromKey.charAt(fromKey.length() - 1) - 1);
}
final Iterator<ORawPair<String, ORID>> indexIterator;
try (Stream<ORawPair<String, ORID>> stream =
singleValueTree.iterateEntriesMajor(fromKey, keyInclusive, ascSortOrder)) {
indexIterator = stream.iterator();
Iterator<Map.Entry<String, ORID>> iterator;
if (ascSortOrder) {
iterator = keyValues.tailMap(fromKey, keyInclusive).entrySet().iterator();
} else {
iterator =
keyValues
.descendingMap()
.subMap(keyValues.lastKey(), true, fromKey, keyInclusive)
.entrySet()
.iterator();
}
while (iterator.hasNext()) {
final ORawPair<String, ORID> indexEntry = indexIterator.next();
final Map.Entry<String, ORID> entry = iterator.next();
Assert.assertEquals(indexEntry.first, entry.getKey());
Assert.assertEquals(indexEntry.second, entry.getValue());
}
//noinspection ConstantConditions
Assert.assertFalse(iterator.hasNext());
Assert.assertFalse(indexIterator.hasNext());
}
}
}
private void assertIterateMinorEntries(
NavigableMap<String, ORID> keyValues,
Random random,
boolean keyInclusive,
boolean ascSortOrder) {
String[] keys = new String[keyValues.size()];
int index = 0;
for (String key : keyValues.keySet()) {
keys[index] = key;
index++;
}
for (int i = 0; i < 100; i++) {
int toKeyIndex = random.nextInt(keys.length);
String toKey = keys[toKeyIndex];
if (random.nextBoolean()) {
toKey = toKey.substring(0, toKey.length() - 2) + (toKey.charAt(toKey.length() - 1) + 1);
}
final Iterator<ORawPair<String, ORID>> indexIterator;
try (Stream<ORawPair<String, ORID>> stream =
singleValueTree.iterateEntriesMinor(toKey, keyInclusive, ascSortOrder)) {
indexIterator = stream.iterator();
Iterator<Map.Entry<String, ORID>> iterator;
if (ascSortOrder) {
iterator = keyValues.headMap(toKey, keyInclusive).entrySet().iterator();
} else {
iterator = keyValues.headMap(toKey, keyInclusive).descendingMap().entrySet().iterator();
}
while (iterator.hasNext()) {
ORawPair<String, ORID> indexEntry = indexIterator.next();
Map.Entry<String, ORID> entry = iterator.next();
Assert.assertEquals(indexEntry.first, entry.getKey());
Assert.assertEquals(indexEntry.second, entry.getValue());
}
//noinspection ConstantConditions
Assert.assertFalse(iterator.hasNext());
Assert.assertFalse(indexIterator.hasNext());
}
}
}
private void assertIterateBetweenEntries(
NavigableMap<String, ORID> keyValues,
Random random,
boolean fromInclusive,
boolean toInclusive,
boolean ascSortOrder) {
String[] keys = new String[keyValues.size()];
int index = 0;
for (String key : keyValues.keySet()) {
keys[index] = key;
index++;
}
for (int i = 0; i < 100; i++) {
int fromKeyIndex = random.nextInt(keys.length);
int toKeyIndex = random.nextInt(keys.length);
if (fromKeyIndex > toKeyIndex) {
toKeyIndex = fromKeyIndex;
}
String fromKey = keys[fromKeyIndex];
String toKey = keys[toKeyIndex];
if (random.nextBoolean()) {
fromKey =
fromKey.substring(0, fromKey.length() - 2) + (fromKey.charAt(fromKey.length() - 1) - 1);
}
if (random.nextBoolean()) {
toKey = toKey.substring(0, toKey.length() - 2) + (toKey.charAt(toKey.length() - 1) + 1);
}
if (fromKey.compareTo(toKey) > 0) {
fromKey = toKey;
}
final Iterator<ORawPair<String, ORID>> indexIterator;
try (Stream<ORawPair<String, ORID>> stream =
singleValueTree.iterateEntriesBetween(
fromKey, fromInclusive, toKey, toInclusive, ascSortOrder)) {
indexIterator = stream.iterator();
Iterator<Map.Entry<String, ORID>> iterator;
if (ascSortOrder) {
iterator =
keyValues.subMap(fromKey, fromInclusive, toKey, toInclusive).entrySet().iterator();
} else {
iterator =
keyValues
.descendingMap()
.subMap(toKey, toInclusive, fromKey, fromInclusive)
.entrySet()
.iterator();
}
while (iterator.hasNext()) {
ORawPair<String, ORID> indexEntry = indexIterator.next();
Assert.assertNotNull(indexEntry);
Map.Entry<String, ORID> mapEntry = iterator.next();
Assert.assertEquals(indexEntry.first, mapEntry.getKey());
Assert.assertEquals(indexEntry.second, mapEntry.getValue());
}
//noinspection ConstantConditions
Assert.assertFalse(iterator.hasNext());
Assert.assertFalse(indexIterator.hasNext());
}
}
}
private void assertIterateBetweenEntriesNonRandom(
final String fromKey,
final NavigableMap<String, ORID> keyValues,
final boolean fromInclusive,
final boolean toInclusive,
final boolean ascSortOrder,
final int startFrom) {
String[] keys = new String[keyValues.size()];
int index = 0;
for (final String key : keyValues.keySet()) {
keys[index] = key;
index++;
}
for (int i = startFrom; i < keyValues.size(); i++) {
final String toKey = keys[i];
final Iterator<ORawPair<String, ORID>> indexIterator;
try (final Stream<ORawPair<String, ORID>> stream =
singleValueTree.iterateEntriesBetween(
fromKey, fromInclusive, toKey, toInclusive, ascSortOrder)) {
indexIterator = stream.iterator();
Assert.assertTrue(indexIterator.hasNext());
}
}
}
static final class RollbackException extends OException implements OHighLevelException {
@SuppressWarnings("WeakerAccess")
public RollbackException() {
this("");
}
@SuppressWarnings("WeakerAccess")
public RollbackException(String message) {
super(message);
}
@SuppressWarnings("unused")
public RollbackException(RollbackException exception) {
super(exception);
}
}
}
| |
package com.suelake.habbo.moderation;
import java.util.Calendar;
import java.util.Date;
import java.util.Vector;
import com.blunk.Environment;
import com.blunk.Log;
import com.blunk.util.TimeHelper;
import com.suelake.habbo.HabboHotel;
import com.suelake.habbo.access.UserAccessEntry;
import com.suelake.habbo.communication.CommunicationHandler;
import com.suelake.habbo.communication.ServerMessage;
/**
* ModerationCenter is where authorized Users perform their moderation tasks such as banning and kicking Users and replying Call for Helps.
*
* @author Nillus
*/
public class ModerationCenter
{
@SuppressWarnings("unchecked")
private Class m_moderationBanClass;
private int m_callCounter;
private Vector<CallForHelp> m_pendingCalls;
public ModerationCenter()
{
ModerationBan sample = (ModerationBan)HabboHotel.getDataObjectFactory().newObject("ModerationBan");
if (sample != null)
{
m_moderationBanClass = sample.getClass();
}
m_callCounter = 0;
m_pendingCalls = new Vector<CallForHelp>();
}
public ModerationBan getBan(int ID)
{
ModerationBan ban = this.newModerationBan();
ban.ID = ID;
// Load and validate the ban
if (Environment.getDatabase().load(ban))
{
if (this.banIsValid(ban))
{
return ban;
}
}
// Not found / expired
return null;
}
public ModerationBan getIpBan(String ip)
{
ModerationBan ban = this.newModerationBan();
ban.ip = ip;
// Load and validate the ban
if (Environment.getDatabase().load(ban))
{
if (this.banIsValid(ban))
{
return ban;
}
}
// Not found / expired
return null;
}
public ModerationBan getUserBan(int userID)
{
ModerationBan ban = this.newModerationBan();
ban.userID = userID;
// Load and validate the ban
if (Environment.getDatabase().load(ban))
{
if (this.banIsValid(ban))
{
return ban;
}
}
// Not found / expired
return null;
}
private boolean banIsValid(ModerationBan ban)
{
// Ban expired?
if (new Date(TimeHelper.getTime()).after(ban.expiresAt))
{
// Try to delete ban from the system
if (this.deleteBan(ban))
{
Log.info("ModerationCenter: ban #" + ban.ID + " expired. [user ID: " + ban.userID + ", IP address: " + ban.ip + "]");
// Ban expired aka not valid anymore!
return false;
}
}
// Ban is still valid!
return true;
}
public boolean deleteBan(ModerationBan ban)
{
if (ban != null)
{
return Environment.getDatabase().delete(ban);
}
else
{
return false;
}
}
public ModerationBan setUserBan(int userID, boolean banIP, int hours, String reason, int issuerID)
{
// Get users last access entry
UserAccessEntry lastAccessEntry = HabboHotel.getAccessControl().getLatestAccessEntry(userID);
if (lastAccessEntry != null)
{
// Delete old user ban (if exists)
ModerationBan ban = this.getUserBan(userID);
if (ban != null)
{
this.deleteBan(ban);
}
// Delete old ip ban (if exists)
if (banIP)
{
ban = this.getIpBan(lastAccessEntry.ip);
if (ban != null)
{
this.deleteBan(ban);
}
}
// Create the new ban
ban = this.newModerationBan();
ban.userID = userID;
ban.ip = (banIP) ? lastAccessEntry.ip : null;
ban.appliedBy = issuerID;
ban.reason = reason;
// Work out expiration etc
Calendar calendar = Calendar.getInstance();
ban.appliedAt = calendar.getTime();
if (hours > 0)
{
calendar.add(Calendar.HOUR, hours);
}
else
{
calendar.add(Calendar.YEAR, 10);
}
ban.expiresAt = calendar.getTime();
// Insert in in the Database
if (Environment.getDatabase().insert(ban))
{
if (ban.ip == null)
{
// Disconnect and notify the user
CommunicationHandler client = HabboHotel.getGameClients().getClientOfUser(ban.userID);
if (client != null)
{
client.sendBan(ban);
client.stop("user is banned");
}
}
else
{
// Disconnect and notify the users on this ip
Vector<CommunicationHandler> clients = HabboHotel.getGameClients().getClientsWithIpAddress(ban.ip);
for (CommunicationHandler client : clients)
{
if (client != null)
{
client.sendBan(ban);
client.stop("user IP is banned");
}
}
}
return ban;
}
}
// Ban failed!
return null;
}
public CallForHelp createCallForHelp()
{
return new CallForHelp(++m_callCounter);
}
public void submitCallForHelp(CallForHelp call)
{
// Add to pending calls!
synchronized (m_pendingCalls)
{
m_pendingCalls.add(call);
}
// Broadcast to helpers
ServerMessage notify = new ServerMessage("CRYFORHELP");
notify.appendObject(call);
this.broadcastToHelpers(notify);
}
public boolean pickCallForHelp(int callID, String picker)
{
CallForHelp call = this.getPendingCall(callID);
if (call != null)
{
// Answered
synchronized (m_pendingCalls)
{
m_pendingCalls.remove(call);
}
// Notify moderators call is picked
ServerMessage notify = new ServerMessage("PICKED_CRY");
notify.appendNewArgument(picker);
notify.appendNewArgument(ModerationCenter.craftChatlogUrl(callID));
this.broadcastToHelpers(notify);
// Picked
return true;
}
// Already picked / does not exist
return false;
}
public void broadcastToHelpers(ServerMessage msg)
{
byte minimumRole = HabboHotel.getAccessControl().getMinimumRoleForUserRight("can_answer_cfh");
Vector<CommunicationHandler> receivers = HabboHotel.getGameClients().getClientsWithUserRole(minimumRole);
for (CommunicationHandler comm : receivers)
{
comm.sendMessage(msg);
}
}
public CallForHelp getPendingCall(int callID)
{
synchronized (m_pendingCalls)
{
for (CallForHelp call : m_pendingCalls)
{
if (call.ID == callID)
{
return call;
}
}
}
return null;
}
public int clearPendingCalls()
{
synchronized (m_pendingCalls)
{
int amount = m_pendingCalls.size();
m_pendingCalls.clear();
return amount;
}
}
public Vector<CallForHelp> getPendingCalls()
{
return m_pendingCalls;
}
public static String craftChatlogUrl(int callID)
{
return "/chatlog.php?id=" + callID;
}
public static int parseCallID(String chatlogUrl)
{
try
{
return Integer.parseInt(chatlogUrl.substring("/chatlog.php?id=".length()));
}
catch (Exception ex)
{
return -1;
}
}
/**
* Creates a new instance of the ModerationBan DataObject implementation class and returns it.
*/
public ModerationBan newModerationBan()
{
try
{
return (ModerationBan)m_moderationBanClass.newInstance();
}
catch (InstantiationException ex)
{
ex.printStackTrace();
}
catch (IllegalAccessException ex)
{
ex.printStackTrace();
}
return null;
}
}
| |
/*
* Copyright (C) 2014 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.graph;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.graph.GraphConstants.NODE_NOT_IN_GRAPH;
import static java.util.Objects.requireNonNull;
import com.google.common.annotations.Beta;
import com.google.common.base.Function;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import com.google.common.collect.Maps;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import javax.annotation.CheckForNull;
/**
* Static utility methods for {@link Graph}, {@link ValueGraph}, and {@link Network} instances.
*
* @author James Sexton
* @author Joshua O'Madadhain
* @since 20.0
*/
@Beta
@ElementTypesAreNonnullByDefault
public final class Graphs {
private Graphs() {}
// Graph query methods
/**
* Returns true if {@code graph} has at least one cycle. A cycle is defined as a non-empty subset
* of edges in a graph arranged to form a path (a sequence of adjacent outgoing edges) starting
* and ending with the same node.
*
* <p>This method will detect any non-empty cycle, including self-loops (a cycle of length 1).
*/
public static <N> boolean hasCycle(Graph<N> graph) {
int numEdges = graph.edges().size();
if (numEdges == 0) {
return false; // An edge-free graph is acyclic by definition.
}
if (!graph.isDirected() && numEdges >= graph.nodes().size()) {
return true; // Optimization for the undirected case: at least one cycle must exist.
}
Map<Object, NodeVisitState> visitedNodes =
Maps.newHashMapWithExpectedSize(graph.nodes().size());
for (N node : graph.nodes()) {
if (subgraphHasCycle(graph, visitedNodes, node, null)) {
return true;
}
}
return false;
}
/**
* Returns true if {@code network} has at least one cycle. A cycle is defined as a non-empty
* subset of edges in a graph arranged to form a path (a sequence of adjacent outgoing edges)
* starting and ending with the same node.
*
* <p>This method will detect any non-empty cycle, including self-loops (a cycle of length 1).
*/
public static boolean hasCycle(Network<?, ?> network) {
// In a directed graph, parallel edges cannot introduce a cycle in an acyclic graph.
// However, in an undirected graph, any parallel edge induces a cycle in the graph.
if (!network.isDirected()
&& network.allowsParallelEdges()
&& network.edges().size() > network.asGraph().edges().size()) {
return true;
}
return hasCycle(network.asGraph());
}
/**
* Performs a traversal of the nodes reachable from {@code node}. If we ever reach a node we've
* already visited (following only outgoing edges and without reusing edges), we know there's a
* cycle in the graph.
*/
private static <N> boolean subgraphHasCycle(
Graph<N> graph,
Map<Object, NodeVisitState> visitedNodes,
N node,
@CheckForNull N previousNode) {
NodeVisitState state = visitedNodes.get(node);
if (state == NodeVisitState.COMPLETE) {
return false;
}
if (state == NodeVisitState.PENDING) {
return true;
}
visitedNodes.put(node, NodeVisitState.PENDING);
for (N nextNode : graph.successors(node)) {
if (canTraverseWithoutReusingEdge(graph, nextNode, previousNode)
&& subgraphHasCycle(graph, visitedNodes, nextNode, node)) {
return true;
}
}
visitedNodes.put(node, NodeVisitState.COMPLETE);
return false;
}
/**
* Determines whether an edge has already been used during traversal. In the directed case a cycle
* is always detected before reusing an edge, so no special logic is required. In the undirected
* case, we must take care not to "backtrack" over an edge (i.e. going from A to B and then going
* from B to A).
*/
private static boolean canTraverseWithoutReusingEdge(
Graph<?> graph, Object nextNode, @CheckForNull Object previousNode) {
if (graph.isDirected() || !Objects.equal(previousNode, nextNode)) {
return true;
}
// This falls into the undirected A->B->A case. The Graph interface does not support parallel
// edges, so this traversal would require reusing the undirected AB edge.
return false;
}
/**
* Returns the transitive closure of {@code graph}. The transitive closure of a graph is another
* graph with an edge connecting node A to node B if node B is {@link #reachableNodes(Graph,
* Object) reachable} from node A.
*
* <p>This is a "snapshot" based on the current topology of {@code graph}, rather than a live view
* of the transitive closure of {@code graph}. In other words, the returned {@link Graph} will not
* be updated after modifications to {@code graph}.
*/
// TODO(b/31438252): Consider potential optimizations for this algorithm.
public static <N> Graph<N> transitiveClosure(Graph<N> graph) {
MutableGraph<N> transitiveClosure = GraphBuilder.from(graph).allowsSelfLoops(true).build();
// Every node is, at a minimum, reachable from itself. Since the resulting transitive closure
// will have no isolated nodes, we can skip adding nodes explicitly and let putEdge() do it.
if (graph.isDirected()) {
// Note: works for both directed and undirected graphs, but we only use in the directed case.
for (N node : graph.nodes()) {
for (N reachableNode : reachableNodes(graph, node)) {
transitiveClosure.putEdge(node, reachableNode);
}
}
} else {
// An optimization for the undirected case: for every node B reachable from node A,
// node A and node B have the same reachability set.
Set<N> visitedNodes = new HashSet<N>();
for (N node : graph.nodes()) {
if (!visitedNodes.contains(node)) {
Set<N> reachableNodes = reachableNodes(graph, node);
visitedNodes.addAll(reachableNodes);
int pairwiseMatch = 1; // start at 1 to include self-loops
for (N nodeU : reachableNodes) {
for (N nodeV : Iterables.limit(reachableNodes, pairwiseMatch++)) {
transitiveClosure.putEdge(nodeU, nodeV);
}
}
}
}
}
return transitiveClosure;
}
/**
* Returns the set of nodes that are reachable from {@code node}. Node B is defined as reachable
* from node A if there exists a path (a sequence of adjacent outgoing edges) starting at node A
* and ending at node B. Note that a node is always reachable from itself via a zero-length path.
*
* <p>This is a "snapshot" based on the current topology of {@code graph}, rather than a live view
* of the set of nodes reachable from {@code node}. In other words, the returned {@link Set} will
* not be updated after modifications to {@code graph}.
*
* @throws IllegalArgumentException if {@code node} is not present in {@code graph}
*/
public static <N> Set<N> reachableNodes(Graph<N> graph, N node) {
checkArgument(graph.nodes().contains(node), NODE_NOT_IN_GRAPH, node);
return ImmutableSet.copyOf(Traverser.forGraph(graph).breadthFirst(node));
}
// Graph mutation methods
// Graph view methods
/**
* Returns a view of {@code graph} with the direction (if any) of every edge reversed. All other
* properties remain intact, and further updates to {@code graph} will be reflected in the view.
*/
public static <N> Graph<N> transpose(Graph<N> graph) {
if (!graph.isDirected()) {
return graph; // the transpose of an undirected graph is an identical graph
}
if (graph instanceof TransposedGraph) {
return ((TransposedGraph<N>) graph).graph;
}
return new TransposedGraph<N>(graph);
}
/**
* Returns a view of {@code graph} with the direction (if any) of every edge reversed. All other
* properties remain intact, and further updates to {@code graph} will be reflected in the view.
*/
public static <N, V> ValueGraph<N, V> transpose(ValueGraph<N, V> graph) {
if (!graph.isDirected()) {
return graph; // the transpose of an undirected graph is an identical graph
}
if (graph instanceof TransposedValueGraph) {
return ((TransposedValueGraph<N, V>) graph).graph;
}
return new TransposedValueGraph<>(graph);
}
/**
* Returns a view of {@code network} with the direction (if any) of every edge reversed. All other
* properties remain intact, and further updates to {@code network} will be reflected in the view.
*/
public static <N, E> Network<N, E> transpose(Network<N, E> network) {
if (!network.isDirected()) {
return network; // the transpose of an undirected network is an identical network
}
if (network instanceof TransposedNetwork) {
return ((TransposedNetwork<N, E>) network).network;
}
return new TransposedNetwork<>(network);
}
static <N> EndpointPair<N> transpose(EndpointPair<N> endpoints) {
if (endpoints.isOrdered()) {
return EndpointPair.ordered(endpoints.target(), endpoints.source());
}
return endpoints;
}
// NOTE: this should work as long as the delegate graph's implementation of edges() (like that of
// AbstractGraph) derives its behavior from calling successors().
private static class TransposedGraph<N> extends ForwardingGraph<N> {
private final Graph<N> graph;
TransposedGraph(Graph<N> graph) {
this.graph = graph;
}
@Override
Graph<N> delegate() {
return graph;
}
@Override
public Set<N> predecessors(N node) {
return delegate().successors(node); // transpose
}
@Override
public Set<N> successors(N node) {
return delegate().predecessors(node); // transpose
}
@Override
public Set<EndpointPair<N>> incidentEdges(N node) {
return new IncidentEdgeSet<N>(this, node) {
@Override
public Iterator<EndpointPair<N>> iterator() {
return Iterators.transform(
delegate().incidentEdges(node).iterator(),
new Function<EndpointPair<N>, EndpointPair<N>>() {
@Override
public EndpointPair<N> apply(EndpointPair<N> edge) {
return EndpointPair.of(delegate(), edge.nodeV(), edge.nodeU());
}
});
}
};
}
@Override
public int inDegree(N node) {
return delegate().outDegree(node); // transpose
}
@Override
public int outDegree(N node) {
return delegate().inDegree(node); // transpose
}
@Override
public boolean hasEdgeConnecting(N nodeU, N nodeV) {
return delegate().hasEdgeConnecting(nodeV, nodeU); // transpose
}
@Override
public boolean hasEdgeConnecting(EndpointPair<N> endpoints) {
return delegate().hasEdgeConnecting(transpose(endpoints));
}
}
// NOTE: this should work as long as the delegate graph's implementation of edges() (like that of
// AbstractValueGraph) derives its behavior from calling successors().
private static class TransposedValueGraph<N, V> extends ForwardingValueGraph<N, V> {
private final ValueGraph<N, V> graph;
TransposedValueGraph(ValueGraph<N, V> graph) {
this.graph = graph;
}
@Override
ValueGraph<N, V> delegate() {
return graph;
}
@Override
public Set<N> predecessors(N node) {
return delegate().successors(node); // transpose
}
@Override
public Set<N> successors(N node) {
return delegate().predecessors(node); // transpose
}
@Override
public int inDegree(N node) {
return delegate().outDegree(node); // transpose
}
@Override
public int outDegree(N node) {
return delegate().inDegree(node); // transpose
}
@Override
public boolean hasEdgeConnecting(N nodeU, N nodeV) {
return delegate().hasEdgeConnecting(nodeV, nodeU); // transpose
}
@Override
public boolean hasEdgeConnecting(EndpointPair<N> endpoints) {
return delegate().hasEdgeConnecting(transpose(endpoints));
}
@Override
public Optional<V> edgeValue(N nodeU, N nodeV) {
return delegate().edgeValue(nodeV, nodeU); // transpose
}
@Override
public Optional<V> edgeValue(EndpointPair<N> endpoints) {
return delegate().edgeValue(transpose(endpoints));
}
@Override
@CheckForNull
public V edgeValueOrDefault(N nodeU, N nodeV, @CheckForNull V defaultValue) {
return delegate().edgeValueOrDefault(nodeV, nodeU, defaultValue); // transpose
}
@Override
@CheckForNull
public V edgeValueOrDefault(EndpointPair<N> endpoints, @CheckForNull V defaultValue) {
return delegate().edgeValueOrDefault(transpose(endpoints), defaultValue);
}
}
private static class TransposedNetwork<N, E> extends ForwardingNetwork<N, E> {
private final Network<N, E> network;
TransposedNetwork(Network<N, E> network) {
this.network = network;
}
@Override
Network<N, E> delegate() {
return network;
}
@Override
public Set<N> predecessors(N node) {
return delegate().successors(node); // transpose
}
@Override
public Set<N> successors(N node) {
return delegate().predecessors(node); // transpose
}
@Override
public int inDegree(N node) {
return delegate().outDegree(node); // transpose
}
@Override
public int outDegree(N node) {
return delegate().inDegree(node); // transpose
}
@Override
public Set<E> inEdges(N node) {
return delegate().outEdges(node); // transpose
}
@Override
public Set<E> outEdges(N node) {
return delegate().inEdges(node); // transpose
}
@Override
public EndpointPair<N> incidentNodes(E edge) {
EndpointPair<N> endpointPair = delegate().incidentNodes(edge);
return EndpointPair.of(network, endpointPair.nodeV(), endpointPair.nodeU()); // transpose
}
@Override
public Set<E> edgesConnecting(N nodeU, N nodeV) {
return delegate().edgesConnecting(nodeV, nodeU); // transpose
}
@Override
public Set<E> edgesConnecting(EndpointPair<N> endpoints) {
return delegate().edgesConnecting(transpose(endpoints));
}
@Override
public Optional<E> edgeConnecting(N nodeU, N nodeV) {
return delegate().edgeConnecting(nodeV, nodeU); // transpose
}
@Override
public Optional<E> edgeConnecting(EndpointPair<N> endpoints) {
return delegate().edgeConnecting(transpose(endpoints));
}
@Override
@CheckForNull
public E edgeConnectingOrNull(N nodeU, N nodeV) {
return delegate().edgeConnectingOrNull(nodeV, nodeU); // transpose
}
@Override
@CheckForNull
public E edgeConnectingOrNull(EndpointPair<N> endpoints) {
return delegate().edgeConnectingOrNull(transpose(endpoints));
}
@Override
public boolean hasEdgeConnecting(N nodeU, N nodeV) {
return delegate().hasEdgeConnecting(nodeV, nodeU); // transpose
}
@Override
public boolean hasEdgeConnecting(EndpointPair<N> endpoints) {
return delegate().hasEdgeConnecting(transpose(endpoints));
}
}
// Graph copy methods
/**
* Returns the subgraph of {@code graph} induced by {@code nodes}. This subgraph is a new graph
* that contains all of the nodes in {@code nodes}, and all of the {@link Graph#edges() edges}
* from {@code graph} for which both nodes are contained by {@code nodes}.
*
* @throws IllegalArgumentException if any element in {@code nodes} is not a node in the graph
*/
public static <N> MutableGraph<N> inducedSubgraph(Graph<N> graph, Iterable<? extends N> nodes) {
MutableGraph<N> subgraph =
(nodes instanceof Collection)
? GraphBuilder.from(graph).expectedNodeCount(((Collection) nodes).size()).build()
: GraphBuilder.from(graph).build();
for (N node : nodes) {
subgraph.addNode(node);
}
for (N node : subgraph.nodes()) {
for (N successorNode : graph.successors(node)) {
if (subgraph.nodes().contains(successorNode)) {
subgraph.putEdge(node, successorNode);
}
}
}
return subgraph;
}
/**
* Returns the subgraph of {@code graph} induced by {@code nodes}. This subgraph is a new graph
* that contains all of the nodes in {@code nodes}, and all of the {@link Graph#edges() edges}
* (and associated edge values) from {@code graph} for which both nodes are contained by {@code
* nodes}.
*
* @throws IllegalArgumentException if any element in {@code nodes} is not a node in the graph
*/
public static <N, V> MutableValueGraph<N, V> inducedSubgraph(
ValueGraph<N, V> graph, Iterable<? extends N> nodes) {
MutableValueGraph<N, V> subgraph =
(nodes instanceof Collection)
? ValueGraphBuilder.from(graph).expectedNodeCount(((Collection) nodes).size()).build()
: ValueGraphBuilder.from(graph).build();
for (N node : nodes) {
subgraph.addNode(node);
}
for (N node : subgraph.nodes()) {
for (N successorNode : graph.successors(node)) {
if (subgraph.nodes().contains(successorNode)) {
// requireNonNull is safe because the endpoint pair comes from the graph.
subgraph.putEdgeValue(
node,
successorNode,
requireNonNull(graph.edgeValueOrDefault(node, successorNode, null)));
}
}
}
return subgraph;
}
/**
* Returns the subgraph of {@code network} induced by {@code nodes}. This subgraph is a new graph
* that contains all of the nodes in {@code nodes}, and all of the {@link Network#edges() edges}
* from {@code network} for which the {@link Network#incidentNodes(Object) incident nodes} are
* both contained by {@code nodes}.
*
* @throws IllegalArgumentException if any element in {@code nodes} is not a node in the graph
*/
public static <N, E> MutableNetwork<N, E> inducedSubgraph(
Network<N, E> network, Iterable<? extends N> nodes) {
MutableNetwork<N, E> subgraph =
(nodes instanceof Collection)
? NetworkBuilder.from(network).expectedNodeCount(((Collection) nodes).size()).build()
: NetworkBuilder.from(network).build();
for (N node : nodes) {
subgraph.addNode(node);
}
for (N node : subgraph.nodes()) {
for (E edge : network.outEdges(node)) {
N successorNode = network.incidentNodes(edge).adjacentNode(node);
if (subgraph.nodes().contains(successorNode)) {
subgraph.addEdge(node, successorNode, edge);
}
}
}
return subgraph;
}
/** Creates a mutable copy of {@code graph} with the same nodes and edges. */
public static <N> MutableGraph<N> copyOf(Graph<N> graph) {
MutableGraph<N> copy = GraphBuilder.from(graph).expectedNodeCount(graph.nodes().size()).build();
for (N node : graph.nodes()) {
copy.addNode(node);
}
for (EndpointPair<N> edge : graph.edges()) {
copy.putEdge(edge.nodeU(), edge.nodeV());
}
return copy;
}
/** Creates a mutable copy of {@code graph} with the same nodes, edges, and edge values. */
public static <N, V> MutableValueGraph<N, V> copyOf(ValueGraph<N, V> graph) {
MutableValueGraph<N, V> copy =
ValueGraphBuilder.from(graph).expectedNodeCount(graph.nodes().size()).build();
for (N node : graph.nodes()) {
copy.addNode(node);
}
for (EndpointPair<N> edge : graph.edges()) {
// requireNonNull is safe because the endpoint pair comes from the graph.
copy.putEdgeValue(
edge.nodeU(),
edge.nodeV(),
requireNonNull(graph.edgeValueOrDefault(edge.nodeU(), edge.nodeV(), null)));
}
return copy;
}
/** Creates a mutable copy of {@code network} with the same nodes and edges. */
public static <N, E> MutableNetwork<N, E> copyOf(Network<N, E> network) {
MutableNetwork<N, E> copy =
NetworkBuilder.from(network)
.expectedNodeCount(network.nodes().size())
.expectedEdgeCount(network.edges().size())
.build();
for (N node : network.nodes()) {
copy.addNode(node);
}
for (E edge : network.edges()) {
EndpointPair<N> endpointPair = network.incidentNodes(edge);
copy.addEdge(endpointPair.nodeU(), endpointPair.nodeV(), edge);
}
return copy;
}
@CanIgnoreReturnValue
static int checkNonNegative(int value) {
checkArgument(value >= 0, "Not true that %s is non-negative.", value);
return value;
}
@CanIgnoreReturnValue
static long checkNonNegative(long value) {
checkArgument(value >= 0, "Not true that %s is non-negative.", value);
return value;
}
@CanIgnoreReturnValue
static int checkPositive(int value) {
checkArgument(value > 0, "Not true that %s is positive.", value);
return value;
}
@CanIgnoreReturnValue
static long checkPositive(long value) {
checkArgument(value > 0, "Not true that %s is positive.", value);
return value;
}
/**
* An enum representing the state of a node during DFS. {@code PENDING} means that the node is on
* the stack of the DFS, while {@code COMPLETE} means that the node and all its successors have
* been already explored. Any node that has not been explored will not have a state at all.
*/
private enum NodeVisitState {
PENDING,
COMPLETE
}
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.features.python;
import com.facebook.buck.core.build.buildable.context.BuildableContext;
import com.facebook.buck.core.build.context.BuildContext;
import com.facebook.buck.core.build.execution.context.StepExecutionContext;
import com.facebook.buck.core.filesystems.AbsPath;
import com.facebook.buck.core.filesystems.PathWrapper;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.CustomHashedBuckOutLinking;
import com.facebook.buck.core.model.OutputLabel;
import com.facebook.buck.core.model.impl.BuildTargetPaths;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.BuildRuleParams;
import com.facebook.buck.core.rules.BuildRuleResolver;
import com.facebook.buck.core.rules.attr.HasRuntimeDeps;
import com.facebook.buck.core.rules.common.BuildableSupport;
import com.facebook.buck.core.rules.impl.AbstractBuildRuleWithDeclaredAndExtraDeps;
import com.facebook.buck.core.rules.tool.BinaryBuildRule;
import com.facebook.buck.core.sourcepath.ForwardingBuildTargetSourcePath;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.core.sourcepath.resolver.SourcePathResolverAdapter;
import com.facebook.buck.core.test.rule.ExternalTestRunnerRule;
import com.facebook.buck.core.test.rule.ExternalTestRunnerTestSpec;
import com.facebook.buck.core.test.rule.TestRule;
import com.facebook.buck.core.toolchain.tool.Tool;
import com.facebook.buck.io.filesystem.BuildCellRelativePath;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.impl.ProjectFilesystemUtils;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.test.TestCaseSummary;
import com.facebook.buck.test.TestResultSummary;
import com.facebook.buck.test.TestResults;
import com.facebook.buck.test.TestRunningOptions;
import com.facebook.buck.util.Memoizer;
import com.facebook.buck.util.json.ObjectMappers;
import com.facebook.buck.util.stream.RichStream;
import com.facebook.buck.util.types.Pair;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.SortedSet;
import java.util.concurrent.Callable;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Stream;
@SuppressWarnings("PMD.TestClassWithoutTestCases")
public class PythonTest extends AbstractBuildRuleWithDeclaredAndExtraDeps
implements TestRule,
HasRuntimeDeps,
ExternalTestRunnerRule,
BinaryBuildRule,
CustomHashedBuckOutLinking {
private BuildRuleResolver ruleResolver;
private final Supplier<? extends SortedSet<BuildRule>> originalDeclaredDeps;
private final Supplier<? extends SortedSet<BuildRule>> originalExtraDeps;
private final Function<BuildRuleResolver, ImmutableMap<String, Arg>> envSupplier;
private final Memoizer<ImmutableMap<String, Arg>> env = new Memoizer<>();
private final PythonBinary binary;
private final ImmutableSet<String> labels;
private final Optional<Long> testRuleTimeoutMs;
private final ImmutableSet<String> contacts;
private final boolean withDownwardApi;
private final ImmutableList<Pair<Float, ImmutableSet<Path>>> neededCoverage;
private final ImmutableSet<SourcePath> additionalCoverageTargets;
private PythonTest(
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
BuildRuleParams params,
BuildRuleResolver ruleResolver,
Supplier<? extends SortedSet<BuildRule>> originalDeclaredDeps,
Supplier<? extends SortedSet<BuildRule>> originalExtraDeps,
Function<BuildRuleResolver, ImmutableMap<String, Arg>> envSupplier,
PythonBinary binary,
ImmutableSet<String> labels,
ImmutableList<Pair<Float, ImmutableSet<Path>>> neededCoverage,
ImmutableSet<SourcePath> additionalCoverageTargets,
Optional<Long> testRuleTimeoutMs,
ImmutableSet<String> contacts,
boolean withDownwardApi) {
super(buildTarget, projectFilesystem, params);
this.ruleResolver = ruleResolver;
this.originalDeclaredDeps = originalDeclaredDeps;
this.originalExtraDeps = originalExtraDeps;
this.envSupplier = envSupplier;
this.binary = binary;
this.labels = labels;
this.neededCoverage = neededCoverage;
this.additionalCoverageTargets = additionalCoverageTargets;
this.testRuleTimeoutMs = testRuleTimeoutMs;
this.contacts = contacts;
this.withDownwardApi = withDownwardApi;
}
public static PythonTest from(
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
BuildRuleParams params,
BuildRuleResolver ruleResolver,
Function<BuildRuleResolver, ImmutableMap<String, Arg>> env,
PythonBinary binary,
ImmutableSet<String> labels,
ImmutableList<Pair<Float, ImmutableSet<Path>>> neededCoverage,
ImmutableSet<SourcePath> additionalCoverageTargets,
Optional<Long> testRuleTimeoutMs,
ImmutableSet<String> contacts,
boolean withDownwardApi) {
return new PythonTest(
buildTarget,
projectFilesystem,
params.withDeclaredDeps(ImmutableSortedSet.of(binary)).withoutExtraDeps(),
ruleResolver,
params.getDeclaredDeps(),
params.getExtraDeps(),
env,
binary,
labels,
neededCoverage,
additionalCoverageTargets,
testRuleTimeoutMs,
contacts,
withDownwardApi);
}
@Override
public ImmutableList<Step> getBuildSteps(
BuildContext context, BuildableContext buildableContext) {
return ImmutableList.of();
}
@Override
public SourcePath getSourcePathToOutput() {
return ForwardingBuildTargetSourcePath.of(getBuildTarget(), binary.getSourcePathToOutput());
}
@Override
public ImmutableList<Step> runTests(
StepExecutionContext executionContext,
TestRunningOptions options,
BuildContext buildContext,
TestReportingCallback testReportingCallback) {
return new ImmutableList.Builder<Step>()
.addAll(
MakeCleanDirectoryStep.of(
BuildCellRelativePath.fromCellRelativePath(
buildContext.getBuildCellRootPath(),
getProjectFilesystem(),
getPathToTestOutputDirectory())))
.add(
new PythonRunTestsStep(
getProjectFilesystem().getRootPath(),
getBuildTarget().getFullyQualifiedName(),
binary
.getExecutableCommand(OutputLabel.defaultLabel())
.getCommandPrefix(buildContext.getSourcePathResolver()),
getMergedEnv(buildContext.getSourcePathResolver()),
options.getTestSelectorList(),
testRuleTimeoutMs,
getProjectFilesystem().resolve(getPathToTestOutputResult()),
ProjectFilesystemUtils.relativize(
getProjectFilesystem().getRootPath(), buildContext.getBuildCellRootPath()),
withDownwardApi))
.build();
}
private ImmutableMap<String, String> getMergedEnv(SourcePathResolverAdapter pathResolver) {
return new ImmutableMap.Builder<String, String>()
.putAll(
binary.getExecutableCommand(OutputLabel.defaultLabel()).getEnvironment(pathResolver))
.putAll(Arg.stringify(getEnv(), pathResolver))
.build();
}
private ImmutableMap<String, Arg> getEnv() {
return env.get(() -> envSupplier.apply(ruleResolver));
}
@Override
public ImmutableSet<String> getContacts() {
return contacts;
}
@Override
public Path getPathToTestOutputDirectory() {
return BuildTargetPaths.getGenPath(
getProjectFilesystem().getBuckPaths(), getBuildTarget(), "__test_%s_output__")
.getPath();
}
private Path getPathToTestOutputResult() {
return getPathToTestOutputDirectory().resolve("results.json");
}
@Override
public ImmutableSet<String> getLabels() {
return labels;
}
@Override
public Callable<TestResults> interpretTestResults(
StepExecutionContext executionContext,
SourcePathResolverAdapter pathResolver,
boolean isUsingTestSelectors) {
return () -> {
Optional<String> resultsFileContents =
getProjectFilesystem().readFileIfItExists(getPathToTestOutputResult());
TestResultSummary[] testResultSummaries =
ObjectMappers.readValue(resultsFileContents.get(), TestResultSummary[].class);
return TestResults.of(
getBuildTarget(),
ImmutableList.of(
new TestCaseSummary(
getBuildTarget().getFullyQualifiedName(),
ImmutableList.copyOf(testResultSummaries))),
contacts,
labels.stream().map(Object::toString).collect(ImmutableSet.toImmutableSet()));
};
}
@Override
public boolean runTestSeparately() {
return false;
}
// A python test rule is actually just a {@link NoopBuildRuleWithDeclaredAndExtraDeps} which
// contains a references to
// a {@link PythonBinary} rule, which is the actual test binary. Therefore, we *need* this
// rule around to run this test, so model this via the {@link HasRuntimeDeps} interface.
@Override
public Stream<BuildTarget> getRuntimeDeps(BuildRuleResolver buildRuleResolver) {
return RichStream.from(originalDeclaredDeps.get())
.concat(originalExtraDeps.get().stream())
.map(BuildRule::getBuildTarget)
.concat(binary.getRuntimeDeps(buildRuleResolver))
.concat(
BuildableSupport.getDeps(
binary.getExecutableCommand(OutputLabel.defaultLabel()), buildRuleResolver)
.map(BuildRule::getBuildTarget));
}
@Override
public boolean supportsStreamingTests() {
return false;
}
@VisibleForTesting
protected PythonBinary getBinary() {
return binary;
}
protected ImmutableSet<SourcePath> getAdditionalCoverageTargets() {
return additionalCoverageTargets;
}
@Override
public Tool getExecutableCommand(OutputLabel outputLabel) {
return binary.getExecutableCommand(OutputLabel.defaultLabel());
}
@Override
public ExternalTestRunnerTestSpec getExternalTestRunnerSpec(
StepExecutionContext executionContext,
TestRunningOptions testRunningOptions,
BuildContext buildContext) {
Tool executable = binary.getExecutableCommand(OutputLabel.defaultLabel());
List<AbsPath> requiredPaths = new ArrayList<>();
// Extract the in-place components link tree from the command and add it to required paths so
// that external runners now to ship it remotely.
BuildableSupport.deriveInputs(executable)
.map(buildContext.getSourcePathResolver()::getAbsolutePath)
.forEach(requiredPaths::add);
// Extract any required paths from env.
for (Arg arg : getEnv().values()) {
BuildableSupport.deriveInputs(arg)
.map(buildContext.getSourcePathResolver()::getAbsolutePath)
.forEach(requiredPaths::add);
}
return ExternalTestRunnerTestSpec.builder()
.setCwd(getProjectFilesystem().getRootPath().getPath())
.setTarget(getBuildTarget())
.setType("pyunit")
.setNeededCoverage(neededCoverage)
.addAllCommand(executable.getCommandPrefix(buildContext.getSourcePathResolver()))
.putAllEnv(getMergedEnv(buildContext.getSourcePathResolver()))
.addAllLabels(getLabels())
.addAllContacts(getContacts())
.addAllAdditionalCoverageTargets(
buildContext.getSourcePathResolver().getAllAbsolutePaths(getAdditionalCoverageTargets())
.stream()
.map(PathWrapper::getPath)
.collect(ImmutableList.toImmutableList()))
.setRequiredPaths(
requiredPaths.stream()
.map(PathWrapper::getPath)
.collect(ImmutableList.toImmutableList()))
.build();
}
@Override
public void updateBuildRuleResolver(BuildRuleResolver ruleResolver) {
this.ruleResolver = ruleResolver;
}
@Override
public boolean supportsHashedBuckOutHardLinking() {
return binary.supportsHashedBuckOutHardLinking();
}
}
| |
/**
* Copyright 2016 Yahoo Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yahoo.pulsar.client.api;
import static org.apache.bookkeeper.test.PortManager.nextFreePort;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.net.URI;
import java.net.URL;
import java.net.URLConnection;
import java.security.KeyStore;
import java.security.PrivateKey;
import java.security.SecureRandom;
import java.security.cert.Certificate;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import javax.naming.AuthenticationException;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import org.apache.bookkeeper.test.PortManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.yahoo.pulsar.broker.PulsarService;
import com.yahoo.pulsar.broker.ServiceConfiguration;
import com.yahoo.pulsar.broker.authentication.AuthenticationDataSource;
import com.yahoo.pulsar.broker.authentication.AuthenticationProvider;
import com.yahoo.pulsar.broker.loadbalance.LoadManager;
import com.yahoo.pulsar.broker.loadbalance.impl.SimpleResourceUnit;
import com.yahoo.pulsar.broker.namespace.NamespaceService;
import com.yahoo.pulsar.client.api.ProducerConfiguration.MessageRoutingMode;
import com.yahoo.pulsar.client.impl.auth.AuthenticationTls;
import com.yahoo.pulsar.common.naming.DestinationName;
import com.yahoo.pulsar.common.naming.ServiceUnitId;
import com.yahoo.pulsar.common.policies.data.ClusterData;
import com.yahoo.pulsar.common.policies.data.PropertyAdmin;
import com.yahoo.pulsar.common.util.SecurityUtility;
import com.yahoo.pulsar.discovery.service.DiscoveryService;
import com.yahoo.pulsar.discovery.service.server.ServiceConfig;
import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
public class BrokerServiceLookupTest extends ProducerConsumerBase {
private static final Logger log = LoggerFactory.getLogger(BrokerServiceLookupTest.class);
@BeforeMethod
@Override
protected void setup() throws Exception {
super.init();
com.yahoo.pulsar.client.api.ClientConfiguration clientConf = new com.yahoo.pulsar.client.api.ClientConfiguration();
clientConf.setStatsInterval(0, TimeUnit.SECONDS);
URI brokerServiceUrl = new URI("pulsar://localhost:" + BROKER_PORT);
pulsarClient = PulsarClient.create(brokerServiceUrl.toString(), clientConf);
super.producerBaseSetup();
}
@AfterMethod
@Override
protected void cleanup() throws Exception {
super.internalCleanup();
}
/**
* UsecaseL Multiple Broker => Lookup Redirection test
*
* 1. Broker1 is a leader
* 2. Lookup request reaches to Broker2 which redirects to leader (Broker1) with authoritative = false
* 3. Leader (Broker1) finds out least loaded broker as Broker2 and redirects request to Broker2 with authoritative = true
* 4. Broker2 receives final request to own a bundle with authoritative = true and client connects to Broker2
*
* @throws Exception
*/
@Test
public void testMultipleBrokerLookup() throws Exception {
log.info("-- Starting {} test --", methodName);
/**** start broker-2 ****/
ServiceConfiguration conf2 = new ServiceConfiguration();
conf2.setBrokerServicePort(PortManager.nextFreePort());
conf2.setBrokerServicePortTls(PortManager.nextFreePort());
conf2.setWebServicePort(PortManager.nextFreePort());
conf2.setWebServicePortTls(PortManager.nextFreePort());
conf2.setAdvertisedAddress("localhost");
conf2.setClusterName(conf.getClusterName());
PulsarService pulsar2 = startBroker(conf2);
pulsar.getLoadManager().get().writeLoadReportOnZookeeper();
pulsar2.getLoadManager().get().writeLoadReportOnZookeeper();
LoadManager loadManager1 = spy(pulsar.getLoadManager().get());
LoadManager loadManager2 = spy(pulsar2.getLoadManager().get());
Field loadManagerField = NamespaceService.class.getDeclaredField("loadManager");
loadManagerField.setAccessible(true);
// mock: redirect request to leader [2]
doReturn(true).when(loadManager2).isCentralized();
loadManagerField.set(pulsar2.getNamespaceService(), new AtomicReference<>(loadManager2));
// mock: return Broker2 as a Least-loaded broker when leader receies request [3]
doReturn(true).when(loadManager1).isCentralized();
SimpleResourceUnit resourceUnit = new SimpleResourceUnit(pulsar2.getWebServiceAddress(), null);
doReturn(resourceUnit).when(loadManager1).getLeastLoaded(any(ServiceUnitId.class));
loadManagerField.set(pulsar.getNamespaceService(), new AtomicReference<>(loadManager1));
/**** started broker-2 ****/
URI brokerServiceUrl = new URI("pulsar://localhost:" + conf2.getBrokerServicePort());
PulsarClient pulsarClient2 = PulsarClient.create(brokerServiceUrl.toString(), new ClientConfiguration());
// load namespace-bundle by calling Broker2
Consumer consumer = pulsarClient2.subscribe("persistent://my-property/use/my-ns/my-topic1", "my-subscriber-name",
new ConsumerConfiguration());
Producer producer = pulsarClient.createProducer("persistent://my-property/use/my-ns/my-topic1", new ProducerConfiguration());
for (int i = 0; i < 10; i++) {
String message = "my-message-" + i;
producer.send(message.getBytes());
}
Message msg = null;
Set<String> messageSet = Sets.newHashSet();
for (int i = 0; i < 10; i++) {
msg = consumer.receive(5, TimeUnit.SECONDS);
String receivedMessage = new String(msg.getData());
log.debug("Received message: [{}]", receivedMessage);
String expectedMessage = "my-message-" + i;
testMessageOrderAndDuplicates(messageSet, receivedMessage, expectedMessage);
}
// Acknowledge the consumption of all messages at once
consumer.acknowledgeCumulative(msg);
consumer.close();
producer.close();
pulsarClient2.close();
pulsar2.close();
loadManager1 = null;
loadManager2 = null;
}
/**
* Usecase: Redirection due to different cluster
* 1. Broker1 runs on cluster: "use" and Broker2 runs on cluster: "use2"
* 2. Broker1 receives "use2" cluster request => Broker1 reads "/clusters" from global-zookkeeper and
* redirects request to Broker2 whch serves "use2"
* 3. Broker2 receives redirect request and own namespace bundle
*
* @throws Exception
*/
@Test
public void testMultipleBrokerDifferentClusterLookup() throws Exception {
log.info("-- Starting {} test --", methodName);
/**** start broker-2 ****/
final String newCluster = "use2";
final String property = "my-property2";
ServiceConfiguration conf2 = new ServiceConfiguration();
conf2.setBrokerServicePort(PortManager.nextFreePort());
conf2.setBrokerServicePortTls(PortManager.nextFreePort());
conf2.setWebServicePort(PortManager.nextFreePort());
conf2.setWebServicePortTls(PortManager.nextFreePort());
conf2.setAdvertisedAddress("localhost");
conf2.setClusterName(newCluster); // Broker2 serves newCluster
String broker2ServiceUrl = "pulsar://localhost:" + conf2.getBrokerServicePort();
admin.clusters().createCluster(newCluster, new ClusterData("http://127.0.0.1:" + BROKER_WEBSERVICE_PORT, null, broker2ServiceUrl, null));
admin.properties().createProperty(property,
new PropertyAdmin(Lists.newArrayList("appid1", "appid2"), Sets.newHashSet(newCluster)));
admin.namespaces().createNamespace(property + "/" + newCluster + "/my-ns");
PulsarService pulsar2 = startBroker(conf2);
pulsar.getLoadManager().get().writeLoadReportOnZookeeper();
pulsar2.getLoadManager().get().writeLoadReportOnZookeeper();
URI brokerServiceUrl = new URI(broker2ServiceUrl);
PulsarClient pulsarClient2 = PulsarClient.create(brokerServiceUrl.toString(), new ClientConfiguration());
// enable authorization: so, broker can validate cluster and redirect if finds different cluster
pulsar.getConfiguration().setAuthorizationEnabled(true);
// restart broker with authorization enabled: it initialize AuthorizationManager
stopBroker();
startBroker();
LoadManager loadManager2 = spy(pulsar2.getLoadManager().get());
Field loadManagerField = NamespaceService.class.getDeclaredField("loadManager");
loadManagerField.setAccessible(true);
// mock: return Broker2 as a Least-loaded broker when leader receies request
doReturn(true).when(loadManager2).isCentralized();
SimpleResourceUnit resourceUnit = new SimpleResourceUnit(pulsar2.getWebServiceAddress(), null);
doReturn(resourceUnit).when(loadManager2).getLeastLoaded(any(ServiceUnitId.class));
loadManagerField.set(pulsar.getNamespaceService(), new AtomicReference<>(loadManager2));
/**** started broker-2 ****/
// load namespace-bundle by calling Broker2
Consumer consumer = pulsarClient.subscribe("persistent://my-property2/use2/my-ns/my-topic1", "my-subscriber-name",
new ConsumerConfiguration());
Producer producer = pulsarClient2.createProducer("persistent://my-property2/use2/my-ns/my-topic1", new ProducerConfiguration());
for (int i = 0; i < 10; i++) {
String message = "my-message-" + i;
producer.send(message.getBytes());
}
Message msg = null;
Set<String> messageSet = Sets.newHashSet();
for (int i = 0; i < 10; i++) {
msg = consumer.receive(5, TimeUnit.SECONDS);
String receivedMessage = new String(msg.getData());
log.debug("Received message: [{}]", receivedMessage);
String expectedMessage = "my-message-" + i;
testMessageOrderAndDuplicates(messageSet, receivedMessage, expectedMessage);
}
// Acknowledge the consumption of all messages at once
consumer.acknowledgeCumulative(msg);
consumer.close();
producer.close();
// disable authorization
pulsar.getConfiguration().setAuthorizationEnabled(false);
pulsarClient2.close();
pulsar2.close();
loadManager2 = null;
}
/**
* Create #PartitionedTopic and let it served by multiple brokers which requries
* a. tcp partitioned-metadata-lookup
* b. multiple topic-lookup
* c. partitioned producer-consumer
*
* @throws Exception
*/
@Test
public void testPartitionTopicLookup() throws Exception {
log.info("-- Starting {} test --", methodName);
int numPartitions = 8;
DestinationName dn = DestinationName.get("persistent://my-property/use/my-ns/my-partitionedtopic1");
ConsumerConfiguration conf = new ConsumerConfiguration();
conf.setSubscriptionType(SubscriptionType.Exclusive);
admin.persistentTopics().createPartitionedTopic(dn.toString(), numPartitions);
/**** start broker-2 ****/
ServiceConfiguration conf2 = new ServiceConfiguration();
conf2.setBrokerServicePort(PortManager.nextFreePort());
conf2.setBrokerServicePortTls(PortManager.nextFreePort());
conf2.setWebServicePort(PortManager.nextFreePort());
conf2.setWebServicePortTls(PortManager.nextFreePort());
conf2.setAdvertisedAddress("localhost");
conf2.setClusterName(pulsar.getConfiguration().getClusterName());
PulsarService pulsar2 = startBroker(conf2);
pulsar.getLoadManager().get().writeLoadReportOnZookeeper();
pulsar2.getLoadManager().get().writeLoadReportOnZookeeper();
LoadManager loadManager1 = spy(pulsar.getLoadManager().get());
LoadManager loadManager2 = spy(pulsar2.getLoadManager().get());
Field loadManagerField = NamespaceService.class.getDeclaredField("loadManager");
loadManagerField.setAccessible(true);
// mock: return Broker2 as a Least-loaded broker when leader receies request
doReturn(true).when(loadManager1).isCentralized();
loadManagerField.set(pulsar.getNamespaceService(), new AtomicReference<>(loadManager1));
// mock: redirect request to leader
doReturn(true).when(loadManager2).isCentralized();
loadManagerField.set(pulsar2.getNamespaceService(), new AtomicReference<>(loadManager2));
/**** broker-2 started ****/
ProducerConfiguration producerConf = new ProducerConfiguration();
producerConf.setMessageRoutingMode(MessageRoutingMode.RoundRobinPartition);
Producer producer = pulsarClient.createProducer(dn.toString(), producerConf);
Consumer consumer = pulsarClient.subscribe(dn.toString(), "my-partitioned-subscriber", conf);
for (int i = 0; i < 20; i++) {
String message = "my-message-" + i;
producer.send(message.getBytes());
}
Message msg = null;
Set<String> messageSet = Sets.newHashSet();
for (int i = 0; i < 20; i++) {
msg = consumer.receive(5, TimeUnit.SECONDS);
Assert.assertNotNull(msg, "Message should not be null");
consumer.acknowledge(msg);
String receivedMessage = new String(msg.getData());
log.debug("Received message: [{}]", receivedMessage);
Assert.assertTrue(messageSet.add(receivedMessage), "Message " + receivedMessage + " already received");
}
producer.close();
consumer.unsubscribe();
consumer.close();
admin.persistentTopics().deletePartitionedTopic(dn.toString());
pulsar2.close();
loadManager2 = null;
log.info("-- Exiting {} test --", methodName);
}
/**
* 1. Start broker1 and broker2 with tls enable
* 2. Hit HTTPS lookup url at broker2 which redirects to HTTPS broker1
*
* @throws Exception
*/
@Test
public void testWebserviceServiceTls() throws Exception {
log.info("-- Starting {} test --", methodName);
final String TLS_SERVER_CERT_FILE_PATH = "./src/test/resources/certificate/server.crt";
final String TLS_SERVER_KEY_FILE_PATH = "./src/test/resources/certificate/server.key";
final String TLS_CLIENT_CERT_FILE_PATH = "./src/test/resources/certificate/client.crt";
final String TLS_CLIENT_KEY_FILE_PATH = "./src/test/resources/certificate/client.key";
/**** start broker-2 ****/
ServiceConfiguration conf2 = new ServiceConfiguration();
conf2.setBrokerServicePort(PortManager.nextFreePort());
conf2.setBrokerServicePortTls(PortManager.nextFreePort());
conf2.setWebServicePort(PortManager.nextFreePort());
conf2.setWebServicePortTls(PortManager.nextFreePort());
conf2.setAdvertisedAddress("localhost");
conf2.setTlsAllowInsecureConnection(true);
conf2.setTlsEnabled(true);
conf2.setTlsCertificateFilePath(TLS_SERVER_CERT_FILE_PATH);
conf2.setTlsKeyFilePath(TLS_SERVER_KEY_FILE_PATH);
conf2.setClusterName(conf.getClusterName());
PulsarService pulsar2 = startBroker(conf2);
// restart broker1 with tls enabled
conf.setTlsAllowInsecureConnection(true);
conf.setTlsEnabled(true);
conf.setTlsCertificateFilePath(TLS_SERVER_CERT_FILE_PATH);
conf.setTlsKeyFilePath(TLS_SERVER_KEY_FILE_PATH);
stopBroker();
startBroker();
pulsar.getLoadManager().get().writeLoadReportOnZookeeper();
pulsar2.getLoadManager().get().writeLoadReportOnZookeeper();
LoadManager loadManager1 = spy(pulsar.getLoadManager().get());
LoadManager loadManager2 = spy(pulsar2.getLoadManager().get());
Field loadManagerField = NamespaceService.class.getDeclaredField("loadManager");
loadManagerField.setAccessible(true);
// mock: redirect request to leader [2]
doReturn(true).when(loadManager2).isCentralized();
loadManagerField.set(pulsar2.getNamespaceService(), new AtomicReference<>(loadManager2));
// mock: return Broker2 as a Least-loaded broker when leader receies
// request [3]
doReturn(true).when(loadManager1).isCentralized();
SimpleResourceUnit resourceUnit = new SimpleResourceUnit(pulsar2.getWebServiceAddress(), null);
doReturn(resourceUnit).when(loadManager1).getLeastLoaded(any(ServiceUnitId.class));
loadManagerField.set(pulsar.getNamespaceService(), new AtomicReference<>(loadManager1));
/**** started broker-2 ****/
URI brokerServiceUrl = new URI("pulsar://localhost:" + conf2.getBrokerServicePort());
PulsarClient pulsarClient2 = PulsarClient.create(brokerServiceUrl.toString(), new ClientConfiguration());
final String lookupResourceUrl = "/lookup/v2/destination/persistent/my-property/use/my-ns/my-topic1";
// set client cert_key file
KeyManager[] keyManagers = null;
Certificate[] tlsCert = SecurityUtility.loadCertificatesFromPemFile(TLS_CLIENT_CERT_FILE_PATH);
PrivateKey tlsKey = SecurityUtility.loadPrivateKeyFromPemFile(TLS_CLIENT_KEY_FILE_PATH);
KeyStore ks = KeyStore.getInstance(KeyStore.getDefaultType());
ks.load(null, null);
ks.setKeyEntry("private", tlsKey, "".toCharArray(), tlsCert);
KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
kmf.init(ks, "".toCharArray());
keyManagers = kmf.getKeyManagers();
TrustManager[] trustManagers = InsecureTrustManagerFactory.INSTANCE.getTrustManagers();
SSLContext sslCtx = SSLContext.getInstance("TLS");
sslCtx.init(keyManagers, trustManagers, new SecureRandom());
HttpsURLConnection.setDefaultSSLSocketFactory(sslCtx.getSocketFactory());
// hit broker2 url
URLConnection con = new URL(pulsar2.getWebServiceAddressTls() + lookupResourceUrl).openConnection();
log.info("orignal url: {}", con.getURL());
con.connect();
log.info("connected url: {} ", con.getURL());
// assert connect-url: broker2-https
Assert.assertEquals(con.getURL().getPort(), conf2.getWebServicePortTls());
InputStream is = con.getInputStream();
// assert redirect-url: broker1-https only
log.info("redirected url: {}", con.getURL());
Assert.assertEquals(con.getURL().getPort(), conf.getWebServicePortTls());
is.close();
pulsarClient2.close();
pulsar2.close();
loadManager1 = null;
loadManager2 = null;
}
/**
* Discovery-Service lookup over binary-protocol
* 1. Start discovery service
* 2. start broker
* 3. Create Producer/Consumer: by calling Discovery service for partitionedMetadata and topic lookup
*
* @throws Exception
*/
@Test
public void testDiscoveryLookup() throws Exception {
// (1) start discovery service
ServiceConfig config = new ServiceConfig();
config.setServicePort(nextFreePort());
config.setBindOnLocalhost(true);
DiscoveryService discoveryService = spy(new DiscoveryService(config));
doReturn(mockZooKeeperClientFactory).when(discoveryService).getZooKeeperClientFactory();
discoveryService.start();
// (2) lookup using discovery service
final String discoverySvcUrl = discoveryService.getServiceUrl();
ClientConfiguration clientConfig = new ClientConfiguration();
PulsarClient pulsarClient2 = PulsarClient.create(discoverySvcUrl, clientConfig);
Consumer consumer = pulsarClient2.subscribe("persistent://my-property2/use2/my-ns/my-topic1", "my-subscriber-name",
new ConsumerConfiguration());
Producer producer = pulsarClient2.createProducer("persistent://my-property2/use2/my-ns/my-topic1", new ProducerConfiguration());
for (int i = 0; i < 10; i++) {
String message = "my-message-" + i;
producer.send(message.getBytes());
}
Message msg = null;
Set<String> messageSet = Sets.newHashSet();
for (int i = 0; i < 10; i++) {
msg = consumer.receive(5, TimeUnit.SECONDS);
String receivedMessage = new String(msg.getData());
log.debug("Received message: [{}]", receivedMessage);
String expectedMessage = "my-message-" + i;
testMessageOrderAndDuplicates(messageSet, receivedMessage, expectedMessage);
}
// Acknowledge the consumption of all messages at once
consumer.acknowledgeCumulative(msg);
consumer.close();
producer.close();
}
/**
* Verify discovery-service binary-proto lookup using tls
*
* @throws Exception
*/
@Test
public void testDiscoveryLookupTls() throws Exception {
final String TLS_SERVER_CERT_FILE_PATH = "./src/test/resources/certificate/server.crt";
final String TLS_SERVER_KEY_FILE_PATH = "./src/test/resources/certificate/server.key";
final String TLS_CLIENT_CERT_FILE_PATH = "./src/test/resources/certificate/client.crt";
final String TLS_CLIENT_KEY_FILE_PATH = "./src/test/resources/certificate/client.key";
// (1) restart broker1 with tls enabled
conf.setTlsAllowInsecureConnection(true);
conf.setTlsEnabled(true);
conf.setTlsCertificateFilePath(TLS_SERVER_CERT_FILE_PATH);
conf.setTlsKeyFilePath(TLS_SERVER_KEY_FILE_PATH);
stopBroker();
startBroker();
// (2) start discovery service
ServiceConfig config = new ServiceConfig();
config.setServicePort(nextFreePort());
config.setServicePortTls(nextFreePort());
config.setTlsEnabled(true);
config.setBindOnLocalhost(true);
config.setTlsCertificateFilePath(TLS_SERVER_CERT_FILE_PATH);
config.setTlsKeyFilePath(TLS_SERVER_KEY_FILE_PATH);
DiscoveryService discoveryService = spy(new DiscoveryService(config));
doReturn(mockZooKeeperClientFactory).when(discoveryService).getZooKeeperClientFactory();
discoveryService.start();
// (3) lookup using discovery service
final String discoverySvcUrl = discoveryService.getServiceUrlTls();
ClientConfiguration clientConfig = new ClientConfiguration();
Map<String, String> authParams = new HashMap<>();
authParams.put("tlsCertFile", TLS_CLIENT_CERT_FILE_PATH);
authParams.put("tlsKeyFile", TLS_CLIENT_KEY_FILE_PATH);
Authentication auth = new AuthenticationTls();
auth.configure(authParams);
clientConfig.setAuthentication(auth);
clientConfig.setUseTls(true);
clientConfig.setTlsAllowInsecureConnection(true);
PulsarClient pulsarClient2 = PulsarClient.create(discoverySvcUrl, clientConfig);
Consumer consumer = pulsarClient2.subscribe("persistent://my-property2/use2/my-ns/my-topic1", "my-subscriber-name",
new ConsumerConfiguration());
Producer producer = pulsarClient2.createProducer("persistent://my-property2/use2/my-ns/my-topic1", new ProducerConfiguration());
for (int i = 0; i < 10; i++) {
String message = "my-message-" + i;
producer.send(message.getBytes());
}
Message msg = null;
Set<String> messageSet = Sets.newHashSet();
for (int i = 0; i < 10; i++) {
msg = consumer.receive(5, TimeUnit.SECONDS);
String receivedMessage = new String(msg.getData());
log.debug("Received message: [{}]", receivedMessage);
String expectedMessage = "my-message-" + i;
testMessageOrderAndDuplicates(messageSet, receivedMessage, expectedMessage);
}
// Acknowledge the consumption of all messages at once
consumer.acknowledgeCumulative(msg);
consumer.close();
producer.close();
}
@Test
public void testDiscoveryLookupAuthAndAuthSuccess() throws Exception {
// (1) start discovery service
ServiceConfig config = new ServiceConfig();
config.setServicePort(nextFreePort());
config.setBindOnLocalhost(true);
// add Authentication Provider
Set<String> providersClassNames = Sets.newHashSet(MockAuthenticationProvider.class.getName());
config.setAuthenticationProviders(providersClassNames);
// enable authentication and authorization
config.setAuthenticationEnabled(true);
config.setAuthorizationEnabled(true);
DiscoveryService discoveryService = spy(new DiscoveryService(config));
doReturn(mockZooKeeperClientFactory).when(discoveryService).getZooKeeperClientFactory();
discoveryService.start();
// (2) lookup using discovery service
final String discoverySvcUrl = discoveryService.getServiceUrl();
ClientConfiguration clientConfig = new ClientConfiguration();
// set authentication data
clientConfig.setAuthentication(new Authentication() {
@Override
public void close() throws IOException {
}
@Override
public String getAuthMethodName() {
return "auth";
}
@Override
public AuthenticationDataProvider getAuthData() throws PulsarClientException {
return new AuthenticationDataProvider() {
};
}
@Override
public void configure(Map<String, String> authParams) {
}
@Override
public void start() throws PulsarClientException {
}
});
PulsarClient pulsarClient = PulsarClient.create(discoverySvcUrl, clientConfig);
Consumer consumer = pulsarClient.subscribe("persistent://my-property/use2/my-ns/my-topic1",
"my-subscriber-name", new ConsumerConfiguration());
Producer producer = pulsarClient.createProducer("persistent://my-property/use2/my-ns/my-topic1",
new ProducerConfiguration());
for (int i = 0; i < 10; i++) {
String message = "my-message-" + i;
producer.send(message.getBytes());
}
Message msg = null;
Set<String> messageSet = Sets.newHashSet();
for (int i = 0; i < 10; i++) {
msg = consumer.receive(5, TimeUnit.SECONDS);
String receivedMessage = new String(msg.getData());
log.debug("Received message: [{}]", receivedMessage);
String expectedMessage = "my-message-" + i;
testMessageOrderAndDuplicates(messageSet, receivedMessage, expectedMessage);
}
// Acknowledge the consumption of all messages at once
consumer.acknowledgeCumulative(msg);
consumer.close();
producer.close();
}
@Test
public void testDiscoveryLookupAuthenticationFailure() throws Exception {
// (1) start discovery service
ServiceConfig config = new ServiceConfig();
config.setServicePort(nextFreePort());
config.setBindOnLocalhost(true);
// set Authentication provider which fails authentication
Set<String> providersClassNames = Sets.newHashSet(MockAuthenticationProviderFail.class.getName());
config.setAuthenticationProviders(providersClassNames);
// enable authentication
config.setAuthenticationEnabled(true);
config.setAuthorizationEnabled(true);
DiscoveryService discoveryService = spy(new DiscoveryService(config));
doReturn(mockZooKeeperClientFactory).when(discoveryService).getZooKeeperClientFactory();
discoveryService.start();
// (2) lookup using discovery service
final String discoverySvcUrl = discoveryService.getServiceUrl();
ClientConfiguration clientConfig = new ClientConfiguration();
// set authentication data
clientConfig.setAuthentication(new Authentication() {
@Override
public void close() throws IOException {
}
@Override
public String getAuthMethodName() {
return "auth";
}
@Override
public AuthenticationDataProvider getAuthData() throws PulsarClientException {
return new AuthenticationDataProvider() {
};
}
@Override
public void configure(Map<String, String> authParams) {
}
@Override
public void start() throws PulsarClientException {
}
});
PulsarClient pulsarClient = PulsarClient.create(discoverySvcUrl, clientConfig);
try {
pulsarClient.subscribe("persistent://my-property/use2/my-ns/my-topic1", "my-subscriber-name",
new ConsumerConfiguration());
Assert.fail("should have failed due to authentication");
} catch (PulsarClientException e) {
// Ok: expected
}
}
@Test
public void testDiscoveryLookupAuthorizationFailure() throws Exception {
// (1) start discovery service
ServiceConfig config = new ServiceConfig();
config.setServicePort(nextFreePort());
config.setBindOnLocalhost(true);
// set Authentication provider which returns "invalid" appid so, authorization fails
Set<String> providersClassNames = Sets.newHashSet(MockAuthorizationProviderFail.class.getName());
config.setAuthenticationProviders(providersClassNames);
// enable authentication
config.setAuthenticationEnabled(true);
config.setAuthorizationEnabled(true);
DiscoveryService discoveryService = spy(new DiscoveryService(config));
doReturn(mockZooKeeperClientFactory).when(discoveryService).getZooKeeperClientFactory();
discoveryService.start();
// (2) lookup using discovery service
final String discoverySvcUrl = discoveryService.getServiceUrl();
ClientConfiguration clientConfig = new ClientConfiguration();
// set authentication data
clientConfig.setAuthentication(new Authentication() {
@Override
public void close() throws IOException {
}
@Override
public String getAuthMethodName() {
return "auth";
}
@Override
public AuthenticationDataProvider getAuthData() throws PulsarClientException {
return new AuthenticationDataProvider() {
};
}
@Override
public void configure(Map<String, String> authParams) {
}
@Override
public void start() throws PulsarClientException {
}
});
PulsarClient pulsarClient = PulsarClient.create(discoverySvcUrl, clientConfig);
try {
pulsarClient.subscribe("persistent://my-property/use2/my-ns/my-topic1", "my-subscriber-name",
new ConsumerConfiguration());
Assert.fail("should have failed due to authentication");
} catch (PulsarClientException e) {
// Ok: expected
Assert.assertTrue(e instanceof PulsarClientException.LookupException);
}
}
/**** helper classes ****/
public static class MockAuthenticationProvider implements AuthenticationProvider {
@Override
public void close() throws IOException {
}
@Override
public void initialize(ServiceConfiguration config) throws IOException {
}
@Override
public String getAuthMethodName() {
return "auth";
}
@Override
public String authenticate(AuthenticationDataSource authData) throws AuthenticationException {
return "appid1";
}
}
public static class MockAuthenticationProviderFail extends MockAuthenticationProvider {
@Override
public String authenticate(AuthenticationDataSource authData) throws AuthenticationException {
throw new AuthenticationException("authentication failed");
}
}
public static class MockAuthorizationProviderFail extends MockAuthenticationProvider {
@Override
public String authenticate(AuthenticationDataSource authData) throws AuthenticationException {
return "invalid";
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.pig.sampling;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.fail;
import java.io.IOException;
import org.apache.pig.data.BagFactory;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.testng.annotations.Test;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.sampling.VarOptItemsSketch;
@SuppressWarnings("javadoc")
public class DataToVarOptSketchTest {
@Test
@SuppressWarnings("unused")
public void checkConstructors() {
// these three should work
DataToVarOptSketch udf = new DataToVarOptSketch();
assertNotNull(udf);
udf = new DataToVarOptSketch("255");
assertNotNull(udf);
udf = new DataToVarOptSketch("123", "0");
assertNotNull(udf);
try {
new DataToVarOptSketch("-1");
fail("Accepted negative k");
} catch (final IllegalArgumentException e) {
// expected
}
try {
new DataToVarOptSketch("-1", "3");
fail("Accepted negative k");
} catch (final IllegalArgumentException e) {
// expected
}
try {
new DataToVarOptSketch("10", "-1");
fail("Accepted weight index");
} catch (final IllegalArgumentException e) {
// expected
}
}
@Test
public void checkExecution() {
final int k = 10;
final DataToVarOptSketch udf = new DataToVarOptSketch(Integer.toString(k), "0");
final DataBag inputBag = BagFactory.getInstance().newDefaultBag();
final Tuple inputTuple = TupleFactory.getInstance().newTuple(1);
// calling accumulate() twice, but keep in exact mode so reference sketch has same values
try {
final VarOptItemsSketch<Tuple> sketch = VarOptItemsSketch.newInstance(k);
for (int i = 1; i < (k / 2); ++i) {
final Tuple t = TupleFactory.getInstance().newTuple(3);
t.set(0, 1.0 * i);
t.set(1, i);
t.set(2, -i);
inputBag.add(t);
sketch.update(t, 1.0 * i);
sketch.update(t, 1.0 * i); // since calling accumulate() twice later
}
inputTuple.set(0, inputBag);
assertNull(udf.getValue());
udf.accumulate(inputTuple);
udf.accumulate(inputTuple);
final DataByteArray outBytes = udf.getValue();
udf.cleanup();
assertNull(udf.getValue());
final VarOptItemsSketch<Tuple> result = VarOptItemsSketch.heapify(Memory.wrap(outBytes.get()),
new ArrayOfTuplesSerDe());
assertNotNull(result);
VarOptCommonAlgebraicTest.compareResults(result, sketch);
} catch (final IOException e) {
fail("Unexpected exception");
}
}
@Test
public void degenerateExecInput() {
final DataToVarOptSketch udf = new DataToVarOptSketch();
try {
assertNull(udf.exec(null));
assertNull(udf.exec(TupleFactory.getInstance().newTuple(0)));
final Tuple in = TupleFactory.getInstance().newTuple(1);
in.set(0, null);
assertNull(udf.exec(in));
} catch (final IOException e) {
fail("Unexpected exception");
}
}
@Test
public void validOutputSchemaTest() throws IOException {
DataToVarOptSketch udf = new DataToVarOptSketch("5", "1");
final Schema recordSchema = new Schema();
recordSchema.add(new Schema.FieldSchema("field1", DataType.CHARARRAY));
recordSchema.add(new Schema.FieldSchema("field2", DataType.DOUBLE));
recordSchema.add(new Schema.FieldSchema("field3", DataType.FLOAT));
final Schema tupleSchema = new Schema();
tupleSchema.add(new Schema.FieldSchema("record", recordSchema, DataType.TUPLE));
final Schema inputSchema = new Schema();
inputSchema.add(new Schema.FieldSchema("data", tupleSchema, DataType.BAG));
Schema output = udf.outputSchema(inputSchema);
assertEquals(output.size(), 1);
assertEquals(output.getField(0).type, DataType.BYTEARRAY);
// use the float as a weight instead
udf = new DataToVarOptSketch("5", "2");
output = udf.outputSchema(inputSchema);
assertEquals(output.size(), 1);
assertEquals(output.getField(0).type, DataType.BYTEARRAY);
}
@Test
public void badOutputSchemaTest() throws IOException {
final Schema recordSchema = new Schema();
recordSchema.add(new Schema.FieldSchema("field1", DataType.CHARARRAY));
recordSchema.add(new Schema.FieldSchema("field2", DataType.DOUBLE));
recordSchema.add(new Schema.FieldSchema("field3", DataType.INTEGER));
final Schema tupleSchema = new Schema();
tupleSchema.add(new Schema.FieldSchema("record", recordSchema, DataType.TUPLE));
final Schema inputSchema = new Schema();
inputSchema.add(new Schema.FieldSchema("data", tupleSchema, DataType.BAG));
final DataToVarOptSketch udf = new DataToVarOptSketch("5", "0");
// degenerate input schemas
try {
udf.outputSchema(null);
fail("Accepted null schema");
} catch (final IllegalArgumentException e) {
// expected
}
try {
udf.outputSchema(new Schema());
fail("Accepted empty schema");
} catch (final IllegalArgumentException e) {
// expected
}
// expecting weight in element 0 (based on constructor arg)
try {
udf.outputSchema(inputSchema);
fail("Accepted non-weight value in weightIndex column");
} catch (final IllegalArgumentException e) {
// expected
}
// passing in Tuple instead of DataBag
try {
udf.outputSchema(tupleSchema);
fail("Accepted Tuple instead of DataBag");
} catch (final IllegalArgumentException e) {
// expected
}
}
}
| |
/*
* Copyright (c) 2015, salesforce.com, inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided
* that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions and the
* following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
* the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.dataloader.ui;
import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.salesforce.dataloader.ConfigTestBase;
import com.salesforce.dataloader.client.SimplePost;
import com.salesforce.dataloader.client.SimplePostFactory;
import com.salesforce.dataloader.config.Config;
import com.salesforce.dataloader.exception.ParameterLoadException;
import com.salesforce.dataloader.model.OAuthToken;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import static org.mockito.Mockito.*;
import java.io.*;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.function.Function;
/**
* Created by rmazzeo on 12/9/15.
*/
public class OAuthSecretFlowTests extends ConfigTestBase {
private SimplePost mockSimplePost;
private Config config;
private ArrayList<String> existingOAuthEnvironments;
private String oauthServer;
private String oauthClientId;
private String oauthRedirectUri;
private String existingEndPoint;
private Function<SimplePostFactory.Criteria, SimplePost> existingConstructor;
@Before
public void testSetup(){
config = getController().getConfig();
existingOAuthEnvironments = config.getStrings(Config.OAUTH_ENVIRONMENTS);
existingEndPoint = config.getString(Config.ENDPOINT);
oauthServer = "http://OAUTH_PARTIAL_SERVER";
oauthClientId = "CLIENTID";
oauthRedirectUri = "REDIRECTURI";
mockSimplePost = mock(SimplePost.class);
config.setValue(Config.OAUTH_ENVIRONMENTS, "Testing");
config.setOAuthEnvironmentString("Testing", Config.OAUTH_PARTIAL_SERVER, oauthServer);
config.setOAuthEnvironmentString("Testing", Config.OAUTH_PARTIAL_CLIENTID, oauthClientId);
config.setOAuthEnvironmentString("Testing", Config.OAUTH_PARTIAL_REDIRECTURI, oauthRedirectUri);
config.setOAuthEnvironment("Testing");
existingConstructor = SimplePostFactory.getConstructor();
SimplePostFactory.setConstructor(c -> mockSimplePost);
}
@After
public void testCleanup(){
config.setValue(Config.OAUTH_ENVIRONMENTS, existingOAuthEnvironments.toArray(new String[0]));
config.setValue(Config.ENDPOINT, existingEndPoint);
SimplePostFactory.setConstructor(existingConstructor);
}
@Test
public void testGetStartUrl(){
try {
String expected = "http://OAUTH_PARTIAL_SERVER/services/oauth2/authorize?response_type=code&display=popup&client_id=CLIENTID&redirect_uri=REDIRECTURI";
String actual = OAuthSecretFlow.getStartUrlImpl(config);
Assert.assertEquals( "OAuth Token Flow returned the wrong url", expected, actual);
} catch (UnsupportedEncodingException e) {
Assert.fail("could not get start url" + e.toString());
}
}
@Test
public void testInvalidInitialReponseUrl(){
try {
String expected = null;
String actual = OAuthSecretFlow.OAuthSecretBrowserListener.handleInitialUrl( "http://OAUTH_PARTIAL_SERVER/services/oauth2/authorize?doit=1");
Assert.assertEquals("OAuthToken should not have handled this", expected, actual);
} catch (URISyntaxException e) {
Assert.fail("Could not handle the url:" + e.toString());
}
}
@Test
public void testValidInitialResponseUrl(){
try {
String expected = "TOKEN";
String actual = OAuthSecretFlow.OAuthSecretBrowserListener.handleInitialUrl( "http://OAUTH_PARTIAL_SERVER/services/oauth2/authorize?code=TOKEN");
Assert.assertEquals("OAuthToken should not have handled this", expected, actual);
} catch (URISyntaxException e) {
Assert.fail("Could not handle the url:" + e.toString());
}
}
@Test
public void testValidSecondResponseAccessToken(){
try {
Gson gson = new GsonBuilder()
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.create();
OAuthToken token = new OAuthToken();
token.setAccessToken("ACCESS");
String jsonToken = gson.toJson(token);
InputStream input = new ByteArrayInputStream(jsonToken.getBytes(StandardCharsets.UTF_8));
when(mockSimplePost.getInput()).thenAnswer(i -> input);
when(mockSimplePost.isSuccessful()).thenReturn(true);
SimplePost simplePost = OAuthSecretFlow.OAuthSecretBrowserListener.handleSecondPost("simplePost", config);
String expected = "ACCESS";
String actual = config.getString(Config.OAUTH_ACCESSTOKEN);
when(mockSimplePost.isSuccessful()).thenReturn(true);
Assert.assertEquals("Access token was not set", expected, actual);
} catch (ParameterLoadException | IOException e) {
Assert.fail("Could not handle second request:" + e.toString());
}
}
@Test
public void testValidSecondResponseRefreshToken(){
try {
Gson gson = new GsonBuilder()
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.create();
OAuthToken token = new OAuthToken();
token.setRefreshToken("REFRESHTOKEN");
String jsonToken = gson.toJson(token);
InputStream input = new ByteArrayInputStream(jsonToken.getBytes(StandardCharsets.UTF_8));
when(mockSimplePost.getInput()).thenAnswer(i -> input);
when(mockSimplePost.isSuccessful()).thenReturn(true);
SimplePost simplePost = OAuthSecretFlow.OAuthSecretBrowserListener.handleSecondPost("simplePost", config);
String expected = "REFRESHTOKEN";
String actual = config.getString(Config.OAUTH_REFRESHTOKEN);
Assert.assertEquals("Access token was not set", expected, actual);
} catch (ParameterLoadException | IOException e) {
Assert.fail("Could not handle second request:" + e.toString());
}
}
@Test
public void testValidSecondResponseInstanceUrl(){
try {
Gson gson = new GsonBuilder()
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.create();
OAuthToken token = new OAuthToken();
token.setInstanceUrl("http://INSTANCEURL");
String jsonToken = gson.toJson(token);
InputStream input = new ByteArrayInputStream(jsonToken.getBytes(StandardCharsets.UTF_8));
when(mockSimplePost.getInput()).thenAnswer(i -> input);
when(mockSimplePost.isSuccessful()).thenReturn(true);
SimplePost simplePost = OAuthSecretFlow.OAuthSecretBrowserListener.handleSecondPost("simplePost", config);
String expected = "http://INSTANCEURL";
String actual = config.getString(Config.ENDPOINT);
Assert.assertEquals("Access token was not set", expected, actual);
} catch (ParameterLoadException | IOException e) {
Assert.fail("Could not handle second request:" + e.toString());
}
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 University of California San Diego
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*
*/
package org.broad.igv.util;
/*
* The MIT License (MIT)
*
* Copyright (c) 2007-2015 Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
import org.broad.igv.logging.*;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/** An implementation of an interval tree, following the explanation.
* from CLR.
*/
public class IntervalTree<T> {
private static Logger logger = LogManager.getLogger(IntervalTree.class);
boolean immutable = false;
IntervalTree.Node root;
IntervalTree.Node NIL = IntervalTree.Node.NIL;
public IntervalTree() {
this.root = NIL;
}
public IntervalTree(boolean immutable) {
this.immutable = immutable;
this.root = NIL;
}
public void insert(Interval<T> interval) {
if (immutable) {
throw new java.lang.UnsupportedOperationException("Tree is immutable. Inserts not allowed");
}
IntervalTree.Node node = new IntervalTree.Node(interval);
insert(node);
}
// Returns all matches as a list of Intervals
public List<Interval<T>> findOverlapping(int start, int end) {
Interval searchInterval = new Interval(start, end, 0);
if (root().isNull()) {
return Collections.emptyList();
}
List<Interval<T>> results = new ArrayList();
searchAll(searchInterval, root(), results);
return results;
}
public String toString() {
return root().toString();
}
private List<Interval<T>> searchAll(Interval interval, IntervalTree.Node node, List<Interval<T>> results) {
if (node.interval.overlaps(interval)) {
results.add(node.interval);
}
if (!node.left.isNull() && node.left.max >= interval.getLow()) {
searchAll(interval, node.left, results);
}
if (!node.right.isNull() && node.right.min <= interval.getHigh()) {
searchAll(interval, node.right, results);
}
return results;
}
/**
* Return all intervals in tree.
* TODO: an iterator would be more effecient.
* @return
*/
public List<Interval<T>> getIntervals() {
if (root().isNull()) {
return Collections.emptyList();
}
List<Interval<T>> results = new ArrayList(size());
getAll(root, results);
return results;
}
private List<Interval<T>> getAll(IntervalTree.Node node, List<Interval<T>> results) {
results.add(node.interval);
if (!node.left.isNull()) {
getAll(node.left, results);
}
if (!node.right.isNull()) {
getAll(node.right, results);
}
return results;
}
/**
* Used for testing only.
*
* @param node
* @return
*/
private int getRealMax(IntervalTree.Node node) {
if (node.isNull())
return Integer.MIN_VALUE;
int leftMax = getRealMax(node.left);
int rightMax = getRealMax(node.right);
int nodeHigh = (node.interval).getHigh();
int max1 = (leftMax > rightMax ? leftMax : rightMax);
return (max1 > nodeHigh ? max1 : nodeHigh);
}
/**
* Used for testing only
*
* @param node
* @return
*/
private int getRealMin(IntervalTree.Node node) {
if (node.isNull())
return Integer.MAX_VALUE;
int leftMin = getRealMin(node.left);
int rightMin = getRealMin(node.right);
int nodeLow = (node.interval).getLow();
int min1 = (leftMin < rightMin ? leftMin : rightMin);
return (min1 < nodeLow ? min1 : nodeLow);
}
private void insert(IntervalTree.Node x) {
assert (x != null);
assert (!x.isNull());
treeInsert(x);
x.color = IntervalTree.Node.RED;
while (x != this.root && x.parent.color == IntervalTree.Node.RED) {
if (x.parent == x.parent.parent.left) {
IntervalTree.Node y = x.parent.parent.right;
if (y.color == IntervalTree.Node.RED) {
x.parent.color = IntervalTree.Node.BLACK;
y.color = IntervalTree.Node.BLACK;
x.parent.parent.color = IntervalTree.Node.RED;
x = x.parent.parent;
} else {
if (x == x.parent.right) {
x = x.parent;
this.leftRotate(x);
}
x.parent.color = IntervalTree.Node.BLACK;
x.parent.parent.color = IntervalTree.Node.RED;
this.rightRotate(x.parent.parent);
}
} else {
IntervalTree.Node y = x.parent.parent.left;
if (y.color == IntervalTree.Node.RED) {
x.parent.color = IntervalTree.Node.BLACK;
y.color = IntervalTree.Node.BLACK;
x.parent.parent.color = IntervalTree.Node.RED;
x = x.parent.parent;
} else {
if (x == x.parent.left) {
x = x.parent;
this.rightRotate(x);
}
x.parent.color = IntervalTree.Node.BLACK;
x.parent.parent.color = IntervalTree.Node.RED;
this.leftRotate(x.parent.parent);
}
}
}
this.root.color = IntervalTree.Node.BLACK;
}
private IntervalTree.Node root() {
return this.root;
}
private IntervalTree.Node minimum(IntervalTree.Node node) {
assert (node != null);
assert (!node.isNull());
while (!node.left.isNull()) {
node = node.left;
}
return node;
}
private IntervalTree.Node maximum(IntervalTree.Node node) {
assert (node != null);
assert (!node.isNull());
while (!node.right.isNull()) {
node = node.right;
}
return node;
}
private IntervalTree.Node successor(IntervalTree.Node x) {
assert (x != null);
assert (!x.isNull());
if (!x.right.isNull()) {
return this.minimum(x.right);
}
IntervalTree.Node y = x.parent;
while ((!y.isNull()) && x == y.right) {
x = y;
y = y.parent;
}
return y;
}
private IntervalTree.Node predecessor(IntervalTree.Node x) {
assert (x != null);
assert (!x.isNull());
if (!x.left.isNull()) {
return this.maximum(x.left);
}
IntervalTree.Node y = x.parent;
while ((!y.isNull()) && x == y.left) {
x = y;
y = y.parent;
}
return y;
}
private void leftRotate(IntervalTree.Node x) {
IntervalTree.Node y = x.right;
x.right = y.left;
if (y.left != NIL) {
y.left.parent = x;
}
y.parent = x.parent;
if (x.parent == NIL) {
this.root = y;
} else {
if (x.parent.left == x) {
x.parent.left = y;
} else {
x.parent.right = y;
}
}
y.left = x;
x.parent = y;
applyUpdate(x);
// no need to apply update on y, since it'll y is an ancestor
// of x, and will be touched by applyUpdate().
}
private void rightRotate(IntervalTree.Node x) {
IntervalTree.Node y = x.left;
x.left = y.right;
if (y.right != NIL) {
y.right.parent = x;
}
y.parent = x.parent;
if (x.parent == NIL) {
this.root = y;
} else {
if (x.parent.right == x) {
x.parent.right = y;
} else {
x.parent.left = y;
}
}
y.right = x;
x.parent = y;
applyUpdate(x);
// no need to apply update on y, since it'll y is an ancestor
// of x, and will be touched by applyUpdate().
}
/**
* Note: Does not maintain RB constraints, this is done post insert
*
* @param x
*/
private void treeInsert(IntervalTree.Node x) {
IntervalTree.Node node = this.root;
IntervalTree.Node y = NIL;
while (node != NIL) {
y = node;
if (x.interval.getLow() <= node.interval.getLow()) {
node = node.left;
} else {
node = node.right;
}
}
x.parent = y;
if (y == NIL) {
this.root = x;
x.left = x.right = NIL;
} else {
if (x.interval.getLow() <= y.interval.getLow()) {
y.left = x;
} else {
y.right = x;
}
}
this.applyUpdate(x);
}
// Applies the statistic update on the node and its ancestors.
private void applyUpdate(IntervalTree.Node node) {
while (!node.isNull()) {
this.update(node);
node = node.parent;
}
}
// Note: this method is called millions of times and is optimized for speed, or as optimized as java allows.
private void update(IntervalTree.Node node) {
int nodeMax = node.left.max > node.right.max ? node.left.max : node.right.max;
int intervalHigh = node.interval.high;
node.max = nodeMax > intervalHigh ? nodeMax : intervalHigh;
int nodeMin = node.left.min < node.right.min ? node.left.min : node.right.min;
int intervalLow = node.interval.low;
node.min = nodeMin < intervalLow ? nodeMin : intervalLow;
}
/**
* Returns the number of nodes in the tree.
*/
public int size() {
return _size(this.root);
}
private int _size(IntervalTree.Node node) {
if (node.isNull())
return 0;
return 1 + _size(node.left) + _size(node.right);
}
private boolean allRedNodesFollowConstraints(IntervalTree.Node node) {
if (node.isNull())
return true;
if (node.color == IntervalTree.Node.BLACK) {
return (allRedNodesFollowConstraints(node.left) &&
allRedNodesFollowConstraints(node.right));
}
// At this point, we know we're on a RED node.
return (node.left.color == IntervalTree.Node.BLACK &&
node.right.color == IntervalTree.Node.BLACK &&
allRedNodesFollowConstraints(node.left) &&
allRedNodesFollowConstraints(node.right));
}
// Check that both ends are equally balanced in terms of black height.
private boolean isBalancedBlackHeight(IntervalTree.Node node) {
if (node.isNull())
return true;
return (blackHeight(node.left) == blackHeight(node.right) &&
isBalancedBlackHeight(node.left) &&
isBalancedBlackHeight(node.right));
}
// The black height of a node should be left/right equal.
private int blackHeight(IntervalTree.Node node) {
if (node.isNull())
return 0;
int leftBlackHeight = blackHeight(node.left);
if (node.color == IntervalTree.Node.BLACK) {
return leftBlackHeight + 1;
} else {
return leftBlackHeight;
}
}
/**
* Test code: make sure that the tree has all the properties
* defined by Red Black trees and interval trees
* <p/>
* o. Root is black.
* <p/>
* o. NIL is black.
* <p/>
* o. Red nodes have black children.
* <p/>
* o. Every path from root to leaves contains the same number of
* black nodes.
* <p/>
* o. getMax(node) is the maximum of any interval rooted at that node..
* <p/>
* This code is expensive, and only meant to be used for
* assertions and testing.
*/
public boolean isValid() {
if (this.root.color != IntervalTree.Node.BLACK) {
logger.warn("root color is wrong");
return false;
}
if (NIL.color != IntervalTree.Node.BLACK) {
logger.warn("NIL color is wrong");
return false;
}
if (allRedNodesFollowConstraints(this.root) == false) {
logger.warn("red node doesn't follow constraints");
return false;
}
if (isBalancedBlackHeight(this.root) == false) {
logger.warn("black height unbalanced");
return false;
}
return hasCorrectMaxFields(this.root) &&
hasCorrectMinFields(this.root);
}
private boolean hasCorrectMaxFields(IntervalTree.Node node) {
if (node.isNull())
return true;
return (getRealMax(node) == (node.max) &&
hasCorrectMaxFields(node.left) &&
hasCorrectMaxFields(node.right));
}
private boolean hasCorrectMinFields(IntervalTree.Node node) {
if (node.isNull())
return true;
return (getRealMin(node) == (node.min) &&
hasCorrectMinFields(node.left) &&
hasCorrectMinFields(node.right));
}
static class Node {
public static boolean BLACK = false;
public static boolean RED = true;
Interval interval;
int min;
int max;
IntervalTree.Node left;
IntervalTree.Node right;
// Color and parent are used for inserts. If tree is immutable these are not required (no requirement
// to store these persistently).
boolean color;
IntervalTree.Node parent;
private Node() {
this.max = Integer.MIN_VALUE;
this.min = Integer.MAX_VALUE;
}
public void store(DataOutputStream dos) throws IOException {
dos.writeInt(interval.getLow());
dos.writeInt(interval.getHigh());
dos.writeInt(min);
dos.writeInt(max);
}
public Node(Interval interval) {
this();
this.parent = NIL;
this.left = NIL;
this.right = NIL;
this.interval = interval;
this.color = RED;
}
static IntervalTree.Node NIL;
static {
NIL = new IntervalTree.Node();
NIL.color = BLACK;
NIL.parent = NIL;
NIL.left = NIL;
NIL.right = NIL;
}
public boolean isNull() {
return this == NIL;
}
public String toString() {
if (this == NIL) {
return "nil";
}
/* return
"(" + this.interval + " " + (this.color == RED ? "RED" : "BLACK") +
" (" + this.left.toString() + ", " + this.right.toString() + ")";
*/
StringBuffer buf = new StringBuffer();
_toString(buf);
return buf.toString();
}
public void _toString(StringBuffer buf) {
if (this == NIL) {
buf.append("nil");
return;
}
buf.append(this.interval + " -> " + this.left.interval + ", " + this.right.interval);
buf.append("\n");
this.left._toString(buf);
this.right._toString(buf);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.common.to;
import java.util.HashSet;
import java.util.Set;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.apache.syncope.common.AbstractBaseBean;
import org.apache.syncope.common.types.ConnConfProperty;
import org.apache.syncope.common.types.PropagationMode;
import org.apache.syncope.common.types.TraceLevel;
@XmlRootElement(name = "resource")
@XmlType
public class ResourceTO extends AbstractBaseBean {
private static final long serialVersionUID = -9193551354041698963L;
/**
* The resource identifier is the name.
*/
private String name;
/**
* The resource type is identified by the associated connector.
*/
private Long connectorId;
/**
* Convenience information: display name for the connector id.
*/
private String connectorDisplayName;
private MappingTO umapping;
private MappingTO rmapping;
private boolean propagationPrimary;
private int propagationPriority;
private boolean randomPwdIfNotProvided;
private PropagationMode propagationMode;
private boolean enforceMandatoryCondition;
private TraceLevel createTraceLevel;
private TraceLevel updateTraceLevel;
private TraceLevel deleteTraceLevel;
private TraceLevel syncTraceLevel;
private Long passwordPolicy;
private Long accountPolicy;
private Long syncPolicy;
private Set<ConnConfProperty> connConfProperties;
private String usyncToken;
private String rsyncToken;
private String propagationActionsClassName;
public ResourceTO() {
super();
connConfProperties = new HashSet<ConnConfProperty>();
propagationMode = PropagationMode.TWO_PHASES;
propagationPriority = 0;
createTraceLevel = TraceLevel.ALL;
updateTraceLevel = TraceLevel.ALL;
deleteTraceLevel = TraceLevel.ALL;
syncTraceLevel = TraceLevel.ALL;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public boolean isEnforceMandatoryCondition() {
return enforceMandatoryCondition;
}
public void setEnforceMandatoryCondition(boolean enforceMandatoryCondition) {
this.enforceMandatoryCondition = enforceMandatoryCondition;
}
public Long getConnectorId() {
return connectorId;
}
public void setConnectorId(Long connectorId) {
this.connectorId = connectorId;
}
public String getConnectorDisplayName() {
return connectorDisplayName;
}
public void setConnectorDisplayName(String connectorDisplayName) {
this.connectorDisplayName = connectorDisplayName;
}
public MappingTO getUmapping() {
return umapping;
}
public void setUmapping(MappingTO umapping) {
this.umapping = umapping;
}
public MappingTO getRmapping() {
return rmapping;
}
public void setRmapping(MappingTO rmapping) {
this.rmapping = rmapping;
}
public boolean isPropagationPrimary() {
return propagationPrimary;
}
public void setPropagationPrimary(boolean propagationPrimary) {
this.propagationPrimary = propagationPrimary;
}
public int getPropagationPriority() {
return propagationPriority;
}
public void setPropagationPriority(int propagationPriority) {
this.propagationPriority = propagationPriority;
}
public boolean isRandomPwdIfNotProvided() {
return randomPwdIfNotProvided;
}
public void setRandomPwdIfNotProvided(boolean randomPwdIfNotProvided) {
this.randomPwdIfNotProvided = randomPwdIfNotProvided;
}
public PropagationMode getPropagationMode() {
return propagationMode;
}
public void setPropagationMode(PropagationMode propagationMode) {
this.propagationMode = propagationMode;
}
public TraceLevel getCreateTraceLevel() {
return createTraceLevel;
}
public void setCreateTraceLevel(TraceLevel createTraceLevel) {
this.createTraceLevel = createTraceLevel;
}
public TraceLevel getDeleteTraceLevel() {
return deleteTraceLevel;
}
public void setDeleteTraceLevel(TraceLevel deleteTraceLevel) {
this.deleteTraceLevel = deleteTraceLevel;
}
public TraceLevel getUpdateTraceLevel() {
return updateTraceLevel;
}
public void setUpdateTraceLevel(TraceLevel updateTraceLevel) {
this.updateTraceLevel = updateTraceLevel;
}
public Long getPasswordPolicy() {
return passwordPolicy;
}
public void setPasswordPolicy(Long passwordPolicy) {
this.passwordPolicy = passwordPolicy;
}
public Long getAccountPolicy() {
return accountPolicy;
}
public void setAccountPolicy(Long accountPolicy) {
this.accountPolicy = accountPolicy;
}
public Long getSyncPolicy() {
return syncPolicy;
}
public void setSyncPolicy(Long syncPolicy) {
this.syncPolicy = syncPolicy;
}
public Set<ConnConfProperty> getConnConfProperties() {
return connConfProperties;
}
public void setConnectorConfigurationProperties(final Set<ConnConfProperty> connConfProperties) {
this.connConfProperties = connConfProperties;
}
public TraceLevel getSyncTraceLevel() {
return syncTraceLevel;
}
public void setSyncTraceLevel(final TraceLevel syncTraceLevel) {
this.syncTraceLevel = syncTraceLevel;
}
public String getUsyncToken() {
return usyncToken;
}
public void setUsyncToken(final String syncToken) {
this.usyncToken = syncToken;
}
public String getRsyncToken() {
return rsyncToken;
}
public void setRsyncToken(final String syncToken) {
this.rsyncToken = syncToken;
}
public String getPropagationActionsClassName() {
return propagationActionsClassName;
}
public void setPropagationActionsClassName(final String propagationActionsClassName) {
this.propagationActionsClassName = propagationActionsClassName;
}
}
| |
// Copyright 2005 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry.services.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.hivemind.ApplicationRuntimeException;
import org.apache.hivemind.ErrorLog;
import org.apache.hivemind.Location;
import org.apache.hivemind.test.HiveMindTestCase;
/**
* Tests for {@link org.apache.tapestry.services.impl.InfrastructureImpl}.
*
* @author Howard M. Lewis Ship
* @since 4.0
*/
public class TestInfrastructure extends HiveMindTestCase
{
private static class DeferredObjectFixture implements DeferredObject
{
private Object _object;
private Location _location;
public DeferredObjectFixture(Object object, Location location)
{
_object = object;
_location = location;
}
public Location getLocation()
{
return _location;
}
public Object getObject()
{
return _object;
}
}
private InfrastructureContribution newContribution(String propertyName, String mode,
Object object)
{
return newContribution(propertyName, mode, object, null);
}
private InfrastructureContribution newContribution(String propertyName, String mode,
Object object, Location location)
{
DeferredObject deferred = new DeferredObjectFixture(object, location);
InfrastructureContribution ic = new InfrastructureContribution();
ic.setDeferredObject(deferred);
ic.setProperty(propertyName);
ic.setMode(mode);
ic.setLocation(location);
return ic;
}
public void testGetPropertyUninitialized()
{
InfrastructureImpl infra = new InfrastructureImpl();
try
{
infra.getProperty("foo");
unreachable();
}
catch (IllegalStateException ex)
{
assertEquals(ImplMessages.infrastructureNotInitialized(), ex.getMessage());
}
}
public void testGetNullProperty()
{
InfrastructureImpl infra = new InfrastructureImpl();
infra.setNormalContributions(Collections.EMPTY_LIST);
infra.setOverrideContributions(Collections.EMPTY_LIST);
infra.initialize("test");
try
{
infra.getProperty("fred");
unreachable();
}
catch (ApplicationRuntimeException ex)
{
assertEquals(ImplMessages.missingInfrastructureProperty("fred"), ex.getMessage());
}
}
public void testReinitalize()
{
InfrastructureImpl infra = new InfrastructureImpl();
infra.setNormalContributions(Collections.EMPTY_LIST);
infra.setOverrideContributions(Collections.EMPTY_LIST);
infra.initialize("ONE");
try
{
infra.initialize("TWO");
unreachable();
}
catch (IllegalStateException ex)
{
assertEquals(ImplMessages.infrastructureAlreadyInitialized("TWO", "ONE"), ex
.getMessage());
}
}
/**
* Test that a contribution for a mode quietly overrides a contribution for the same property
* that does not specify a mode.
*/
public void testModeOverridesNonMode()
{
Object fredModal = new Object();
Object plainFred = new Object();
InfrastructureImpl infra = new InfrastructureImpl();
List l = new ArrayList();
l.add(newContribution("fred", "bedrock", fredModal));
l.add(newContribution("fred", null, plainFred));
infra.setNormalContributions(l);
infra.setOverrideContributions(Collections.EMPTY_LIST);
infra.initialize("bedrock");
assertSame(fredModal, infra.getProperty("fred"));
}
public void testWrongModeIgnored()
{
Object fredModal = new Object();
Object wrongFred = new Object();
InfrastructureImpl infra = new InfrastructureImpl();
List l = new ArrayList();
l.add(newContribution("fred", "bedrock", fredModal));
l.add(newContribution("fred", "shale", wrongFred));
infra.setNormalContributions(l);
infra.setOverrideContributions(Collections.EMPTY_LIST);
infra.initialize("bedrock");
assertSame(fredModal, infra.getProperty("fred"));
}
/**
* Test that override contributions trump contributions from the normal path.
*/
public void testOverrides()
{
Object normalFred = new Object();
Object overrideFred = new Object();
InfrastructureImpl infra = new InfrastructureImpl();
infra.setNormalContributions(Collections.singletonList(newContribution(
"fred",
null,
normalFred)));
infra.setOverrideContributions(Collections.singletonList(newContribution(
"fred",
null,
overrideFred)));
infra.initialize("bedrock");
assertSame(overrideFred, infra.getProperty("fred"));
}
public void testDuplicate()
{
ErrorLog log = (ErrorLog) newMock(ErrorLog.class);
Location l1 = fabricateLocation(99);
Location l2 = fabricateLocation(132);
Object fredModal = new Object();
Object duplicateFred = new Object();
List l = new ArrayList();
l.add(newContribution("fred", "bedrock", fredModal, l1));
InfrastructureContribution conflict = newContribution("fred", "bedrock", duplicateFred, l2);
l.add(conflict);
log.error(ImplMessages.duplicateInfrastructureContribution(conflict, l1), l2, null);
replayControls();
InfrastructureImpl infra = new InfrastructureImpl();
infra.setNormalContributions(l);
infra.setOverrideContributions(Collections.EMPTY_LIST);
infra.setErrorLog(log);
infra.initialize("bedrock");
assertSame(fredModal, infra.getProperty("fred"));
verifyControls();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql2rel;
import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.rel.RelCollation;
import org.apache.calcite.rel.RelCollations;
import org.apache.calcite.rel.RelDistribution;
import org.apache.calcite.rel.RelFieldCollation;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Aggregate;
import org.apache.calcite.rel.core.AggregateCall;
import org.apache.calcite.rel.core.Calc;
import org.apache.calcite.rel.core.CorrelationId;
import org.apache.calcite.rel.core.Exchange;
import org.apache.calcite.rel.core.Filter;
import org.apache.calcite.rel.core.Join;
import org.apache.calcite.rel.core.JoinRelType;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.core.RelFactories;
import org.apache.calcite.rel.core.SetOp;
import org.apache.calcite.rel.core.Sort;
import org.apache.calcite.rel.core.SortExchange;
import org.apache.calcite.rel.core.TableScan;
import org.apache.calcite.rel.logical.LogicalTableFunctionScan;
import org.apache.calcite.rel.logical.LogicalTableModify;
import org.apache.calcite.rel.logical.LogicalValues;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rel.type.RelDataTypeImpl;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexCorrelVariable;
import org.apache.calcite.rex.RexDynamicParam;
import org.apache.calcite.rex.RexFieldAccess;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexPermuteInputsShuttle;
import org.apache.calcite.rex.RexProgram;
import org.apache.calcite.rex.RexUtil;
import org.apache.calcite.rex.RexVisitor;
import org.apache.calcite.sql.SqlExplainFormat;
import org.apache.calcite.sql.SqlExplainLevel;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.validate.SqlValidator;
import org.apache.calcite.tools.RelBuilder;
import org.apache.calcite.util.Bug;
import org.apache.calcite.util.ImmutableBitSet;
import org.apache.calcite.util.Pair;
import org.apache.calcite.util.ReflectUtil;
import org.apache.calcite.util.ReflectiveVisitor;
import org.apache.calcite.util.Util;
import org.apache.calcite.util.mapping.IntPair;
import org.apache.calcite.util.mapping.Mapping;
import org.apache.calcite.util.mapping.MappingType;
import org.apache.calcite.util.mapping.Mappings;
import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
/**
* Transformer that walks over a tree of relational expressions, replacing each
* {@link RelNode} with a 'slimmed down' relational expression that projects
* only the columns required by its consumer.
*
* <p>Uses multi-methods to fire the right rule for each type of relational
* expression. This allows the transformer to be extended without having to
* add a new method to RelNode, and without requiring a collection of rule
* classes scattered to the four winds.
*
* <p>REVIEW: jhyde, 2009/7/28: Is sql2rel the correct package for this class?
* Trimming fields is not an essential part of SQL-to-Rel translation, and
* arguably belongs in the optimization phase. But this transformer does not
* obey the usual pattern for planner rules; it is difficult to do so, because
* each {@link RelNode} needs to return a different set of fields after
* trimming.
*
* <p>TODO: Change 2nd arg of the {@link #trimFields} method from BitSet to
* Mapping. Sometimes it helps the consumer if you return the columns in a
* particular order. For instance, it may avoid a project at the top of the
* tree just for reordering. Could ease the transition by writing methods that
* convert BitSet to Mapping and vice versa.
*/
public class RelFieldTrimmer implements ReflectiveVisitor {
//~ Static fields/initializers ---------------------------------------------
//~ Instance fields --------------------------------------------------------
private final ReflectUtil.MethodDispatcher<TrimResult> trimFieldsDispatcher;
private final RelBuilder relBuilder;
//~ Constructors -----------------------------------------------------------
/**
* Creates a RelFieldTrimmer.
*
* @param validator Validator
*/
public RelFieldTrimmer(@Nullable SqlValidator validator, RelBuilder relBuilder) {
Util.discard(validator); // may be useful one day
this.relBuilder = relBuilder;
@SuppressWarnings("argument.type.incompatible")
ReflectUtil.MethodDispatcher<TrimResult> dispatcher =
ReflectUtil.createMethodDispatcher(
TrimResult.class,
this,
"trimFields",
RelNode.class,
ImmutableBitSet.class,
Set.class);
this.trimFieldsDispatcher = dispatcher;
}
@Deprecated // to be removed before 2.0
public RelFieldTrimmer(@Nullable SqlValidator validator,
RelOptCluster cluster,
RelFactories.ProjectFactory projectFactory,
RelFactories.FilterFactory filterFactory,
RelFactories.JoinFactory joinFactory,
RelFactories.SortFactory sortFactory,
RelFactories.AggregateFactory aggregateFactory,
RelFactories.SetOpFactory setOpFactory) {
this(validator,
RelBuilder.proto(projectFactory, filterFactory, joinFactory,
sortFactory, aggregateFactory, setOpFactory)
.create(cluster, null));
}
//~ Methods ----------------------------------------------------------------
/**
* Trims unused fields from a relational expression.
*
* <p>We presume that all fields of the relational expression are wanted by
* its consumer, so only trim fields that are not used within the tree.
*
* @param root Root node of relational expression
* @return Trimmed relational expression
*/
public RelNode trim(RelNode root) {
final int fieldCount = root.getRowType().getFieldCount();
final ImmutableBitSet fieldsUsed = ImmutableBitSet.range(fieldCount);
final Set<RelDataTypeField> extraFields = Collections.emptySet();
final TrimResult trimResult =
dispatchTrimFields(root, fieldsUsed, extraFields);
if (!trimResult.right.isIdentity()) {
throw new IllegalArgumentException();
}
if (SqlToRelConverter.SQL2REL_LOGGER.isDebugEnabled()) {
SqlToRelConverter.SQL2REL_LOGGER.debug(
RelOptUtil.dumpPlan("Plan after trimming unused fields",
trimResult.left, SqlExplainFormat.TEXT,
SqlExplainLevel.EXPPLAN_ATTRIBUTES));
}
return trimResult.left;
}
/**
* Trims the fields of an input relational expression.
*
* @param rel Relational expression
* @param input Input relational expression, whose fields to trim
* @param fieldsUsed Bitmap of fields needed by the consumer
* @return New relational expression and its field mapping
*/
protected TrimResult trimChild(
RelNode rel,
RelNode input,
final ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final ImmutableBitSet.Builder fieldsUsedBuilder = fieldsUsed.rebuild();
// Fields that define the collation cannot be discarded.
final RelMetadataQuery mq = rel.getCluster().getMetadataQuery();
final ImmutableList<RelCollation> collations = mq.collations(input);
if (collations != null) {
for (RelCollation collation : collations) {
for (RelFieldCollation fieldCollation : collation.getFieldCollations()) {
fieldsUsedBuilder.set(fieldCollation.getFieldIndex());
}
}
}
// Correlating variables are a means for other relational expressions to use
// fields.
for (final CorrelationId correlation : rel.getVariablesSet()) {
rel.accept(
new CorrelationReferenceFinder() {
@Override protected RexNode handle(RexFieldAccess fieldAccess) {
final RexCorrelVariable v =
(RexCorrelVariable) fieldAccess.getReferenceExpr();
if (v.id.equals(correlation)) {
fieldsUsedBuilder.set(fieldAccess.getField().getIndex());
}
return fieldAccess;
}
});
}
return dispatchTrimFields(input, fieldsUsedBuilder.build(), extraFields);
}
/**
* Trims a child relational expression, then adds back a dummy project to
* restore the fields that were removed.
*
* <p>Sounds pointless? It causes unused fields to be removed
* further down the tree (towards the leaves), but it ensure that the
* consuming relational expression continues to see the same fields.
*
* @param rel Relational expression
* @param input Input relational expression, whose fields to trim
* @param fieldsUsed Bitmap of fields needed by the consumer
* @return New relational expression and its field mapping
*/
protected TrimResult trimChildRestore(
RelNode rel,
RelNode input,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
TrimResult trimResult = trimChild(rel, input, fieldsUsed, extraFields);
if (trimResult.right.isIdentity()) {
return trimResult;
}
final RelDataType rowType = input.getRowType();
List<RelDataTypeField> fieldList = rowType.getFieldList();
final List<RexNode> exprList = new ArrayList<>();
final List<String> nameList = rowType.getFieldNames();
RexBuilder rexBuilder = rel.getCluster().getRexBuilder();
assert trimResult.right.getSourceCount() == fieldList.size();
for (int i = 0; i < fieldList.size(); i++) {
int source = trimResult.right.getTargetOpt(i);
RelDataTypeField field = fieldList.get(i);
exprList.add(
source < 0
? rexBuilder.makeZeroLiteral(field.getType())
: rexBuilder.makeInputRef(field.getType(), source));
}
relBuilder.push(trimResult.left)
.project(exprList, nameList);
return result(relBuilder.build(),
Mappings.createIdentity(fieldList.size()));
}
/**
* Invokes {@link #trimFields}, or the appropriate method for the type
* of the rel parameter, using multi-method dispatch.
*
* @param rel Relational expression
* @param fieldsUsed Bitmap of fields needed by the consumer
* @return New relational expression and its field mapping
*/
protected final TrimResult dispatchTrimFields(
RelNode rel,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final TrimResult trimResult =
trimFieldsDispatcher.invoke(rel, fieldsUsed, extraFields);
final RelNode newRel = trimResult.left;
final Mapping mapping = trimResult.right;
final int fieldCount = rel.getRowType().getFieldCount();
assert mapping.getSourceCount() == fieldCount
: "source: " + mapping.getSourceCount() + " != " + fieldCount;
final int newFieldCount = newRel.getRowType().getFieldCount();
assert mapping.getTargetCount() + extraFields.size() == newFieldCount
|| Bug.TODO_FIXED
: "target: " + mapping.getTargetCount()
+ " + " + extraFields.size()
+ " != " + newFieldCount;
if (Bug.TODO_FIXED) {
assert newFieldCount > 0 : "rel has no fields after trim: " + rel;
}
if (newRel.equals(rel)) {
return result(rel, mapping);
}
return trimResult;
}
protected TrimResult result(RelNode r, final Mapping mapping) {
final RexBuilder rexBuilder = relBuilder.getRexBuilder();
for (final CorrelationId correlation : r.getVariablesSet()) {
r = r.accept(
new CorrelationReferenceFinder() {
@Override protected RexNode handle(RexFieldAccess fieldAccess) {
final RexCorrelVariable v =
(RexCorrelVariable) fieldAccess.getReferenceExpr();
if (v.id.equals(correlation)
&& v.getType().getFieldCount() == mapping.getSourceCount()) {
final int old = fieldAccess.getField().getIndex();
final int new_ = mapping.getTarget(old);
final RelDataTypeFactory.Builder typeBuilder =
relBuilder.getTypeFactory().builder();
for (int target : Util.range(mapping.getTargetCount())) {
typeBuilder.add(
v.getType().getFieldList().get(mapping.getSource(target)));
}
final RexNode newV =
rexBuilder.makeCorrel(typeBuilder.build(), v.id);
if (old != new_) {
return rexBuilder.makeFieldAccess(newV, new_);
}
}
return fieldAccess;
}
});
}
return new TrimResult(r, mapping);
}
/**
* Visit method, per {@link org.apache.calcite.util.ReflectiveVisitor}.
*
* <p>This method is invoked reflectively, so there may not be any apparent
* calls to it. The class (or derived classes) may contain overloads of
* this method with more specific types for the {@code rel} parameter.
*
* <p>Returns a pair: the relational expression created, and the mapping
* between the original fields and the fields of the newly created
* relational expression.
*
* @param rel Relational expression
* @param fieldsUsed Fields needed by the consumer
* @return relational expression and mapping
*/
public TrimResult trimFields(
RelNode rel,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
// We don't know how to trim this kind of relational expression, so give
// it back intact.
Util.discard(fieldsUsed);
return result(rel,
Mappings.createIdentity(rel.getRowType().getFieldCount()));
}
/**
* Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
* {@link org.apache.calcite.rel.logical.LogicalCalc}.
*/
public TrimResult trimFields(
Calc calc,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final RexProgram rexProgram = calc.getProgram();
final List<RexNode> projs = Util.transform(rexProgram.getProjectList(),
rexProgram::expandLocalRef);
RexNode conditionExpr = null;
if (rexProgram.getCondition() != null) {
final List<RexNode> filter = Util.transform(
ImmutableList.of(
rexProgram.getCondition()), rexProgram::expandLocalRef);
assert filter.size() == 1;
conditionExpr = filter.get(0);
}
final RelDataType rowType = calc.getRowType();
final int fieldCount = rowType.getFieldCount();
final RelNode input = calc.getInput();
final Set<RelDataTypeField> inputExtraFields =
new HashSet<>(extraFields);
RelOptUtil.InputFinder inputFinder =
new RelOptUtil.InputFinder(inputExtraFields);
for (Ord<RexNode> ord : Ord.zip(projs)) {
if (fieldsUsed.get(ord.i)) {
ord.e.accept(inputFinder);
}
}
if (conditionExpr != null) {
conditionExpr.accept(inputFinder);
}
ImmutableBitSet inputFieldsUsed = inputFinder.build();
// Create input with trimmed columns.
TrimResult trimResult =
trimChild(calc, input, inputFieldsUsed, inputExtraFields);
RelNode newInput = trimResult.left;
final Mapping inputMapping = trimResult.right;
// If the input is unchanged, and we need to project all columns,
// there's nothing we can do.
if (newInput == input
&& fieldsUsed.cardinality() == fieldCount) {
return result(calc, Mappings.createIdentity(fieldCount));
}
// Some parts of the system can't handle rows with zero fields, so
// pretend that one field is used.
if (fieldsUsed.cardinality() == 0 && rexProgram.getCondition() == null) {
return dummyProject(fieldCount, newInput);
}
// Build new project expressions, and populate the mapping.
final List<RexNode> newProjects = new ArrayList<>();
final RexVisitor<RexNode> shuttle =
new RexPermuteInputsShuttle(
inputMapping, newInput);
final Mapping mapping =
Mappings.create(
MappingType.INVERSE_SURJECTION,
fieldCount,
fieldsUsed.cardinality());
for (Ord<RexNode> ord : Ord.zip(projs)) {
if (fieldsUsed.get(ord.i)) {
mapping.set(ord.i, newProjects.size());
RexNode newProjectExpr = ord.e.accept(shuttle);
newProjects.add(newProjectExpr);
}
}
final RelDataType newRowType =
RelOptUtil.permute(calc.getCluster().getTypeFactory(), rowType,
mapping);
final RelNode newInputRelNode = relBuilder.push(newInput).build();
RexNode newConditionExpr = null;
if (conditionExpr != null) {
newConditionExpr = conditionExpr.accept(shuttle);
}
final RexProgram newRexProgram = RexProgram.create(newInputRelNode.getRowType(),
newProjects, newConditionExpr, newRowType.getFieldNames(),
newInputRelNode.getCluster().getRexBuilder());
final Calc newCalc = calc.copy(calc.getTraitSet(), newInputRelNode, newRexProgram);
return result(newCalc, mapping);
}
/**
* Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
* {@link org.apache.calcite.rel.logical.LogicalProject}.
*/
public TrimResult trimFields(
Project project,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final RelDataType rowType = project.getRowType();
final int fieldCount = rowType.getFieldCount();
final RelNode input = project.getInput();
// Which fields are required from the input?
final Set<RelDataTypeField> inputExtraFields =
new LinkedHashSet<>(extraFields);
RelOptUtil.InputFinder inputFinder =
new RelOptUtil.InputFinder(inputExtraFields);
for (Ord<RexNode> ord : Ord.zip(project.getProjects())) {
if (fieldsUsed.get(ord.i)) {
ord.e.accept(inputFinder);
}
}
ImmutableBitSet inputFieldsUsed = inputFinder.build();
// Create input with trimmed columns.
TrimResult trimResult =
trimChild(project, input, inputFieldsUsed, inputExtraFields);
RelNode newInput = trimResult.left;
final Mapping inputMapping = trimResult.right;
// If the input is unchanged, and we need to project all columns,
// there's nothing we can do.
if (newInput == input
&& fieldsUsed.cardinality() == fieldCount) {
return result(project, Mappings.createIdentity(fieldCount));
}
// Some parts of the system can't handle rows with zero fields, so
// pretend that one field is used.
if (fieldsUsed.cardinality() == 0) {
return dummyProject(fieldCount, newInput, project);
}
// Build new project expressions, and populate the mapping.
final List<RexNode> newProjects = new ArrayList<>();
final RexVisitor<RexNode> shuttle =
new RexPermuteInputsShuttle(
inputMapping, newInput);
final Mapping mapping =
Mappings.create(
MappingType.INVERSE_SURJECTION,
fieldCount,
fieldsUsed.cardinality());
for (Ord<RexNode> ord : Ord.zip(project.getProjects())) {
if (fieldsUsed.get(ord.i)) {
mapping.set(ord.i, newProjects.size());
RexNode newProjectExpr = ord.e.accept(shuttle);
newProjects.add(newProjectExpr);
}
}
final RelDataType newRowType =
RelOptUtil.permute(project.getCluster().getTypeFactory(), rowType,
mapping);
relBuilder.push(newInput);
relBuilder.project(newProjects, newRowType.getFieldNames());
final RelNode newProject = RelOptUtil.propagateRelHints(project, relBuilder.build());
return result(newProject, mapping);
}
/** Creates a project with a dummy column, to protect the parts of the system
* that cannot handle a relational expression with no columns.
*
* @param fieldCount Number of fields in the original relational expression
* @param input Trimmed input
* @return Dummy project
*/
protected TrimResult dummyProject(int fieldCount, RelNode input) {
return dummyProject(fieldCount, input, null);
}
/** Creates a project with a dummy column, to protect the parts of the system
* that cannot handle a relational expression with no columns.
*
* @param fieldCount Number of fields in the original relational expression
* @param input Trimmed input
* @param originalRelNode Source RelNode for hint propagation (or null if no propagation needed)
* @return Dummy project
*/
protected TrimResult dummyProject(int fieldCount, RelNode input,
@Nullable RelNode originalRelNode) {
final RelOptCluster cluster = input.getCluster();
final Mapping mapping =
Mappings.create(MappingType.INVERSE_SURJECTION, fieldCount, 1);
if (input.getRowType().getFieldCount() == 1) {
// Input already has one field (and may in fact be a dummy project we
// created for the child). We can't do better.
return result(input, mapping);
}
final RexLiteral expr =
cluster.getRexBuilder().makeExactLiteral(BigDecimal.ZERO);
relBuilder.push(input);
relBuilder.project(ImmutableList.of(expr), ImmutableList.of("DUMMY"));
RelNode newProject = relBuilder.build();
if (originalRelNode != null) {
newProject = RelOptUtil.propagateRelHints(originalRelNode, newProject);
}
return result(newProject, mapping);
}
/**
* Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
* {@link org.apache.calcite.rel.logical.LogicalFilter}.
*/
public TrimResult trimFields(
Filter filter,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final RelDataType rowType = filter.getRowType();
final int fieldCount = rowType.getFieldCount();
final RexNode conditionExpr = filter.getCondition();
final RelNode input = filter.getInput();
// We use the fields used by the consumer, plus any fields used in the
// filter.
final Set<RelDataTypeField> inputExtraFields =
new LinkedHashSet<>(extraFields);
RelOptUtil.InputFinder inputFinder =
new RelOptUtil.InputFinder(inputExtraFields, fieldsUsed);
conditionExpr.accept(inputFinder);
final ImmutableBitSet inputFieldsUsed = inputFinder.build();
// Create input with trimmed columns.
TrimResult trimResult =
trimChild(filter, input, inputFieldsUsed, inputExtraFields);
RelNode newInput = trimResult.left;
final Mapping inputMapping = trimResult.right;
// If the input is unchanged, and we need to project all columns,
// there's nothing we can do.
if (newInput == input
&& fieldsUsed.cardinality() == fieldCount) {
return result(filter, Mappings.createIdentity(fieldCount));
}
// Build new project expressions, and populate the mapping.
final RexVisitor<RexNode> shuttle =
new RexPermuteInputsShuttle(inputMapping, newInput);
RexNode newConditionExpr =
conditionExpr.accept(shuttle);
// Build new filter with trimmed input and condition.
relBuilder.push(newInput)
.filter(filter.getVariablesSet(), newConditionExpr);
// The result has the same mapping as the input gave us. Sometimes we
// return fields that the consumer didn't ask for, because the filter
// needs them for its condition.
return result(relBuilder.build(), inputMapping);
}
/**
* Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
* {@link org.apache.calcite.rel.core.Sort}.
*/
public TrimResult trimFields(
Sort sort,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final RelDataType rowType = sort.getRowType();
final int fieldCount = rowType.getFieldCount();
final RelCollation collation = sort.getCollation();
final RelNode input = sort.getInput();
// We use the fields used by the consumer, plus any fields used as sort
// keys.
final ImmutableBitSet.Builder inputFieldsUsed = fieldsUsed.rebuild();
for (RelFieldCollation field : collation.getFieldCollations()) {
inputFieldsUsed.set(field.getFieldIndex());
}
// Create input with trimmed columns.
final Set<RelDataTypeField> inputExtraFields = Collections.emptySet();
TrimResult trimResult =
trimChild(sort, input, inputFieldsUsed.build(), inputExtraFields);
RelNode newInput = trimResult.left;
final Mapping inputMapping = trimResult.right;
// If the input is unchanged, and we need to project all columns,
// there's nothing we can do.
if (newInput == input
&& inputMapping.isIdentity()
&& fieldsUsed.cardinality() == fieldCount) {
return result(sort, Mappings.createIdentity(fieldCount));
}
// leave the Sort unchanged in case we have dynamic limits
if (sort.offset instanceof RexDynamicParam
|| sort.fetch instanceof RexDynamicParam) {
return result(sort, inputMapping);
}
relBuilder.push(newInput);
final int offset =
sort.offset == null ? 0 : RexLiteral.intValue(sort.offset);
final int fetch =
sort.fetch == null ? -1 : RexLiteral.intValue(sort.fetch);
final ImmutableList<RexNode> fields =
relBuilder.fields(RexUtil.apply(inputMapping, collation));
relBuilder.sortLimit(offset, fetch, fields);
// The result has the same mapping as the input gave us. Sometimes we
// return fields that the consumer didn't ask for, because the filter
// needs them for its condition.
return result(relBuilder.build(), inputMapping);
}
public TrimResult trimFields(
Exchange exchange,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final RelDataType rowType = exchange.getRowType();
final int fieldCount = rowType.getFieldCount();
final RelDistribution distribution = exchange.getDistribution();
final RelNode input = exchange.getInput();
// We use the fields used by the consumer, plus any fields used as exchange
// keys.
final ImmutableBitSet.Builder inputFieldsUsed = fieldsUsed.rebuild();
for (int keyIndex : distribution.getKeys()) {
inputFieldsUsed.set(keyIndex);
}
// Create input with trimmed columns.
final Set<RelDataTypeField> inputExtraFields = Collections.emptySet();
final TrimResult trimResult =
trimChild(exchange, input, inputFieldsUsed.build(), inputExtraFields);
final RelNode newInput = trimResult.left;
final Mapping inputMapping = trimResult.right;
// If the input is unchanged, and we need to project all columns,
// there's nothing we can do.
if (newInput == input
&& inputMapping.isIdentity()
&& fieldsUsed.cardinality() == fieldCount) {
return result(exchange, Mappings.createIdentity(fieldCount));
}
relBuilder.push(newInput);
final RelDistribution newDistribution = distribution.apply(inputMapping);
relBuilder.exchange(newDistribution);
return result(relBuilder.build(), inputMapping);
}
public TrimResult trimFields(
SortExchange sortExchange,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final RelDataType rowType = sortExchange.getRowType();
final int fieldCount = rowType.getFieldCount();
final RelCollation collation = sortExchange.getCollation();
final RelDistribution distribution = sortExchange.getDistribution();
final RelNode input = sortExchange.getInput();
// We use the fields used by the consumer, plus any fields used as sortExchange
// keys.
final ImmutableBitSet.Builder inputFieldsUsed = fieldsUsed.rebuild();
for (RelFieldCollation field : collation.getFieldCollations()) {
inputFieldsUsed.set(field.getFieldIndex());
}
for (int keyIndex : distribution.getKeys()) {
inputFieldsUsed.set(keyIndex);
}
// Create input with trimmed columns.
final Set<RelDataTypeField> inputExtraFields = Collections.emptySet();
TrimResult trimResult =
trimChild(sortExchange, input, inputFieldsUsed.build(), inputExtraFields);
RelNode newInput = trimResult.left;
final Mapping inputMapping = trimResult.right;
// If the input is unchanged, and we need to project all columns,
// there's nothing we can do.
if (newInput == input
&& inputMapping.isIdentity()
&& fieldsUsed.cardinality() == fieldCount) {
return result(sortExchange, Mappings.createIdentity(fieldCount));
}
relBuilder.push(newInput);
RelCollation newCollation = RexUtil.apply(inputMapping, collation);
RelDistribution newDistribution = distribution.apply(inputMapping);
relBuilder.sortExchange(newDistribution, newCollation);
return result(relBuilder.build(), inputMapping);
}
/**
* Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
* {@link org.apache.calcite.rel.logical.LogicalJoin}.
*/
public TrimResult trimFields(
Join join,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final int fieldCount = join.getSystemFieldList().size()
+ join.getLeft().getRowType().getFieldCount()
+ join.getRight().getRowType().getFieldCount();
final RexNode conditionExpr = join.getCondition();
final int systemFieldCount = join.getSystemFieldList().size();
// Add in fields used in the condition.
final Set<RelDataTypeField> combinedInputExtraFields =
new LinkedHashSet<>(extraFields);
RelOptUtil.InputFinder inputFinder =
new RelOptUtil.InputFinder(combinedInputExtraFields, fieldsUsed);
conditionExpr.accept(inputFinder);
final ImmutableBitSet fieldsUsedPlus = inputFinder.build();
// If no system fields are used, we can remove them.
int systemFieldUsedCount = 0;
for (int i = 0; i < systemFieldCount; ++i) {
if (fieldsUsed.get(i)) {
++systemFieldUsedCount;
}
}
final int newSystemFieldCount;
if (systemFieldUsedCount == 0) {
newSystemFieldCount = 0;
} else {
newSystemFieldCount = systemFieldCount;
}
int offset = systemFieldCount;
int changeCount = 0;
int newFieldCount = newSystemFieldCount;
final List<RelNode> newInputs = new ArrayList<>(2);
final List<Mapping> inputMappings = new ArrayList<>();
final List<Integer> inputExtraFieldCounts = new ArrayList<>();
for (RelNode input : join.getInputs()) {
final RelDataType inputRowType = input.getRowType();
final int inputFieldCount = inputRowType.getFieldCount();
// Compute required mapping.
ImmutableBitSet.Builder inputFieldsUsed = ImmutableBitSet.builder();
for (int bit : fieldsUsedPlus) {
if (bit >= offset && bit < offset + inputFieldCount) {
inputFieldsUsed.set(bit - offset);
}
}
// If there are system fields, we automatically use the
// corresponding field in each input.
inputFieldsUsed.set(0, newSystemFieldCount);
// FIXME: We ought to collect extra fields for each input
// individually. For now, we assume that just one input has
// on-demand fields.
Set<RelDataTypeField> inputExtraFields =
RelDataTypeImpl.extra(inputRowType) == null
? Collections.emptySet()
: combinedInputExtraFields;
inputExtraFieldCounts.add(inputExtraFields.size());
TrimResult trimResult =
trimChild(join, input, inputFieldsUsed.build(), inputExtraFields);
newInputs.add(trimResult.left);
if (trimResult.left != input) {
++changeCount;
}
final Mapping inputMapping = trimResult.right;
inputMappings.add(inputMapping);
// Move offset to point to start of next input.
offset += inputFieldCount;
newFieldCount +=
inputMapping.getTargetCount() + inputExtraFields.size();
}
Mapping mapping =
Mappings.create(
MappingType.INVERSE_SURJECTION,
fieldCount,
newFieldCount);
for (int i = 0; i < newSystemFieldCount; ++i) {
mapping.set(i, i);
}
offset = systemFieldCount;
int newOffset = newSystemFieldCount;
for (int i = 0; i < inputMappings.size(); i++) {
Mapping inputMapping = inputMappings.get(i);
for (IntPair pair : inputMapping) {
mapping.set(pair.source + offset, pair.target + newOffset);
}
offset += inputMapping.getSourceCount();
newOffset += inputMapping.getTargetCount()
+ inputExtraFieldCounts.get(i);
}
if (changeCount == 0
&& mapping.isIdentity()) {
return result(join, Mappings.createIdentity(join.getRowType().getFieldCount()));
}
// Build new join.
final RexVisitor<RexNode> shuttle =
new RexPermuteInputsShuttle(
mapping, newInputs.get(0), newInputs.get(1));
RexNode newConditionExpr =
conditionExpr.accept(shuttle);
relBuilder.push(newInputs.get(0));
relBuilder.push(newInputs.get(1));
switch (join.getJoinType()) {
case SEMI:
case ANTI:
// For SemiJoins and AntiJoins only map fields from the left-side
if (join.getJoinType() == JoinRelType.SEMI) {
relBuilder.semiJoin(newConditionExpr);
} else {
relBuilder.antiJoin(newConditionExpr);
}
Mapping inputMapping = inputMappings.get(0);
mapping = Mappings.create(MappingType.INVERSE_SURJECTION,
join.getRowType().getFieldCount(),
newSystemFieldCount + inputMapping.getTargetCount());
for (int i = 0; i < newSystemFieldCount; ++i) {
mapping.set(i, i);
}
offset = systemFieldCount;
newOffset = newSystemFieldCount;
for (IntPair pair : inputMapping) {
mapping.set(pair.source + offset, pair.target + newOffset);
}
break;
default:
relBuilder.join(join.getJoinType(), newConditionExpr);
}
relBuilder.hints(join.getHints());
return result(relBuilder.build(), mapping);
}
/**
* Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
* {@link org.apache.calcite.rel.core.SetOp} (Only UNION ALL is supported).
*/
public TrimResult trimFields(
SetOp setOp,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final RelDataType rowType = setOp.getRowType();
final int fieldCount = rowType.getFieldCount();
// Trim fields only for UNION ALL.
//
// UNION | INTERSECT | INTERSECT ALL | EXCEPT | EXCEPT ALL
// all have comparison between branches.
// They can not be trimmed because the comparison needs
// complete fields.
if (!(setOp.kind == SqlKind.UNION && setOp.all)) {
return result(setOp, Mappings.createIdentity(fieldCount));
}
int changeCount = 0;
// Fennel abhors an empty row type, so pretend that the parent rel
// wants the last field. (The last field is the least likely to be a
// system field.)
if (fieldsUsed.isEmpty()) {
fieldsUsed = ImmutableBitSet.of(rowType.getFieldCount() - 1);
}
// Compute the desired field mapping. Give the consumer the fields they
// want, in the order that they appear in the bitset.
final Mapping mapping = createMapping(fieldsUsed, fieldCount);
// Create input with trimmed columns.
for (RelNode input : setOp.getInputs()) {
TrimResult trimResult =
trimChild(setOp, input, fieldsUsed, extraFields);
// We want "mapping", the input gave us "inputMapping", compute
// "remaining" mapping.
// | | |
// |---------------- mapping ---------->|
// |-- inputMapping -->| |
// | |-- remaining -->|
//
// For instance, suppose we have columns [a, b, c, d],
// the consumer asked for mapping = [b, d],
// and the transformed input has columns inputMapping = [d, a, b].
// remaining will permute [b, d] to [d, a, b].
Mapping remaining = Mappings.divide(mapping, trimResult.right);
// Create a projection; does nothing if remaining is identity.
relBuilder.push(trimResult.left);
relBuilder.permute(remaining);
if (input != relBuilder.peek()) {
++changeCount;
}
}
// If the input is unchanged, and we need to project all columns,
// there's to do.
if (changeCount == 0
&& mapping.isIdentity()) {
for (@SuppressWarnings("unused") RelNode input : setOp.getInputs()) {
relBuilder.build();
}
return result(setOp, mapping);
}
switch (setOp.kind) {
case UNION:
relBuilder.union(setOp.all, setOp.getInputs().size());
break;
case INTERSECT:
relBuilder.intersect(setOp.all, setOp.getInputs().size());
break;
case EXCEPT:
assert setOp.getInputs().size() == 2;
relBuilder.minus(setOp.all);
break;
default:
throw new AssertionError("unknown setOp " + setOp);
}
return result(relBuilder.build(), mapping);
}
/**
* Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
* {@link org.apache.calcite.rel.logical.LogicalAggregate}.
*/
public TrimResult trimFields(
Aggregate aggregate,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
// Fields:
//
// | sys fields | group fields | indicator fields | agg functions |
//
// Two kinds of trimming:
//
// 1. If agg rel has system fields but none of these are used, create an
// agg rel with no system fields.
//
// 2. If aggregate functions are not used, remove them.
//
// But group and indicator fields stay, even if they are not used.
final RelDataType rowType = aggregate.getRowType();
// Compute which input fields are used.
// 1. group fields are always used
final ImmutableBitSet.Builder inputFieldsUsed =
aggregate.getGroupSet().rebuild();
// 2. agg functions
for (AggregateCall aggCall : aggregate.getAggCallList()) {
inputFieldsUsed.addAll(aggCall.getArgList());
if (aggCall.filterArg >= 0) {
inputFieldsUsed.set(aggCall.filterArg);
}
if (aggCall.distinctKeys != null) {
inputFieldsUsed.addAll(aggCall.distinctKeys);
}
inputFieldsUsed.addAll(RelCollations.ordinals(aggCall.collation));
}
// Create input with trimmed columns.
final RelNode input = aggregate.getInput();
final Set<RelDataTypeField> inputExtraFields = Collections.emptySet();
final TrimResult trimResult =
trimChild(aggregate, input, inputFieldsUsed.build(), inputExtraFields);
final RelNode newInput = trimResult.left;
final Mapping inputMapping = trimResult.right;
// We have to return group keys and (if present) indicators.
// So, pretend that the consumer asked for them.
final int groupCount = aggregate.getGroupSet().cardinality();
fieldsUsed =
fieldsUsed.union(ImmutableBitSet.range(groupCount));
// If the input is unchanged, and we need to project all columns,
// there's nothing to do.
if (input == newInput
&& fieldsUsed.equals(ImmutableBitSet.range(rowType.getFieldCount()))) {
return result(aggregate,
Mappings.createIdentity(rowType.getFieldCount()));
}
// Which agg calls are used by our consumer?
int j = groupCount;
int usedAggCallCount = 0;
for (int i = 0; i < aggregate.getAggCallList().size(); i++) {
if (fieldsUsed.get(j++)) {
++usedAggCallCount;
}
}
// Offset due to the number of system fields having changed.
Mapping mapping =
Mappings.create(
MappingType.INVERSE_SURJECTION,
rowType.getFieldCount(),
groupCount + usedAggCallCount);
final ImmutableBitSet newGroupSet =
Mappings.apply(inputMapping, aggregate.getGroupSet());
final ImmutableList<ImmutableBitSet> newGroupSets =
ImmutableList.copyOf(
Util.transform(aggregate.getGroupSets(),
input1 -> Mappings.apply(inputMapping, input1)));
// Populate mapping of where to find the fields. System, group key and
// indicator fields first.
for (j = 0; j < groupCount; j++) {
mapping.set(j, j);
}
// Now create new agg calls, and populate mapping for them.
relBuilder.push(newInput);
final List<RelBuilder.AggCall> newAggCallList = new ArrayList<>();
j = groupCount;
for (AggregateCall aggCall : aggregate.getAggCallList()) {
if (fieldsUsed.get(j)) {
mapping.set(j, groupCount + newAggCallList.size());
newAggCallList.add(relBuilder.aggregateCall(aggCall, inputMapping));
}
++j;
}
if (newAggCallList.isEmpty() && newGroupSet.isEmpty()) {
// Add a dummy call if all the column fields have been trimmed
mapping = Mappings.create(
MappingType.INVERSE_SURJECTION,
mapping.getSourceCount(),
1);
newAggCallList.add(relBuilder.count(false, "DUMMY"));
}
final RelBuilder.GroupKey groupKey = relBuilder.groupKey(newGroupSet, newGroupSets);
relBuilder.aggregate(groupKey, newAggCallList);
final RelNode newAggregate = RelOptUtil.propagateRelHints(aggregate, relBuilder.build());
return result(newAggregate, mapping);
}
/**
* Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
* {@link org.apache.calcite.rel.logical.LogicalTableModify}.
*/
public TrimResult trimFields(
LogicalTableModify modifier,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
// Ignore what consumer wants. We always project all columns.
Util.discard(fieldsUsed);
final RelDataType rowType = modifier.getRowType();
final int fieldCount = rowType.getFieldCount();
RelNode input = modifier.getInput();
// We want all fields from the child.
final int inputFieldCount = input.getRowType().getFieldCount();
final ImmutableBitSet inputFieldsUsed =
ImmutableBitSet.range(inputFieldCount);
// Create input with trimmed columns.
final Set<RelDataTypeField> inputExtraFields = Collections.emptySet();
TrimResult trimResult =
trimChild(modifier, input, inputFieldsUsed, inputExtraFields);
RelNode newInput = trimResult.left;
final Mapping inputMapping = trimResult.right;
if (!inputMapping.isIdentity()) {
// We asked for all fields. Can't believe that the child decided
// to permute them!
throw new AssertionError(
"Expected identity mapping, got " + inputMapping);
}
LogicalTableModify newModifier = modifier;
if (newInput != input) {
newModifier =
modifier.copy(
modifier.getTraitSet(),
Collections.singletonList(newInput));
}
assert newModifier.getClass() == modifier.getClass();
// Always project all fields.
Mapping mapping = Mappings.createIdentity(fieldCount);
return result(newModifier, mapping);
}
/**
* Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
* {@link org.apache.calcite.rel.logical.LogicalTableFunctionScan}.
*/
public TrimResult trimFields(
LogicalTableFunctionScan tabFun,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final RelDataType rowType = tabFun.getRowType();
final int fieldCount = rowType.getFieldCount();
final List<RelNode> newInputs = new ArrayList<>();
for (RelNode input : tabFun.getInputs()) {
final int inputFieldCount = input.getRowType().getFieldCount();
ImmutableBitSet inputFieldsUsed = ImmutableBitSet.range(inputFieldCount);
// Create input with trimmed columns.
final Set<RelDataTypeField> inputExtraFields =
Collections.emptySet();
TrimResult trimResult =
trimChildRestore(
tabFun, input, inputFieldsUsed, inputExtraFields);
assert trimResult.right.isIdentity();
newInputs.add(trimResult.left);
}
LogicalTableFunctionScan newTabFun = tabFun;
if (!tabFun.getInputs().equals(newInputs)) {
newTabFun = tabFun.copy(tabFun.getTraitSet(), newInputs,
tabFun.getCall(), tabFun.getElementType(), tabFun.getRowType(),
tabFun.getColumnMappings());
}
assert newTabFun.getClass() == tabFun.getClass();
// Always project all fields.
Mapping mapping = Mappings.createIdentity(fieldCount);
return result(newTabFun, mapping);
}
/**
* Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
* {@link org.apache.calcite.rel.logical.LogicalValues}.
*/
public TrimResult trimFields(
LogicalValues values,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final RelDataType rowType = values.getRowType();
final int fieldCount = rowType.getFieldCount();
// If they are asking for no fields, we can't give them what they want,
// because zero-column records are illegal. Give them the last field,
// which is unlikely to be a system field.
if (fieldsUsed.isEmpty()) {
fieldsUsed = ImmutableBitSet.range(fieldCount - 1, fieldCount);
}
// If all fields are used, return unchanged.
if (fieldsUsed.equals(ImmutableBitSet.range(fieldCount))) {
Mapping mapping = Mappings.createIdentity(fieldCount);
return result(values, mapping);
}
final ImmutableList.Builder<ImmutableList<RexLiteral>> newTuples =
ImmutableList.builder();
for (ImmutableList<RexLiteral> tuple : values.getTuples()) {
ImmutableList.Builder<RexLiteral> newTuple = ImmutableList.builder();
for (int field : fieldsUsed) {
newTuple.add(tuple.get(field));
}
newTuples.add(newTuple.build());
}
final Mapping mapping = createMapping(fieldsUsed, fieldCount);
final RelDataType newRowType =
RelOptUtil.permute(values.getCluster().getTypeFactory(), rowType,
mapping);
final LogicalValues newValues =
LogicalValues.create(values.getCluster(), newRowType,
newTuples.build());
return result(newValues, mapping);
}
protected Mapping createMapping(ImmutableBitSet fieldsUsed, int fieldCount) {
final Mapping mapping =
Mappings.create(
MappingType.INVERSE_SURJECTION,
fieldCount,
fieldsUsed.cardinality());
int i = 0;
for (int field : fieldsUsed) {
mapping.set(field, i++);
}
return mapping;
}
/**
* Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
* {@link org.apache.calcite.rel.logical.LogicalTableScan}.
*/
public TrimResult trimFields(
final TableScan tableAccessRel,
ImmutableBitSet fieldsUsed,
Set<RelDataTypeField> extraFields) {
final int fieldCount = tableAccessRel.getRowType().getFieldCount();
if (fieldsUsed.equals(ImmutableBitSet.range(fieldCount))
&& extraFields.isEmpty()) {
// if there is nothing to project or if we are projecting everything
// then no need to introduce another RelNode
return trimFields(
(RelNode) tableAccessRel, fieldsUsed, extraFields);
}
final RelNode newTableAccessRel =
tableAccessRel.project(fieldsUsed, extraFields, relBuilder);
// Some parts of the system can't handle rows with zero fields, so
// pretend that one field is used.
if (fieldsUsed.cardinality() == 0) {
RelNode input = newTableAccessRel;
if (input instanceof Project) {
// The table has implemented the project in the obvious way - by
// creating project with 0 fields. Strip it away, and create our own
// project with one field.
Project project = (Project) input;
if (project.getRowType().getFieldCount() == 0) {
input = project.getInput();
}
}
return dummyProject(fieldCount, input);
}
final Mapping mapping = createMapping(fieldsUsed, fieldCount);
return result(newTableAccessRel, mapping);
}
//~ Inner Classes ----------------------------------------------------------
/**
* Result of an attempt to trim columns from a relational expression.
*
* <p>The mapping describes where to find the columns wanted by the parent
* of the current relational expression.
*
* <p>The mapping is a
* {@link org.apache.calcite.util.mapping.Mappings.SourceMapping}, which means
* that no column can be used more than once, and some columns are not used.
* {@code columnsUsed.getSource(i)} returns the source of the i'th output
* field.
*
* <p>For example, consider the mapping for a relational expression that
* has 4 output columns but only two are being used. The mapping
* {2 → 1, 3 → 0} would give the following behavior:
*
* <ul>
* <li>columnsUsed.getSourceCount() returns 4
* <li>columnsUsed.getTargetCount() returns 2
* <li>columnsUsed.getSource(0) returns 3
* <li>columnsUsed.getSource(1) returns 2
* <li>columnsUsed.getSource(2) throws IndexOutOfBounds
* <li>columnsUsed.getTargetOpt(3) returns 0
* <li>columnsUsed.getTargetOpt(0) returns -1
* </ul>
*/
protected static class TrimResult extends Pair<RelNode, Mapping> {
/**
* Creates a TrimResult.
*
* @param left New relational expression
* @param right Mapping of fields onto original fields
*/
public TrimResult(RelNode left, Mapping right) {
super(left, right);
assert right.getTargetCount() == left.getRowType().getFieldCount()
: "rowType: " + left.getRowType() + ", mapping: " + right;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package freemarker.core;
import freemarker.template.Configuration;
import freemarker.template.Template;
/**
* A class that allows one to associate custom data with a {@link Configuration}, a {@link Template}, or
* {@link Environment}.
*
* <p>This API has similar approach to that of {@link ThreadLocal} (which allows one to associate
* custom data with a thread). With an example:</p>
*
* <pre>
* // The object identity itself will serve as the attribute identifier; there's no attribute name String:
* public static final CustomAttribute MY_ATTR = new CustomAttribute(CustomAttribute.SCOPE_CONFIGURATION);
* ...
* // Set the attribute in this particular Configuration object:
* MY_ATTR.set(myAttrValue, cfg);
* ...
* // Read the attribute from this particular Configuration object:
* myAttrValue = MY_ATTR.get(cfg);
* </pre>
*/
// [2.4] Use generics; type parameter used for the type of the stored value
public class CustomAttribute {
/**
* Constant used in the constructor specifying that this attribute is {@link Environment}-scoped.
*/
public static final int SCOPE_ENVIRONMENT = 0;
/**
* Constant used in the constructor specifying that this attribute is {@link Template}-scoped.
*/
public static final int SCOPE_TEMPLATE = 1;
/**
* Constant used in the constructor specifying that this attribute is {@link Configuration}-scoped.
*/
public static final int SCOPE_CONFIGURATION = 2;
// We use an internal key instead of 'this' so that malicious subclasses
// overriding equals() and hashCode() can't gain access to other attribute
// values. That's also the reason why get() and set() are marked final.
private final Object key = new Object();
private final int scope;
/**
* Creates a new custom attribute with the specified scope
* @param scope one of <tt>SCOPE_</tt> constants.
*/
public CustomAttribute(int scope) {
if (scope != SCOPE_ENVIRONMENT &&
scope != SCOPE_TEMPLATE &&
scope != SCOPE_CONFIGURATION) {
throw new IllegalArgumentException();
}
this.scope = scope;
}
/**
* This method is invoked when {@link #get()} is invoked without
* {@link #set(Object)} being invoked before it to define the value in the
* current scope. Override it to create the attribute value on-demand.
* @return the initial value for the custom attribute. By default returns null.
*/
protected Object create() {
return null;
}
/**
* Gets the attribute from the appropriate scope that's accessible through the specified {@link Environment}. If
* the attribute has {@link #SCOPE_ENVIRONMENT} scope, it will be get from the given {@link Environment} directly.
* If the attribute has {@link #SCOPE_TEMPLATE} scope, it will be get from the parent of the given
* {@link Environment} (that is, in {@link Environment#getParent()}) directly). If the attribute has
* {@link #SCOPE_CONFIGURATION} scope, it will be get from {@link Environment#getConfiguration()}.
*
* @throws NullPointerException
* If {@code env} is null
*
* @return The new value of the attribute (possibly {@code null}), or {@code null} if the attribute doesn't exist.
*
* @since 2.3.22
*/
public final Object get(Environment env) {
return getScopeConfigurable(env).getCustomAttribute(key, this);
}
/**
* Same as {@link #get(Environment)}, but uses {@link Environment#getCurrentEnvironment()} to fill the 2nd argument.
*
* @throws IllegalStateException
* If there is no current {@link Environment}, which is usually the case when the current thread isn't
* processing a template.
*/
public final Object get() {
return getScopeConfigurable(getRequiredCurrentEnvironment()).getCustomAttribute(key, this);
}
/**
* Gets the value of a {@link Template}-scope attribute from the given {@link Template}.
*
* @throws UnsupportedOperationException
* If this custom attribute has different scope than {@link #SCOPE_TEMPLATE}.
* @throws NullPointerException
* If {@code template} is null
*/
public final Object get(Template template) {
if (scope != SCOPE_TEMPLATE) {
throw new UnsupportedOperationException("This is not a template-scope attribute");
}
return ((Configurable) template).getCustomAttribute(key, this);
}
/**
* Same as {@link #get(Template)}, but applies to a {@link TemplateConfiguration}.
*
* @since 2.3.24
*/
public Object get(TemplateConfiguration templateConfiguration) {
if (scope != SCOPE_TEMPLATE) {
throw new UnsupportedOperationException("This is not a template-scope attribute");
}
return templateConfiguration.getCustomAttribute(key, this);
}
/**
* Gets the value of a {@link Configuration}-scope attribute from the given {@link Configuration}.
*
* @throws UnsupportedOperationException
* If this custom attribute has different scope than {@link #SCOPE_CONFIGURATION}.
* @throws NullPointerException
* If {@code cfg} is null
*
* @since 2.3.22
*/
public final Object get(Configuration cfg) {
if (scope != SCOPE_CONFIGURATION) {
throw new UnsupportedOperationException("This is not a template-scope attribute");
}
return ((Configurable) cfg).getCustomAttribute(key, this);
}
/**
* Sets the attribute inside the appropriate scope that's accessible through the specified {@link Environment}. If
* the attribute has {@link #SCOPE_ENVIRONMENT} scope, it will be set in the given {@link Environment} directly. If
* the attribute has {@link #SCOPE_TEMPLATE} scope, it will be set in the parent of the given {@link Environment}
* (that is, in {@link Environment#getParent()}) directly). If the attribute has {@link #SCOPE_CONFIGURATION} scope,
* it will be set in {@link Environment#getConfiguration()}.
*
* @param value
* The new value of the attribute. Can be {@code null}.
*
* @throws NullPointerException
* If {@code env} is null
*
* @since 2.3.22
*/
public final void set(Object value, Environment env) {
getScopeConfigurable(env).setCustomAttribute(key, value);
}
/**
* Same as {@link #set(Object, Environment)}, but uses {@link Environment#getCurrentEnvironment()} to fill the 2nd
* argument.
*
* @throws IllegalStateException
* If there is no current {@link Environment}, which is usually the case when the current thread isn't
* processing a template.
*/
public final void set(Object value) {
getScopeConfigurable(getRequiredCurrentEnvironment()).setCustomAttribute(key, value);
}
/**
* Sets the value of a {@link Template}-scope attribute in the given {@link Template}.
*
* @param value
* The new value of the attribute. Can be {@code null}.
*
* @throws UnsupportedOperationException
* If this custom attribute has different scope than {@link #SCOPE_TEMPLATE}.
* @throws NullPointerException
* If {@code template} is null
*/
public final void set(Object value, Template template) {
if (scope != SCOPE_TEMPLATE) {
throw new UnsupportedOperationException("This is not a template-scope attribute");
}
((Configurable) template).setCustomAttribute(key, value);
}
/**
* Same as {@link #set(Object, Template)}, but applicable to a {@link TemplateConfiguration}.
*
* @since 2.3.24
*/
public final void set(Object value, TemplateConfiguration templateConfiguration) {
if (scope != SCOPE_TEMPLATE) {
throw new UnsupportedOperationException("This is not a template-scope attribute");
}
templateConfiguration.setCustomAttribute(key, value);
}
/**
* Sets the value of a {@link Configuration}-scope attribute in the given {@link Configuration}.
*
* @param value
* The new value of the attribute. Can be {@code null}.
*
* @throws UnsupportedOperationException
* If this custom attribute has different scope than {@link #SCOPE_CONFIGURATION}.
* @throws NullPointerException
* If {@code cfg} is null
*
* @since 2.3.22
*/
public final void set(Object value, Configuration cfg) {
if (scope != SCOPE_CONFIGURATION) {
throw new UnsupportedOperationException("This is not a configuration-scope attribute");
}
((Configurable) cfg).setCustomAttribute(key, value);
}
private Environment getRequiredCurrentEnvironment() {
Environment c = Environment.getCurrentEnvironment();
if (c == null) {
throw new IllegalStateException("No current environment");
}
return c;
}
private Configurable getScopeConfigurable(Environment env) throws Error {
switch (scope) {
case SCOPE_ENVIRONMENT:
return env;
case SCOPE_TEMPLATE:
return env.getParent();
case SCOPE_CONFIGURATION:
return env.getParent().getParent();
default:
throw new BugException();
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
/**
* <p>
* Describes a private IP address.
* </p>
*/
public class InstancePrivateIpAddress implements Serializable, Cloneable {
/**
* <p>
* The private IP address of the network interface.
* </p>
*/
private String privateIpAddress;
/**
* <p>
* The private DNS name.
* </p>
*/
private String privateDnsName;
/**
* <p>
* Indicates whether this IP address is the primary private IP address of
* the network interface.
* </p>
*/
private Boolean primary;
/**
* <p>
* The association information for an Elastic IP address for the network
* interface.
* </p>
*/
private InstanceNetworkInterfaceAssociation association;
/**
* <p>
* The private IP address of the network interface.
* </p>
*
* @param privateIpAddress
* The private IP address of the network interface.
*/
public void setPrivateIpAddress(String privateIpAddress) {
this.privateIpAddress = privateIpAddress;
}
/**
* <p>
* The private IP address of the network interface.
* </p>
*
* @return The private IP address of the network interface.
*/
public String getPrivateIpAddress() {
return this.privateIpAddress;
}
/**
* <p>
* The private IP address of the network interface.
* </p>
*
* @param privateIpAddress
* The private IP address of the network interface.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public InstancePrivateIpAddress withPrivateIpAddress(String privateIpAddress) {
setPrivateIpAddress(privateIpAddress);
return this;
}
/**
* <p>
* The private DNS name.
* </p>
*
* @param privateDnsName
* The private DNS name.
*/
public void setPrivateDnsName(String privateDnsName) {
this.privateDnsName = privateDnsName;
}
/**
* <p>
* The private DNS name.
* </p>
*
* @return The private DNS name.
*/
public String getPrivateDnsName() {
return this.privateDnsName;
}
/**
* <p>
* The private DNS name.
* </p>
*
* @param privateDnsName
* The private DNS name.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public InstancePrivateIpAddress withPrivateDnsName(String privateDnsName) {
setPrivateDnsName(privateDnsName);
return this;
}
/**
* <p>
* Indicates whether this IP address is the primary private IP address of
* the network interface.
* </p>
*
* @param primary
* Indicates whether this IP address is the primary private IP
* address of the network interface.
*/
public void setPrimary(Boolean primary) {
this.primary = primary;
}
/**
* <p>
* Indicates whether this IP address is the primary private IP address of
* the network interface.
* </p>
*
* @return Indicates whether this IP address is the primary private IP
* address of the network interface.
*/
public Boolean getPrimary() {
return this.primary;
}
/**
* <p>
* Indicates whether this IP address is the primary private IP address of
* the network interface.
* </p>
*
* @param primary
* Indicates whether this IP address is the primary private IP
* address of the network interface.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public InstancePrivateIpAddress withPrimary(Boolean primary) {
setPrimary(primary);
return this;
}
/**
* <p>
* Indicates whether this IP address is the primary private IP address of
* the network interface.
* </p>
*
* @return Indicates whether this IP address is the primary private IP
* address of the network interface.
*/
public Boolean isPrimary() {
return this.primary;
}
/**
* <p>
* The association information for an Elastic IP address for the network
* interface.
* </p>
*
* @param association
* The association information for an Elastic IP address for the
* network interface.
*/
public void setAssociation(InstanceNetworkInterfaceAssociation association) {
this.association = association;
}
/**
* <p>
* The association information for an Elastic IP address for the network
* interface.
* </p>
*
* @return The association information for an Elastic IP address for the
* network interface.
*/
public InstanceNetworkInterfaceAssociation getAssociation() {
return this.association;
}
/**
* <p>
* The association information for an Elastic IP address for the network
* interface.
* </p>
*
* @param association
* The association information for an Elastic IP address for the
* network interface.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public InstancePrivateIpAddress withAssociation(
InstanceNetworkInterfaceAssociation association) {
setAssociation(association);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPrivateIpAddress() != null)
sb.append("PrivateIpAddress: " + getPrivateIpAddress() + ",");
if (getPrivateDnsName() != null)
sb.append("PrivateDnsName: " + getPrivateDnsName() + ",");
if (getPrimary() != null)
sb.append("Primary: " + getPrimary() + ",");
if (getAssociation() != null)
sb.append("Association: " + getAssociation());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof InstancePrivateIpAddress == false)
return false;
InstancePrivateIpAddress other = (InstancePrivateIpAddress) obj;
if (other.getPrivateIpAddress() == null
^ this.getPrivateIpAddress() == null)
return false;
if (other.getPrivateIpAddress() != null
&& other.getPrivateIpAddress().equals(
this.getPrivateIpAddress()) == false)
return false;
if (other.getPrivateDnsName() == null
^ this.getPrivateDnsName() == null)
return false;
if (other.getPrivateDnsName() != null
&& other.getPrivateDnsName().equals(this.getPrivateDnsName()) == false)
return false;
if (other.getPrimary() == null ^ this.getPrimary() == null)
return false;
if (other.getPrimary() != null
&& other.getPrimary().equals(this.getPrimary()) == false)
return false;
if (other.getAssociation() == null ^ this.getAssociation() == null)
return false;
if (other.getAssociation() != null
&& other.getAssociation().equals(this.getAssociation()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getPrivateIpAddress() == null) ? 0 : getPrivateIpAddress()
.hashCode());
hashCode = prime
* hashCode
+ ((getPrivateDnsName() == null) ? 0 : getPrivateDnsName()
.hashCode());
hashCode = prime * hashCode
+ ((getPrimary() == null) ? 0 : getPrimary().hashCode());
hashCode = prime
* hashCode
+ ((getAssociation() == null) ? 0 : getAssociation().hashCode());
return hashCode;
}
@Override
public InstancePrivateIpAddress clone() {
try {
return (InstancePrivateIpAddress) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
package main.java.CucumberTableExpander;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
public class CucumberTableExpander {
// Delimiters for different Scenarios on the Cucumber Feature File
private static final String SCENARIO_DELIMITERS = "Scenario";
//Delimiters for specific Scenarios with tables
private static final String SCENARIO_OUTLINE_DELIMITER = "Outline";
//Delimiters for separate table value lines from definition
private static final String SCENARIO_TABLE_SEPARATOR = "Examples:|Scenarios:";
// **************************************************************
// * Cucumber Table Expander. (Armando Sanchez Medina) *
// * - Input : String containing Cucumber Feature File *
// * - Output : Cucumber feature with all table scenarios *
// * returned as one scenario replacing variables *
// * with values from the table. Possible returns *
// * as : *
// * 1) Single String with all Scenarios concant. *
// * 2) List of Strings. *
// **************************************************************
private String feature;
private StringBuilder result = new StringBuilder();
private ArrayList<String> listOutput = new ArrayList<String>();
// Values for delimiters
// Delimiters for different Scenarios on the Cucumber Feature File
//private static String scenarioDelimiters = "Scenario" ;
// Delimiters for separate table value lines from definition
// private static ArrayList<String> tableDelimiters = (ArrayList<String>) Arrays.asList("Examples:","Scenarios:");
// // Delimiters for separate table values/headers
// private static ArrayList<String> valueDelimiters = (ArrayList<String>) Arrays.asList("|");
public CucumberTableExpander(String featureFile) {
String[] scenarios = featureFile.split(SCENARIO_DELIMITERS);
for (int i = 1; i < scenarios.length; i++) {
// Separate Scenario Outlines from regular scenarios
if (scenarios[i].trim().startsWith(SCENARIO_OUTLINE_DELIMITER+":")) {
// Remove Outline word
scenarios[i] = scenarios[i].trim().replace(SCENARIO_OUTLINE_DELIMITER+":", "");
// Separate Scenario definitions from table with containing
// values
String[] parts = scenarios[i].trim().split(
SCENARIO_TABLE_SEPARATOR);// OTS
// Instantiate outline class to work with definition and list of
// values
CucumberTableOutline outline = new CucumberTableOutline(parts);
this.result.append(outline.getOutlineOutput());
this.listOutput.addAll(outline.getSeveralScenariosResult());
} else {
StringBuilder noFeatureScenario = new StringBuilder();
noFeatureScenario.append("\n"+SCENARIO_DELIMITERS)
.append(scenarios[i].trim()).append("\n");
this.result.append(noFeatureScenario);
this.listOutput.add(noFeatureScenario.toString());
}
}
// scenarios outlines n-times with table values.
this.feature = this.result.toString();
}
public String getFeatureExpanded() {
if (this.feature.isEmpty()) {
return "No Cucumber feature file provided";
} else {
return this.feature;
}
}
public ArrayList<String> getFeatureExpandedAsList() {
if (this.listOutput.isEmpty()) {
return null;
} else {
return this.listOutput;
}
}
protected class CucumberTableOutline {
private ArrayList<String> outlineLine = new ArrayList<String>();
private ArrayList<CucumberTableHeaderElement> headers = new ArrayList<CucumberTableHeaderElement>();
private ArrayList<String> headersNames = new ArrayList<String>();
private StringBuilder outlineOutputWork = new StringBuilder();
private StringBuilder outlineOutput = new StringBuilder();
private ArrayList<String> severalScenariosResult = new ArrayList<String>();
public CucumberTableOutline(String[] parts) {
addFeatureContent(parts);
}
public String getOutlineLine(int id) {
return this.outlineLine.get(id);
}
public void addToOutlineLine(String outlineLineEntry) {
this.outlineLine.add(outlineLineEntry);
}
public void addOutlineLines(String[] outlineLineEntries) {
// Read the Outline Definition and find one variable name on each
// line.
for (int j = 0; j < outlineLineEntries.length; j++) {
String outlineLineValue = outlineLineEntries[j].toString();
this.outlineOutputWork.append(outlineLineValue).append("\n");
// Find the variable on the line.
if (outlineLineValue.contains("<")) {
String variableName = outlineLineValue
.substring(outlineLineValue.indexOf("<") + 1);
variableName = variableName.substring(0,
variableName.indexOf(">"));
this.headersNames.add(variableName.trim());
}
this.outlineLine.add(outlineLineEntries[j].toString());
}
}
public int getLength() {
return this.outlineLine.size();
}
private void addScenarioValues(String[] scenarioOnTable) {
// Read variable names from the first line of the table
String[] variableNamesOnValues = scenarioOnTable[0].trim().split(
"\\|");// OTS
for (int i = 0; i < variableNamesOnValues.length; i++) {
if (!variableNamesOnValues[i].isEmpty()) {
// Include Header element to the Scenario
int position = this.headersNames
.indexOf(variableNamesOnValues[i].trim());
CucumberTableHeaderElement headerElement = new CucumberTableHeaderElement();
headerElement.setHeaderPosition(position);
headerElement
.setHeaderName(variableNamesOnValues[i].trim());
headerElement.setHeaderPositionOnTable(i);
this.headers.add(headerElement);
}
}
// TODO : Remove Empty variable lines (Leading delimiters)
int indexOfHeader;
for (int j = 1; j < scenarioOnTable.length; j++) {
if (!scenarioOnTable[j].isEmpty()) {
indexOfHeader = 0;
String ScenarioGenerated = outlineOutputWork.toString();
// Divide Each Value for variable
String[] variableValue = scenarioOnTable[j].trim().split(
"\\|");
for (int k = 0; k < variableValue.length; k++) {
if ((!variableValue[k].isEmpty())&&(indexOfHeader < headers.size())) {
ScenarioGenerated = ScenarioGenerated.replace(
this.headers.get(indexOfHeader)
.getHeaderNameWithSymbols().trim(),
variableValue[k].trim());
indexOfHeader++;
}
}
this.outlineOutput.append("\n").append(SCENARIO_DELIMITERS+": ")
.append(ScenarioGenerated);
// NEW ROW WITH DATA on ScenarioGenerated
this.severalScenariosResult.add("\n"+SCENARIO_DELIMITERS+": "
+ ScenarioGenerated);
}
}
}
private void addFeatureContent(String[] parts) {
// Divide lines of Scenarios
this.addOutlineLines(parts[0].trim().split("\n"));// OTS
// Separate each line on the table
ArrayList<String> scenarioontableList = new ArrayList<String>(
Arrays.asList(parts[1].trim().split("\n")));// OTS
for (Iterator<String> iteration = scenarioontableList
.listIterator(); iteration.hasNext();) {
String elementToCheck = iteration.next();
if (elementToCheck.startsWith("#")) {
iteration.remove();
}
}
String[] arrayOfScenario = new String[scenarioontableList.size()];
for (int i = 0; i < scenarioontableList.size(); i++) {
arrayOfScenario[i] = scenarioontableList.get(i);
}
this.addScenarioValues(arrayOfScenario);
}
public String getOutlineOutput() {
return outlineOutput.toString();
}
public ArrayList<String> getSeveralScenariosResult() {
return severalScenariosResult;
}
}
protected class CucumberTableHeaderElement {
private String headerName;
private int headerPosition;
private int headerPositionOnTable;
public CucumberTableHeaderElement() {
}
public String getHeaderName() {
return headerName;
}
public String getHeaderNameWithSymbols() {
return "<" + headerName + ">";
}
public void setHeaderName(String headerName) {
this.headerName = headerName;
}
public int getHeaderPosition() {
return headerPosition;
}
public void setHeaderPosition(int headerPosition) {
this.headerPosition = headerPosition;
}
public int getHeaderPositionOnTable() {
return headerPositionOnTable;
}
public void setHeaderPositionOnTable(int headerPositionOnTable) {
this.headerPositionOnTable = headerPositionOnTable;
}
}
}
| |
package com.francelabs.datafari.ldap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import com.francelabs.datafari.utils.ObfuscationTool;
public class LdapRealm {
private String connectionName;
private String connectionPassword;
private String connectionURL;;
private String domainName;
private final List<String> userBases;
private String userFilter;
private String userSearchAttribute;
private static final Logger logger = LogManager.getLogger(LdapRealm.class);
public LdapRealm(final JSONObject jsonConf) {
connectionName = jsonConf.get(LdapConfig.ATTR_CONNECTION_NAME).toString();
connectionPassword = jsonConf.get(LdapConfig.ATTR_CONNECTION_PW).toString();
connectionURL = jsonConf.get(LdapConfig.ATTR_CONNECTION_URL).toString();
domainName = jsonConf.get(LdapConfig.ATTR_DOMAIN_NAME).toString();
final JSONArray juserBases = (JSONArray) jsonConf.get(LdapConfig.ATTR_USER_BASE);
final String[] ub = (String[]) juserBases.toArray(new String[0]);
userBases = new ArrayList<>();
userBases.addAll(Arrays.asList(ub));
if (jsonConf.containsKey(LdapConfig.ATTR_USER_FILTER)) {
userFilter = jsonConf.get(LdapConfig.ATTR_USER_FILTER).toString();
} else {
userFilter = LdapUtils.baseFilter;
}
if (jsonConf.containsKey(LdapConfig.ATTR_USER_SEARCH_ATTRIBUTE)) {
userSearchAttribute = jsonConf.get(LdapConfig.ATTR_USER_SEARCH_ATTRIBUTE).toString();
} else {
userSearchAttribute = LdapUtils.baseSearchAttribute;
}
}
/**
* ActiveDirectoryRealmConf constructor
*
* @param connectionName the username that will be used to connect to the Active Directory
* @param connectionPassword the password of the provided connectionName
* @param isClearPassword set to true if the connectionPassword has not already been obfuscated by the ManifoldCF obfuscation method, false otherwise
* @param connectionURL the connection URL to reach the Active Directory (ldap://host:port)
* @param domainSuffix the domain suffix to use with the Active Directory
* @param authenticationProtocol the authentication protocol to use with the Active Directory
* @param userSubtree set to true if the users may have to be found deeply in the provided userBase(s)
* @param userBases the list of userBases (separated by char return)
* @param userFilter the LDAP filter to use in order to find users in the user bases
* @param userSearchAttribute the LDAP filter to use in order to find a specific user (based on a unique identifier like the samaccount)
* @param domainName the domain name (eg francelabs.com)
*/
public LdapRealm(final String connectionName, final String connectionPassword, final boolean isClearPassword, final String connectionURL, final List<String> userBases, final String userFilter,
final String userSearchAttribute, final String domainName) {
this.connectionName = connectionName;
if (isClearPassword) {
try {
this.connectionPassword = ObfuscationTool.obfuscate(connectionPassword);
} catch (final Exception e) {
logger.error("MCF obfuscation error for password: " + connectionPassword);
this.connectionPassword = connectionPassword;
}
} else {
this.connectionPassword = connectionPassword;
}
this.connectionURL = connectionURL;
this.domainName = domainName;
this.userBases = userBases;
this.userFilter = userFilter;
this.userSearchAttribute = userSearchAttribute;
}
/**
* ActiveDirectoryRealmConf constructor
*
* @param connectionName the username that will be used to connect to the Active Directory
* @param connectionPassword the password of the provided connectionName
* @param isClearPassword set to true if the connectionPassword has not already been obfuscated by the ManifoldCF obfuscation method, false otherwise
* @param connectionURL the connection URL to reach the Active Directory (ldap://host:port)
* @param domainSuffix the domain suffix to use with the Active Directory
* @param authenticationProtocol the authentication protocol to use with the Active Directory
* @param userSubtree set to true if the users may have to be found deeply in the provided userBase(s)
* @param userFilter the LDAP filter to use in order to find users in the user bases
* @param userSearchAttribute the LDAP attribute to use in order to find a specific user (based on a unique identifier like the samaccount)
* @param domainName the domain name (eg francelabs.com)
*/
public LdapRealm(final String connectionName, final String connectionPassword, final boolean isClearPassword, final String connectionURL, final String domainSuffix,
final String authenticationProtocol, final String userSubtree, final String userFilter, final String userSearchAttribute, final String domainName) {
this.connectionName = connectionName;
if (isClearPassword) {
try {
this.connectionPassword = ObfuscationTool.obfuscate(connectionPassword);
} catch (final Exception e) {
logger.error("MCF obfuscation error for password: " + connectionPassword);
this.connectionPassword = connectionPassword;
}
} else {
this.connectionPassword = connectionPassword;
}
this.connectionURL = connectionURL;
this.domainName = domainName;
this.userBases = new ArrayList<>();
this.userFilter = userFilter;
this.userSearchAttribute = userSearchAttribute;
}
public JSONObject toJson() {
final JSONObject json = new JSONObject();
json.put(LdapConfig.ATTR_CONNECTION_URL, connectionURL);
json.put(LdapConfig.ATTR_CONNECTION_NAME, connectionName);
json.put(LdapConfig.ATTR_CONNECTION_PW, connectionPassword);
json.put(LdapConfig.ATTR_DOMAIN_NAME, domainName);
json.put(LdapConfig.ATTR_USER_FILTER, userFilter);
json.put(LdapConfig.ATTR_USER_SEARCH_ATTRIBUTE, userSearchAttribute);
json.put(LdapConfig.ATTR_USER_BASE, userBases);
return json;
}
public void addUserBase(final String userBase) {
this.userBases.add(userBase);
}
public void clearUserBases() {
this.userBases.clear();
}
public void addAllUserBases(final List<String> userBases) {
this.userBases.addAll(userBases);
}
public String getConnectionName() {
return connectionName;
}
public String getDeobfuscatedConnectionPassword() {
try {
return ObfuscationTool.deobfuscate(connectionPassword);
} catch (final Exception e) {
logger.error("MCF deobfuscation error for password");
return connectionPassword;
}
}
public String getObfuscatedConnectionPassword() {
return connectionPassword;
}
public String getConnectionURL() {
return connectionURL;
}
public List<String> getUserBases() {
return userBases;
}
public void setConnectionName(final String connectionName) {
this.connectionName = connectionName;
}
public void setConnectionPassword(final String connectionPassword) {
try {
this.connectionPassword = ObfuscationTool.obfuscate(connectionPassword);
} catch (final Exception e) {
logger.error("Unable to obfuscate the password", e);
this.connectionPassword = "";
}
}
public void setConnectionURL(final String connectionURL) {
this.connectionURL = connectionURL;
}
public String getUserFilter() {
return userFilter;
}
public void setUserFilter(final String userFilter) {
this.userFilter = userFilter;
}
public String getUserSearchAttribute() {
return userSearchAttribute;
}
public void setUserSearchAttribute(final String userSearchAttribute) {
this.userSearchAttribute = userSearchAttribute;
}
public String getDomainName() {
return domainName;
}
public void setDomainName(final String domainName) {
this.domainName = domainName;
}
}
| |
package cz.metacentrum.perun.webgui.json.groupsManager;
import com.google.gwt.cell.client.Cell;
import com.google.gwt.cell.client.CheckboxCell;
import com.google.gwt.cell.client.FieldUpdater;
import com.google.gwt.cell.client.ValueUpdater;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.safehtml.shared.SafeHtmlUtils;
import com.google.gwt.user.cellview.client.CellTable;
import com.google.gwt.user.cellview.client.Column;
import com.google.gwt.user.cellview.client.ColumnSortEvent.ListHandler;
import com.google.gwt.user.cellview.client.Header;
import com.google.gwt.user.cellview.client.RowStyles;
import com.google.gwt.user.client.ui.*;
import com.google.gwt.view.client.DefaultSelectionEventManager;
import com.google.gwt.view.client.ListDataProvider;
import com.google.gwt.view.client.MultiSelectionModel;
import cz.metacentrum.perun.webgui.client.PerunWebSession;
import cz.metacentrum.perun.webgui.client.resources.LargeIcons;
import cz.metacentrum.perun.webgui.client.resources.PerunEntity;
import cz.metacentrum.perun.webgui.client.resources.SmallIcons;
import cz.metacentrum.perun.webgui.client.resources.TableSorter;
import cz.metacentrum.perun.webgui.json.*;
import cz.metacentrum.perun.webgui.json.keyproviders.GeneralKeyProvider;
import cz.metacentrum.perun.webgui.model.PerunError;
import cz.metacentrum.perun.webgui.model.RichGroup;
import cz.metacentrum.perun.webgui.widgets.AjaxLoaderImage;
import cz.metacentrum.perun.webgui.widgets.Confirm;
import cz.metacentrum.perun.webgui.widgets.CustomButton;
import cz.metacentrum.perun.webgui.widgets.PerunTable;
import cz.metacentrum.perun.webgui.widgets.UnaccentMultiWordSuggestOracle;
import cz.metacentrum.perun.webgui.widgets.cells.CustomClickableInfoCellWithImageResource;
import cz.metacentrum.perun.webgui.widgets.cells.PerunCheckboxCell;
import java.util.ArrayList;
import java.util.Comparator;
/**
* Ajax query to get all groups in VO
*
* @author Vaclav Mach <374430@mail.muni.cz>
* @author Pavel Zlamal <256627@mail.muni.cz>
* @author Ondrej Velisek <ondrejvelisek@gmail.com>
*/
public class GetAllRichGroups implements JsonCallback, JsonCallbackTable<RichGroup>, JsonCallbackOracle<RichGroup> {
// session
private PerunWebSession session = PerunWebSession.getInstance();
// VO id
private int voId;
// attribute names which we want to get
private ArrayList<String> attrNames;
// JSON URL
static private final String JSON_URL = "groupsManager/getAllRichGroupsWithAttributesByNames";
// Selection model
final MultiSelectionModel<RichGroup> selectionModel = new MultiSelectionModel<RichGroup>(new GeneralKeyProvider<RichGroup>());
// RichGroups table data provider
private ListDataProvider<RichGroup> dataProvider = new ListDataProvider<RichGroup>();
// RichGroups table
private PerunTable<RichGroup> table;
// RichGroups table list
private ArrayList<RichGroup> list = new ArrayList<RichGroup>();
// External events
private JsonCallbackEvents events = new JsonCallbackEvents();
// Table field updater
private FieldUpdater<RichGroup, String> tableFieldUpdater;
// loader image
private AjaxLoaderImage loaderImage = new AjaxLoaderImage();
// oracle
private UnaccentMultiWordSuggestOracle oracle = new UnaccentMultiWordSuggestOracle(":");
private ArrayList<RichGroup> fullBackup = new ArrayList<RichGroup>();
// checkable core groups
private boolean coreGroupsCheckable = false;
private boolean checkable = true;
/**
* Creates a new callback
*
* @param id ID of VO for which we want groups for
* @param attrNames Attribute names which we want to get
*/
public GetAllRichGroups(int id, ArrayList<String> attrNames) {
this.voId = id;
this.attrNames = attrNames;
}
/**
* Creates a new callback
*
* @param id ID of VO for which we want groups for
* @param events external events
*/
public GetAllRichGroups(int id, JsonCallbackEvents events) {
this.voId = id;
this.events = events;
}
/**
* Returns table with groups in hierarchical structure and with custom field updater
*
* @param fu Custom field updater
* @return table widget
*/
public CellTable<RichGroup> getTable(FieldUpdater<RichGroup, String> fu) {
this.tableFieldUpdater = fu;
return this.getTable();
}
public CellTable<RichGroup> getEmptyTable(FieldUpdater<RichGroup, String> fu) {
this.tableFieldUpdater = fu;
return this.getEmptyTable();
}
public CellTable<RichGroup> getTable() {
// retrieve data
retrieveData();
return getEmptyTable();
}
/**
* Returns table with groups in hierarchical structure and with custom field updater
*
* @return table widget
*/
public CellTable<RichGroup> getEmptyTable() {
// Table data provider.
dataProvider = new ListDataProvider<RichGroup>(list);
// Cell table
table = new PerunTable<RichGroup>(list);
table.setHyperlinksAllowed(false);
// Connect the table to the data provider.
dataProvider.addDataDisplay(table);
// Sorting
ListHandler<RichGroup> columnSortHandler = new ListHandler<RichGroup>(dataProvider.getList());
table.addColumnSortHandler(columnSortHandler);
// table selection
table.setSelectionModel(selectionModel, DefaultSelectionEventManager.<RichGroup> createCheckboxManager());
// set empty content & loader
table.setEmptyTableWidget(loaderImage);
if (!session.isVoAdmin(voId)) {
loaderImage.setEmptyResultMessage("You are not manager of any group in this VO.");
} else {
loaderImage.setEmptyResultMessage("VO has no groups.");
}
Column<RichGroup, RichGroup> checkBoxColumn = new Column<RichGroup, RichGroup>(
new PerunCheckboxCell<RichGroup>(true, false, coreGroupsCheckable)) {
@Override
public RichGroup getValue(RichGroup object) {
// Get the value from the selection model.
object.setChecked(selectionModel.isSelected(object));
return object;
}
};
// updates the columns size
table.setColumnWidth(checkBoxColumn, 40.0, Unit.PX);
// Add the columns
// Checkbox column header
CheckboxCell cb = new CheckboxCell();
Header<Boolean> checkBoxHeader = new Header<Boolean>(cb) {
public Boolean getValue() {
return false;//return true to see a checked checkbox.
}
};
checkBoxHeader.setUpdater(new ValueUpdater<Boolean>() {
public void update(Boolean value) {
// sets selected to all, if value = true, unselect otherwise
for(RichGroup obj : list){
if (!obj.isCoreGroup()) {
selectionModel.setSelected(obj, value);
}
}
}
});
if (checkable) {
table.addColumn(checkBoxColumn,checkBoxHeader);
}
table.addIdColumn("Group ID", tableFieldUpdater);
// Add a synchronization clicable icon column.
final Column<RichGroup, RichGroup> syncColumn = new Column<RichGroup, RichGroup>(
new CustomClickableInfoCellWithImageResource("click")) {
@Override
public RichGroup getValue(RichGroup object) {
return object;
}
@Override
public String getCellStyleNames(Cell.Context context, RichGroup object) {
if (tableFieldUpdater != null) {
return super.getCellStyleNames(context, object) + " pointer image-hover";
} else {
return super.getCellStyleNames(context, object);
}
}
};
syncColumn.setFieldUpdater(new FieldUpdater<RichGroup, RichGroup>() {
@Override
public void update(int index, final RichGroup object, RichGroup value) {
GetEntityById get = new GetEntityById(PerunEntity.RICH_GROUP, object.getId(), new JsonCallbackEvents() {
@Override
public void onFinished(JavaScriptObject jso) {
final RichGroup object = jso.cast();
String name, syncEnabled, syncInterval, syncTimestamp, syncSuccessTimestamp, syncState, authGroup;
name = object.getName();
if (object.isSyncEnabled()) {
syncEnabled = "enabled";
} else {
syncEnabled = "disabled";
}
if (object.getSynchronizationInterval() == null) {
syncInterval = "N/A";
} else {
if (JsonUtils.checkParseInt(object.getSynchronizationInterval())) {
int time = Integer.parseInt(object.getSynchronizationInterval()) * 5 / 60;
if (time == 0) {
time = Integer.parseInt(object.getSynchronizationInterval()) * 5;
syncInterval = time + " minute(s)";
} else {
syncInterval = time + " hour(s)";
}
} else {
syncInterval = object.getSynchronizationInterval();
}
}
if (object.getLastSynchronizationState() == null) {
if (object.getLastSuccessSynchronizationTimestamp() != null) {
syncState = "OK";
} else {
syncState = "Not synced yet";
}
} else {
if (session.isPerunAdmin()) {
syncState = object.getLastSynchronizationState();
} else {
syncState = "Internal Error";
}
}
if (object.getLastSynchronizationTimestamp() == null) {
syncTimestamp = "N/A";
} else {
syncTimestamp = object.getLastSynchronizationTimestamp().split("\\.")[0];
}
if (object.getLastSuccessSynchronizationTimestamp() == null) {
syncSuccessTimestamp = "N/A";
} else {
syncSuccessTimestamp = object.getLastSuccessSynchronizationTimestamp().split("\\.")[0];
}
if (object.getAuthoritativeGroup() != null && object.getAuthoritativeGroup().equals("1")) {
authGroup = "Yes";
} else {
authGroup = "No";
}
String html = "Group name: <b>"+ SafeHtmlUtils.fromString(name).asString()+"</b><br>";
html += "Synchronization: <b>"+SafeHtmlUtils.fromString(syncEnabled).asString()+"</b><br>";
if (object.isSyncEnabled()) {
html += "Last sync. state: <b>"+SafeHtmlUtils.fromString(syncState).asString()+"</b><br>";
html += "Last sync. timestamp: <b>"+SafeHtmlUtils.fromString(syncTimestamp).asString()+"</b><br>";
html += "Last successful sync. timestamp: <b>"+SafeHtmlUtils.fromString(syncSuccessTimestamp).asString()+"</b><br>";
html += "Sync. Interval: <b>"+SafeHtmlUtils.fromString(syncInterval).asString()+"</b><br>";
html += "Authoritative group: <b>"+SafeHtmlUtils.fromString(authGroup).asString()+"</b><br>";
}
FlexTable layout = new FlexTable();
layout.setWidget(0, 0, new HTML("<p>" + new Image(LargeIcons.INSTANCE.informationIcon())));
layout.setHTML(0, 1, "<p style=\"line-height: 1.2;\">" + html);
layout.getFlexCellFormatter().setAlignment(0, 0, HasHorizontalAlignment.ALIGN_LEFT, HasVerticalAlignment.ALIGN_TOP);
layout.getFlexCellFormatter().setAlignment(0, 1, HasHorizontalAlignment.ALIGN_LEFT, HasVerticalAlignment.ALIGN_TOP);
layout.getFlexCellFormatter().setStyleName(0, 0, "alert-box-image");
final CustomButton okButton = new CustomButton("Force synchronization", SmallIcons.INSTANCE.arrowRefreshIcon());
okButton.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
ForceGroupSynchronization call = new ForceGroupSynchronization(JsonCallbackEvents.disableButtonEvents(okButton));
call.synchronizeGroup(object.getId());
}
});
okButton.setVisible(object.isSyncEnabled());
if (!session.isVoAdmin(object.getVoId()) && !session.isGroupAdmin(object.getId())) okButton.setEnabled(false);
final Confirm c = new Confirm("Group synchronization info", layout, okButton, null, true);
c.setHideOnButtonClick(false);
c.setCancelIcon(SmallIcons.INSTANCE.acceptIcon());
c.setCancelButtonText("OK");
c.setCancelClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
c.hide();
}
});
c.show();
}
});
get.retrieveData();
};
});
syncColumn.setSortable(true);
columnSortHandler.setComparator(syncColumn, new Comparator<RichGroup>() {
@Override
public int compare(RichGroup o1, RichGroup o2) {
if (o1 != null && o2 != null) {
int o1val = 0;
int o2val = 0;
if (o1.isSyncEnabled()) o1val = 5;
if (o2.isSyncEnabled()) o2val = 5;
if (o1.getAuthoritativeGroup() != null && o1.getAuthoritativeGroup().equalsIgnoreCase("1")) o1val = o1val + 3;
if (o2.getAuthoritativeGroup() != null && o2.getAuthoritativeGroup().equalsIgnoreCase("1")) o2val = o2val + 3;
return o1val - o2val;
}
return 0;
}
});
table.addColumn(syncColumn, "Sync");
table.setColumnWidth(syncColumn, "70px");
// set row styles based on: isCoreGroup()
table.setRowStyles(new RowStyles<RichGroup>(){
public String getStyleNames(RichGroup row, int rowIndex) {
if (row.isCoreGroup()) {
return "bold";
}
return "";
}
});
table.addNameColumn(tableFieldUpdater);
table.addDescriptionColumn(tableFieldUpdater);
return table;
}
/**
* Sorts table by objects Name
*/
public void sortTable() {
list = new TableSorter<RichGroup>().sortByName(getList());
dataProvider.flush();
dataProvider.refresh();
}
/**
* Add object as new row to table
*
* @param object RichGroup to be added as new row
*/
public void addToTable(RichGroup object) {
list.add(object);
oracle.add(object.getName());
dataProvider.flush();
dataProvider.refresh();
}
/**
* Removes object as row from table
*
* @param object RichGroup to be removed as row
*/
public void removeFromTable(RichGroup object) {
list.remove(object);
selectionModel.getSelectedSet().remove(object);
dataProvider.flush();
dataProvider.refresh();
}
/**
* Clear all table content
*/
public void clearTable(){
loaderImage.loadingStart();
list.clear();
fullBackup.clear();
oracle.clear();
selectionModel.clear();
dataProvider.flush();
dataProvider.refresh();
}
/**
* Clears list of selected items
*/
public void clearTableSelectedSet(){
selectionModel.clear();
}
/**
* Return selected items from list
*
* @return return list of checked items
*/
public ArrayList<RichGroup> getTableSelectedList(){
return JsonUtils.setToList(selectionModel.getSelectedSet());
}
/**
* Called, when an error occurs
*/
public void onError(PerunError error) {
session.getUiElements().setLogErrorText("Error while loading all groups.");
loaderImage.loadingError(error);
events.onError(error);
}
/**
* Called, when loading starts
*/
public void onLoadingStart() {
session.getUiElements().setLogText("Loading all groups started.");
events.onLoadingStart();
}
/**
* Called, when operation finishes successfully.
*/
public void onFinished(JavaScriptObject jso) {
clearTable();
for (RichGroup g : JsonUtils.<RichGroup>jsoAsList(jso)) {
if (g.isCoreGroup()) {
insertToTable(0, g);
} else {
addToTable(g);
}
}
//sortTable(); groups are sorted from RPC
session.getUiElements().setLogText("Groups loaded: " + list.size());
events.onFinished(jso);
loaderImage.loadingFinished();
}
public void insertToTable(int index, RichGroup object) {
list.add(index, object);
oracle.add(object.getName());
dataProvider.flush();
dataProvider.refresh();
}
public void setEditable(boolean editable) {
// TODO Auto-generated method stub
}
public void setCheckable(boolean checkable) {
this.checkable = checkable;
}
public void setList(ArrayList<RichGroup> list) {
clearTable();
this.list.addAll(list);
for (RichGroup g : list) {
oracle.add(g.getName());
}
dataProvider.flush();
dataProvider.refresh();
}
public ArrayList<RichGroup> getList() {
return this.list;
}
public UnaccentMultiWordSuggestOracle getOracle(){
return this.oracle;
}
public void filterTable(String text){
// store list only for first time
if (fullBackup.isEmpty() || fullBackup == null) {
fullBackup.addAll(list);
}
// always clear selected items
selectionModel.clear();
list.clear();
if (text.equalsIgnoreCase("")) {
list.addAll(fullBackup);
} else {
for (RichGroup grp : fullBackup){
// store facility by filter
if (grp.getName().toLowerCase().startsWith(text.toLowerCase()) ||
grp.getName().toLowerCase().contains(":"+text.toLowerCase())) {
list.add(grp);
}
}
}
if (list.isEmpty() && !text.isEmpty()) {
loaderImage.setEmptyResultMessage("No group matching '"+text+"' found.");
} else {
if (!session.isVoAdmin(voId)) {
loaderImage.setEmptyResultMessage("You are not manager of any group in this VO.");
} else {
loaderImage.setEmptyResultMessage("VO has no groups.");
}
}
dataProvider.flush();
dataProvider.refresh();
loaderImage.loadingFinished();
}
public void setOracle(UnaccentMultiWordSuggestOracle oracle) {
this.oracle = oracle;
}
public void retrieveData() {
String param = "vo="+this.voId;
if (!this.attrNames.isEmpty()) {
// parse list
for (String attrName : this.attrNames) {
param += "&attrNames[]="+attrName;
}
}
JsonClient js = new JsonClient();
js.retrieveData(JSON_URL, param, this);
}
public void setCoreGroupsCheckable(boolean checkable) {
coreGroupsCheckable = checkable;
}
public void setEvents(JsonCallbackEvents events) {
this.events = events;
}
public void setVoId(int voId) {
this.voId = voId;
}
public MultiSelectionModel<RichGroup> getSelectionModel() {
return this.selectionModel;
}
}
| |
package com.sherncsuk.mymusicmanager.Activities;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.view.Menu;
import android.view.View;
import android.widget.EditText;
import android.widget.TextView;
import com.sherncsuk.mymusicmanager.DataStructures.Filestate;
import com.sherncsuk.mymusicmanager.DataStructures.Message;
import com.sherncsuk.mymusicmanager.R;
import com.sherncsuk.mymusicmanager.Utils.FileUtil;
import com.sherncsuk.mymusicmanager.Utils.NetworkingUtil;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.net.Socket;
public class MainActivity extends Activity {
//Network Connection Stuffz
private Socket sock;
private OutputStream out;
private DataInputStream inputStream;
private boolean connected = false;
private Context currentContext;
//Utilities
private NetworkingUtil networkingUtil;
private FileUtil fileUtil;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
networkingUtil = new NetworkingUtil();
networkingUtil.connect(this);
fileUtil = new FileUtil();
currentContext = getApplicationContext();
}
@Override
protected void onStop() {
leave(null);
super.onStop();
}
@Override
protected void onRestart() {
networkingUtil.connect(this);
super.onRestart();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
/**
* A method that lists the files currently on a server
* @param v
*/
public void list(View v){
if(connected){
networkingUtil.sendInitialMessage(sock, Message.MessageType.LIST);
networkingUtil.recieveFilenames(this, sock);
}
}
/**
* Calculates the diff from files on the server to the Android directory
* @param v
*/
public void diff(View v){
if(connected){
networkingUtil.sendInitialMessage(sock, Message.MessageType.DIFF);
Filestate currentState = fileUtil.updateFiles(currentContext);
networkingUtil.sendIDs(currentState, sock);
networkingUtil.recieveFilenames(this, sock);
}
}
/**
* Pulls diff'd files from the server and saves them to the Android directory
* @param v
*/
@TargetApi(Build.VERSION_CODES.FROYO)
public void pull(View v){
if(connected){
networkingUtil.sendInitialMessage(sock, Message.MessageType.PULL);
File currDirectory = currentContext.getExternalFilesDir(Environment.DIRECTORY_MUSIC);
Filestate currState = fileUtil.updateFiles(currentContext);
networkingUtil.sendIDs(currState, sock);
networkingUtil.receiveMusicFiles(this, currDirectory, sock, Message.MessageType.PULL);
}
}
/**
* Pulls most popular songs from the server up to a certain file cap
* @param v
*/
@TargetApi(Build.VERSION_CODES.FROYO)
public void cap(View v){
if(connected){
AlertDialog.Builder alert = new AlertDialog.Builder(this);
alert.setTitle("Cap");
alert.setMessage("Enter Cap in MB");
// Set an EditText view to get user input
final EditText input = new EditText(this);
alert.setView(input);
alert.setPositiveButton("Ok", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
int numBytes = (int)(Double.parseDouble(input.getText().toString()) * 1048576);
networkingUtil.sendInitialMessage(sock, Message.MessageType.CAP, numBytes);
File currDirectory = currentContext.getExternalFilesDir(Environment.DIRECTORY_MUSIC);
Filestate currState = fileUtil.updateFiles(currentContext);
networkingUtil.sendIDs(currState, sock);
networkingUtil.receiveMusicFiles(MainActivity.this, currDirectory, sock, Message.MessageType.CAP);
}
});
alert.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
// Canceled.
}
});
alert.show();
}
}
/**
* Leaves the session
* @param v
*/
public void leave(View v){
if(connected){
networkingUtil.sendInitialMessage(sock, Message.MessageType.LEAVE);
try{
sock.close();
inputStream.close();
out.close();
} catch (IOException e){
e.printStackTrace();
}
connected = false;
updateConnectedLabel(connected);
}
}
/**
* Reconnects to the session
* @param v
*/
public void reconnect(View v){
if(!connected){
networkingUtil.connect(this);
}
}
/**
* A helper method to update the connected state label
* @param connected
*/
public void updateConnectedLabel(boolean connected){
if(connected)
((TextView)findViewById(R.id.connectedState)).setText(R.string.connected);
else
((TextView)findViewById(R.id.connectedState)).setText(R.string.not_connected);
}
/**
* Standard getters and setters
*/
public Socket getSock() {
return sock;
}
public void setSock(Socket sock) {
this.sock = sock;
}
public void setOut(OutputStream out) {
this.out = out;
}
public void setInputStream(DataInputStream inputStream) {
this.inputStream = inputStream;
}
public void setConnected(boolean connected) {
this.connected = connected;
}
public boolean getConnected() {
return connected;
}
}
| |
package mil.nga.giat.geowave.format.geotools.vector;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import mil.nga.giat.geowave.adapter.vector.FeatureDataAdapter;
import mil.nga.giat.geowave.core.index.ByteArrayId;
import mil.nga.giat.geowave.core.ingest.GeoWaveData;
import mil.nga.giat.geowave.core.store.CloseableIterator;
import mil.nga.giat.geowave.core.store.adapter.WritableDataAdapter;
import mil.nga.giat.geowave.core.store.data.visibility.GlobalVisibilityHandler;
import mil.nga.giat.geowave.format.geotools.vector.RetypingVectorDataPlugin.RetypingVectorDataSource;
import org.apache.log4j.Logger;
import org.geoserver.feature.RetypingFeatureCollection;
import org.geotools.data.DataStore;
import org.geotools.data.simple.SimpleFeatureCollection;
import org.geotools.data.simple.SimpleFeatureIterator;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.opengis.feature.IllegalAttributeException;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.simple.SimpleFeatureType;
import org.opengis.feature.type.AttributeDescriptor;
import org.opengis.feature.type.Name;
import org.opengis.filter.Filter;
import org.opengis.filter.identity.FeatureId;
/**
* This is a wrapper for a GeoTools SimpleFeatureCollection as a convenience to
* ingest it into GeoWave by translating a list of SimpleFeatureCollection to a
* closeable iterator of GeoWaveData
*/
public class SimpleFeatureGeoWaveWrapper implements
CloseableIterator<GeoWaveData<SimpleFeature>>
{
private final static Logger LOGGER = Logger.getLogger(SimpleFeatureGeoWaveWrapper.class);
private class InternalIterator implements
CloseableIterator<GeoWaveData<SimpleFeature>>
{
private final SimpleFeatureIterator featureIterator;
private final WritableDataAdapter<SimpleFeature> dataAdapter;
private RetypingVectorDataSource source = null;
private final Filter filter;
private SimpleFeatureBuilder builder = null;
private GeoWaveData<SimpleFeature> currentData = null;
public InternalIterator(
final SimpleFeatureCollection featureCollection,
final String visibility,
final Filter filter ) {
this.filter = filter;
featureIterator = featureCollection.features();
final SimpleFeatureType originalSchema = featureCollection.getSchema();
SimpleFeatureType retypedSchema = originalSchema;
if (retypingPlugin != null) {
source = retypingPlugin.getRetypingSource(originalSchema);
if (source != null) {
retypedSchema = source.getRetypedSimpleFeatureType();
builder = new SimpleFeatureBuilder(
retypedSchema);
}
}
if ((visibility == null) || visibility.isEmpty()) {
dataAdapter = new FeatureDataAdapter(
retypedSchema);
}
else {
dataAdapter = new FeatureDataAdapter(
retypedSchema,
new GlobalVisibilityHandler(
visibility));
}
}
@Override
public boolean hasNext() {
if (currentData == null) {
// return a flag indicating if we find more data that matches
// the filter, essentially peeking and caching the result
return nextData();
}
return true;
}
@Override
public GeoWaveData<SimpleFeature> next() {
if (currentData == null) {
// get the next data that matches the filter
nextData();
}
// return that data and set the current data to null
final GeoWaveData<SimpleFeature> retVal = currentData;
currentData = null;
return retVal;
}
private SimpleFeature retype(
final SimpleFeature original )
throws IllegalAttributeException {
final SimpleFeatureType target = builder.getFeatureType();
for (int i = 0; i < target.getAttributeCount(); i++) {
final AttributeDescriptor attributeType = target.getDescriptor(i);
Object value = null;
if (original.getFeatureType().getDescriptor(
attributeType.getName()) != null) {
final Name name = attributeType.getName();
value = source.retypeAttributeValue(
original.getAttribute(name),
name);
}
builder.add(value);
}
String featureId = source.getFeatureId(original);
if (featureId == null) {
// a null ID will default to use the original
final FeatureId id = RetypingFeatureCollection.reTypeId(
original.getIdentifier(),
original.getFeatureType(),
target);
featureId = id.getID();
}
final SimpleFeature retyped = builder.buildFeature(featureId);
retyped.getUserData().putAll(
original.getUserData());
return retyped;
}
private synchronized boolean nextData() {
SimpleFeature nextAcceptedFeature;
do {
if (!featureIterator.hasNext()) {
return false;
}
nextAcceptedFeature = featureIterator.next();
if (builder != null) {
nextAcceptedFeature = retype(nextAcceptedFeature);
}
}
while (!filter.evaluate(nextAcceptedFeature));
currentData = new GeoWaveData<SimpleFeature>(
dataAdapter,
primaryIndexId,
nextAcceptedFeature);
return true;
}
@Override
public void remove() {}
@Override
public void close()
throws IOException {
featureIterator.close();
}
}
private final List<SimpleFeatureCollection> featureCollections;
private final ByteArrayId primaryIndexId;
private InternalIterator currentIterator = null;
private final String visibility;
private final DataStore dataStore;
private final RetypingVectorDataPlugin retypingPlugin;
private final Filter filter;
public SimpleFeatureGeoWaveWrapper(
final List<SimpleFeatureCollection> featureCollections,
final ByteArrayId primaryIndexId,
final String visibility,
final DataStore dataStore,
final RetypingVectorDataPlugin retypingPlugin,
final Filter filter ) {
this.featureCollections = featureCollections;
this.visibility = visibility;
this.primaryIndexId = primaryIndexId;
this.dataStore = dataStore;
this.retypingPlugin = retypingPlugin;
this.filter = filter;
}
@Override
public boolean hasNext() {
if ((currentIterator == null) || !currentIterator.hasNext()) {
// return a flag indicating if we find another iterator that hasNext
return nextIterator();
}
// currentIterator has next
return true;
}
private synchronized boolean nextIterator() {
if (currentIterator != null) {
try {
currentIterator.close();
}
catch (final IOException e) {
LOGGER.warn(
"Cannot close feature iterator",
e);
}
}
final Iterator<SimpleFeatureCollection> it = featureCollections.iterator();
while (it.hasNext()) {
final SimpleFeatureCollection collection = it.next();
final InternalIterator featureIt = new InternalIterator(
collection,
visibility,
filter);
it.remove();
if (!featureIt.hasNext()) {
try {
featureIt.close();
}
catch (final IOException e) {
LOGGER.warn(
"Cannot close feature iterator",
e);
}
}
else {
currentIterator = featureIt;
return true;
}
}
return false;
}
@Override
public GeoWaveData<SimpleFeature> next() {
if ((currentIterator == null) || !currentIterator.hasNext()) {
if (nextIterator()) {
return currentIterator.next();
}
return null;
}
return currentIterator.next();
}
@Override
public void remove() {
if (currentIterator != null) {
// this isn't really implemented anyway and should not be called
currentIterator.remove();
}
}
@Override
public void close()
throws IOException {
if (currentIterator != null) {
currentIterator.close();
}
if (dataStore != null) {
dataStore.dispose();
}
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.conduits;
import io.undertow.UndertowLogger;
import io.undertow.UndertowOptions;
import io.undertow.server.OpenListener;
import io.undertow.util.WorkerUtils;
import org.xnio.Buffers;
import org.xnio.ChannelListeners;
import org.xnio.IoUtils;
import org.xnio.Options;
import org.xnio.StreamConnection;
import org.xnio.XnioExecutor;
import org.xnio.channels.StreamSourceChannel;
import org.xnio.conduits.AbstractStreamSinkConduit;
import org.xnio.conduits.StreamSinkConduit;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.FileChannel;
import java.util.concurrent.TimeUnit;
/**
* Wrapper for write timeout. This should always be the first wrapper applied to the underlying channel.
*
* @author Stuart Douglas
* @see org.xnio.Options#WRITE_TIMEOUT
*/
public final class WriteTimeoutStreamSinkConduit extends AbstractStreamSinkConduit<StreamSinkConduit> {
private XnioExecutor.Key handle;
private final StreamConnection connection;
private volatile long expireTime = -1;
private final OpenListener openListener;
private static final int FUZZ_FACTOR = 50; //we add 50ms to the timeout to make sure the underlying channel has actually timed out
private final Runnable timeoutCommand = new Runnable() {
@Override
public void run() {
handle = null;
if (expireTime == -1) {
return;
}
long current = System.currentTimeMillis();
if (current < expireTime) {
//timeout has been bumped, re-schedule
handle = WorkerUtils.executeAfter(getWriteThread(),timeoutCommand, (expireTime - current) + FUZZ_FACTOR, TimeUnit.MILLISECONDS);
return;
}
UndertowLogger.REQUEST_LOGGER.tracef("Timing out channel %s due to inactivity", connection.getSinkChannel());
IoUtils.safeClose(connection);
if (connection.getSourceChannel().isReadResumed()) {
ChannelListeners.invokeChannelListener(connection.getSourceChannel(), connection.getSourceChannel().getReadListener());
}
if (connection.getSinkChannel().isWriteResumed()) {
ChannelListeners.invokeChannelListener(connection.getSinkChannel(), connection.getSinkChannel().getWriteListener());
}
}
};
public WriteTimeoutStreamSinkConduit(final StreamSinkConduit delegate, StreamConnection connection, OpenListener openListener) {
super(delegate);
this.connection = connection;
this.openListener = openListener;
}
private void handleWriteTimeout(final long ret) throws IOException {
if (!connection.isOpen()) {
return;
}
if (ret == 0 && handle != null) {
return;
}
Integer timeout = getTimeout();
if (timeout == null || timeout <= 0) {
return;
}
long currentTime = System.currentTimeMillis();
long expireTimeVar = expireTime;
if (expireTimeVar != -1 && currentTime > expireTimeVar) {
IoUtils.safeClose(connection);
throw new ClosedChannelException();
}
expireTime = currentTime + timeout;
}
@Override
public int write(final ByteBuffer src) throws IOException {
int ret = super.write(src);
handleWriteTimeout(ret);
return ret;
}
@Override
public long write(final ByteBuffer[] srcs, final int offset, final int length) throws IOException {
long ret = super.write(srcs, offset, length);
handleWriteTimeout(ret);
return ret;
}
@Override
public int writeFinal(ByteBuffer src) throws IOException {
int ret = super.writeFinal(src);
handleWriteTimeout(ret);
if(!src.hasRemaining()) {
if(handle != null) {
handle.remove();
handle = null;
}
}
return ret;
}
@Override
public long writeFinal(ByteBuffer[] srcs, int offset, int length) throws IOException {
long ret = super.writeFinal(srcs, offset, length);
handleWriteTimeout(ret);
if(!Buffers.hasRemaining(srcs)) {
if(handle != null) {
handle.remove();
handle = null;
}
}
return ret;
}
@Override
public long transferFrom(final FileChannel src, final long position, final long count) throws IOException {
long ret = super.transferFrom(src, position, count);
handleWriteTimeout(ret);
return ret;
}
@Override
public long transferFrom(final StreamSourceChannel source, final long count, final ByteBuffer throughBuffer) throws IOException {
long ret = super.transferFrom(source, count, throughBuffer);
handleWriteTimeout(ret);
return ret;
}
@Override
public void awaitWritable() throws IOException {
Integer timeout = getTimeout();
if (timeout != null && timeout > 0) {
super.awaitWritable(timeout + FUZZ_FACTOR, TimeUnit.MILLISECONDS);
} else {
super.awaitWritable();
}
}
@Override
public void awaitWritable(long time, TimeUnit timeUnit) throws IOException {
Integer timeout = getTimeout();
if (timeout != null && timeout > 0) {
long millis = timeUnit.toMillis(time);
super.awaitWritable(Math.min(millis, timeout + FUZZ_FACTOR), TimeUnit.MILLISECONDS);
} else {
super.awaitWritable(time, timeUnit);
}
}
private Integer getTimeout() {
Integer timeout = 0;
try {
timeout = connection.getSourceChannel().getOption(Options.WRITE_TIMEOUT);
} catch (IOException ignore) {
// should never happen, ignoring
}
Integer idleTimeout = openListener.getUndertowOptions().get(UndertowOptions.IDLE_TIMEOUT);
if ((timeout == null || timeout <= 0) && idleTimeout != null) {
timeout = idleTimeout;
} else if (timeout != null && idleTimeout != null && idleTimeout > 0) {
timeout = Math.min(timeout, idleTimeout);
}
return timeout;
}
@Override
public void terminateWrites() throws IOException {
super.terminateWrites();
if(handle != null) {
handle.remove();
handle = null;
}
}
@Override
public void truncateWrites() throws IOException {
super.truncateWrites();
if(handle != null) {
handle.remove();
handle = null;
}
}
@Override
public void resumeWrites() {
super.resumeWrites();
handleResumeTimeout();
}
@Override
public void suspendWrites() {
super.suspendWrites();
XnioExecutor.Key handle = this.handle;
if(handle != null) {
handle.remove();
this.handle = null;
}
}
@Override
public void wakeupWrites() {
super.wakeupWrites();
handleResumeTimeout();
}
private void handleResumeTimeout() {
Integer timeout = getTimeout();
if (timeout == null || timeout <= 0) {
return;
}
long currentTime = System.currentTimeMillis();
expireTime = currentTime + timeout;
XnioExecutor.Key key = handle;
if (key == null) {
handle = connection.getIoThread().executeAfter(timeoutCommand, timeout, TimeUnit.MILLISECONDS);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.installer.core.impl;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.sling.installer.api.InstallableResource;
import org.apache.sling.installer.api.event.InstallationListener;
import org.apache.sling.installer.api.tasks.RegisteredResource;
import org.apache.sling.installer.api.tasks.TransformationResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Persistent list of RegisteredResource, used by installer to
* keep track of all registered resources
*/
public class PersistentResourceList {
/** Serialization version. */
private static final int VERSION = 2;
/** Entity id for restart active bundles. */
public static final String RESTART_ACTIVE_BUNDLES_TYPE = "org.apache.sling.installer.core.restart.bundles";
public static final String RESTART_ACTIVE_BUNDLES_ID = "org.apache.sling.installer.core.restart.bundles";
public static final String RESTART_ACTIVE_BUNDLES_ENTITY_ID = RESTART_ACTIVE_BUNDLES_TYPE + ':' + RESTART_ACTIVE_BUNDLES_ID;
/** The logger */
private final Logger logger = LoggerFactory.getLogger(this.getClass());
/**
* Map of registered resource sets.
* The key of the map is the entity id of the registered resource.
* The value is a set containing all registered resources for the
* same entity. Usually this is just one resource per entity.
*/
private final Map<String, EntityResourceList> data;
/** The persistence file. */
private final File dataFile;
/** All untransformed resources. */
private final List<RegisteredResource> untransformedResources;
private final InstallationListener listener;
@SuppressWarnings("unchecked")
public PersistentResourceList(final File dataFile, final InstallationListener listener) {
this.dataFile = dataFile;
this.listener = listener;
Map<String, EntityResourceList> restoredData = null;
List<RegisteredResource> unknownList = null;
if ( dataFile.exists() ) {
ObjectInputStream ois = null;
try {
ois = new ObjectInputStream(new BufferedInputStream(new FileInputStream(dataFile)));
final int version = ois.readInt();
if ( version > 0 && version <= VERSION ) {
restoredData = (Map<String, EntityResourceList>)ois.readObject();
if ( version == VERSION ) {
unknownList = (List<RegisteredResource>)ois.readObject();
}
} else {
logger.warn("Unknown version for persistent resource list: {}", version);
}
logger.debug("Restored resource list: {}", restoredData);
logger.debug("Restored unknown resource list: {}", unknownList);
} catch (final Exception e) {
logger.warn("Unable to restore data, starting with empty list (" + e.getMessage() + ")", e);
restoredData = null;
unknownList = null;
} finally {
if (ois != null) {
try {
ois.close();
} catch (final IOException ignore) {
// ignore
}
}
}
}
data = restoredData != null ? restoredData : new HashMap<String, EntityResourceList>();
this.untransformedResources = unknownList != null ? unknownList : new ArrayList<RegisteredResource>();
this.updateCache();
// update resource ids
for(final Map.Entry<String, EntityResourceList> entry : this.data.entrySet()) {
final EntityResourceList erl = entry.getValue();
erl.setResourceId(entry.getKey());
erl.setListener(listener);
}
// check for special resources
if ( this.getEntityResourceList(RESTART_ACTIVE_BUNDLES_ENTITY_ID) == null ) {
final RegisteredResource rr = this.addOrUpdate(new InternalResource("$sling-installer$",
RESTART_ACTIVE_BUNDLES_ID,
null,
new Hashtable<String, Object>(),
InstallableResource.TYPE_PROPERTIES, "1",
null, null, null));
final TransformationResult result = new TransformationResult();
result.setId(RESTART_ACTIVE_BUNDLES_ID);
result.setResourceType(RESTART_ACTIVE_BUNDLES_TYPE);
this.transform(rr, new TransformationResult[] {result});
}
}
/**
* Update the url to digest cache
*/
private void updateCache() {
for(final EntityResourceList group : this.data.values()) {
for(final RegisteredResource rr : group.getResources()) {
if ( ((RegisteredResourceImpl)rr).hasDataFile() ) {
FileDataStore.SHARED.updateDigestCache(rr.getURL(), ((RegisteredResourceImpl)rr).getDataFile(), rr.getDigest());
}
}
}
for(final RegisteredResource rr : this.untransformedResources ) {
if ( ((RegisteredResourceImpl)rr).hasDataFile() ) {
FileDataStore.SHARED.updateDigestCache(rr.getURL(), ((RegisteredResourceImpl)rr).getDataFile(), rr.getDigest());
}
}
}
/**
* Persist the current state
*/
public void save() {
try {
final ObjectOutputStream oos = new ObjectOutputStream(new BufferedOutputStream(new FileOutputStream(dataFile)));
try {
oos.writeInt(VERSION);
oos.writeObject(data);
oos.writeObject(untransformedResources);
logger.debug("Persisted resource list.");
} finally {
oos.close();
}
} catch (final Exception e) {
logger.warn("Unable to save persistent list: " + e.getMessage(), e);
}
}
public Collection<String> getEntityIds() {
return this.data.keySet();
}
/**
* Add or update an installable resource.
* @param input The installable resource
*/
public RegisteredResource addOrUpdate(final InternalResource input) {
// first check untransformed resource if there are resources with the same url and digest
for(final RegisteredResource rr : this.untransformedResources ) {
if ( rr.getURL().equals(input.getURL()) && ( rr.getDigest().equals(input.getDigest())) ) {
// if we found the resource we can return after updating
((RegisteredResourceImpl)rr).update(input);
return rr;
}
}
// installed resources are next
for(final EntityResourceList group : this.data.values()) {
for(final RegisteredResource rr : group.getResources()) {
if ( rr.getURL().equals(input.getURL()) && ( rr.getDigest().equals(input.getDigest()))) {
// if we found the resource we can return after updating
((RegisteredResourceImpl)rr).update(input);
return rr;
}
}
}
try {
final RegisteredResourceImpl registeredResource = RegisteredResourceImpl.create(input);
this.checkInstallable(registeredResource);
return registeredResource;
} catch (final IOException ioe) {
logger.warn("Ignoring resource. Error during processing of " + input.getURL(), ioe);
return null;
}
}
/**
* Check if the provided installable resource is already installable (has a
* known resource type)
*/
private void checkInstallable(final RegisteredResourceImpl input) {
if ( !InstallableResource.TYPE_FILE.equals(input.getType())
&& !InstallableResource.TYPE_PROPERTIES.equals(input.getType()) ) {
EntityResourceList t = this.data.get(input.getEntityId());
if (t == null) {
t = new EntityResourceList(input.getEntityId(), this.listener);
this.data.put(input.getEntityId(), t);
}
t.addOrUpdate(input);
} else {
// check if there is an old resource and remove it first
if ( this.untransformedResources.contains(input) ) {
this.untransformedResources.remove(input);
}
this.untransformedResources.add(input);
}
}
/**
* Get the list of untransformed resources = resources without resource type
*/
public List<RegisteredResource> getUntransformedResources() {
return this.untransformedResources;
}
/**
* Remove a resource by url.
* Check all resource groups and the list of untransformed resources.
* @param url The url to remove
*/
public void remove(final String url) {
// iterate over all resource groups and remove resources
// with the given url
for(final EntityResourceList group : this.data.values()) {
group.remove(url);
}
// iterate over untransformed resources and remove
// the resource with that url
final Iterator<RegisteredResource> i = this.untransformedResources.iterator();
while ( i.hasNext() ) {
final RegisteredResource rr = i.next();
if ( rr.getURL().equals(url) ) {
((RegisteredResourceImpl)rr).cleanup();
i.remove();
break;
}
}
}
/**
* Get the resource group for an entity id.
*/
public EntityResourceList getEntityResourceList(final String entityId) {
EntityResourceList erl = this.data.get(entityId);
if ( erl == null ) {
for(final EntityResourceList group : this.data.values()) {
if ( entityId.equals(group.getFullAlias()) ) {
erl = group;
break;
}
}
}
return erl;
}
/**
* Compact the internal state and remove empty groups.
* @return <code>true</code> if another cycle should be started.
*/
public boolean compact() {
boolean startNewCycle = false;
final Iterator<Map.Entry<String, EntityResourceList>> i = this.data.entrySet().iterator();
while ( i.hasNext() ) {
final Map.Entry<String, EntityResourceList> entry = i.next();
startNewCycle |= entry.getValue().compact();
if ( entry.getValue().isEmpty() ) {
i.remove();
}
}
return startNewCycle;
}
/**
* Transform an unknown resource to a registered one
*/
public void transform(final RegisteredResource resource,
final TransformationResult[] result) {
// remove resource from unknown list
this.untransformedResources.remove(resource);
try {
for(int i=0; i<result.length; i++) {
// check the result
final TransformationResult tr = result[i];
if ( tr == null ) {
logger.warn("Ignoring null result for {}", resource);
continue;
}
if ( tr.getResourceType() != null && tr.getId() == null) {
logger.error("Result for {} contains new resource type {} but no unique id!",
resource, tr.getResourceType());
continue;
}
final RegisteredResourceImpl clone = (RegisteredResourceImpl)((RegisteredResourceImpl)resource).clone(result[i]);
this.checkInstallable(clone);
}
} catch (final IOException ioe) {
logger.warn("Ignoring resource. Error during processing of " + resource, ioe);
}
}
/**
* Check if the id is a special id and should not be included in the info report
*/
public boolean isSpecialEntityId(final String id) {
return RESTART_ACTIVE_BUNDLES_ENTITY_ID.equals(id);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.statistic;
import org.apache.calcite.materialize.SqlStatisticProvider;
import org.apache.calcite.plan.Contexts;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptSchema;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.plan.ViewExpanders;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.rel2sql.RelToSqlConverter;
import org.apache.calcite.rel.rel2sql.SqlImplementor;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.tools.Frameworks;
import org.apache.calcite.tools.RelBuilder;
import org.apache.calcite.util.Util;
import com.google.common.cache.CacheBuilder;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import javax.sql.DataSource;
import static java.util.Objects.requireNonNull;
/**
* Implementation of {@link SqlStatisticProvider} that generates and executes
* SQL queries.
*/
public class QuerySqlStatisticProvider implements SqlStatisticProvider {
/** Instance that uses SQL to compute statistics,
* does not log SQL statements,
* and caches up to 1,024 results for up to 30 minutes.
* (That period should be sufficient for the
* duration of Calcite's tests, and many other purposes.) */
public static final SqlStatisticProvider SILENT_CACHING_INSTANCE =
new CachingSqlStatisticProvider(
new QuerySqlStatisticProvider(sql -> { }),
CacheBuilder.newBuilder().expireAfterAccess(30, TimeUnit.MINUTES)
.maximumSize(1_024).build());
/** As {@link #SILENT_CACHING_INSTANCE} but prints SQL statements to
* {@link System#out}. */
public static final SqlStatisticProvider VERBOSE_CACHING_INSTANCE =
new CachingSqlStatisticProvider(
new QuerySqlStatisticProvider(sql -> System.out.println(sql + ":")),
CacheBuilder.newBuilder().expireAfterAccess(30, TimeUnit.MINUTES)
.maximumSize(1_024).build());
private final Consumer<String> sqlConsumer;
/** Creates a QuerySqlStatisticProvider.
*
* @param sqlConsumer Called when each SQL statement is generated
*/
public QuerySqlStatisticProvider(Consumer<String> sqlConsumer) {
this.sqlConsumer = requireNonNull(sqlConsumer, "sqlConsumer");
}
@Override public double tableCardinality(RelOptTable table) {
final SqlDialect dialect = table.unwrapOrThrow(SqlDialect.class);
final DataSource dataSource = table.unwrapOrThrow(DataSource.class);
return withBuilder(
(cluster, relOptSchema, relBuilder) -> {
// Generate:
// SELECT COUNT(*) FROM `EMP`
relBuilder.push(table.toRel(ViewExpanders.simpleContext(cluster)))
.aggregate(relBuilder.groupKey(), relBuilder.count());
final String sql = toSql(relBuilder.build(), dialect);
try (Connection connection = dataSource.getConnection();
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql)) {
if (!resultSet.next()) {
throw new AssertionError("expected exactly 1 row: " + sql);
}
final double cardinality = resultSet.getDouble(1);
if (resultSet.next()) {
throw new AssertionError("expected exactly 1 row: " + sql);
}
return cardinality;
} catch (SQLException e) {
throw handle(e, sql);
}
});
}
@Override public boolean isForeignKey(RelOptTable fromTable, List<Integer> fromColumns,
RelOptTable toTable, List<Integer> toColumns) {
final SqlDialect dialect = fromTable.unwrapOrThrow(SqlDialect.class);
final DataSource dataSource = fromTable.unwrapOrThrow(DataSource.class);
return withBuilder(
(cluster, relOptSchema, relBuilder) -> {
// EMP(DEPTNO) is a foreign key to DEPT(DEPTNO) if the following
// query returns 0:
//
// SELECT COUNT(*) FROM (
// SELECT deptno FROM `EMP` WHERE deptno IS NOT NULL
// MINUS
// SELECT deptno FROM `DEPT`)
final RelOptTable.ToRelContext toRelContext =
ViewExpanders.simpleContext(cluster);
relBuilder.push(fromTable.toRel(toRelContext))
.filter(fromColumns.stream()
.map(column ->
relBuilder.isNotNull(relBuilder.field(column)))
.collect(Collectors.toList()))
.project(relBuilder.fields(fromColumns))
.push(toTable.toRel(toRelContext))
.project(relBuilder.fields(toColumns))
.minus(false, 2)
.aggregate(relBuilder.groupKey(), relBuilder.count());
final String sql = toSql(relBuilder.build(), dialect);
try (Connection connection = dataSource.getConnection();
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql)) {
if (!resultSet.next()) {
throw new AssertionError("expected exactly 1 row: " + sql);
}
final int count = resultSet.getInt(1);
if (resultSet.next()) {
throw new AssertionError("expected exactly 1 row: " + sql);
}
return count == 0;
} catch (SQLException e) {
throw handle(e, sql);
}
});
}
@Override public boolean isKey(RelOptTable table, List<Integer> columns) {
final SqlDialect dialect = table.unwrapOrThrow(SqlDialect.class);
final DataSource dataSource = table.unwrapOrThrow(DataSource.class);
return withBuilder(
(cluster, relOptSchema, relBuilder) -> {
// The collection of columns ['DEPTNO'] is a key for 'EMP' if the
// following query returns no rows:
//
// SELECT 1
// FROM `EMP`
// GROUP BY `DEPTNO`
// HAVING COUNT(*) > 1
//
final RelOptTable.ToRelContext toRelContext =
ViewExpanders.simpleContext(cluster);
relBuilder.push(table.toRel(toRelContext))
.aggregate(relBuilder.groupKey(relBuilder.fields(columns)),
relBuilder.count())
.filter(
relBuilder.call(SqlStdOperatorTable.GREATER_THAN,
Util.last(relBuilder.fields()), relBuilder.literal(1)));
final String sql = toSql(relBuilder.build(), dialect);
try (Connection connection = dataSource.getConnection();
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql)) {
return !resultSet.next();
} catch (SQLException e) {
throw handle(e, sql);
}
});
}
private static RuntimeException handle(SQLException e, String sql) {
return new RuntimeException("Error while executing SQL for statistics: "
+ sql, e);
}
protected String toSql(RelNode rel, SqlDialect dialect) {
final RelToSqlConverter converter = new RelToSqlConverter(dialect);
SqlImplementor.Result result = converter.visitRoot(rel);
final SqlNode sqlNode = result.asStatement();
final String sql = sqlNode.toSqlString(dialect).getSql();
sqlConsumer.accept(sql);
return sql;
}
private static <R> R withBuilder(BuilderAction<R> action) {
return Frameworks.withPlanner(
(cluster, relOptSchema, rootSchema) -> {
final RelBuilder relBuilder =
RelBuilder.proto(Contexts.of()).create(cluster, relOptSchema);
return action.apply(cluster, relOptSchema, relBuilder);
});
}
/** Performs an action with a {@link RelBuilder}.
*
* @param <R> Result type */
@FunctionalInterface
private interface BuilderAction<R> {
R apply(RelOptCluster cluster, RelOptSchema relOptSchema,
RelBuilder relBuilder);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.channels.ClosedChannelException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.DF;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.server.datanode.DataStorage;
import org.apache.hadoop.hdfs.server.datanode.DatanodeUtil;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeReference;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.CloseableReferenceCount;
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Timer;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import org.codehaus.jackson.map.ObjectWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
* The underlying volume used to store replica.
*
* It uses the {@link FsDatasetImpl} object for synchronization.
*/
@InterfaceAudience.Private
@VisibleForTesting
public class FsVolumeImpl implements FsVolumeSpi {
public static final Logger LOG =
LoggerFactory.getLogger(FsVolumeImpl.class);
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
private static final ObjectReader READER =
new ObjectMapper().reader(BlockIteratorState.class);
private final FsDatasetImpl dataset;
private final String storageID;
private final StorageType storageType;
private final Map<String, BlockPoolSlice> bpSlices
= new ConcurrentHashMap<String, BlockPoolSlice>();
private final File currentDir; // <StorageDirectory>/current
private final DF usage;
private final long reserved;
private CloseableReferenceCount reference = new CloseableReferenceCount();
// Disk space reserved for blocks (RBW or Re-replicating) open for write.
private AtomicLong reservedForReplicas;
private long recentReserved = 0;
// Capacity configured. This is useful when we want to
// limit the visible capacity for tests. If negative, then we just
// query from the filesystem.
protected volatile long configuredCapacity;
/**
* Per-volume worker pool that processes new blocks to cache.
* The maximum number of workers per volume is bounded (configurable via
* dfs.datanode.fsdatasetcache.max.threads.per.volume) to limit resource
* contention.
*/
protected ThreadPoolExecutor cacheExecutor;
FsVolumeImpl(FsDatasetImpl dataset, String storageID, File currentDir,
Configuration conf, StorageType storageType) throws IOException {
this.dataset = dataset;
this.storageID = storageID;
this.reserved = conf.getLong(
DFSConfigKeys.DFS_DATANODE_DU_RESERVED_KEY,
DFSConfigKeys.DFS_DATANODE_DU_RESERVED_DEFAULT);
this.reservedForReplicas = new AtomicLong(0L);
this.currentDir = currentDir;
File parent = currentDir.getParentFile();
this.usage = new DF(parent, conf);
this.storageType = storageType;
this.configuredCapacity = -1;
cacheExecutor = initializeCacheExecutor(parent);
}
protected ThreadPoolExecutor initializeCacheExecutor(File parent) {
if (storageType.isTransient()) {
return null;
}
if (dataset.datanode == null) {
// FsVolumeImpl is used in test.
return null;
}
final int maxNumThreads = dataset.datanode.getConf().getInt(
DFSConfigKeys.DFS_DATANODE_FSDATASETCACHE_MAX_THREADS_PER_VOLUME_KEY,
DFSConfigKeys.DFS_DATANODE_FSDATASETCACHE_MAX_THREADS_PER_VOLUME_DEFAULT);
ThreadFactory workerFactory = new ThreadFactoryBuilder()
.setDaemon(true)
.setNameFormat("FsVolumeImplWorker-" + parent.toString() + "-%d")
.build();
ThreadPoolExecutor executor = new ThreadPoolExecutor(
1, maxNumThreads,
60, TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(),
workerFactory);
executor.allowCoreThreadTimeOut(true);
return executor;
}
private void printReferenceTraceInfo(String op) {
StackTraceElement[] stack = Thread.currentThread().getStackTrace();
for (StackTraceElement ste : stack) {
switch (ste.getMethodName()) {
case "getDfsUsed":
case "getBlockPoolUsed":
case "getAvailable":
case "getVolumeMap":
return;
default:
break;
}
}
FsDatasetImpl.LOG.trace("Reference count: " + op + " " + this + ": " +
this.reference.getReferenceCount());
FsDatasetImpl.LOG.trace(
Joiner.on("\n").join(Thread.currentThread().getStackTrace()));
}
/**
* Increase the reference count. The caller must increase the reference count
* before issuing IOs.
*
* @throws IOException if the volume is already closed.
*/
private void reference() throws ClosedChannelException {
this.reference.reference();
if (FsDatasetImpl.LOG.isTraceEnabled()) {
printReferenceTraceInfo("incr");
}
}
/**
* Decrease the reference count.
*/
private void unreference() {
if (FsDatasetImpl.LOG.isTraceEnabled()) {
printReferenceTraceInfo("desc");
}
if (FsDatasetImpl.LOG.isDebugEnabled()) {
if (reference.getReferenceCount() <= 0) {
FsDatasetImpl.LOG.debug("Decrease reference count <= 0 on " + this +
Joiner.on("\n").join(Thread.currentThread().getStackTrace()));
}
}
checkReference();
this.reference.unreference();
}
private static class FsVolumeReferenceImpl implements FsVolumeReference {
private FsVolumeImpl volume;
FsVolumeReferenceImpl(FsVolumeImpl volume) throws ClosedChannelException {
this.volume = volume;
volume.reference();
}
/**
* Decreases the reference count.
* @throws IOException it never throws IOException.
*/
@Override
public void close() throws IOException {
if (volume != null) {
volume.unreference();
volume = null;
}
}
@Override
public FsVolumeSpi getVolume() {
return this.volume;
}
}
@Override
public FsVolumeReference obtainReference() throws ClosedChannelException {
return new FsVolumeReferenceImpl(this);
}
private void checkReference() {
Preconditions.checkState(reference.getReferenceCount() > 0);
}
@VisibleForTesting
int getReferenceCount() {
return this.reference.getReferenceCount();
}
/**
* Close this volume.
* @throws IOException if the volume is closed.
*/
void setClosed() throws IOException {
try {
this.reference.setClosed();
dataset.stopAllDataxceiverThreads(this);
} catch (ClosedChannelException e) {
throw new IOException("The volume has already closed.", e);
}
}
/**
* Check whether this volume has successfully been closed.
*/
boolean checkClosed() {
if (this.reference.getReferenceCount() > 0) {
if (FsDatasetImpl.LOG.isDebugEnabled()) {
FsDatasetImpl.LOG.debug(String.format(
"The reference count for %s is %d, wait to be 0.",
this, reference.getReferenceCount()));
}
return false;
}
return true;
}
File getCurrentDir() {
return currentDir;
}
File getRbwDir(String bpid) throws IOException {
return getBlockPoolSlice(bpid).getRbwDir();
}
File getLazyPersistDir(String bpid) throws IOException {
return getBlockPoolSlice(bpid).getLazypersistDir();
}
File getTmpDir(String bpid) throws IOException {
return getBlockPoolSlice(bpid).getTmpDir();
}
void onBlockFileDeletion(String bpid, long value) {
decDfsUsedAndNumBlocks(bpid, value, true);
if (isTransientStorage()) {
dataset.releaseLockedMemory(value, true);
}
}
void onMetaFileDeletion(String bpid, long value) {
decDfsUsedAndNumBlocks(bpid, value, false);
}
private void decDfsUsedAndNumBlocks(String bpid, long value,
boolean blockFileDeleted) {
synchronized(dataset) {
BlockPoolSlice bp = bpSlices.get(bpid);
if (bp != null) {
bp.decDfsUsed(value);
if (blockFileDeleted) {
bp.decrNumBlocks();
}
}
}
}
void incDfsUsedAndNumBlocks(String bpid, long value) {
synchronized (dataset) {
BlockPoolSlice bp = bpSlices.get(bpid);
if (bp != null) {
bp.incDfsUsed(value);
bp.incrNumBlocks();
}
}
}
void incDfsUsed(String bpid, long value) {
synchronized(dataset) {
BlockPoolSlice bp = bpSlices.get(bpid);
if (bp != null) {
bp.incDfsUsed(value);
}
}
}
@VisibleForTesting
public long getDfsUsed() throws IOException {
long dfsUsed = 0;
synchronized(dataset) {
for(BlockPoolSlice s : bpSlices.values()) {
dfsUsed += s.getDfsUsed();
}
}
return dfsUsed;
}
long getBlockPoolUsed(String bpid) throws IOException {
return getBlockPoolSlice(bpid).getDfsUsed();
}
/**
* Return either the configured capacity of the file system if configured; or
* the capacity of the file system excluding space reserved for non-HDFS.
*
* @return the unreserved number of bytes left in this filesystem. May be
* zero.
*/
@VisibleForTesting
public long getCapacity() {
if (configuredCapacity < 0) {
long remaining = usage.getCapacity() - reserved;
return remaining > 0 ? remaining : 0;
}
return configuredCapacity;
}
/**
* This function MUST NOT be used outside of tests.
*
* @param capacity
*/
@VisibleForTesting
public void setCapacityForTesting(long capacity) {
this.configuredCapacity = capacity;
}
/*
* Calculate the available space of the filesystem, excluding space reserved
* for non-HDFS and space reserved for RBW
*
* @return the available number of bytes left in this filesystem. May be zero.
*/
@Override
public long getAvailable() throws IOException {
long remaining = getCapacity() - getDfsUsed() - reservedForReplicas.get();
long available = usage.getAvailable() - reserved
- reservedForReplicas.get();
if (remaining > available) {
remaining = available;
}
return (remaining > 0) ? remaining : 0;
}
@VisibleForTesting
public long getReservedForReplicas() {
return reservedForReplicas.get();
}
@VisibleForTesting
long getRecentReserved() {
return recentReserved;
}
long getReserved(){
return reserved;
}
BlockPoolSlice getBlockPoolSlice(String bpid) throws IOException {
BlockPoolSlice bp = bpSlices.get(bpid);
if (bp == null) {
throw new IOException("block pool " + bpid + " is not found");
}
return bp;
}
@Override
public String getBasePath() {
return currentDir.getParent();
}
@Override
public boolean isTransientStorage() {
return storageType.isTransient();
}
@Override
public String getPath(String bpid) throws IOException {
return getBlockPoolSlice(bpid).getDirectory().getAbsolutePath();
}
@Override
public File getFinalizedDir(String bpid) throws IOException {
return getBlockPoolSlice(bpid).getFinalizedDir();
}
/**
* Make a deep copy of the list of currently active BPIDs
*/
@Override
public String[] getBlockPoolList() {
return bpSlices.keySet().toArray(new String[bpSlices.keySet().size()]);
}
/**
* Temporary files. They get moved to the finalized block directory when
* the block is finalized.
*/
File createTmpFile(String bpid, Block b) throws IOException {
checkReference();
reserveSpaceForReplica(b.getNumBytes());
try {
return getBlockPoolSlice(bpid).createTmpFile(b);
} catch (IOException exception) {
releaseReservedSpace(b.getNumBytes());
throw exception;
}
}
@Override
public void reserveSpaceForReplica(long bytesToReserve) {
if (bytesToReserve != 0) {
reservedForReplicas.addAndGet(bytesToReserve);
recentReserved = bytesToReserve;
}
}
@Override
public void releaseReservedSpace(long bytesToRelease) {
if (bytesToRelease != 0) {
long oldReservation, newReservation;
do {
oldReservation = reservedForReplicas.get();
newReservation = oldReservation - bytesToRelease;
if (newReservation < 0) {
// Failsafe, this should never occur in practice, but if it does we
// don't want to start advertising more space than we have available.
newReservation = 0;
}
} while (!reservedForReplicas.compareAndSet(oldReservation,
newReservation));
}
}
@Override
public void releaseLockedMemory(long bytesToRelease) {
if (isTransientStorage()) {
dataset.releaseLockedMemory(bytesToRelease, false);
}
}
private enum SubdirFilter implements FilenameFilter {
INSTANCE;
@Override
public boolean accept(File dir, String name) {
return name.startsWith("subdir");
}
}
private enum BlockFileFilter implements FilenameFilter {
INSTANCE;
@Override
public boolean accept(File dir, String name) {
return !name.endsWith(".meta") &&
name.startsWith(Block.BLOCK_FILE_PREFIX);
}
}
@VisibleForTesting
public static String nextSorted(List<String> arr, String prev) {
int res = 0;
if (prev != null) {
res = Collections.binarySearch(arr, prev);
if (res < 0) {
res = -1 - res;
} else {
res++;
}
}
if (res >= arr.size()) {
return null;
}
return arr.get(res);
}
private static class BlockIteratorState {
BlockIteratorState() {
lastSavedMs = iterStartMs = Time.now();
curFinalizedDir = null;
curFinalizedSubDir = null;
curEntry = null;
atEnd = false;
}
// The wall-clock ms since the epoch at which this iterator was last saved.
@JsonProperty
private long lastSavedMs;
// The wall-clock ms since the epoch at which this iterator was created.
@JsonProperty
private long iterStartMs;
@JsonProperty
private String curFinalizedDir;
@JsonProperty
private String curFinalizedSubDir;
@JsonProperty
private String curEntry;
@JsonProperty
private boolean atEnd;
}
/**
* A BlockIterator implementation for FsVolumeImpl.
*/
private class BlockIteratorImpl implements FsVolumeSpi.BlockIterator {
private final File bpidDir;
private final String name;
private final String bpid;
private long maxStalenessMs = 0;
private List<String> cache;
private long cacheMs;
private BlockIteratorState state;
BlockIteratorImpl(String bpid, String name) {
this.bpidDir = new File(currentDir, bpid);
this.name = name;
this.bpid = bpid;
rewind();
}
/**
* Get the next subdirectory within the block pool slice.
*
* @return The next subdirectory within the block pool slice, or
* null if there are no more.
*/
private String getNextSubDir(String prev, File dir)
throws IOException {
List<String> children =
IOUtils.listDirectory(dir, SubdirFilter.INSTANCE);
cache = null;
cacheMs = 0;
if (children.size() == 0) {
LOG.trace("getNextSubDir({}, {}): no subdirectories found in {}",
storageID, bpid, dir.getAbsolutePath());
return null;
}
Collections.sort(children);
String nextSubDir = nextSorted(children, prev);
if (nextSubDir == null) {
LOG.trace("getNextSubDir({}, {}): no more subdirectories found in {}",
storageID, bpid, dir.getAbsolutePath());
} else {
LOG.trace("getNextSubDir({}, {}): picking next subdirectory {} " +
"within {}", storageID, bpid, nextSubDir, dir.getAbsolutePath());
}
return nextSubDir;
}
private String getNextFinalizedDir() throws IOException {
File dir = Paths.get(
bpidDir.getAbsolutePath(), "current", "finalized").toFile();
return getNextSubDir(state.curFinalizedDir, dir);
}
private String getNextFinalizedSubDir() throws IOException {
if (state.curFinalizedDir == null) {
return null;
}
File dir = Paths.get(
bpidDir.getAbsolutePath(), "current", "finalized",
state.curFinalizedDir).toFile();
return getNextSubDir(state.curFinalizedSubDir, dir);
}
private List<String> getSubdirEntries() throws IOException {
if (state.curFinalizedSubDir == null) {
return null; // There are no entries in the null subdir.
}
long now = Time.monotonicNow();
if (cache != null) {
long delta = now - cacheMs;
if (delta < maxStalenessMs) {
return cache;
} else {
LOG.trace("getSubdirEntries({}, {}): purging entries cache for {} " +
"after {} ms.", storageID, bpid, state.curFinalizedSubDir, delta);
cache = null;
}
}
File dir = Paths.get(bpidDir.getAbsolutePath(), "current", "finalized",
state.curFinalizedDir, state.curFinalizedSubDir).toFile();
List<String> entries =
IOUtils.listDirectory(dir, BlockFileFilter.INSTANCE);
if (entries.size() == 0) {
entries = null;
} else {
Collections.sort(entries);
}
if (entries == null) {
LOG.trace("getSubdirEntries({}, {}): no entries found in {}",
storageID, bpid, dir.getAbsolutePath());
} else {
LOG.trace("getSubdirEntries({}, {}): listed {} entries in {}",
storageID, bpid, entries.size(), dir.getAbsolutePath());
}
cache = entries;
cacheMs = now;
return cache;
}
/**
* Get the next block.<p/>
*
* Each volume has a hierarchical structure.<p/>
*
* <code>
* BPID B0
* finalized/
* subdir0
* subdir0
* blk_000
* blk_001
* ...
* subdir1
* subdir0
* ...
* rbw/
* </code>
*
* When we run out of entries at one level of the structure, we search
* progressively higher levels. For example, when we run out of blk_
* entries in a subdirectory, we search for the next subdirectory.
* And so on.
*/
@Override
public ExtendedBlock nextBlock() throws IOException {
if (state.atEnd) {
return null;
}
try {
while (true) {
List<String> entries = getSubdirEntries();
if (entries != null) {
state.curEntry = nextSorted(entries, state.curEntry);
if (state.curEntry == null) {
LOG.trace("nextBlock({}, {}): advancing from {} to next " +
"subdirectory.", storageID, bpid, state.curFinalizedSubDir);
} else {
ExtendedBlock block =
new ExtendedBlock(bpid, Block.filename2id(state.curEntry));
File expectedBlockDir = DatanodeUtil.idToBlockDir(
new File("."), block.getBlockId());
File actualBlockDir = Paths.get(".",
state.curFinalizedDir, state.curFinalizedSubDir).toFile();
if (!expectedBlockDir.equals(actualBlockDir)) {
LOG.error("nextBlock({}, {}): block id {} found in invalid " +
"directory. Expected directory: {}. " +
"Actual directory: {}", storageID, bpid,
block.getBlockId(), expectedBlockDir.getPath(),
actualBlockDir.getPath());
continue;
}
LOG.trace("nextBlock({}, {}): advancing to {}",
storageID, bpid, block);
return block;
}
}
state.curFinalizedSubDir = getNextFinalizedSubDir();
if (state.curFinalizedSubDir == null) {
state.curFinalizedDir = getNextFinalizedDir();
if (state.curFinalizedDir == null) {
state.atEnd = true;
return null;
}
}
}
} catch (IOException e) {
state.atEnd = true;
LOG.error("nextBlock({}, {}): I/O error", storageID, bpid, e);
throw e;
}
}
@Override
public boolean atEnd() {
return state.atEnd;
}
@Override
public void rewind() {
cache = null;
cacheMs = 0;
state = new BlockIteratorState();
}
@Override
public void save() throws IOException {
state.lastSavedMs = Time.now();
boolean success = false;
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(
new FileOutputStream(getTempSaveFile(), false), "UTF-8"))) {
WRITER.writeValue(writer, state);
success = true;
} finally {
if (!success) {
if (getTempSaveFile().delete()) {
LOG.debug("save({}, {}): error deleting temporary file.",
storageID, bpid);
}
}
}
Files.move(getTempSaveFile().toPath(), getSaveFile().toPath(),
StandardCopyOption.ATOMIC_MOVE);
if (LOG.isTraceEnabled()) {
LOG.trace("save({}, {}): saved {}", storageID, bpid,
WRITER.writeValueAsString(state));
}
}
public void load() throws IOException {
File file = getSaveFile();
this.state = READER.readValue(file);
LOG.trace("load({}, {}): loaded iterator {} from {}: {}", storageID,
bpid, name, file.getAbsoluteFile(),
WRITER.writeValueAsString(state));
}
File getSaveFile() {
return new File(bpidDir, name + ".cursor");
}
File getTempSaveFile() {
return new File(bpidDir, name + ".cursor.tmp");
}
@Override
public void setMaxStalenessMs(long maxStalenessMs) {
this.maxStalenessMs = maxStalenessMs;
}
@Override
public void close() throws IOException {
// No action needed for this volume implementation.
}
@Override
public long getIterStartMs() {
return state.iterStartMs;
}
@Override
public long getLastSavedMs() {
return state.lastSavedMs;
}
@Override
public String getBlockPoolId() {
return bpid;
}
}
@Override
public BlockIterator newBlockIterator(String bpid, String name) {
return new BlockIteratorImpl(bpid, name);
}
@Override
public BlockIterator loadBlockIterator(String bpid, String name)
throws IOException {
BlockIteratorImpl iter = new BlockIteratorImpl(bpid, name);
iter.load();
return iter;
}
@Override
public FsDatasetSpi getDataset() {
return dataset;
}
/**
* RBW files. They get moved to the finalized block directory when
* the block is finalized.
*/
File createRbwFile(String bpid, Block b) throws IOException {
checkReference();
reserveSpaceForReplica(b.getNumBytes());
try {
return getBlockPoolSlice(bpid).createRbwFile(b);
} catch (IOException exception) {
releaseReservedSpace(b.getNumBytes());
throw exception;
}
}
/**
*
* @param bytesReserved Space that was reserved during
* block creation. Now that the block is being finalized we
* can free up this space.
* @return
* @throws IOException
*/
File addFinalizedBlock(String bpid, Block b, File f, long bytesReserved)
throws IOException {
releaseReservedSpace(bytesReserved);
return getBlockPoolSlice(bpid).addFinalizedBlock(b, f);
}
Executor getCacheExecutor() {
return cacheExecutor;
}
void checkDirs() throws DiskErrorException {
// TODO:FEDERATION valid synchronization
for(BlockPoolSlice s : bpSlices.values()) {
s.checkDirs();
}
}
void getVolumeMap(ReplicaMap volumeMap,
final RamDiskReplicaTracker ramDiskReplicaMap)
throws IOException {
for(BlockPoolSlice s : bpSlices.values()) {
s.getVolumeMap(volumeMap, ramDiskReplicaMap);
}
}
void getVolumeMap(String bpid, ReplicaMap volumeMap,
final RamDiskReplicaTracker ramDiskReplicaMap)
throws IOException {
getBlockPoolSlice(bpid).getVolumeMap(volumeMap, ramDiskReplicaMap);
}
long getNumBlocks() {
long numBlocks = 0;
for (BlockPoolSlice s : bpSlices.values()) {
numBlocks += s.getNumOfBlocks();
}
return numBlocks;
}
@Override
public String toString() {
return currentDir.getAbsolutePath();
}
void shutdown() {
if (cacheExecutor != null) {
cacheExecutor.shutdown();
}
Set<Entry<String, BlockPoolSlice>> set = bpSlices.entrySet();
for (Entry<String, BlockPoolSlice> entry : set) {
entry.getValue().shutdown(null);
}
}
void addBlockPool(String bpid, Configuration conf) throws IOException {
addBlockPool(bpid, conf, null);
}
void addBlockPool(String bpid, Configuration conf, Timer timer)
throws IOException {
File bpdir = new File(currentDir, bpid);
BlockPoolSlice bp;
if (timer == null) {
bp = new BlockPoolSlice(bpid, this, bpdir, conf, new Timer());
} else {
bp = new BlockPoolSlice(bpid, this, bpdir, conf, timer);
}
bpSlices.put(bpid, bp);
}
void shutdownBlockPool(String bpid, BlockListAsLongs blocksListsAsLongs) {
BlockPoolSlice bp = bpSlices.get(bpid);
if (bp != null) {
bp.shutdown(blocksListsAsLongs);
}
bpSlices.remove(bpid);
}
boolean isBPDirEmpty(String bpid) throws IOException {
File volumeCurrentDir = this.getCurrentDir();
File bpDir = new File(volumeCurrentDir, bpid);
File bpCurrentDir = new File(bpDir, DataStorage.STORAGE_DIR_CURRENT);
File finalizedDir = new File(bpCurrentDir,
DataStorage.STORAGE_DIR_FINALIZED);
File rbwDir = new File(bpCurrentDir, DataStorage.STORAGE_DIR_RBW);
if (finalizedDir.exists() && !DatanodeUtil.dirNoFilesRecursive(
finalizedDir)) {
return false;
}
if (rbwDir.exists() && FileUtil.list(rbwDir).length != 0) {
return false;
}
return true;
}
void deleteBPDirectories(String bpid, boolean force) throws IOException {
File volumeCurrentDir = this.getCurrentDir();
File bpDir = new File(volumeCurrentDir, bpid);
if (!bpDir.isDirectory()) {
// nothing to be deleted
return;
}
File tmpDir = new File(bpDir, DataStorage.STORAGE_DIR_TMP);
File bpCurrentDir = new File(bpDir, DataStorage.STORAGE_DIR_CURRENT);
File finalizedDir = new File(bpCurrentDir,
DataStorage.STORAGE_DIR_FINALIZED);
File lazypersistDir = new File(bpCurrentDir,
DataStorage.STORAGE_DIR_LAZY_PERSIST);
File rbwDir = new File(bpCurrentDir, DataStorage.STORAGE_DIR_RBW);
if (force) {
FileUtil.fullyDelete(bpDir);
} else {
if (!rbwDir.delete()) {
throw new IOException("Failed to delete " + rbwDir);
}
if (!DatanodeUtil.dirNoFilesRecursive(finalizedDir) ||
!FileUtil.fullyDelete(finalizedDir)) {
throw new IOException("Failed to delete " + finalizedDir);
}
if (lazypersistDir.exists() &&
((!DatanodeUtil.dirNoFilesRecursive(lazypersistDir) ||
!FileUtil.fullyDelete(lazypersistDir)))) {
throw new IOException("Failed to delete " + lazypersistDir);
}
FileUtil.fullyDelete(tmpDir);
for (File f : FileUtil.listFiles(bpCurrentDir)) {
if (!f.delete()) {
throw new IOException("Failed to delete " + f);
}
}
if (!bpCurrentDir.delete()) {
throw new IOException("Failed to delete " + bpCurrentDir);
}
for (File f : FileUtil.listFiles(bpDir)) {
if (!f.delete()) {
throw new IOException("Failed to delete " + f);
}
}
if (!bpDir.delete()) {
throw new IOException("Failed to delete " + bpDir);
}
}
}
@Override
public String getStorageID() {
return storageID;
}
@Override
public StorageType getStorageType() {
return storageType;
}
DatanodeStorage toDatanodeStorage() {
return new DatanodeStorage(storageID, DatanodeStorage.State.NORMAL, storageType);
}
}
| |
/*******************************************************************************
* Copyright 2013 EMBL-EBI
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package net.sf.cram;
import cipheronly.CipherOutputStream_256;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.Parameters;
import com.beust.jcommander.converters.FileConverter;
import htsjdk.samtools.CRAMFileWriter;
import htsjdk.samtools.CigarElement;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.SAMFileReader;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.SAMRecordIterator;
import htsjdk.samtools.SAMSequenceRecord;
import htsjdk.samtools.ValidationStringency;
import htsjdk.samtools.cram.build.ContainerFactory;
import htsjdk.samtools.cram.build.CramIO;
import htsjdk.samtools.cram.build.CramNormalizer;
import htsjdk.samtools.cram.build.Sam2CramRecordFactory;
import htsjdk.samtools.cram.common.CramVersions;
import htsjdk.samtools.cram.lossy.QualityScorePreservation;
import htsjdk.samtools.cram.ref.ReferenceTracks;
import htsjdk.samtools.cram.structure.Container;
import htsjdk.samtools.cram.structure.ContainerIO;
import htsjdk.samtools.cram.structure.CramCompressionRecord;
import htsjdk.samtools.cram.structure.CramHeader;
import htsjdk.samtools.cram.structure.Slice;
import htsjdk.samtools.util.Log;
import net.sf.cram.common.Utils;
import net.sf.cram.ref.ReferenceSource;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
public class Bam2Cram {
private static Log log = Log.getInstance(Bam2Cram.class);
public static final String COMMAND = "cram";
private static Set<String> tagsNamesToSet(String tags) {
Set<String> set = new TreeSet<String>();
if (tags == null || tags.length() == 0)
return set;
String[] chunks = tags.split(":");
for (String s : chunks) {
if (s.length() != 2)
throw new RuntimeException("Expecting column delimited tags names but got: '" + tags + "'");
set.add(s);
}
return set;
}
public static void updateTracks(List<SAMRecord> samRecords, ReferenceTracks tracks) {
for (SAMRecord samRecord : samRecords) {
if (samRecord.getReadBases().length == 0) continue;
if (samRecord.getAlignmentStart() != SAMRecord.NO_ALIGNMENT_START) {
int refPos = samRecord.getAlignmentStart();
int readPos = 0;
for (CigarElement ce : samRecord.getCigar().getCigarElements()) {
if (ce.getOperator().consumesReferenceBases()) {
for (int i = 0; i < ce.getLength(); i++)
tracks.addCoverage(refPos + i, 1);
}
switch (ce.getOperator()) {
case M:
case X:
case EQ:
for (int i = readPos; i < ce.getLength(); i++) {
byte readBase = samRecord.getReadBases()[readPos + i];
byte refBase = tracks.baseAt(refPos + i);
if (readBase != refBase)
tracks.addMismatches(refPos + i, 1);
}
break;
default:
break;
}
readPos += ce.getOperator().consumesReadBases() ? ce.getLength() : 0;
refPos += ce.getOperator().consumesReferenceBases() ? ce.getLength() : 0;
}
}
}
}
public static List<CramCompressionRecord> convert(List<SAMRecord> samRecords, CramHeader header, byte[] ref,
ReferenceTracks tracks, QualityScorePreservation preservation, boolean captureAllTags, String captureTags,
String ignoreTags) {
int sequenceId = samRecords.get(0).getReferenceIndex();
String sequenceName = samRecords.get(0).getReferenceName();
log.debug(String.format("Writing %d records for sequence %d, %s", samRecords.size(), sequenceId, sequenceName));
Sam2CramRecordFactory f = new Sam2CramRecordFactory(ref, header.getSamFileHeader(), header.getVersion());
f.captureAllTags = captureAllTags;
f.captureTags.addAll(tagsNamesToSet(captureTags));
f.ignoreTags.addAll(tagsNamesToSet(ignoreTags));
List<CramCompressionRecord> cramRecords = new ArrayList<CramCompressionRecord>();
int prevAlStart = samRecords.get(0).getAlignmentStart();
int index = 0;
long tracksNanos = System.nanoTime();
updateTracks(samRecords, tracks);
tracksNanos = System.nanoTime() - tracksNanos;
long createNanos = System.nanoTime();
for (SAMRecord samRecord : samRecords) {
CramCompressionRecord cramRecord = f.createCramRecord(samRecord);
cramRecord.index = ++index;
cramRecord.alignmentDelta = samRecord.getAlignmentStart() - prevAlStart;
cramRecord.alignmentStart = samRecord.getAlignmentStart();
prevAlStart = samRecord.getAlignmentStart();
cramRecords.add(cramRecord);
preservation.addQualityScores(samRecord, cramRecord, tracks);
}
if (f.getBaseCount() < 3 * f.getFeatureCount())
log.warn("Abnormally high number of mismatches, possibly wrong reference.");
createNanos = System.nanoTime() - createNanos;
{
long mateNanos = System.nanoTime();
if (header.getSamFileHeader().getSortOrder() == SAMFileHeader.SortOrder.coordinate) {
// mating:
Map<String, CramCompressionRecord> primaryMateMap = new TreeMap<String, CramCompressionRecord>();
Map<String, CramCompressionRecord> secondaryMateMap = new TreeMap<String, CramCompressionRecord>();
for (CramCompressionRecord r : cramRecords) {
if (!r.isMultiFragment()) {
r.setDetached(true);
r.setHasMateDownStream(false);
r.recordsToNextFragment = -1;
r.next = null;
r.previous = null;
} else {
String name = r.readName;
Map<String, CramCompressionRecord> mateMap = r.isSecondaryAlignment() ? secondaryMateMap : primaryMateMap;
CramCompressionRecord mate = mateMap.get(name);
if (mate == null) {
mateMap.put(name, r);
} else {
CramCompressionRecord prev = mate;
while (prev.next != null)
prev = prev.next;
prev.recordsToNextFragment = r.index - prev.index - 1;
prev.next = r;
r.previous = prev;
r.previous.setHasMateDownStream(true);
r.setHasMateDownStream(false);
r.setDetached(false);
r.previous.setDetached(false);
}
}
}
// mark unpredictable reads as detached:
for (CramCompressionRecord r : cramRecords) {
if (r.next == null || r.previous != null)
continue;
CramCompressionRecord last = r;
while (last.next != null)
last = last.next;
if (r.isFirstSegment() && last.isLastSegment()) {
final int templateLength = Utils.computeInsertSize(r, last);
if (r.templateSize == templateLength) {
last = r.next;
while (last.next != null) {
if (last.templateSize != -templateLength)
break;
last = last.next;
}
if (last.templateSize != -templateLength)
detach(r);
}
} else
detach(r);
}
for (CramCompressionRecord r : primaryMateMap.values()) {
if (r.next != null)
continue;
r.setDetached(true);
r.setHasMateDownStream(false);
r.recordsToNextFragment = -1;
r.next = null;
r.previous = null;
}
for (CramCompressionRecord r : secondaryMateMap.values()) {
if (r.next != null)
continue;
r.setDetached(true);
r.setHasMateDownStream(false);
r.recordsToNextFragment = -1;
r.next = null;
r.previous = null;
}
} else {
for (CramCompressionRecord r : cramRecords) {
r.setDetached(true);
}
}
mateNanos = System.nanoTime() - mateNanos;
log.info(String.format("create: tracks %dms, records %dms, mating %dms.", tracksNanos / 1000000,
createNanos / 1000000, mateNanos / 1000000));
}
return cramRecords;
}
private static void detach(CramCompressionRecord cramRecord) {
do {
cramRecord.setDetached(true);
cramRecord.setHasMateDownStream(false);
cramRecord.recordsToNextFragment = -1;
}
while ((cramRecord = cramRecord.next) != null);
}
private static void printUsage(JCommander jc) {
StringBuilder sb = new StringBuilder();
sb.append("\n");
jc.usage(sb);
System.out.println("Version " + Bam2Cram.class.getPackage().getImplementationVersion());
System.out.println(sb.toString());
}
private static OutputStream openOutputStream(File outputCramFile, boolean encrypt, char[] pass) throws FileNotFoundException {
OutputStream os;
if (outputCramFile != null) {
FileOutputStream fos = new FileOutputStream(outputCramFile);
os = new BufferedOutputStream(fos);
} else {
log.warn("No output file, writint to STDOUT.");
os = System.out;
}
if (encrypt) {
CipherOutputStream_256 cos = new CipherOutputStream_256(os, pass, 128);
os = cos.getCipherOutputStream();
}
return os;
}
public static void main(String[] args) throws IOException, IllegalArgumentException, IllegalAccessException,
NoSuchAlgorithmException {
Params params = new Params();
JCommander jc = new JCommander(params);
try {
jc.parse(args);
} catch (Exception e) {
System.out.println("Failed to parse parameteres, detailed message below: ");
System.out.println(e.getMessage());
System.out.println();
System.out.println("See usage: -h");
System.exit(1);
}
if (args.length == 0 || params.help) {
printUsage(jc);
System.exit(1);
}
Log.setGlobalLogLevel(params.logLevel);
if (params.referenceFasta == null)
log.warn("No reference file specified, remote access over internet may be used to download public sequences. ");
ReferenceSource referenceSource = new ReferenceSource(params.referenceFasta);
char[] pass = null;
if (params.encrypt) {
if (System.console() == null)
throw new RuntimeException("Cannot access console.");
pass = System.console().readPassword();
}
File bamFile = params.bamFile;
SAMFileReader.setDefaultValidationStringency(ValidationStringency.SILENT);
SAMFileReader samFileReader = null;
if (params.bamFile == null) {
log.warn("No input file, reading from input...");
samFileReader = new SAMFileReader(System.in);
} else
samFileReader = new SAMFileReader(bamFile);
SAMFileHeader samFileHeader = samFileReader.getFileHeader().clone();
SAMSequenceRecord samSequenceRecord = null;
List<SAMRecord> samRecords = new ArrayList<SAMRecord>(params.maxSliceSize);
int prevSeqId = SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX;
SAMRecordIterator iterator = samFileReader.iterator();
if (!iterator.hasNext()) {
log.debug("No records found, writing out empty cram file...");
CramHeader h = new CramHeader(CramVersions.CRAM_v3, bamFile.getName(), samFileHeader);
OutputStream os = openOutputStream(params.outputCramFile, params.encrypt, pass);
CramIO.writeCramHeader(h, os);
CramIO.issueEOF(h.getVersion(), os);
os.close();
return;
}
{
String seqName = null;
SAMRecord samRecord = iterator.next();
if (samRecord == null)
throw new RuntimeException("No records found.");
seqName = samRecord.getReferenceName();
prevSeqId = samRecord.getReferenceIndex();
samRecords.add(samRecord);
if (SAMRecord.NO_ALIGNMENT_REFERENCE_NAME.equals(seqName))
samSequenceRecord = null;
else
samSequenceRecord = samFileHeader.getSequence(seqName);
}
QualityScorePreservation preservation;
if (params.losslessQS)
preservation = new QualityScorePreservation("*40");
else
preservation = new QualityScorePreservation(params.qsSpec);
byte[] ref = null;
ReferenceTracks tracks = null;
if (samSequenceRecord == null) {
ref = new byte[0];
tracks = new ReferenceTracks(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX, SAMRecord.NO_ALIGNMENT_REFERENCE_NAME,
ref);
} else {
ref = referenceSource.getReferenceBases(samSequenceRecord, true);
log.debug(String.format("Creating tracks for reference: name=%s, length=%d.\n",
samSequenceRecord.getSequenceName(), ref.length));
tracks = new ReferenceTracks(samSequenceRecord.getSequenceIndex(), samSequenceRecord.getSequenceName(), ref);
}
OutputStream os;
if (params.outputCramFile != null) {
FileOutputStream fos = new FileOutputStream(params.outputCramFile);
os = new BufferedOutputStream(fos);
} else {
log.warn("No output file, writint to STDOUT.");
os = System.out;
}
if (params.encrypt) {
CipherOutputStream_256 cos = new CipherOutputStream_256(os, pass, 128);
os = cos.getCipherOutputStream();
}
FixBAMFileHeader fixBAMFileHeader = new FixBAMFileHeader(referenceSource);
fixBAMFileHeader.setConfirmMD5(params.confirmMD5);
fixBAMFileHeader.setInjectURI(params.injectURI);
fixBAMFileHeader.setIgnoreMD5Mismatch(params.ignoreMD5Mismatch);
try {
fixBAMFileHeader.fixSequences(samFileHeader.getSequenceDictionary().getSequences());
} catch (FixBAMFileHeader.MD5MismatchError e) {
log.error(e.getMessage());
System.exit(1);
}
fixBAMFileHeader.addCramtoolsPG(samFileHeader);
CramHeader h = new CramHeader(CramVersions.CRAM_v3,
params.bamFile == null ? "STDIN" : params.bamFile.getName(), samFileHeader);
long offset = CramIO.writeCramHeader(h, os);
long bases = 0;
// long coreBytes = 0;
// long[] 90 = new long[10];
ContainerFactory cf = new ContainerFactory(samFileHeader, params.maxSliceSize);
do {
if (params.outputCramFile == null && System.out.checkError())
return;
if (!iterator.hasNext()) break;
SAMRecord samRecord = iterator.next();
if (samRecord.getReferenceIndex() != prevSeqId || samRecords.size() >= params.maxContainerSize) {
long convertNanos = 0;
if (!samRecords.isEmpty()) {
convertNanos = System.nanoTime();
List<CramCompressionRecord> records = convert(samRecords, h, ref, tracks, preservation,
params.captureAllTags, params.captureTags, params.ignoreTags);
convertNanos = System.nanoTime() - convertNanos;
samRecords.clear();
Container container = cf.buildContainer(records);
for (Slice s : container.slices) {
s.setRefMD5(ref);
}
records.clear();
long len = ContainerIO.writeContainer(h.getVersion(), container, os);
container.offset = offset;
offset += len;
log.info(String
.format("CONTAINER WRITE TIMES: records build time %dms, header build time %dms, slices build time %dms, io time %dms.",
convertNanos / 1000000, container.buildHeaderTime / 1000000,
container.buildSlicesTime / 1000000, container.writeTime / 1000000));
// for (Slice s : container.slices) {
// coreBytes += s.coreBlock.getCompressedContentSize();
// for (Integer i : s.external.keySet())
// externalBytes[i] += s.external.get(i).getCompressedContentSize();
// }
}
}
if (prevSeqId != samRecord.getReferenceIndex()) {
if (samRecord.getReferenceIndex() != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
samSequenceRecord = samFileHeader.getSequence(samRecord.getReferenceName());
ref = referenceSource.getReferenceBases(samSequenceRecord, true);
tracks = new ReferenceTracks(samSequenceRecord.getSequenceIndex(),
samSequenceRecord.getSequenceName(), ref);
} else {
ref = new byte[] {};
tracks = new ReferenceTracks(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
SAMRecord.NO_ALIGNMENT_REFERENCE_NAME, ref);
}
prevSeqId = samRecord.getReferenceIndex();
}
samRecords.add(samRecord);
bases += samRecord.getReadLength();
if (params.maxRecords-- < 1)
break;
} while (iterator.hasNext());
{ // copied for now, should be a subroutine:
if (!samRecords.isEmpty()) {
List<CramCompressionRecord> records = convert(samRecords, h, ref, tracks, preservation,
params.captureAllTags, params.captureTags, params.ignoreTags);
samRecords.clear();
Container container = cf.buildContainer(records);
for (Slice s : container.slices)
s.setRefMD5(ref);
records.clear();
ContainerIO.writeContainer(h.getVersion(),container, os);
log.info(String.format(
"CONTAINER WRITE TIMES: header build time %dms, slices build time %dms, io time %dms.",
container.buildHeaderTime / 1000000, container.buildSlicesTime / 1000000,
container.writeTime / 1000000));
// for (Slice s : container.slices) {
// coreBytes += s.coreBlock.getCompressedContentSize();
// for (Integer i : s.external.keySet())
// externalBytes[i] += s.external.get(i).getCompressedContentSize();
// }
}
}
iterator.close();
samFileReader.close();
if (params.addEOF)
CramIO.issueEOF(h.getVersion(), os);
os.close();
StringBuilder sb = new StringBuilder();
// sb.append(String.format("STATS: core %.2f b/b", 8f * coreBytes / bases));
// for (int i = 0; i < externalBytes.length; i++)
// if (externalBytes[i] > 0)
// sb.append(String.format(", ex%d %.2f b/b, ", i, 8f * externalBytes[i] / bases));
log.info(sb.toString());
if (params.outputCramFile != null)
log.info(String.format("Compression: %.2f b/b.", (8f * params.outputCramFile.length() / bases)));
}
@Parameters(commandDescription = "BAM to CRAM converter. ")
static class Params {
@Parameter(names = { "-l", "--log-level" }, description = "Change log level: DEBUG, INFO, WARNING, ERROR.", converter = CramTools.LevelConverter.class)
Log.LogLevel logLevel = Log.LogLevel.ERROR;
@Parameter(names = { "--input-bam-file", "-I" }, converter = FileConverter.class, description = "Path to a BAM file to be converted to CRAM. Omit if standard input (pipe).")
File bamFile;
@Parameter(names = { "--reference-fasta-file", "-R" }, converter = FileConverter.class, description = "The reference fasta file, uncompressed and indexed (.fai file, use 'samtools faidx'). ")
File referenceFasta;
@Parameter(names = { "--output-cram-file", "-O" }, converter = FileConverter.class, description = "The path for the output CRAM file. Omit if standard output (pipe).")
File outputCramFile = null;
@Parameter(names = { "--max-records" }, description = "Stop after compressing this many records. ")
long maxRecords = Long.MAX_VALUE;
@Parameter
List<String> sequences;
@Parameter(names = { "-h", "--help" }, description = "Print help and quit")
boolean help = false;
@Parameter(names = { "--max-slice-size" }, hidden = true)
int maxSliceSize = 10000;
@Parameter(names = { "--max-container-size" }, hidden = true)
int maxContainerSize = 10000;
@Parameter(names = { "--preserve-read-names", "-n" }, description = "Preserve all read names.")
boolean preserveReadNames = false;
@Parameter(names = { "--lossless-quality-score", "-Q" }, description = "Preserve all quality scores. Overwrites '--lossless-quality-score'.")
boolean losslessQS = false;
@Parameter(names = { "--lossy-quality-score-spec", "-L" }, description = "A string specifying what quality scores should be preserved.")
String qsSpec = "";
@Parameter(names = { "--encrypt" }, description = "Encrypt the CRAM file.")
boolean encrypt = false;
@Parameter(names = { "--ignore-tags" }, description = "Ignore the tags listed, for example 'OQ:XA:XB'")
String ignoreTags = "";
@Parameter(names = { "--capture-tags" }, description = "Capture the tags listed, for example 'OQ:XA:XB'")
String captureTags = "";
@Parameter(names = { "--capture-all-tags" }, description = "Capture all tags.")
boolean captureAllTags = false;
@Parameter(names = { "--input-is-sam" }, description = "Input is in SAM format.")
boolean inputIsSam = false;
@Parameter(names = { "--inject-sq-uri" }, description = "Inject or change the @SQ:UR header fields to point to ENA reference service. ")
public boolean injectURI = false;
@Parameter(names = { "--ignore-md5-mismatch" }, description = "Fail on MD5 mismatch if true, or correct (overwrite) the checksums and continue if false.")
public boolean ignoreMD5Mismatch = false;
@Deprecated
@Parameter(names = { "--issue-eof-marker" }, description = "Append the EOF marker to the end of the output file/stream.", hidden = true, arity = 1)
public boolean addEOF = true;
@Parameter(names = { "--confirm-md5" }, description = "Confirm MD5 checksums of the reference sequences.", hidden = true, arity = 1)
public boolean confirmMD5 = true;
}
}
| |
package com.bangalore.barcamp.fragment;
import java.net.MalformedURLException;
import java.net.URL;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ColorFilter;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.widget.ShareActionProvider;
import android.text.Html;
import android.text.method.LinkMovementMethod;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.TextView;
import com.bangalore.barcamp.BCBSharedPrefUtils;
import com.bangalore.barcamp.BCBUtils;
import com.bangalore.barcamp.R;
import com.bangalore.barcamp.SessionAttendingUpdateService;
import com.bangalore.barcamp.activity.BCBFragmentActionbarActivity;
import com.bangalore.barcamp.activity.ShareActivity;
import com.bangalore.barcamp.data.BarcampBangalore;
import com.bangalore.barcamp.data.BarcampData;
import com.bangalore.barcamp.data.Session;
import com.bangalore.barcamp.data.Slot;
import com.bangalore.barcamp.utils.BCBFragmentUtils;
import com.bangalore.barcamp.widgets.CircularImageView;
import com.google.android.gms.analytics.HitBuilders;
import com.google.android.gms.analytics.Tracker;
public class SessionDetailsFragment extends BCBFragmentBaseClass {
public final static String EXTRA_SESSION_POSITION = "session_position";
public final static String EXTRA_SLOT_POS = "slotPosition";
public static final String EXTRA_SESSION_ID = "sessionID";
private static final int SHOW_ERROR_DIALOG = 100;
private MenuItem mShareItem;
private Session session;
private ShareActionProvider mShareActionProvider;
@Override
public void onCreate(Bundle savedInstanceState) {
// setHasOptionsMenu(true);
super.onCreate(savedInstanceState);
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.session_details, null);
return view;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
// View share = MenuItemCompat.getActionView(mShareItem);
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.session_details, menu);
mShareItem = menu.findItem(R.id.share_session);
mShareActionProvider = (ShareActionProvider) MenuItemCompat
.getActionProvider(mShareItem);
mShareActionProvider.setShareIntent(getDefaultIntent());
Log.e("OptionsMenu", "options menu created");
}
private Intent getDefaultIntent() {
Intent intent = new Intent(Intent.ACTION_SEND);
intent.setType("text/plain");
if (session != null) {
intent.putExtra(Intent.EXTRA_TEXT, "I am attending session "
+ session.title + " by " + session.presenter + " @"
+ session.location + " between " + session.time);
}
return intent;
}
@Override
public void onPrepareOptionsMenu(Menu menu) {
super.onPrepareOptionsMenu(menu);
Log.e("OptionsMenu", "onPrepareOptionsMenu called");
if (mShareItem != null) {
mShareItem.setVisible(true);
} else {
Log.e("OptionsMenu", "options menu not found");
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.share_session) {
Intent intent = new Intent(getActivity(), ShareActivity.class);
intent.putExtra(ShareActivity.SHARE_STRING,
"I am attending session " + session.title + " by "
+ session.presenter + " @" + session.location
+ " between " + session.time);
startActivity(intent);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
View view = getView();
BarcampData data = ((BarcampBangalore) getActivity()
.getApplicationContext()).getBarcampData();
if (data == null) {
finish();
return;
}
final Slot slot = data.slotsArray.get(getIntent().getIntExtra(
EXTRA_SLOT_POS, 0));
session = slot.sessionsArray.get(getIntent().getIntExtra(
EXTRA_SESSION_POSITION, 0));
String id = getIntent().getStringExtra(EXTRA_SESSION_ID);
if (id != null && !id.equals(session.id)) {
ShowErrorDialogFragment.newInstance(R.string.error_title).show(
getFragmentManager(), "dialog");
finish();
}
((TextView) view.findViewById(R.id.title)).setText(session.title);
((TextView) view.findViewById(R.id.time)).setText(session.time);
((TextView) view.findViewById(R.id.location)).setText(session.location);
((TextView) view.findViewById(R.id.presenter)).setText("By "
+ session.presenter);
((TextView) view.findViewById(R.id.presenter))
.setBackground(new SpeakerNameBackgroundDrawable(session.color));
((TextView) view.findViewById(R.id.description)).setText(Html
.fromHtml(session.description));
((TextView) view.findViewById(R.id.description))
.setMovementMethod(LinkMovementMethod.getInstance());
try {
((CircularImageView) view.findViewById(R.id.authorImage))
.setImageURL(new URL(session.photo));
} catch (MalformedURLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
CheckBox checkBox = (CheckBox) view.findViewById(R.id.checkBox1);
checkBox.setChecked(BCBSharedPrefUtils.getAlarmSettingsForID(
getActivity(), session.id) == BCBSharedPrefUtils.ALARM_SET);
checkBox.setOnCheckedChangeListener(new OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (isChecked) {
BCBUtils.setAlarmForSession(getActivity()
.getApplicationContext(), slot, session,
getIntent().getIntExtra(EXTRA_SLOT_POS, 0),
getIntent().getIntExtra(EXTRA_SESSION_POSITION, 0));
Tracker t = ((BarcampBangalore) getActivity()
.getApplication()).getTracker();
// Send a screen view.
t.send(new HitBuilders.EventBuilder()
.setCategory("ui_action").setAction("checkbox")
.setLabel("Set Alarm").build());
} else {
BCBUtils.removeSessionFromSchedule(getActivity()
.getApplicationContext(), session.id, getIntent()
.getIntExtra(EXTRA_SLOT_POS, 0), getIntent()
.getIntExtra(EXTRA_SESSION_POSITION, 0));
Tracker t = ((BarcampBangalore) getActivity()
.getApplication()).getTracker();
// Send a screen view.
t.send(new HitBuilders.EventBuilder()
.setCategory("ui_action").setAction("checkbox")
.setLabel("Remove Alarm").build());
}
Intent newIntent = new Intent(getActivity()
.getApplicationContext(),
SessionAttendingUpdateService.class);
newIntent.putExtra(SessionAttendingUpdateService.SESSION_ID,
session.id);
newIntent.putExtra(SessionAttendingUpdateService.IS_ATTENDING,
isChecked ? "true" : "false");
getActivity().startService(newIntent);
}
});
view.findViewById(R.id.location_layout).setOnClickListener(
new LocationClickListener());
// view.findViewById(R.id.location_marker).setOnClickListener(
// new LocationClickListener());
// Intent intent = new Intent(getActivity(), ShareActivity.class);
// intent.putExtra(ShareActivity.SHARE_STRING, "I am attending session "
// + session.title + " by " + session.presenter + " @"
// + session.location + " between " + session.time);
// IntentAction shareAction = new IntentAction(getActivity(), intent,
// R.drawable.share_icon);
// BCBFragmentActionbarActivity activity =
// (BCBFragmentActionbarActivity) getActivity();
// activity.callForFunction(BCBFragmentActionbarActivity.ADD_ACTIONBAR,
// intent);
setHasOptionsMenu(true);
// getActivity().supportInvalidateOptionsMenu();
if (mShareActionProvider != null) {
mShareActionProvider.setShareIntent(getDefaultIntent());
}
Tracker t = ((BarcampBangalore) getActivity().getApplication())
.getTracker();
// Set screen name.
t.setScreenName(this.getClass().getName());
// Send a screen view.
t.send(new HitBuilders.AppViewBuilder().build());
}
private final class LocationClickListener implements OnClickListener {
@Override
public void onClick(View v) {
Intent intent = new Intent(getActivity().getApplicationContext(),
LocationFragment.class);
intent.putExtra(BCBFragmentUtils.CLASS_NAME, LocationFragment.class);
intent.putExtra(LocationFragment.LOCATION_EXTRA, session.location);
BCBFragmentActionbarActivity activity = (BCBFragmentActionbarActivity) getActivity();
activity.callForFunction(
BCBFragmentActionbarActivity.START_FRAGMENT, intent);
}
}
public static class ShowErrorDialogFragment extends DialogFragment {
public static ShowErrorDialogFragment newInstance(int title) {
ShowErrorDialogFragment frag = new ShowErrorDialogFragment();
Bundle args = new Bundle();
args.putInt("title", title);
frag.setArguments(args);
return frag;
}
private AlertDialog alertDialog;
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
alertDialog = new AlertDialog.Builder(getActivity())
.setTitle("Error!!!")
.setMessage(
"Error!!! Session got changed. Please check schedule again.")
.setPositiveButton("OK",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
dialog.dismiss();
}
}).create();
return alertDialog;
}
}
class SpeakerNameBackgroundDrawable extends Drawable {
private Paint mPainter;
SpeakerNameBackgroundDrawable(String color) {
mPainter = new Paint();
mPainter.setAntiAlias(true);
mPainter.setColor(Color.parseColor(color));
}
@Override
public void draw(Canvas canvas) {
float density = SessionDetailsFragment.this.getActivity()
.getResources().getDisplayMetrics().density;
Rect rect = getBounds();
RectF rectF = new RectF(rect);
canvas.drawRoundRect(rectF, 5 * density, 5 * density, mPainter);
rect.right = (int) (rect.right - (5 * density));
canvas.drawRect(rect, mPainter);
}
@Override
public void setAlpha(int alpha) {
}
@Override
public void setColorFilter(ColorFilter cf) {
}
@Override
public int getOpacity() {
return 0;
}
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* you may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.device.mgt.core.dao.impl.device;
import org.wso2.carbon.device.mgt.common.Device;
import org.wso2.carbon.device.mgt.common.PaginationRequest;
import org.wso2.carbon.device.mgt.core.dao.DeviceManagementDAOException;
import org.wso2.carbon.device.mgt.core.dao.DeviceManagementDAOFactory;
import org.wso2.carbon.device.mgt.core.dao.impl.AbstractDeviceDAOImpl;
import org.wso2.carbon.device.mgt.core.dao.util.DeviceManagementDAOUtil;
import java.sql.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* This class holds the generic implementation of DeviceDAO which can be used to support ANSI db syntax.
*/
public class GenericDeviceDAOImpl extends AbstractDeviceDAOImpl {
@Override
public List<Device> getDevices(PaginationRequest request, int tenantId)
throws DeviceManagementDAOException {
Connection conn;
PreparedStatement stmt = null;
ResultSet rs = null;
List<Device> devices = null;
String deviceType = request.getDeviceType();
boolean isDeviceTypeProvided = false;
String deviceName = request.getDeviceName();
boolean isDeviceNameProvided = false;
String owner = request.getOwner();
boolean isOwnerProvided = false;
String ownership = request.getOwnership();
boolean isOwnershipProvided = false;
String status = request.getStatus();
boolean isStatusProvided = false;
Date since = request.getSince();
boolean isSinceProvided = false;
String model = request.getModel();
boolean isModelProvided = false;
try {
conn = this.getConnection();
/*String sql = "SELECT d1.ID AS DEVICE_ID, d1.DESCRIPTION, d1.NAME AS DEVICE_NAME, d1.DEVICE_TYPE, " +
"d1.DEVICE_IDENTIFICATION, e.OWNER, e.OWNERSHIP, e.STATUS, e.DATE_OF_LAST_UPDATE, " +
"e.DATE_OF_ENROLMENT, e.ID AS ENROLMENT_ID FROM DM_ENROLMENT e, (SELECT d.ID, d.DESCRIPTION, " +
"d.NAME, d.DEVICE_IDENTIFICATION, t.NAME AS DEVICE_TYPE " +
"FROM DM_DEVICE d, DM_DEVICE_TYPE t ";*/
String sql = "SELECT d1.ID AS DEVICE_ID, d1.DESCRIPTION, d1.NAME AS DEVICE_NAME, d1.DEVICE_TYPE, " +
"d1.DEVICE_IDENTIFICATION, e.OWNER, e.OWNERSHIP, e.STATUS, e.DATE_OF_LAST_UPDATE, " +
"e.DATE_OF_ENROLMENT, e.ID AS ENROLMENT_ID FROM DM_ENROLMENT e, (SELECT d.ID, d.DESCRIPTION, " +
"d.NAME, d.DEVICE_IDENTIFICATION, t.NAME AS DEVICE_TYPE ,a.DEVICE_MODEL " +
"FROM DM_DEVICE d, DM_DEVICE_TYPE t ,WSO2MobileAndroid_DB.AD_DEVICE a ";
//Add the query to filter active devices on timestamp
if (since != null) {
sql = sql + ", DM_DEVICE_DETAIL dt";
isSinceProvided = true;
}
sql = sql + " WHERE DEVICE_TYPE_ID = t.ID AND d.DEVICE_IDENTIFICATION = a.DEVICE_ID AND d.TENANT_ID = ?";
//Add query for last updated timestamp
if (isSinceProvided) {
sql = sql + " AND dt.DEVICE_ID = d.ID AND dt.UPDATE_TIMESTAMP > ?";
}
//Add the query for device-type
if (deviceType != null && !deviceType.isEmpty()) {
sql = sql + " AND t.NAME = ?";
isDeviceTypeProvided = true;
}
//Add the query for device-name
if (deviceName != null && !deviceName.isEmpty()) {
sql = sql + " AND d.NAME LIKE ?";
isDeviceNameProvided = true;
}
sql = sql + ") d1 WHERE d1.ID = e.DEVICE_ID AND e.STATUS != 'REMOVED' AND TENANT_ID = ?";
if(model!=null && !model.equalsIgnoreCase("all")) {
sql = sql + " AND d1.DEVICE_MODEL like ? ";
isModelProvided = true;
}
//Add the query for ownership
if (ownership != null && !ownership.isEmpty()) {
sql = sql + " AND e.OWNERSHIP = ?";
isOwnershipProvided = true;
}
//Add the query for owner
if (owner != null && !owner.isEmpty()) {
sql = sql + " AND e.OWNER LIKE ?";
isOwnerProvided = true;
}
//Add the query for status
if (status != null && !status.isEmpty()) {
sql = sql + " AND e.STATUS = ?";
isStatusProvided = true;
}
sql = sql + " LIMIT ?,?";
stmt = conn.prepareStatement(sql);
stmt.setInt(1, tenantId);
int paramIdx = 2;
if (isSinceProvided) {
stmt.setLong(paramIdx++, since.getTime());
}
if (isDeviceTypeProvided) {
stmt.setString(paramIdx++, request.getDeviceType());
}
if (isDeviceNameProvided) {
stmt.setString(paramIdx++, request.getDeviceName() + "%");
}
stmt.setInt(paramIdx++, tenantId);
if(isModelProvided) {
stmt.setString(paramIdx++, request.getModel());
}
if (isOwnershipProvided) {
stmt.setString(paramIdx++, request.getOwnership());
}
if (isOwnerProvided) {
stmt.setString(paramIdx++, request.getOwner() + "%");
}
if (isStatusProvided) {
stmt.setString(paramIdx++, request.getStatus());
}
stmt.setInt(paramIdx++, request.getStartIndex());
stmt.setInt(paramIdx, request.getRowCount());
rs = stmt.executeQuery();
devices = new ArrayList<>();
while (rs.next()) {
Device device = DeviceManagementDAOUtil.loadDevice(rs);
devices.add(device);
}
} catch (SQLException e) {
throw new DeviceManagementDAOException("Error occurred while retrieving information of all " +
"registered devices", e);
} finally {
DeviceManagementDAOUtil.cleanupResources(stmt, rs);
}
return devices;
}
@Override
public List<Device> getDevicesByType(PaginationRequest request, int tenantId)
throws DeviceManagementDAOException {
Connection conn;
PreparedStatement stmt = null;
ResultSet rs = null;
List<Device> devices = null;
try {
conn = this.getConnection();
String sql = "SELECT d1.ID AS DEVICE_ID, d1.DESCRIPTION, d1.NAME AS DEVICE_NAME, d1.DEVICE_TYPE, " +
"d1.DEVICE_IDENTIFICATION, e.OWNER, e.OWNERSHIP, e.STATUS, e.DATE_OF_LAST_UPDATE, " +
"e.DATE_OF_ENROLMENT, e.ID AS ENROLMENT_ID FROM DM_ENROLMENT e, (SELECT d.ID, d.DESCRIPTION, " +
"d.NAME, d.DEVICE_IDENTIFICATION, t.NAME AS DEVICE_TYPE FROM DM_DEVICE d, " +
"DM_DEVICE_TYPE t WHERE DEVICE_TYPE_ID = t.ID AND t.NAME = ? " +
"AND d.TENANT_ID = ?) d1 WHERE d1.ID = e.DEVICE_ID AND TENANT_ID = ? LIMIT ?,?";
stmt = conn.prepareStatement(sql);
stmt.setString(1, request.getDeviceType());
stmt.setInt(2, tenantId);
stmt.setInt(3, tenantId);
stmt.setInt(4, request.getStartIndex());
stmt.setInt(5, request.getRowCount());
rs = stmt.executeQuery();
devices = new ArrayList<>();
while (rs.next()) {
Device device = DeviceManagementDAOUtil.loadDevice(rs);
devices.add(device);
}
} catch (SQLException e) {
throw new DeviceManagementDAOException("Error occurred while listing devices for type '" + request.getDeviceType() + "'", e);
} finally {
DeviceManagementDAOUtil.cleanupResources(stmt, rs);
}
return devices;
}
@Override
public List<Device> getDevicesOfUser(PaginationRequest request, int tenantId)
throws DeviceManagementDAOException {
Connection conn;
PreparedStatement stmt = null;
List<Device> devices = new ArrayList<>();
try {
conn = this.getConnection();
String sql = "SELECT e1.OWNER, e1.OWNERSHIP, e1.ENROLMENT_ID, e1.DEVICE_ID, e1.STATUS, e1.DATE_OF_LAST_UPDATE," +
" e1.DATE_OF_ENROLMENT, d.DESCRIPTION, d.NAME AS DEVICE_NAME, d.DEVICE_IDENTIFICATION, t.NAME " +
"AS DEVICE_TYPE FROM DM_DEVICE d, (SELECT e.OWNER, e.OWNERSHIP, e.ID AS ENROLMENT_ID, " +
"e.DEVICE_ID, e.STATUS, e.DATE_OF_LAST_UPDATE, e.DATE_OF_ENROLMENT FROM DM_ENROLMENT e WHERE " +
"e.TENANT_ID = ? AND e.OWNER = ?) e1, DM_DEVICE_TYPE t WHERE d.ID = e1.DEVICE_ID " +
"AND t.ID = d.DEVICE_TYPE_ID LIMIT ?,?";
stmt = conn.prepareStatement(sql);
stmt.setInt(1, tenantId);
stmt.setString(2, request.getOwner());
stmt.setInt(3, request.getStartIndex());
stmt.setInt(4, request.getRowCount());
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
Device device = DeviceManagementDAOUtil.loadDevice(rs);
devices.add(device);
}
} catch (SQLException e) {
throw new DeviceManagementDAOException("Error occurred while fetching the list of devices belongs to '" +
request.getOwner() + "'", e);
} finally {
DeviceManagementDAOUtil.cleanupResources(stmt, null);
}
return devices;
}
@Override
public List<Device> getDevicesByName(PaginationRequest request, int tenantId)
throws DeviceManagementDAOException {
Connection conn;
PreparedStatement stmt = null;
List<Device> devices = new ArrayList<>();
try {
conn = this.getConnection();
String sql = "SELECT d1.ID AS DEVICE_ID, d1.DESCRIPTION, d1.NAME AS DEVICE_NAME, d1.DEVICE_TYPE, " +
"d1.DEVICE_IDENTIFICATION, e.OWNER, e.OWNERSHIP, e.STATUS, e.DATE_OF_LAST_UPDATE, " +
"e.DATE_OF_ENROLMENT, e.ID AS ENROLMENT_ID FROM DM_ENROLMENT e, (SELECT d.ID, d.NAME, " +
"d.DESCRIPTION, t.NAME AS DEVICE_TYPE, d.DEVICE_IDENTIFICATION FROM DM_DEVICE d, " +
"DM_DEVICE_TYPE t WHERE d.DEVICE_TYPE_ID = t.ID AND d.NAME LIKE ? AND d.TENANT_ID = ?) d1 " +
"WHERE DEVICE_ID = e.DEVICE_ID AND TENANT_ID = ? LIMIT ?,?";
stmt = conn.prepareStatement(sql);
stmt.setString(1, request.getDeviceName() + "%");
stmt.setInt(2, tenantId);
stmt.setInt(3, tenantId);
stmt.setInt(4, request.getStartIndex());
stmt.setInt(5, request.getRowCount());
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
Device device = DeviceManagementDAOUtil.loadDevice(rs);
devices.add(device);
}
} catch (SQLException e) {
throw new DeviceManagementDAOException("Error occurred while fetching the list of devices that matches " +
"'" + request.getDeviceName() + "'", e);
} finally {
DeviceManagementDAOUtil.cleanupResources(stmt, null);
}
return devices;
}
@Override
public List<Device> getDevicesByOwnership(PaginationRequest request, int tenantId)
throws DeviceManagementDAOException {
Connection conn;
PreparedStatement stmt = null;
List<Device> devices = new ArrayList<>();
try {
conn = this.getConnection();
String sql = "SELECT d.ID AS DEVICE_ID, d.DESCRIPTION, d.NAME AS DEVICE_NAME, t.NAME AS DEVICE_TYPE, " +
"d.DEVICE_IDENTIFICATION, e.OWNER, e.OWNERSHIP, e.STATUS, e.DATE_OF_LAST_UPDATE, " +
"e.DATE_OF_ENROLMENT, e.ID AS ENROLMENT_ID FROM (SELECT e.ID, e.DEVICE_ID, e.OWNER, e.OWNERSHIP, e.STATUS, " +
"e.DATE_OF_ENROLMENT, e.DATE_OF_LAST_UPDATE, e.ID AS ENROLMENT_ID FROM DM_ENROLMENT e " +
"WHERE TENANT_ID = ? AND OWNERSHIP = ?) e, DM_DEVICE d, DM_DEVICE_TYPE t " +
"WHERE DEVICE_ID = e.DEVICE_ID AND d.DEVICE_TYPE_ID = t.ID AND d.TENANT_ID = ? LIMIT ?,?";
stmt = conn.prepareStatement(sql);
stmt.setInt(1, tenantId);
stmt.setString(2, request.getOwnership());
stmt.setInt(3, tenantId);
stmt.setInt(4, request.getStartIndex());
stmt.setInt(5, request.getRowCount());
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
Device device = DeviceManagementDAOUtil.loadDevice(rs);
devices.add(device);
}
} catch (SQLException e) {
throw new DeviceManagementDAOException("Error occurred while fetching the list of devices that matches to ownership " +
"'" + request.getOwnership() + "'", e);
} finally {
DeviceManagementDAOUtil.cleanupResources(stmt, null);
}
return devices;
}
@Override
public List<Device> getDevicesByStatus(PaginationRequest request, int tenantId)
throws DeviceManagementDAOException {
Connection conn;
PreparedStatement stmt = null;
List<Device> devices = new ArrayList<>();
try {
conn = this.getConnection();
String sql = "SELECT d.ID AS DEVICE_ID, d.DESCRIPTION, d.NAME AS DEVICE_NAME, t.NAME AS DEVICE_TYPE, " +
"d.DEVICE_IDENTIFICATION, e.OWNER, e.OWNERSHIP, e.STATUS, e.DATE_OF_LAST_UPDATE, " +
"e.DATE_OF_ENROLMENT, e.ID AS ENROLMENT_ID FROM (SELECT e.ID, e.DEVICE_ID, e.OWNER, e.OWNERSHIP, e.STATUS, " +
"e.DATE_OF_ENROLMENT, e.DATE_OF_LAST_UPDATE, e.ID AS ENROLMENT_ID FROM DM_ENROLMENT e " +
"WHERE TENANT_ID = ? AND STATUS = ?) e, DM_DEVICE d, DM_DEVICE_TYPE t " +
"WHERE DEVICE_ID = e.DEVICE_ID AND d.DEVICE_TYPE_ID = t.ID AND d.TENANT_ID = ? LIMIT ?,?";
stmt = conn.prepareStatement(sql);
stmt.setInt(1, tenantId);
stmt.setString(2, request.getStatus());
stmt.setInt(3, tenantId);
stmt.setInt(4, request.getStartIndex());
stmt.setInt(5, request.getRowCount());
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
Device device = DeviceManagementDAOUtil.loadDevice(rs);
devices.add(device);
}
} catch (SQLException e) {
throw new DeviceManagementDAOException("Error occurred while fetching the list of devices that matches to status " +
"'" + request.getStatus() + "'", e);
} finally {
DeviceManagementDAOUtil.cleanupResources(stmt, null);
}
return devices;
}
/**
* Get the list of devices that matches with the given device name and (or) device type.
*
* @param deviceName Name of the device.
* @param tenantId Id of the current tenant
* @return device list
* @throws DeviceManagementDAOException
*/
@Override
public List<Device> getDevicesByNameAndType(String deviceName, String type, int tenantId, int offset, int limit)
throws DeviceManagementDAOException {
String filteringString = "";
if (deviceName != null && !deviceName.isEmpty()) {
filteringString = filteringString + " AND d.NAME LIKE ?";
}
if (type != null && !type.isEmpty()) {
filteringString = filteringString + " AND t.NAME = ?";
}
Connection conn;
PreparedStatement stmt = null;
List<Device> devices = new ArrayList<>();
ResultSet rs = null;
try {
conn = this.getConnection();
String sql = "SELECT d1.ID AS DEVICE_ID, d1.DESCRIPTION, d1.NAME AS DEVICE_NAME, d1.DEVICE_TYPE, " +
"d1.DEVICE_IDENTIFICATION, e.OWNER, e.OWNERSHIP, e.STATUS, e.DATE_OF_LAST_UPDATE, " +
"e.DATE_OF_ENROLMENT, e.ID AS ENROLMENT_ID FROM DM_ENROLMENT e, (SELECT d.ID, d.NAME, " +
"d.DESCRIPTION, d.DEVICE_IDENTIFICATION, t.NAME AS DEVICE_TYPE FROM DM_DEVICE d, " +
"DM_DEVICE_TYPE t WHERE d.DEVICE_TYPE_ID = t.ID AND d.TENANT_ID = ?" + filteringString +
") d1 WHERE d1.ID = e.DEVICE_ID LIMIT ?, ?";
stmt = conn.prepareStatement(sql);
stmt.setInt(1, tenantId);
int i = 1;
if (deviceName != null && !deviceName.isEmpty()) {
stmt.setString(++i, deviceName + "%");
}
if (type != null && !type.isEmpty()) {
stmt.setString(++i, type);
}
stmt.setInt(++i, offset);
stmt.setInt(++i, limit);
rs = stmt.executeQuery();
while (rs.next()) {
Device device = DeviceManagementDAOUtil.loadDevice(rs);
devices.add(device);
}
} catch (SQLException e) {
throw new DeviceManagementDAOException("Error occurred while fetching the list of devices corresponding" +
"to the mentioned filtering criteria", e);
} finally {
DeviceManagementDAOUtil.cleanupResources(stmt, rs);
}
return devices;
}
private Connection getConnection() throws SQLException {
return DeviceManagementDAOFactory.getConnection();
}
}
| |
/*
* @(#)file GenericHttpRequestHandler.java
* @(#)author Sun Microsystems, Inc.
* @(#)version 1.40
* @(#)date 07/10/01
*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 2007 Sun Microsystems, Inc. All Rights Reserved.
*
* The contents of this file are subject to the terms of either the GNU General
* Public License Version 2 only ("GPL") or the Common Development and
* Distribution License("CDDL")(collectively, the "License"). You may not use
* this file except in compliance with the License. You can obtain a copy of the
* License at http://opendmk.dev.java.net/legal_notices/licenses.txt or in the
* LEGAL_NOTICES folder that accompanied this code. See the License for the
* specific language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file found at
* http://opendmk.dev.java.net/legal_notices/licenses.txt
* or in the LEGAL_NOTICES folder that accompanied this code.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code.
*
* If applicable, add the following below the License Header, with the fields
* enclosed by brackets [] replaced by your own identifying information:
*
* "Portions Copyrighted [year] [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding
*
* "[Contributor] elects to include this software in this distribution
* under the [CDDL or GPL Version 2] license."
*
* If you don't indicate a single choice of license, a recipient has the option
* to distribute your version of this file under either the CDDL or the GPL
* Version 2, or to extend the choice of license to its licensees as provided
* above. However, if you add GPL Version 2 code and therefore, elected the
* GPL Version 2 license, then the option applies only if the new code is made
* subject to such option by the copyright holder.
*
*
*/
package com.sun.jdmk.comm;
// java import
//
import java.io.*;
import java.net.*;
import java.util.Set;
import java.util.Date;
import java.util.StringTokenizer;
import java.util.Hashtable;
// jmx import
//
import javax.management.QueryExp;
import javax.management.MBeanInfo;
import javax.management.ObjectName;
import javax.management.MBeanServer;
import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.ObjectInstance;
import javax.management.MBeanException;
import javax.management.OperationsException;
import javax.management.ReflectionException;
import javax.management.IntrospectionException;
import javax.management.InstanceNotFoundException;
import javax.management.AttributeNotFoundException;
import javax.management.MBeanRegistrationException;
import javax.management.NotCompliantMBeanException;
import javax.management.MalformedObjectNameException;
import javax.management.InstanceAlreadyExistsException;
import javax.management.InvalidAttributeValueException;
import javax.management.JMException;
// jdmk import
//
import com.sun.jdmk.ThreadContext;
import com.sun.jdmk.OperationContext;
class GenericHttpRequestHandler extends GenericHttpClientHandler {
// CONSTRUCTOR
//------------
public GenericHttpRequestHandler(GenericHttpConnectorServer server,
int id, GenericHttpSocket s,
MBeanServer mbs, ObjectName name) {
super(server, id, s, mbs, name);
if (mbs == null)
throw new CommunicationException("No MBeanServer provided");
}
// PROTECTED METHODS
// -----------------
/**
* Authenticates the request and returns null if it fails. If the
* authentication is disabled, this method returns an AuthInfo
* object with login and password both null. If authentication is
* enabled and passes, this method returns the AuthInfo that was
* used to authenticate successfully.
*/
protected AuthInfo authenticateRequest(HttpRequest request) throws IOException {
GenericHttpConnectorServer server =
(GenericHttpConnectorServer) adaptorServer;
if (!server.isAuthenticationOn())
return makeNullAuthInfo();
AuthInfo authInfo = null;
final String authorization =
request.getHeader(request.AUTHORIZATION_HEADER);
if (authorization != null
&& authorization.startsWith("CRAM-MD5 ")) {
String encoding = authorization.substring("CRAM-MD5 ".length());
byte decoding[] = new BASE64Decoder().decodeBuffer(encoding);
String response = new String(decoding);
authInfo = server.checkChallengeResponse(response);
}
if (logger.finestOn()) {
if (authInfo != null) {
logger.finest("authenticateRequest", "Authentication succeeded");
} else {
logger.finest("authenticateRequest", "Authentication failed");
}
}
return authInfo;
}
/**
* Process an incoming post request and return the response.
*/
protected HttpResponse processPostRequest(HttpRequest request)
throws IOException {
if (logger.finerOn()) {
logger.finer("processPostRequest",
"Process a POST request = " + request.getURIPath());
}
//
// Check that content is not null
//
if (request.getContentLength() == 0) {
//
// Send a Bad Request
//
final CommunicationException ce =
new CommunicationException("Content is null");
return makeExceptionResponse(ce);
}
//
// Get remote operation to be performed
//
String remoteOp = null;
Object result;
String resultType;
ByteArrayInputStream bIn;
ObjectInputStream objIn;
ThreadContext oldThreadContext = null;
try {
bIn = new ByteArrayInputStream(request.getContentBytes());
objIn = new ObjectInputStream(bIn);
/* An old (Java DMK 4.1) client, or a new (Java DMK 4.2) client that has not
set an OperationContext, sends the remote operation string as
the first item. A new Java DMK 4.2 client sends the
OperationContext first, then the same thing as a client without
an OperationContext. This is slightly more awkward than if we
sent the OperationContext at the end, but has the advantage
that we already have the context in place for the cases where
we need to call mbs.deserialize. */
Object contextOrRemoteOp = objIn.readObject();
if (contextOrRemoteOp instanceof OperationContext) {
oldThreadContext =
ThreadContext.push("OperationContext", contextOrRemoteOp);
contextOrRemoteOp = objIn.readObject();
}
remoteOp = (String) contextOrRemoteOp;
if (logger.finerOn())
logger.finer("doRun", "Remote operation: " + remoteOp);
final ParsedMethod pm = (ParsedMethod) methodTable.get(remoteOp);
if (pm == null) {
if (logger.finerOn())
logger.finer("doRun", "Unknown remote operation: " + remoteOp);
return makeErrorResponse(HttpDef.HTTP_ERROR_BAD_REQUEST_ID,
HttpDef.HTTP_ERROR_BAD_REQUEST);
}
resultType = pm.resultType;
if (pm.magicParam > 0) {
final Object[] args = readObjects(objIn, pm.magicParam);
switch (pm.methodNumber) {
case CREATE_MBEAN_PARAMS:
objIn = mbs.deserialize((String) args[0],
request.getContentBytes());
break;
case CREATE_MBEAN_LOADER_PARAMS:
objIn = mbs.deserialize((String) args[0],
(ObjectName) args[2],
request.getContentBytes());
break;
case INVOKE:
case SET_ATTRIBUTE:
case SET_ATTRIBUTES:
objIn = mbs.deserialize((ObjectName) args[0],
request.getContentBytes());
break;
default:
throw new Error("method marked as magic but not in " +
"switch: " + remoteOp);
}
contextOrRemoteOp = objIn.readObject();
if (contextOrRemoteOp instanceof OperationContext)
contextOrRemoteOp = objIn.readObject();
if (!((String) contextOrRemoteOp).equals(remoteOp))
throw new Error("remoteOp changed after deserialize");
}
final Object[] args = readObjects(objIn, pm.nParams);
result = doOperation(pm.methodNumber, args);
} catch (Exception e) {
if (logger.finerOn())
logger.finer("doRun", remoteOp + ":" + e.getMessage());
return makeExceptionResponse(e);
} finally {
if (oldThreadContext != null)
ThreadContext.restore(oldThreadContext);
}
//
// Serialize the result and send OK
//
final ByteArrayOutputStream bOut = serialize(resultType, result);
return makeOkResponse(bOut.toByteArray());
}
Object doOperation(int methodNumber, Object[] args) throws Exception {
switch (methodNumber) {
case CREATE_MBEAN:
return createMBean((String) args[0], (ObjectName) args[1]);
case CREATE_MBEAN_PARAMS:
return createMBean((String) args[0], (ObjectName) args[1],
(Object[]) args[2], (String[]) args[3]);
case CREATE_MBEAN_LOADER:
return createMBean((String) args[0], (ObjectName) args[1],
(ObjectName) args[2]);
case CREATE_MBEAN_LOADER_PARAMS:
return createMBean((String) args[0], (ObjectName) args[1],
(ObjectName) args[2], (Object[]) args[3],
(String[]) args[4]);
case GET_ATTRIBUTE:
return getAttribute((ObjectName) args[0], (String) args[1]);
case GET_ATTRIBUTES:
return getAttributes((ObjectName) args[0], (String[]) args[1]);
case GET_DEFAULT_DOMAIN:
return getDefaultDomain();
case IS_INSTANCE_OF:
return new Boolean(isInstanceOf((ObjectName) args[0],
(String) args[1]));
case GET_OBJECT_INSTANCE:
return getObjectInstance((ObjectName) args[0]);
case GET_MBEAN_COUNT:
return getMBeanCount();
case GET_MBEAN_SERVER_ID:
return getMBeanServerId();
case GET_MBEAN_INFO:
return getMBeanInfo((ObjectName) args[0]);
case INVOKE:
return invoke((ObjectName) args[0], (String) args[1],
(Object[]) args[2], (String[]) args[3]);
case IS_REGISTERED:
return new Boolean(isRegistered((ObjectName) args[0]));
case QUERY_NAMES:
return queryNames((ObjectName) args[0], (QueryExp) args[1]);
case QUERY_MBEANS:
return queryMBeans((ObjectName) args[0], (QueryExp) args[1]);
case SET_ATTRIBUTE:
setAttribute((ObjectName) args[0], (Attribute) args[1]);
return null;
case SET_ATTRIBUTES:
return setAttributes((ObjectName) args[0],
(AttributeList) args[1]);
case UNREGISTER_MBEAN:
unregisterMBean((ObjectName) args[0]);
return null;
case REMOTE_REQUEST:
final int opType = ((Integer) args[0]).intValue();
return remoteRequest(opType, (Object[]) args[1]);
case PING_HEART_BEAT_SERVER:
return pingHeartBeatServer((String) args[0],
((Integer) args[1]).intValue(),
((Integer) args[2]).intValue(),
(Long) args[3]);
case SUPPORTS:
return new Boolean(supports((String) args[0]));
default:
throw new Error("internal error: bad method number " +
methodNumber);
}
}
private Object[] readObjects(ObjectInputStream s, int nObjects)
throws IOException, ClassNotFoundException {
Object[] objects = new Object[nObjects];
for (int i = 0; i < nObjects; i++)
objects[i] = s.readObject();
return objects;
}
// HTTP SPECIFIC METHODS
//----------------------
/**
* Generate a challenge if authentication is required.
*/
protected String getChallenge() {
GenericHttpConnectorServer server = (GenericHttpConnectorServer) adaptorServer;
if (server.isAuthenticationOn()) {
//
// Generate CRAM-MD5 challenge and store expected response
//
String challenge = server.generateChallengeResponse();
// The maximum number of bytes encoded at a time by the BASE64Encoder is 57.
// This results in encoded lines being no more than 76 characters long.
final int maxBytesPerLine = 57;
String encoded_challenge = "";
String chunk = null;
int quotient = challenge.length() / maxBytesPerLine;
int modulus = challenge.length() % maxBytesPerLine;
for (int i = 0; i < quotient; i++) {
chunk = challenge.substring((i*maxBytesPerLine),(i+1)*maxBytesPerLine);
encoded_challenge += new BASE64Encoder().encode(chunk.getBytes());
}
if (modulus > 0) {
chunk = challenge.substring(quotient*maxBytesPerLine);
encoded_challenge += new BASE64Encoder().encode(chunk.getBytes());
}
return "CRAM-MD5 " + encoded_challenge;
} else {
return null;
}
}
// MAPPING OF CLIENT REQUESTS TO MBEANSERVER METHODS
//--------------------------------------------------
/**
* ObjectInstance
* createMBean(String className, ObjectName name)
* Creates an registers an instance of an MBean in the remote object server.
*/
private ObjectInstance createMBean(String className, ObjectName name)
throws InstanceAlreadyExistsException, MBeanException, MBeanRegistrationException,
NotCompliantMBeanException, ReflectionException {
if (logger.finerOn())
logger.finer("createMBean", "createMBean");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.createMBean(className, name);
}
/**
* ObjectInstance
* createMBean(String className, ObjectName name, Object[] params, String[] signature)
* Creates and registers an instance of an MBean in the remote object server.
*/
private ObjectInstance createMBean(String className, ObjectName name, Object[] params, String[] signature)
throws InstanceAlreadyExistsException, MBeanException, MBeanRegistrationException,
NotCompliantMBeanException, ReflectionException {
if (logger.finerOn())
logger.finer("createMBean", "createMBean");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.createMBean(className, name, params, signature);
}
/**
* ObjectInstance
* createMBean(String className, ObjectName name, ObjectName loaderName)
* Creates and registers an instance of an MBean in the remote object server.
*/
private ObjectInstance createMBean(String className, ObjectName name, ObjectName loaderName)
throws InstanceAlreadyExistsException, MBeanException, MBeanRegistrationException,
NotCompliantMBeanException, ReflectionException, InstanceNotFoundException {
if (logger.finerOn())
logger.finer("createMBean", "createMBean");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.createMBean(className, name, loaderName);
}
/**
* ObjectInstance
* createMBean(String className, ObjectName name, ObjectName loaderName, Object[] params, String[] signature)
* Creates and registers an instance of an MBean in the remote object server.
*/
private ObjectInstance createMBean(String className, ObjectName name, ObjectName loaderName, Object[] params, String[] signature)
throws InstanceAlreadyExistsException, MBeanException, MBeanRegistrationException,
NotCompliantMBeanException, ReflectionException, InstanceNotFoundException {
if (logger.finerOn())
logger.finer("createMBean", "createMBean");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.createMBean(className, name, loaderName, params, signature);
}
/**
* Object
* getAttribute(ObjectName name, String attribute)
* Gets the value of a specific attribute of a named MBean.
*/
private Object getAttribute(ObjectName name, String attribute)
throws AttributeNotFoundException, InstanceNotFoundException, MBeanException, ReflectionException {
if (logger.finerOn())
logger.finer("getAttribute", "getAttribute");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.getAttribute(name, attribute);
}
/**
* AttributeList
* getAttributes(ObjectName name, String[] attributes)
* Allows to retrieve the values of several attributes of an MBean.
*/
private AttributeList getAttributes(ObjectName name, String[] attributes)
throws InstanceNotFoundException, ReflectionException {
if (logger.finerOn())
logger.finer("getAttributes", "getAttributes");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.getAttributes(name, attributes);
}
/**
* String
* getDefaultDomain()
* Returns the default domain used for the MBean naming.
*/
private String getDefaultDomain() {
if (logger.finerOn())
logger.finer("getDefaultDomain", "getDefaultDomain");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.getDefaultDomain();
}
/**
* boolean
* isInstanceOf()
* Returns true if the MBean is of an instance of class name
*/
private boolean isInstanceOf(ObjectName name, String className) throws InstanceNotFoundException {
if (logger.finerOn())
logger.finer("isInstanceOf", "isInstanceOf");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.isInstanceOf(name, className);
}
/**
* ObjectInstance
* getObjectInstance(ObjectName name)
*/
private ObjectInstance getObjectInstance(ObjectName name) throws InstanceNotFoundException {
if (logger.finerOn())
logger.finer("getObjectInstance", "getObjectInstance");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.getObjectInstance(name);
}
/**
* Integer
* getMBeanCount()
* Returns the number of MBeans controlled by the MBeanServer.
*/
private Integer getMBeanCount() {
if (logger.finerOn())
logger.finer("getMBeanCount", "getMBeanCount");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.getMBeanCount();
}
/**
* MBeanInfo
* getMBeanInfo(ObjectName name)
* This method supplies the exposed attributes and actions of the MBean.
*/
private MBeanInfo getMBeanInfo(ObjectName name)
throws InstanceNotFoundException, IntrospectionException, ReflectionException {
if (logger.finerOn())
logger.finer("getMBeanInfo", "getMBeanInfo");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.getMBeanInfo(name);
}
/**
* String
* getMBeanServerId()
* Return a string which represents the agent identification.
*/
private String getMBeanServerId() {
if (logger.finerOn())
logger.finer("getMBeanServerId", "getMBeanServerId");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
// Get value from MBeanServerDelegate MBean
try {
return (String) mbs.getAttribute(new ObjectName("JMImplementation:type=MBeanServerDelegate") , "MBeanServerId");
} catch (Exception e) {
return null;
}
}
/**
* Object
* invoke(ObjectName name, String methodName, Object[] params, String[] signature)
* Invokes a method of an MBean.
*/
private Object invoke(ObjectName name, String methodName, Object[] params, String[] signature)
throws InstanceNotFoundException, MBeanException, ReflectionException {
if (logger.finerOn())
logger.finer("invoke", "invoke");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.invoke(name, methodName, params, signature);
}
/**
* boolean
* isRegistered(ObjectName name)
* Checks if the given MBean is registered with the MBeanServer.
*/
private boolean isRegistered(ObjectName name) {
if (logger.finerOn())
logger.finer("isRegistered", "isRegistered");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.isRegistered(name);
}
/**
* Set
* queryNames(ObjectName name, QueryExp query)
* Gets the names of MBeans controlled by the MBeanServer.
*/
private Set queryNames(ObjectName name, QueryExp query) {
if (logger.finerOn())
logger.finer("queryNames", "queryNames");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.queryNames(name, query);
}
/**
* Set
* queryMBeans(ObjectName name, QueryExp query)
* Gets the MBeans controlled by the MBeanServer.
*/
private Set queryMBeans(ObjectName name, QueryExp query) {
if (logger.finerOn())
logger.finer("queryMBeans", "queryMBeans");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.queryMBeans(name, query);
}
/**
* void
* setAttribute(ObjectName name, Attribute attribute)
* Sets the value of a specific attribute of a named MBean.
*/
private void setAttribute(ObjectName name, Attribute attribute)
throws AttributeNotFoundException, InstanceNotFoundException,
InvalidAttributeValueException, MBeanException, ReflectionException {
if (logger.finerOn())
logger.finer("setAttribute", "setAttribute");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
mbs.setAttribute(name, attribute);
}
/**
* AttributeList
* setAttributes(ObjectName name, AttributeList attributes)
* Allows to modify the values of several attributes of an MBean.
*/
private AttributeList setAttributes(ObjectName name, AttributeList attributes)
throws InstanceNotFoundException, ReflectionException {
if (logger.finerOn())
logger.finer("setAttributes", "setAttributes");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
return mbs.setAttributes(name, attributes);
}
/**
* void
* unregisterMBean(ObjectName name)
* Deletes an instance of an MBean in the remote MBean server.
*/
private void unregisterMBean(ObjectName name) throws InstanceNotFoundException, MBeanRegistrationException {
if (logger.finerOn())
logger.finer("unregisterMBean", "unregisterMBean");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
mbs.unregisterMBean(name);
}
/**
* Object[]
* remoteRequest(int opType, Object[] params)
* Transfers a notification request from the client to the agent.
*/
private Object[] remoteRequest(int opType, Object[] params) throws Exception {
if (logger.finerOn())
logger.finer("remoteRequest", "remoteRequest");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
// Invoke remoteRequest method in server notification dispatcher.
//
GenericHttpConnectorServer server = (GenericHttpConnectorServer) adaptorServer;
return server.serverNotificationDispatcher.remoteRequest(opType,params);
}
/**
* String
* pingHeartBeatServer(String sessionId, int period, int nretries, Long notifSessionId)
* Transfers a ping request from the client to the agent.
*/
private String pingHeartBeatServer(String sessionId, int period, int nretries, Long notifSessionId) {
if (logger.finerOn())
logger.finer("pingHeartBeatServer", "pingHeartBeatServer");
if (!adaptorServer.isActive()) {
throw new CommunicationException("Connector Server is OFFLINE");
}
// Invoke pingHeartBeatServer method in heartbeat server handler.
//
GenericHttpConnectorServer server = (GenericHttpConnectorServer) adaptorServer;
return server.heartbeatServerHandler.pingHeartBeatServer(sessionId, period, nretries, notifSessionId);
}
private static final String[] supportedFeatures = {
"OperationContext",
};
private boolean supports(String feature) {
if (logger.finerOn())
logger.finer("supports", feature);
for (int i = 0; i < supportedFeatures.length; i++) {
if (supportedFeatures[i].equals(feature))
return true;
}
return false;
}
// TRACE METHODS
//--------------
/**
* Returns the string used in debug traces.
*/
protected String makeDebugTag() {
return "GenericHttpRequestHandler[" + adaptorServer.getProtocol() + ":" + adaptorServer.getPort() + "][" + requestId + "]";
}
// PRIVATE VARIABLES
//------------------
/* This table drives the conversion of received requests into method calls.
Each entry has a corresponding constant in the list immediately after
the table, and if you change one you must change the other. The
constants are for use in a switch statement.
Each entry is a string with three or four separated words.
The first word is the request code that will be received from the
remote client.
The second word is the number of Object parameters to be read from
the remote client for that request.
The third word is the return type string for that request. This is
included for compatibility with previous versions of the protocol,
but it is not actually used for anything, except to distinguish
exception returns from normal returns whose value happens to be an
object of type exception.
The fourth word, if present, indicates that that many parameters should
be read from the remote client in a first pass. This is used when
a non-default class loader specified by the these parameters must be
used when reading the remaining parameters.
IF YOU CHANGE THIS LIST YOU MUST CHANGE THE CONSTANTS IMMEDIATELY
FOLLOWING IT. */
private static final String[] methodList = {
"createMBean 2 ObjectInstance",
"createMBeanParams 4 ObjectInstance 2",
"createMBeanLoader 3 ObjectInstance",
"createMBeanLoaderParams 5 ObjectInstance 3",
"getAttribute 2 Object",
"getAttributes 2 AttributeList",
"getDefaultDomain 0 String",
"isInstanceOf 2 Boolean",
"getObjectInstance 1 ObjectInstance",
"getMBeanCount 0 Integer",
"getMBeanServerId 0 String",
"getMBeanInfo 1 MBeanInfo",
"invoke 4 Object 1",
"isRegistered 1 Boolean",
"queryNames 2 Set",
"queryMBeans 2 Set",
"setAttribute 2 Object 1",
"setAttributes 2 AttributeList 1",
"unregisterMBean 1 Object",
"remoteRequest 2 Object[]",
"pingHeartBeatServer 4 String",
"supports 1 Boolean",
};
private static final int
CREATE_MBEAN = 0,
CREATE_MBEAN_PARAMS = 1,
CREATE_MBEAN_LOADER = 2,
CREATE_MBEAN_LOADER_PARAMS = 3,
GET_ATTRIBUTE = 4,
GET_ATTRIBUTES = 5,
GET_DEFAULT_DOMAIN = 6,
IS_INSTANCE_OF = 7,
GET_OBJECT_INSTANCE = 8,
GET_MBEAN_COUNT = 9,
GET_MBEAN_SERVER_ID = 10,
GET_MBEAN_INFO = 11,
INVOKE = 12,
IS_REGISTERED = 13,
QUERY_NAMES = 14,
QUERY_MBEANS = 15,
SET_ATTRIBUTE = 16,
SET_ATTRIBUTES = 17,
UNREGISTER_MBEAN = 18,
REMOTE_REQUEST = 19,
PING_HEART_BEAT_SERVER = 20,
SUPPORTS = 21;
private static final class ParsedMethod {
int methodNumber;
int nParams;
String resultType;
int magicParam;
}
private static final Hashtable methodTable = new Hashtable();
static {
for (int i = 0; i < methodList.length; i++) {
final String entry = methodList[i];
final StringTokenizer tok = new StringTokenizer(entry);
final ParsedMethod pm = new ParsedMethod();
final String methodName = tok.nextToken();
pm.methodNumber = i;
pm.nParams = Integer.parseInt(tok.nextToken());
pm.resultType = tok.nextToken();
if (tok.hasMoreTokens())
pm.magicParam = Integer.parseInt(tok.nextToken());
else
pm.magicParam = -1;
if (tok.hasMoreTokens())
throw new Error("invalid methodList entry: " + entry);
methodTable.put(methodName, pm);
}
}
}
| |
package org.radargun.service;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import org.radargun.logging.Log;
import org.radargun.logging.LogFactory;
import org.radargun.traits.Killable;
import org.radargun.traits.Lifecycle;
/**
* Java runtime does not provide API that would allow us to manage
* full process tree, that's why we delegate the start/stop/kill
* handling to OS-specific scripts.
* So far, only Unix scripts are implemented.
*
* @author Radim Vansa <rvansa@redhat.com>
*/
public class ProcessLifecycle implements Lifecycle, Killable {
protected final Log log = LogFactory.getLog(getClass());
protected final ProcessService service;
protected ProcessOutputReader outputReader, errorReader;
private final CopyOnWriteArrayList<Listener> listeners = new CopyOnWriteArrayList<>();
private String prefix;
private String extension;
public ProcessLifecycle(ProcessService service) {
this.service = service;
prefix = service.getCommandPrefix();
extension = service.getCommandSuffix();
}
@Override
public void kill() {
fireBeforeStop(false);
try {
Runnable waiting = killAsyncInternal();
if (waiting == null) return;
waiting.run();
} finally {
fireAfterStop(false);
}
}
@Override
public void killAsync() {
fireBeforeStop(false);
final Runnable waiting = killAsyncInternal();
if (waiting == null) {
fireAfterStop(false);
return;
}
Thread listenerInvoker = new Thread(new Runnable() {
@Override
public void run() {
try {
waiting.run();
} finally {
fireAfterStop(false);
}
}
}, "StopListenerInvoker");
listenerInvoker.setDaemon(true);
listenerInvoker.start();
}
protected Runnable killAsyncInternal() {
if (!isRunning()) {
log.warn("Cannot kill, process is not running");
return null;
}
try {
fireBeforeStop(false);
final Process process = new ProcessBuilder().inheritIO().command(Arrays.asList(prefix + "kill" + extension, service.getCommandTag())).start();
return new Runnable() {
@Override
public void run() {
for (; ; ) {
try {
process.waitFor();
} catch (InterruptedException e) {
log.trace("Interrupted waiting for kill", e);
}
if (!isRunning()) return;
}
}
};
} catch (IOException e) {
log.error("Cannot kill service", e);
return null;
}
}
@Override
public void start() {
if (isRunning()) {
log.warn("Process is already running");
return;
}
fireBeforeStart();
try {
startInternal();
} finally {
fireAfterStart();
}
}
protected void startInternal() {
List<String> command = new ArrayList<String>();
command.add(prefix + "start" + extension);
command.add(service.getCommandTag());
command.addAll(service.getCommand());
Map<String, String> env = service.getEnvironment();
log.info("Environment:\n" + env);
log.info("Starting with: " + command);
ProcessBuilder pb = new ProcessBuilder().command(command);
for (Map.Entry<String, String> envVar : env.entrySet()) {
pb.environment().put(envVar.getKey(), envVar.getValue());
}
StreamWriter inputWriter = getInputWriter();
StreamReader outputReader = getOutputReader();
StreamReader errorReader = getErrorReader();
if (inputWriter == null) {
pb.redirectInput(ProcessBuilder.Redirect.INHERIT);
}
if (outputReader == null) {
pb.redirectOutput(ProcessBuilder.Redirect.INHERIT);
}
if (errorReader == null) {
pb.redirectError(ProcessBuilder.Redirect.INHERIT);
}
try {
Process process = pb.start();
if (inputWriter != null) inputWriter.setStream(process.getOutputStream());
if (outputReader != null) outputReader.setStream(process.getInputStream());
if (errorReader != null) errorReader.setStream(process.getErrorStream());
} catch (IOException e) {
log.error("Failed to start", e);
}
}
@Override
public void stop() {
if (!isRunning()) {
log.warn("Process is not running, cannot stop");
return;
}
fireBeforeStop(true);
try {
stopInternal();
} finally {
fireAfterStop(true);
}
}
protected void stopInternal() {
try {
long startTime = System.currentTimeMillis();
for (; ; ) {
String command = service.stopTimeout < 0 || System.currentTimeMillis() < startTime + service.stopTimeout ? "stop" : "kill";
Process process = new ProcessBuilder().inheritIO().command(Arrays.asList(prefix + command + extension, service.getCommandTag())).start();
try {
process.waitFor();
} catch (InterruptedException e) {
log.trace("Interrupted waiting for stop", e);
}
if (!isRunning()) return;
}
} catch (IOException e) {
log.error("Cannot stop service", e);
}
}
@Override
public boolean isRunning() {
Process process = null;
try {
process = new ProcessBuilder().inheritIO().command(Arrays.asList(prefix + "running" + extension, service.getCommandTag())).start();
int exitValue = process.waitFor();
return exitValue == 0;
} catch (IOException e) {
log.error("Cannot determine if running", e);
return false;
} catch (InterruptedException e) {
log.error("Script interrupted", e);
if (process != null) {
try {
return process.exitValue() == 0;
} catch (IllegalThreadStateException itse) {
return true;
}
}
return true;
}
}
protected synchronized StreamReader getOutputReader() {
if (outputReader == null) {
outputReader = new ProcessOutputReader(new LineConsumer() {
@Override
public void consume(String line) {
service.reportOutput(line);
}
});
}
return outputReader;
}
protected synchronized StreamReader getErrorReader() {
if (errorReader == null) {
errorReader = new ProcessOutputReader(new LineConsumer() {
@Override
public void consume(String line) {
service.reportError(line);
}
});
}
return errorReader;
}
protected StreamWriter getInputWriter() {
return null;
}
/**
* Provides a hook for service to read output
*/
interface StreamReader {
void setStream(InputStream stream);
}
/**
* Provides a hook for passing input to the process
*/
interface StreamWriter {
void setStream(OutputStream stream);
}
interface LineConsumer {
void consume(String line);
}
protected class ProcessOutputReader extends Thread implements StreamReader {
private BufferedReader reader;
private LineConsumer consumer;
public ProcessOutputReader(LineConsumer consumer) {
this.consumer = consumer;
}
@Override
public void setStream(InputStream stream) {
this.reader = new BufferedReader(new InputStreamReader(stream));
this.start();
}
@Override
public void run() {
String line;
try {
while ((line = reader.readLine()) != null) {
consumer.consume(line);
}
} catch (IOException e) {
log.error("Failed to read server output", e);
} finally {
try {
reader.close();
} catch (IOException e) {
log.error("Failed to close", e);
}
}
}
}
public void addListener(Listener listener) {
listeners.add(listener);
}
public void removeListener(Listener listener) {
listeners.remove(listener);
}
// lambdas, wish you were here...
private interface ListenerRunner {
void run(Listener listener);
}
private void fireListeners(ListenerRunner runner) {
for (Listener listener : listeners) {
try {
runner.run(listener);
} catch (Exception e) {
log.error("Listener has thrown an exception", e);
}
}
}
protected void fireBeforeStart() {
fireListeners(new ListenerRunner() {
@Override
public void run(Listener listener) {
listener.beforeStart();
}
});
}
protected void fireAfterStart() {
fireListeners(new ListenerRunner() {
@Override
public void run(Listener listener) {
listener.afterStart();
}
});
}
protected void fireBeforeStop(final boolean graceful) {
fireListeners(new ListenerRunner() {
@Override
public void run(Listener listener) {
listener.beforeStop(graceful);
}
});
}
protected void fireAfterStop(final boolean graceful) {
fireListeners(new ListenerRunner() {
@Override
public void run(Listener listener) {
listener.afterStop(graceful);
}
});
}
public interface Listener {
void beforeStart();
void afterStart();
void beforeStop(boolean graceful);
void afterStop(boolean graceful);
}
public static class ListenerAdapter implements Listener {
public void beforeStart() {}
public void afterStart() {}
public void beforeStop(boolean graceful) {}
public void afterStop(boolean graceful) {}
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.vulkan;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* Structure describing push descriptor limits that can be supported by an implementation.
*
* <h5>Description</h5>
*
* <p>If the {@link VkPhysicalDevicePushDescriptorPropertiesKHR} structure is included in the {@code pNext} chain of the {@link VkPhysicalDeviceProperties2} structure passed to {@link VK11#vkGetPhysicalDeviceProperties2 GetPhysicalDeviceProperties2}, it is filled in with each corresponding implementation-dependent property.</p>
*
* <h5>Valid Usage (Implicit)</h5>
*
* <ul>
* <li>{@code sType} <b>must</b> be {@link KHRPushDescriptor#VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR}</li>
* </ul>
*
* <h3>Layout</h3>
*
* <pre><code>
* struct VkPhysicalDevicePushDescriptorPropertiesKHR {
* VkStructureType {@link #sType};
* void * {@link #pNext};
* uint32_t {@link #maxPushDescriptors};
* }</code></pre>
*/
public class VkPhysicalDevicePushDescriptorPropertiesKHR extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
STYPE,
PNEXT,
MAXPUSHDESCRIPTORS;
static {
Layout layout = __struct(
__member(4),
__member(POINTER_SIZE),
__member(4)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
STYPE = layout.offsetof(0);
PNEXT = layout.offsetof(1);
MAXPUSHDESCRIPTORS = layout.offsetof(2);
}
/**
* Creates a {@code VkPhysicalDevicePushDescriptorPropertiesKHR} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public VkPhysicalDevicePushDescriptorPropertiesKHR(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** the type of this structure. */
@NativeType("VkStructureType")
public int sType() { return nsType(address()); }
/** {@code NULL} or a pointer to a structure extending this structure. */
@NativeType("void *")
public long pNext() { return npNext(address()); }
/** the maximum number of descriptors that <b>can</b> be used in a descriptor set created with {@link KHRPushDescriptor#VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR} set. */
@NativeType("uint32_t")
public int maxPushDescriptors() { return nmaxPushDescriptors(address()); }
/** Sets the specified value to the {@link #sType} field. */
public VkPhysicalDevicePushDescriptorPropertiesKHR sType(@NativeType("VkStructureType") int value) { nsType(address(), value); return this; }
/** Sets the {@link KHRPushDescriptor#VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR} value to the {@link #sType} field. */
public VkPhysicalDevicePushDescriptorPropertiesKHR sType$Default() { return sType(KHRPushDescriptor.VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR); }
/** Sets the specified value to the {@link #pNext} field. */
public VkPhysicalDevicePushDescriptorPropertiesKHR pNext(@NativeType("void *") long value) { npNext(address(), value); return this; }
/** Initializes this struct with the specified values. */
public VkPhysicalDevicePushDescriptorPropertiesKHR set(
int sType,
long pNext
) {
sType(sType);
pNext(pNext);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public VkPhysicalDevicePushDescriptorPropertiesKHR set(VkPhysicalDevicePushDescriptorPropertiesKHR src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code VkPhysicalDevicePushDescriptorPropertiesKHR} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static VkPhysicalDevicePushDescriptorPropertiesKHR malloc() {
return wrap(VkPhysicalDevicePushDescriptorPropertiesKHR.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code VkPhysicalDevicePushDescriptorPropertiesKHR} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static VkPhysicalDevicePushDescriptorPropertiesKHR calloc() {
return wrap(VkPhysicalDevicePushDescriptorPropertiesKHR.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code VkPhysicalDevicePushDescriptorPropertiesKHR} instance allocated with {@link BufferUtils}. */
public static VkPhysicalDevicePushDescriptorPropertiesKHR create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(VkPhysicalDevicePushDescriptorPropertiesKHR.class, memAddress(container), container);
}
/** Returns a new {@code VkPhysicalDevicePushDescriptorPropertiesKHR} instance for the specified memory address. */
public static VkPhysicalDevicePushDescriptorPropertiesKHR create(long address) {
return wrap(VkPhysicalDevicePushDescriptorPropertiesKHR.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkPhysicalDevicePushDescriptorPropertiesKHR createSafe(long address) {
return address == NULL ? null : wrap(VkPhysicalDevicePushDescriptorPropertiesKHR.class, address);
}
/**
* Returns a new {@link VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
// -----------------------------------
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkPhysicalDevicePushDescriptorPropertiesKHR mallocStack() { return malloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkPhysicalDevicePushDescriptorPropertiesKHR callocStack() { return calloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkPhysicalDevicePushDescriptorPropertiesKHR mallocStack(MemoryStack stack) { return malloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkPhysicalDevicePushDescriptorPropertiesKHR callocStack(MemoryStack stack) { return calloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); }
/**
* Returns a new {@code VkPhysicalDevicePushDescriptorPropertiesKHR} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static VkPhysicalDevicePushDescriptorPropertiesKHR malloc(MemoryStack stack) {
return wrap(VkPhysicalDevicePushDescriptorPropertiesKHR.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code VkPhysicalDevicePushDescriptorPropertiesKHR} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static VkPhysicalDevicePushDescriptorPropertiesKHR calloc(MemoryStack stack) {
return wrap(VkPhysicalDevicePushDescriptorPropertiesKHR.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #sType}. */
public static int nsType(long struct) { return UNSAFE.getInt(null, struct + VkPhysicalDevicePushDescriptorPropertiesKHR.STYPE); }
/** Unsafe version of {@link #pNext}. */
public static long npNext(long struct) { return memGetAddress(struct + VkPhysicalDevicePushDescriptorPropertiesKHR.PNEXT); }
/** Unsafe version of {@link #maxPushDescriptors}. */
public static int nmaxPushDescriptors(long struct) { return UNSAFE.getInt(null, struct + VkPhysicalDevicePushDescriptorPropertiesKHR.MAXPUSHDESCRIPTORS); }
/** Unsafe version of {@link #sType(int) sType}. */
public static void nsType(long struct, int value) { UNSAFE.putInt(null, struct + VkPhysicalDevicePushDescriptorPropertiesKHR.STYPE, value); }
/** Unsafe version of {@link #pNext(long) pNext}. */
public static void npNext(long struct, long value) { memPutAddress(struct + VkPhysicalDevicePushDescriptorPropertiesKHR.PNEXT, value); }
// -----------------------------------
/** An array of {@link VkPhysicalDevicePushDescriptorPropertiesKHR} structs. */
public static class Buffer extends StructBuffer<VkPhysicalDevicePushDescriptorPropertiesKHR, Buffer> implements NativeResource {
private static final VkPhysicalDevicePushDescriptorPropertiesKHR ELEMENT_FACTORY = VkPhysicalDevicePushDescriptorPropertiesKHR.create(-1L);
/**
* Creates a new {@code VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link VkPhysicalDevicePushDescriptorPropertiesKHR#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected VkPhysicalDevicePushDescriptorPropertiesKHR getElementFactory() {
return ELEMENT_FACTORY;
}
/** @return the value of the {@link VkPhysicalDevicePushDescriptorPropertiesKHR#sType} field. */
@NativeType("VkStructureType")
public int sType() { return VkPhysicalDevicePushDescriptorPropertiesKHR.nsType(address()); }
/** @return the value of the {@link VkPhysicalDevicePushDescriptorPropertiesKHR#pNext} field. */
@NativeType("void *")
public long pNext() { return VkPhysicalDevicePushDescriptorPropertiesKHR.npNext(address()); }
/** @return the value of the {@link VkPhysicalDevicePushDescriptorPropertiesKHR#maxPushDescriptors} field. */
@NativeType("uint32_t")
public int maxPushDescriptors() { return VkPhysicalDevicePushDescriptorPropertiesKHR.nmaxPushDescriptors(address()); }
/** Sets the specified value to the {@link VkPhysicalDevicePushDescriptorPropertiesKHR#sType} field. */
public VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer sType(@NativeType("VkStructureType") int value) { VkPhysicalDevicePushDescriptorPropertiesKHR.nsType(address(), value); return this; }
/** Sets the {@link KHRPushDescriptor#VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR} value to the {@link VkPhysicalDevicePushDescriptorPropertiesKHR#sType} field. */
public VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer sType$Default() { return sType(KHRPushDescriptor.VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR); }
/** Sets the specified value to the {@link VkPhysicalDevicePushDescriptorPropertiesKHR#pNext} field. */
public VkPhysicalDevicePushDescriptorPropertiesKHR.Buffer pNext(@NativeType("void *") long value) { VkPhysicalDevicePushDescriptorPropertiesKHR.npNext(address(), value); return this; }
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIESOR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.jpa.container.parser.impl;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import javax.persistence.SharedCacheMode;
import javax.persistence.ValidationMode;
import javax.persistence.spi.ClassTransformer;
import javax.persistence.spi.PersistenceUnitInfo;
import javax.persistence.spi.PersistenceUnitTransactionType;
import javax.sql.DataSource;
import org.apache.aries.jpa.container.weaving.impl.TransformerRegistry;
import org.apache.aries.jpa.container.weaving.impl.TransformerRegistrySingleton;
import org.osgi.framework.Bundle;
import org.osgi.framework.wiring.BundleWiring;
public class PersistenceUnit implements PersistenceUnitInfo {
private Bundle bundle;
private ClassLoader classLoader;
private Set<String> classNames;
private boolean excludeUnlisted;
private DataSource jtaDataSource;
private String jtaDataSourceName;
private DataSource nonJtaDataSource;
private String nonJtaDataSourceName;
private String persistenceProviderClassName;
private String persistenceUnitName;
private String persistenceXMLSchemaVersion;
private Properties props;
private SharedCacheMode sharedCacheMode = SharedCacheMode.UNSPECIFIED;
private PersistenceUnitTransactionType transactionType;
private ValidationMode validationMode = ValidationMode.NONE;
public PersistenceUnit(Bundle bundle, String persistenceUnitName,
PersistenceUnitTransactionType transactionType) {
this.bundle = bundle;
this.persistenceUnitName = persistenceUnitName;
this.transactionType = transactionType;
this.props = new Properties();
this.classLoader = bundle.adapt(BundleWiring.class).getClassLoader();
this.classNames = new HashSet<String>();
}
public void addClassName(String className) {
this.classNames.add(className);
}
public void addProperty(String name, String value) {
props.put(name, value);
}
@Override
public void addTransformer(ClassTransformer transformer) {
TransformerRegistry reg = TransformerRegistrySingleton.get();
reg.addTransformer(bundle, transformer);
}
@Override
public boolean excludeUnlistedClasses() {
return this.excludeUnlisted;
}
public Bundle getBundle() {
return bundle;
}
@Override
public ClassLoader getClassLoader() {
return this.classLoader;
}
@Override
public List<URL> getJarFileUrls() {
return Collections.emptyList();
}
@Override
public DataSource getJtaDataSource() {
return this.jtaDataSource;
}
public String getJtaDataSourceName() {
return jtaDataSourceName;
}
@Override
public List<String> getManagedClassNames() {
return new ArrayList<String>(classNames);
}
@Override
public List<String> getMappingFileNames() {
return Collections.emptyList();
}
public String getName() {
return persistenceUnitName;
}
@Override
public ClassLoader getNewTempClassLoader() {
return new TempBundleDelegatingClassLoader(bundle, classLoader);
}
@Override
public DataSource getNonJtaDataSource() {
return this.nonJtaDataSource;
}
public String getNonJtaDataSourceName() {
return nonJtaDataSourceName;
}
@Override
public String getPersistenceProviderClassName() {
return this.persistenceProviderClassName;
}
@Override
public String getPersistenceUnitName() {
return this.persistenceUnitName;
}
@Override
public URL getPersistenceUnitRootUrl() {
return bundle.getResource("/");
}
@Override
public String getPersistenceXMLSchemaVersion() {
return this.persistenceXMLSchemaVersion;
}
@Override
public Properties getProperties() {
return this.props;
}
@Override
public SharedCacheMode getSharedCacheMode() {
return this.sharedCacheMode;
}
@Override
public PersistenceUnitTransactionType getTransactionType() {
return transactionType;
}
@Override
public ValidationMode getValidationMode() {
return this.validationMode;
}
public boolean isExcludeUnlisted() {
return excludeUnlisted;
}
public void setExcludeUnlisted(boolean excludeUnlisted) {
this.excludeUnlisted = excludeUnlisted;
}
public void setJtaDataSource(DataSource jtaDataSource) {
this.jtaDataSource = jtaDataSource;
}
public void setJtaDataSourceName(String jtaDataSourceName) {
this.jtaDataSourceName = jtaDataSourceName;
}
public void setNonJtaDataSource(DataSource nonJtaDataSource) {
this.nonJtaDataSource = nonJtaDataSource;
}
public void setNonJtaDataSourceName(String nonJtaDataSourceName) {
this.nonJtaDataSourceName = nonJtaDataSourceName;
}
public void setProviderClassName(String providerClassName) {
this.persistenceProviderClassName = providerClassName;
}
public void setSharedCacheMode(SharedCacheMode sharedCacheMode) {
this.sharedCacheMode = sharedCacheMode;
}
public void setValidationMode(ValidationMode validationMode) {
this.validationMode = validationMode;
}
public void addAnnotated() {
if (!excludeUnlistedClasses()) {
Collection<String> detected = JPAAnnotationScanner.findJPAAnnotatedClasses(bundle);
for (String name : detected) {
addClassName(name);
}
}
}
public void setTransactionType(PersistenceUnitTransactionType transactionType) {
this.transactionType = transactionType;
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.tasks.actions.context;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.tasks.LocalTask;
import com.intellij.tasks.TaskManager;
import com.intellij.tasks.actions.BaseTaskAction;
import com.intellij.tasks.actions.SwitchTaskAction;
import com.intellij.tasks.context.ContextInfo;
import com.intellij.tasks.context.LoadContextUndoableAction;
import com.intellij.tasks.context.WorkingContextManager;
import com.intellij.tasks.impl.TaskUtil;
import com.intellij.ui.popup.list.ListPopupImpl;
import com.intellij.util.Function;
import com.intellij.util.NullableFunction;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.text.DateFormatUtil;
import consulo.ui.image.Image;
import icons.TasksIcons;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.*;
import java.awt.event.ActionEvent;
import java.util.*;
/**
* @author Dmitry Avdeev
*/
public class LoadContextAction extends BaseTaskAction
{
private static final int MAX_ROW_COUNT = 10;
@Override
public void actionPerformed(AnActionEvent e)
{
final Project project = getProject(e);
assert project != null;
DefaultActionGroup group = new DefaultActionGroup();
final WorkingContextManager manager = WorkingContextManager.getInstance(project);
List<ContextInfo> history = manager.getContextHistory();
List<ContextHolder> infos = new ArrayList<ContextHolder>(ContainerUtil.map2List(history, new Function<ContextInfo, ContextHolder>()
{
public ContextHolder fun(final ContextInfo info)
{
return new ContextHolder()
{
@Override
void load(final boolean clear)
{
LoadContextUndoableAction undoableAction = LoadContextUndoableAction.createAction(manager, clear, info.name);
UndoableCommand.execute(project, undoableAction, "Load context " + info.comment, "Context");
}
@Override
void remove()
{
manager.removeContext(info.name);
}
@Override
Date getDate()
{
return new Date(info.date);
}
@Override
String getComment()
{
return info.comment;
}
@Override
Image getIcon()
{
return TasksIcons.SavedContext;
}
};
}
}));
final TaskManager taskManager = TaskManager.getManager(project);
List<LocalTask> tasks = taskManager.getLocalTasks();
infos.addAll(ContainerUtil.mapNotNull(tasks, new NullableFunction<LocalTask, ContextHolder>()
{
public ContextHolder fun(final LocalTask task)
{
if(task.isActive())
{
return null;
}
return new ContextHolder()
{
@Override
void load(boolean clear)
{
LoadContextUndoableAction undoableAction = LoadContextUndoableAction.createAction(manager, clear, task);
UndoableCommand.execute(project, undoableAction, "Load context " + TaskUtil.getTrimmedSummary(task), "Context");
}
@Override
void remove()
{
SwitchTaskAction.removeTask(project, task, taskManager);
}
@Override
Date getDate()
{
return task.getUpdated();
}
@Override
String getComment()
{
return TaskUtil.getTrimmedSummary(task);
}
@Override
Image getIcon()
{
return task.getIcon();
}
};
}
}));
Collections.sort(infos, new Comparator<ContextHolder>()
{
public int compare(ContextHolder o1, ContextHolder o2)
{
return o2.getDate().compareTo(o1.getDate());
}
});
final Ref<Boolean> shiftPressed = Ref.create(false);
boolean today = true;
Calendar now = Calendar.getInstance();
for(int i = 0, historySize = Math.min(MAX_ROW_COUNT, infos.size()); i < historySize; i++)
{
final ContextHolder info = infos.get(i);
Calendar calendar = Calendar.getInstance();
calendar.setTime(info.getDate());
if(today && (calendar.get(Calendar.YEAR) != now.get(Calendar.YEAR) || calendar.get(Calendar.DAY_OF_YEAR) != now.get(Calendar.DAY_OF_YEAR)))
{
group.addSeparator();
today = false;
}
group.add(createItem(info, shiftPressed));
}
final ListPopupImpl popup = (ListPopupImpl) JBPopupFactory.getInstance().createActionGroupPopup("Load Context", group, e.getDataContext(), JBPopupFactory.ActionSelectionAid.SPEEDSEARCH, false, null, MAX_ROW_COUNT);
popup.setAdText("Press SHIFT to merge with current context");
popup.registerAction("shiftPressed", KeyStroke.getKeyStroke("shift pressed SHIFT"), new AbstractAction()
{
public void actionPerformed(ActionEvent e)
{
shiftPressed.set(true);
popup.setCaption("Merge with Current Context");
}
});
popup.registerAction("shiftReleased", KeyStroke.getKeyStroke("released SHIFT"), new AbstractAction()
{
public void actionPerformed(ActionEvent e)
{
shiftPressed.set(false);
popup.setCaption("Load Context");
}
});
popup.registerAction("invoke", KeyStroke.getKeyStroke("shift ENTER"), new AbstractAction()
{
public void actionPerformed(ActionEvent e)
{
popup.handleSelect(true);
}
});
popup.showCenteredInCurrentWindow(project);
}
abstract static class ContextHolder
{
abstract void load(boolean clear);
abstract void remove();
abstract Date getDate();
abstract String getComment();
abstract Image getIcon();
}
private static ActionGroup createItem(final ContextHolder holder, final Ref<Boolean> shiftPressed)
{
String text = DateFormatUtil.formatPrettyDateTime(holder.getDate());
String comment = holder.getComment();
if(!StringUtil.isEmpty(comment))
{
text = comment + " (" + text + ")";
}
final AnAction loadAction = new AnAction("Load")
{
@Override
public void actionPerformed(AnActionEvent e)
{
holder.load(!shiftPressed.get());
}
};
ActionGroup contextGroup = new ActionGroup(text, text, holder.getIcon())
{
@Override
public void actionPerformed(AnActionEvent e)
{
loadAction.actionPerformed(e);
}
@Nonnull
@Override
public AnAction[] getChildren(@Nullable AnActionEvent e)
{
return new AnAction[]{
loadAction,
new AnAction("Remove")
{
@Override
public void actionPerformed(AnActionEvent e)
{
holder.remove();
}
}
};
}
@Override
public boolean canBePerformed(DataContext context)
{
return true;
}
};
contextGroup.setPopup(true);
return contextGroup;
}
}
| |
/**
*/
package CIM15.IEC61970.Informative.InfAssets;
import CIM15.IEC61968.Assets.AssetInfo;
import org.eclipse.emf.ecore.EClass;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Switch Info</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* <ul>
* <li>{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getDielectricStrength <em>Dielectric Strength</em>}</li>
* <li>{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#isGang <em>Gang</em>}</li>
* <li>{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getMakingCapacity <em>Making Capacity</em>}</li>
* <li>{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getWithstandCurrent <em>Withstand Current</em>}</li>
* <li>{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#isLoadBreak <em>Load Break</em>}</li>
* <li>{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getMinimumCurrent <em>Minimum Current</em>}</li>
* <li>{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getInterruptingRating <em>Interrupting Rating</em>}</li>
* <li>{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#isRemote <em>Remote</em>}</li>
* <li>{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getPoleCount <em>Pole Count</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class SwitchInfo extends AssetInfo {
/**
* The default value of the '{@link #getDielectricStrength() <em>Dielectric Strength</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDielectricStrength()
* @generated
* @ordered
*/
protected static final float DIELECTRIC_STRENGTH_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getDielectricStrength() <em>Dielectric Strength</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDielectricStrength()
* @generated
* @ordered
*/
protected float dielectricStrength = DIELECTRIC_STRENGTH_EDEFAULT;
/**
* This is true if the Dielectric Strength attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean dielectricStrengthESet;
/**
* The default value of the '{@link #isGang() <em>Gang</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isGang()
* @generated
* @ordered
*/
protected static final boolean GANG_EDEFAULT = false;
/**
* The cached value of the '{@link #isGang() <em>Gang</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isGang()
* @generated
* @ordered
*/
protected boolean gang = GANG_EDEFAULT;
/**
* This is true if the Gang attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean gangESet;
/**
* The default value of the '{@link #getMakingCapacity() <em>Making Capacity</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMakingCapacity()
* @generated
* @ordered
*/
protected static final float MAKING_CAPACITY_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getMakingCapacity() <em>Making Capacity</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMakingCapacity()
* @generated
* @ordered
*/
protected float makingCapacity = MAKING_CAPACITY_EDEFAULT;
/**
* This is true if the Making Capacity attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean makingCapacityESet;
/**
* The default value of the '{@link #getWithstandCurrent() <em>Withstand Current</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getWithstandCurrent()
* @generated
* @ordered
*/
protected static final float WITHSTAND_CURRENT_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getWithstandCurrent() <em>Withstand Current</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getWithstandCurrent()
* @generated
* @ordered
*/
protected float withstandCurrent = WITHSTAND_CURRENT_EDEFAULT;
/**
* This is true if the Withstand Current attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean withstandCurrentESet;
/**
* The default value of the '{@link #isLoadBreak() <em>Load Break</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isLoadBreak()
* @generated
* @ordered
*/
protected static final boolean LOAD_BREAK_EDEFAULT = false;
/**
* The cached value of the '{@link #isLoadBreak() <em>Load Break</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isLoadBreak()
* @generated
* @ordered
*/
protected boolean loadBreak = LOAD_BREAK_EDEFAULT;
/**
* This is true if the Load Break attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean loadBreakESet;
/**
* The default value of the '{@link #getMinimumCurrent() <em>Minimum Current</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMinimumCurrent()
* @generated
* @ordered
*/
protected static final float MINIMUM_CURRENT_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getMinimumCurrent() <em>Minimum Current</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMinimumCurrent()
* @generated
* @ordered
*/
protected float minimumCurrent = MINIMUM_CURRENT_EDEFAULT;
/**
* This is true if the Minimum Current attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean minimumCurrentESet;
/**
* The default value of the '{@link #getInterruptingRating() <em>Interrupting Rating</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getInterruptingRating()
* @generated
* @ordered
*/
protected static final float INTERRUPTING_RATING_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getInterruptingRating() <em>Interrupting Rating</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getInterruptingRating()
* @generated
* @ordered
*/
protected float interruptingRating = INTERRUPTING_RATING_EDEFAULT;
/**
* This is true if the Interrupting Rating attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean interruptingRatingESet;
/**
* The default value of the '{@link #isRemote() <em>Remote</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isRemote()
* @generated
* @ordered
*/
protected static final boolean REMOTE_EDEFAULT = false;
/**
* The cached value of the '{@link #isRemote() <em>Remote</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isRemote()
* @generated
* @ordered
*/
protected boolean remote = REMOTE_EDEFAULT;
/**
* This is true if the Remote attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean remoteESet;
/**
* The default value of the '{@link #getPoleCount() <em>Pole Count</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPoleCount()
* @generated
* @ordered
*/
protected static final int POLE_COUNT_EDEFAULT = 0;
/**
* The cached value of the '{@link #getPoleCount() <em>Pole Count</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPoleCount()
* @generated
* @ordered
*/
protected int poleCount = POLE_COUNT_EDEFAULT;
/**
* This is true if the Pole Count attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean poleCountESet;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected SwitchInfo() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return InfAssetsPackage.eINSTANCE.getSwitchInfo();
}
/**
* Returns the value of the '<em><b>Dielectric Strength</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Dielectric Strength</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Dielectric Strength</em>' attribute.
* @see #isSetDielectricStrength()
* @see #unsetDielectricStrength()
* @see #setDielectricStrength(float)
* @generated
*/
public float getDielectricStrength() {
return dielectricStrength;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getDielectricStrength <em>Dielectric Strength</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Dielectric Strength</em>' attribute.
* @see #isSetDielectricStrength()
* @see #unsetDielectricStrength()
* @see #getDielectricStrength()
* @generated
*/
public void setDielectricStrength(float newDielectricStrength) {
dielectricStrength = newDielectricStrength;
dielectricStrengthESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getDielectricStrength <em>Dielectric Strength</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetDielectricStrength()
* @see #getDielectricStrength()
* @see #setDielectricStrength(float)
* @generated
*/
public void unsetDielectricStrength() {
dielectricStrength = DIELECTRIC_STRENGTH_EDEFAULT;
dielectricStrengthESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getDielectricStrength <em>Dielectric Strength</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Dielectric Strength</em>' attribute is set.
* @see #unsetDielectricStrength()
* @see #getDielectricStrength()
* @see #setDielectricStrength(float)
* @generated
*/
public boolean isSetDielectricStrength() {
return dielectricStrengthESet;
}
/**
* Returns the value of the '<em><b>Gang</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Gang</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Gang</em>' attribute.
* @see #isSetGang()
* @see #unsetGang()
* @see #setGang(boolean)
* @generated
*/
public boolean isGang() {
return gang;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#isGang <em>Gang</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Gang</em>' attribute.
* @see #isSetGang()
* @see #unsetGang()
* @see #isGang()
* @generated
*/
public void setGang(boolean newGang) {
gang = newGang;
gangESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#isGang <em>Gang</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetGang()
* @see #isGang()
* @see #setGang(boolean)
* @generated
*/
public void unsetGang() {
gang = GANG_EDEFAULT;
gangESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#isGang <em>Gang</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Gang</em>' attribute is set.
* @see #unsetGang()
* @see #isGang()
* @see #setGang(boolean)
* @generated
*/
public boolean isSetGang() {
return gangESet;
}
/**
* Returns the value of the '<em><b>Making Capacity</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Making Capacity</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Making Capacity</em>' attribute.
* @see #isSetMakingCapacity()
* @see #unsetMakingCapacity()
* @see #setMakingCapacity(float)
* @generated
*/
public float getMakingCapacity() {
return makingCapacity;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getMakingCapacity <em>Making Capacity</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Making Capacity</em>' attribute.
* @see #isSetMakingCapacity()
* @see #unsetMakingCapacity()
* @see #getMakingCapacity()
* @generated
*/
public void setMakingCapacity(float newMakingCapacity) {
makingCapacity = newMakingCapacity;
makingCapacityESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getMakingCapacity <em>Making Capacity</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetMakingCapacity()
* @see #getMakingCapacity()
* @see #setMakingCapacity(float)
* @generated
*/
public void unsetMakingCapacity() {
makingCapacity = MAKING_CAPACITY_EDEFAULT;
makingCapacityESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getMakingCapacity <em>Making Capacity</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Making Capacity</em>' attribute is set.
* @see #unsetMakingCapacity()
* @see #getMakingCapacity()
* @see #setMakingCapacity(float)
* @generated
*/
public boolean isSetMakingCapacity() {
return makingCapacityESet;
}
/**
* Returns the value of the '<em><b>Withstand Current</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Withstand Current</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Withstand Current</em>' attribute.
* @see #isSetWithstandCurrent()
* @see #unsetWithstandCurrent()
* @see #setWithstandCurrent(float)
* @generated
*/
public float getWithstandCurrent() {
return withstandCurrent;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getWithstandCurrent <em>Withstand Current</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Withstand Current</em>' attribute.
* @see #isSetWithstandCurrent()
* @see #unsetWithstandCurrent()
* @see #getWithstandCurrent()
* @generated
*/
public void setWithstandCurrent(float newWithstandCurrent) {
withstandCurrent = newWithstandCurrent;
withstandCurrentESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getWithstandCurrent <em>Withstand Current</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetWithstandCurrent()
* @see #getWithstandCurrent()
* @see #setWithstandCurrent(float)
* @generated
*/
public void unsetWithstandCurrent() {
withstandCurrent = WITHSTAND_CURRENT_EDEFAULT;
withstandCurrentESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getWithstandCurrent <em>Withstand Current</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Withstand Current</em>' attribute is set.
* @see #unsetWithstandCurrent()
* @see #getWithstandCurrent()
* @see #setWithstandCurrent(float)
* @generated
*/
public boolean isSetWithstandCurrent() {
return withstandCurrentESet;
}
/**
* Returns the value of the '<em><b>Load Break</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Load Break</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Load Break</em>' attribute.
* @see #isSetLoadBreak()
* @see #unsetLoadBreak()
* @see #setLoadBreak(boolean)
* @generated
*/
public boolean isLoadBreak() {
return loadBreak;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#isLoadBreak <em>Load Break</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Load Break</em>' attribute.
* @see #isSetLoadBreak()
* @see #unsetLoadBreak()
* @see #isLoadBreak()
* @generated
*/
public void setLoadBreak(boolean newLoadBreak) {
loadBreak = newLoadBreak;
loadBreakESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#isLoadBreak <em>Load Break</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetLoadBreak()
* @see #isLoadBreak()
* @see #setLoadBreak(boolean)
* @generated
*/
public void unsetLoadBreak() {
loadBreak = LOAD_BREAK_EDEFAULT;
loadBreakESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#isLoadBreak <em>Load Break</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Load Break</em>' attribute is set.
* @see #unsetLoadBreak()
* @see #isLoadBreak()
* @see #setLoadBreak(boolean)
* @generated
*/
public boolean isSetLoadBreak() {
return loadBreakESet;
}
/**
* Returns the value of the '<em><b>Minimum Current</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Minimum Current</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Minimum Current</em>' attribute.
* @see #isSetMinimumCurrent()
* @see #unsetMinimumCurrent()
* @see #setMinimumCurrent(float)
* @generated
*/
public float getMinimumCurrent() {
return minimumCurrent;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getMinimumCurrent <em>Minimum Current</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Minimum Current</em>' attribute.
* @see #isSetMinimumCurrent()
* @see #unsetMinimumCurrent()
* @see #getMinimumCurrent()
* @generated
*/
public void setMinimumCurrent(float newMinimumCurrent) {
minimumCurrent = newMinimumCurrent;
minimumCurrentESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getMinimumCurrent <em>Minimum Current</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetMinimumCurrent()
* @see #getMinimumCurrent()
* @see #setMinimumCurrent(float)
* @generated
*/
public void unsetMinimumCurrent() {
minimumCurrent = MINIMUM_CURRENT_EDEFAULT;
minimumCurrentESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getMinimumCurrent <em>Minimum Current</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Minimum Current</em>' attribute is set.
* @see #unsetMinimumCurrent()
* @see #getMinimumCurrent()
* @see #setMinimumCurrent(float)
* @generated
*/
public boolean isSetMinimumCurrent() {
return minimumCurrentESet;
}
/**
* Returns the value of the '<em><b>Interrupting Rating</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Interrupting Rating</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Interrupting Rating</em>' attribute.
* @see #isSetInterruptingRating()
* @see #unsetInterruptingRating()
* @see #setInterruptingRating(float)
* @generated
*/
public float getInterruptingRating() {
return interruptingRating;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getInterruptingRating <em>Interrupting Rating</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Interrupting Rating</em>' attribute.
* @see #isSetInterruptingRating()
* @see #unsetInterruptingRating()
* @see #getInterruptingRating()
* @generated
*/
public void setInterruptingRating(float newInterruptingRating) {
interruptingRating = newInterruptingRating;
interruptingRatingESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getInterruptingRating <em>Interrupting Rating</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetInterruptingRating()
* @see #getInterruptingRating()
* @see #setInterruptingRating(float)
* @generated
*/
public void unsetInterruptingRating() {
interruptingRating = INTERRUPTING_RATING_EDEFAULT;
interruptingRatingESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getInterruptingRating <em>Interrupting Rating</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Interrupting Rating</em>' attribute is set.
* @see #unsetInterruptingRating()
* @see #getInterruptingRating()
* @see #setInterruptingRating(float)
* @generated
*/
public boolean isSetInterruptingRating() {
return interruptingRatingESet;
}
/**
* Returns the value of the '<em><b>Remote</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Remote</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Remote</em>' attribute.
* @see #isSetRemote()
* @see #unsetRemote()
* @see #setRemote(boolean)
* @generated
*/
public boolean isRemote() {
return remote;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#isRemote <em>Remote</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Remote</em>' attribute.
* @see #isSetRemote()
* @see #unsetRemote()
* @see #isRemote()
* @generated
*/
public void setRemote(boolean newRemote) {
remote = newRemote;
remoteESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#isRemote <em>Remote</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetRemote()
* @see #isRemote()
* @see #setRemote(boolean)
* @generated
*/
public void unsetRemote() {
remote = REMOTE_EDEFAULT;
remoteESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#isRemote <em>Remote</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Remote</em>' attribute is set.
* @see #unsetRemote()
* @see #isRemote()
* @see #setRemote(boolean)
* @generated
*/
public boolean isSetRemote() {
return remoteESet;
}
/**
* Returns the value of the '<em><b>Pole Count</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Pole Count</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Pole Count</em>' attribute.
* @see #isSetPoleCount()
* @see #unsetPoleCount()
* @see #setPoleCount(int)
* @generated
*/
public int getPoleCount() {
return poleCount;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getPoleCount <em>Pole Count</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Pole Count</em>' attribute.
* @see #isSetPoleCount()
* @see #unsetPoleCount()
* @see #getPoleCount()
* @generated
*/
public void setPoleCount(int newPoleCount) {
poleCount = newPoleCount;
poleCountESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getPoleCount <em>Pole Count</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetPoleCount()
* @see #getPoleCount()
* @see #setPoleCount(int)
* @generated
*/
public void unsetPoleCount() {
poleCount = POLE_COUNT_EDEFAULT;
poleCountESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61970.Informative.InfAssets.SwitchInfo#getPoleCount <em>Pole Count</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Pole Count</em>' attribute is set.
* @see #unsetPoleCount()
* @see #getPoleCount()
* @see #setPoleCount(int)
* @generated
*/
public boolean isSetPoleCount() {
return poleCountESet;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case InfAssetsPackage.SWITCH_INFO__DIELECTRIC_STRENGTH:
return getDielectricStrength();
case InfAssetsPackage.SWITCH_INFO__GANG:
return isGang();
case InfAssetsPackage.SWITCH_INFO__MAKING_CAPACITY:
return getMakingCapacity();
case InfAssetsPackage.SWITCH_INFO__WITHSTAND_CURRENT:
return getWithstandCurrent();
case InfAssetsPackage.SWITCH_INFO__LOAD_BREAK:
return isLoadBreak();
case InfAssetsPackage.SWITCH_INFO__MINIMUM_CURRENT:
return getMinimumCurrent();
case InfAssetsPackage.SWITCH_INFO__INTERRUPTING_RATING:
return getInterruptingRating();
case InfAssetsPackage.SWITCH_INFO__REMOTE:
return isRemote();
case InfAssetsPackage.SWITCH_INFO__POLE_COUNT:
return getPoleCount();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case InfAssetsPackage.SWITCH_INFO__DIELECTRIC_STRENGTH:
setDielectricStrength((Float)newValue);
return;
case InfAssetsPackage.SWITCH_INFO__GANG:
setGang((Boolean)newValue);
return;
case InfAssetsPackage.SWITCH_INFO__MAKING_CAPACITY:
setMakingCapacity((Float)newValue);
return;
case InfAssetsPackage.SWITCH_INFO__WITHSTAND_CURRENT:
setWithstandCurrent((Float)newValue);
return;
case InfAssetsPackage.SWITCH_INFO__LOAD_BREAK:
setLoadBreak((Boolean)newValue);
return;
case InfAssetsPackage.SWITCH_INFO__MINIMUM_CURRENT:
setMinimumCurrent((Float)newValue);
return;
case InfAssetsPackage.SWITCH_INFO__INTERRUPTING_RATING:
setInterruptingRating((Float)newValue);
return;
case InfAssetsPackage.SWITCH_INFO__REMOTE:
setRemote((Boolean)newValue);
return;
case InfAssetsPackage.SWITCH_INFO__POLE_COUNT:
setPoleCount((Integer)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case InfAssetsPackage.SWITCH_INFO__DIELECTRIC_STRENGTH:
unsetDielectricStrength();
return;
case InfAssetsPackage.SWITCH_INFO__GANG:
unsetGang();
return;
case InfAssetsPackage.SWITCH_INFO__MAKING_CAPACITY:
unsetMakingCapacity();
return;
case InfAssetsPackage.SWITCH_INFO__WITHSTAND_CURRENT:
unsetWithstandCurrent();
return;
case InfAssetsPackage.SWITCH_INFO__LOAD_BREAK:
unsetLoadBreak();
return;
case InfAssetsPackage.SWITCH_INFO__MINIMUM_CURRENT:
unsetMinimumCurrent();
return;
case InfAssetsPackage.SWITCH_INFO__INTERRUPTING_RATING:
unsetInterruptingRating();
return;
case InfAssetsPackage.SWITCH_INFO__REMOTE:
unsetRemote();
return;
case InfAssetsPackage.SWITCH_INFO__POLE_COUNT:
unsetPoleCount();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case InfAssetsPackage.SWITCH_INFO__DIELECTRIC_STRENGTH:
return isSetDielectricStrength();
case InfAssetsPackage.SWITCH_INFO__GANG:
return isSetGang();
case InfAssetsPackage.SWITCH_INFO__MAKING_CAPACITY:
return isSetMakingCapacity();
case InfAssetsPackage.SWITCH_INFO__WITHSTAND_CURRENT:
return isSetWithstandCurrent();
case InfAssetsPackage.SWITCH_INFO__LOAD_BREAK:
return isSetLoadBreak();
case InfAssetsPackage.SWITCH_INFO__MINIMUM_CURRENT:
return isSetMinimumCurrent();
case InfAssetsPackage.SWITCH_INFO__INTERRUPTING_RATING:
return isSetInterruptingRating();
case InfAssetsPackage.SWITCH_INFO__REMOTE:
return isSetRemote();
case InfAssetsPackage.SWITCH_INFO__POLE_COUNT:
return isSetPoleCount();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (dielectricStrength: ");
if (dielectricStrengthESet) result.append(dielectricStrength); else result.append("<unset>");
result.append(", gang: ");
if (gangESet) result.append(gang); else result.append("<unset>");
result.append(", makingCapacity: ");
if (makingCapacityESet) result.append(makingCapacity); else result.append("<unset>");
result.append(", withstandCurrent: ");
if (withstandCurrentESet) result.append(withstandCurrent); else result.append("<unset>");
result.append(", loadBreak: ");
if (loadBreakESet) result.append(loadBreak); else result.append("<unset>");
result.append(", minimumCurrent: ");
if (minimumCurrentESet) result.append(minimumCurrent); else result.append("<unset>");
result.append(", interruptingRating: ");
if (interruptingRatingESet) result.append(interruptingRating); else result.append("<unset>");
result.append(", remote: ");
if (remoteESet) result.append(remote); else result.append("<unset>");
result.append(", poleCount: ");
if (poleCountESet) result.append(poleCount); else result.append("<unset>");
result.append(')');
return result.toString();
}
} // SwitchInfo
| |
/*
* This file is part of the Jikes RVM project (http://jikesrvm.org).
*
* This file is licensed to You under the Common Public License (CPL);
* You may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.opensource.org/licenses/cpl1.0.php
*
* See the COPYRIGHT.txt file distributed with this work for information
* regarding copyright ownership.
*/
package org.jikesrvm.compilers.opt.regalloc.ppc;
import org.jikesrvm.ArchitectureSpecific;
import org.jikesrvm.VM;
import org.jikesrvm.classloader.TypeReference;
import org.jikesrvm.compilers.opt.DefUse;
import org.jikesrvm.compilers.opt.ir.Call;
import org.jikesrvm.compilers.opt.ir.Load;
import org.jikesrvm.compilers.opt.ir.MIR_Call;
import org.jikesrvm.compilers.opt.ir.MIR_Load;
import org.jikesrvm.compilers.opt.ir.MIR_Move;
import org.jikesrvm.compilers.opt.ir.MIR_Return;
import org.jikesrvm.compilers.opt.ir.MIR_Store;
import org.jikesrvm.compilers.opt.ir.Move;
import org.jikesrvm.compilers.opt.ir.IR;
import org.jikesrvm.compilers.opt.ir.IRTools;
import org.jikesrvm.compilers.opt.ir.Instruction;
import org.jikesrvm.compilers.opt.ir.OperandEnumeration;
import static org.jikesrvm.compilers.opt.ir.Operators.DOUBLE_MOVE;
import static org.jikesrvm.compilers.opt.ir.Operators.FLOAT_MOVE;
import static org.jikesrvm.compilers.opt.ir.Operators.INT_MOVE;
import static org.jikesrvm.compilers.opt.ir.Operators.INT_STORE;
import static org.jikesrvm.compilers.opt.ir.Operators.IR_PROLOGUE;
import static org.jikesrvm.compilers.opt.ir.Operators.LONG_MOVE;
import static org.jikesrvm.compilers.opt.ir.Operators.PPC_FMR;
import static org.jikesrvm.compilers.opt.ir.Operators.PPC_LAddr;
import static org.jikesrvm.compilers.opt.ir.Operators.PPC_LFD;
import static org.jikesrvm.compilers.opt.ir.Operators.PPC_LFS;
import static org.jikesrvm.compilers.opt.ir.Operators.PPC_LInt;
import static org.jikesrvm.compilers.opt.ir.Operators.PPC_MOVE;
import static org.jikesrvm.compilers.opt.ir.Operators.PPC_STAddr;
import static org.jikesrvm.compilers.opt.ir.Operators.PPC_STFD;
import static org.jikesrvm.compilers.opt.ir.Operators.PPC_STFS;
import static org.jikesrvm.compilers.opt.ir.Operators.PPC_STW;
import static org.jikesrvm.compilers.opt.ir.Operators.REF_LOAD;
import static org.jikesrvm.compilers.opt.ir.Operators.REF_STORE;
import static org.jikesrvm.compilers.opt.ir.Operators.SYSCALL;
import org.jikesrvm.compilers.opt.ir.Register;
import org.jikesrvm.compilers.opt.ir.Prologue;
import org.jikesrvm.compilers.opt.ir.Store;
import org.jikesrvm.compilers.opt.ir.operand.DoubleConstantOperand;
import org.jikesrvm.compilers.opt.ir.operand.FloatConstantOperand;
import org.jikesrvm.compilers.opt.ir.operand.LongConstantOperand;
import org.jikesrvm.compilers.opt.ir.operand.Operand;
import org.jikesrvm.compilers.opt.ir.operand.RegisterOperand;
import org.jikesrvm.compilers.opt.ir.ppc.PhysicalRegisterSet;
import static org.jikesrvm.compilers.opt.regalloc.ppc.PhysicalRegisterConstants.BYTES_IN_ADDRESS;
import static org.jikesrvm.compilers.opt.regalloc.ppc.PhysicalRegisterConstants.BYTES_IN_DOUBLE;
import static org.jikesrvm.compilers.opt.regalloc.ppc.PhysicalRegisterConstants.BYTES_IN_FLOAT;
import static org.jikesrvm.compilers.opt.regalloc.ppc.PhysicalRegisterConstants.BYTES_IN_INT;
import static org.jikesrvm.compilers.opt.regalloc.ppc.PhysicalRegisterConstants.FIRST_DOUBLE_PARAM;
import static org.jikesrvm.compilers.opt.regalloc.ppc.PhysicalRegisterConstants.FIRST_DOUBLE_RETURN;
import static org.jikesrvm.compilers.opt.regalloc.ppc.PhysicalRegisterConstants.FIRST_INT_PARAM;
import static org.jikesrvm.compilers.opt.regalloc.ppc.PhysicalRegisterConstants.FIRST_INT_RETURN;
import static org.jikesrvm.compilers.opt.regalloc.ppc.PhysicalRegisterConstants.LOG_BYTES_IN_ADDRESS;
import static org.jikesrvm.compilers.opt.regalloc.ppc.PhysicalRegisterConstants.NUMBER_DOUBLE_PARAM;
import static org.jikesrvm.compilers.opt.regalloc.ppc.PhysicalRegisterConstants.NUMBER_INT_PARAM;
import static org.jikesrvm.ppc.StackframeLayoutConstants.STACKFRAME_HEADER_SIZE;
import static org.jikesrvm.ppc.StackframeLayoutConstants.STACKFRAME_METHOD_ID_OFFSET;
import org.vmmagic.unboxed.Offset;
/**
* This class contains PowerPC Calling conventions.
* The two public methods are:
* <ul>
* <li> expandCallingConventions(IR) which is called by
* the register allocator immediately before allocation to make
* manifest the use of registers by the calling convention.
* <li> expandSysCall(Instruction, IR) which is called to
* expand a SYSCALL HIR instruction into the appropriate
* sequence of LIR instructions.
* </ul>
*/
public abstract class CallingConvention extends IRTools {
/**
* Expand calls, returns, and add initialize code for arguments/parms.
* @param ir
*/
public static void expandCallingConventions(IR ir) {
for (Instruction inst = ir.firstInstructionInCodeOrder(); inst != null; inst =
inst.nextInstructionInCodeOrder()) {
if (inst.isCall()) {
callExpand(inst, ir);
} else if (inst.isReturn()) {
returnExpand(inst, ir);
}
}
prologueExpand(ir);
}
/**
* This is just called for instructions that were added by
* instrumentation during register allocation
*/
public static void expandCallingConventionsForInstrumentation(IR ir, Instruction from, Instruction to) {
for (Instruction inst = from; inst != to; inst = inst.nextInstructionInCodeOrder()) {
if (inst.isCall()) {
callExpand(inst, ir);
} else if (inst.isReturn()) {
returnExpand(inst, ir);
}
}
}
/**
* Calling convention to implement calls to
* native (C) routines using the AIX linkage conventions
*/
public static void expandSysCall(Instruction s, IR ir) {
RegisterOperand ip = (RegisterOperand) Call.getClearAddress(s);
int numberParams = Call.getNumberOfParams(s);
/* Long, float, and double constants are loaded from the JTOC.
* We must ensure that they are loaded _before_ we change
* the JTOC to be the C TOC so inject explicit moves to do so.
*/
for (int i = 0; i < numberParams; i++) {
Operand arg = Call.getParam(s, i);
if (arg instanceof LongConstantOperand) {
LongConstantOperand op = (LongConstantOperand) Call.getClearParam(s, i);
RegisterOperand rop = ir.regpool.makeTempLong();
s.insertBefore(Move.create(LONG_MOVE, rop, op));
Call.setParam(s, i, rop.copy());
} else if (arg instanceof DoubleConstantOperand) {
DoubleConstantOperand op = (DoubleConstantOperand) Call.getClearParam(s, i);
RegisterOperand rop = ir.regpool.makeTempDouble();
s.insertBefore(Move.create(DOUBLE_MOVE, rop, op));
Call.setParam(s, i, rop.copy());
} else if (arg instanceof FloatConstantOperand) {
FloatConstantOperand op = (FloatConstantOperand) Call.getClearParam(s, i);
RegisterOperand rop = ir.regpool.makeTempFloat();
s.insertBefore(Move.create(FLOAT_MOVE, rop, op));
Call.setParam(s, i, rop.copy());
}
}
/* compute the parameter space */
int parameterWords;
if (VM.BuildFor32Addr) {
parameterWords = 0;
for (int i = 0; i < numberParams; i++) {
parameterWords++;
Operand op = Call.getParam(s, i);
if (op instanceof RegisterOperand) {
RegisterOperand reg = (RegisterOperand) op;
if (reg.getType().isLongType() || reg.getType().isDoubleType()) {
parameterWords++;
}
}
}
} else {
parameterWords = numberParams;
}
// see PowerPC Compiler Writer's Guide, pp. 162
ir.stackManager.allocateParameterSpace((6 + parameterWords) * BYTES_IN_ADDRESS);
// IMPORTANT WARNING: as the callee C routine may destroy the cmid field
// (it is the saved CR field of the callee in C convention)
// we are restoring the methodID after a sysCall.
Instruction s2 =
Store.create(REF_STORE,
ir.regpool.makeJTOCOp(ir, s),
ir.regpool.makeFPOp(),
AC(Offset.fromIntSignExtend(5 * BYTES_IN_ADDRESS)),
null); // TODO: valid location?
s.insertBefore(s2);
if (VM.BuildForPowerOpenABI) {
s2 =
Load.create(REF_LOAD, ir.regpool.makeJTOCOp(ir, s), ip, AC(Offset.fromIntZeroExtend(BYTES_IN_ADDRESS)), null);
s.insertBefore(s2);
RegisterOperand iptmp = ir.regpool.makeTempAddress();
s2 = Load.create(REF_LOAD, iptmp, ip, AC(Offset.zero()), null);
s.insertBefore(s2);
ip = iptmp;
}
Call.mutate0(s, SYSCALL, Call.getClearResult(s), ip, null);
s2 =
Load.create(REF_LOAD,
ir.regpool.makeJTOCOp(ir, s),
ir.regpool.makeFPOp(),
AC(Offset.fromIntSignExtend(5 * BYTES_IN_ADDRESS)),
null); // TODO: valid location?
s.insertAfter(s2);
RegisterOperand temp = ir.regpool.makeTempInt();
s2 = Move.create(INT_MOVE, temp, IC(ir.compiledMethod.getId()));
Instruction s3 =
Store.create(INT_STORE,
temp.copy(),
ir.regpool.makeFPOp(),
AC(Offset.fromIntSignExtend(STACKFRAME_METHOD_ID_OFFSET)),
null); // TODO: valid location?
s.insertAfter(s3);
s.insertAfter(s2);
}
/////////////////////
// Implementation
/////////////////////
/**
* Expand the prologue instruction to make calling convention explicit.
*/
private static void prologueExpand(IR ir) {
// set up register lists for dead code elimination.
boolean useDU = ir.options.getOptLevel() >= 1;
if (useDU) {
DefUse.computeDU(ir);
}
Instruction prologueInstr = ir.firstInstructionInCodeOrder().nextInstructionInCodeOrder();
if (VM.VerifyAssertions) VM._assert(prologueInstr.operator == IR_PROLOGUE);
Instruction start = prologueInstr.nextInstructionInCodeOrder();
int int_index = 0;
int double_index = 0;
int spilledArgumentCounter =
(-256 - ArchitectureSpecific.ArchConstants.STACKFRAME_HEADER_SIZE) >> LOG_BYTES_IN_ADDRESS;
PhysicalRegisterSet phys = ir.regpool.getPhysicalRegisterSet();
Register FP = phys.getFP();
for (OperandEnumeration symParams = prologueInstr.getDefs(); symParams.hasMoreElements();) {
RegisterOperand symParamOp = (RegisterOperand) symParams.next();
Register symParam = symParamOp.getRegister();
TypeReference t = symParamOp.getType();
if (t.isFloatType()) {
// if optimizing, skip dead parameters
// SJF: This optimization current breaks the paranoid sanity test.
// Why? TODO: figure this out and remove the 'true' case below
if (true || !useDU || symParam.useList != null) {
if (double_index < NUMBER_DOUBLE_PARAM) {
Register param = phys.get(FIRST_DOUBLE_PARAM + (double_index));
start.insertBefore(MIR_Move.create(PPC_FMR, F(symParam), F(param)));
} else { // spilled parameter
start.insertBefore(MIR_Load.create(PPC_LFS,
F(symParam),
A(FP),
IC((spilledArgumentCounter << LOG_BYTES_IN_ADDRESS) - BYTES_IN_ADDRESS +
BYTES_IN_FLOAT)));
spilledArgumentCounter--;
}
}
double_index++;
} else if (t.isDoubleType()) {
// if optimizing, skip dead parameters
// SJF: This optimization current breaks the paranoid sanity test.
// Why? TODO: figure this out and remove the 'true' case below
if (true || !useDU || symParam.useList != null) {
if (double_index < NUMBER_DOUBLE_PARAM) {
Register param = phys.get(FIRST_DOUBLE_PARAM + (double_index));
start.insertBefore(MIR_Move.create(PPC_FMR, D(symParam), D(param)));
} else { // spilled parameter
start.insertBefore(MIR_Load.create(PPC_LFD,
D(symParam),
A(FP),
IC(spilledArgumentCounter << LOG_BYTES_IN_ADDRESS)));
spilledArgumentCounter -= BYTES_IN_DOUBLE / BYTES_IN_ADDRESS;
}
}
double_index++;
} else { // t is object, 1/2 of a long, int, short, char, byte, or boolean
// if optimizing, skip dead parameters
// SJF: This optimization current breaks the paranoid sanity test.
// Why? TODO: figure this out and remove the 'true' case below
if (true || !useDU || symParam.useList != null) {
if (int_index < NUMBER_INT_PARAM) {
Register param = phys.get(FIRST_INT_PARAM + (int_index));
start.insertBefore(MIR_Move.create(PPC_MOVE, new RegisterOperand(symParam, t), A(param)));
} else { // spilled parameter
if (VM
.BuildFor64Addr &&
(t.isIntType() ||
t.isShortType() ||
t.isByteType() ||
t.isCharType() ||
t.isBooleanType())) {
start.insertBefore(MIR_Load.create(PPC_LInt,
new RegisterOperand(symParam, t),
A(FP),
IC((spilledArgumentCounter << LOG_BYTES_IN_ADDRESS) -
BYTES_IN_ADDRESS + BYTES_IN_INT)));
} else {
// same size as addr (ie, either we're in 32 bit mode or we're in 64 bit mode and it's a reference or long)
start.insertBefore(MIR_Load.create(PPC_LAddr,
new RegisterOperand(symParam, t),
A(FP),
IC(spilledArgumentCounter << LOG_BYTES_IN_ADDRESS)));
}
spilledArgumentCounter--;
}
}
int_index++;
}
}
// Now that we've made the calling convention explicit in the prologue,
// set IR_PROLOGUE to have no defs.
prologueInstr.replace(Prologue.create(IR_PROLOGUE, 0));
}
/**
* Expand the call as appropriate
* @param s the call instruction
* @param ir the ir
*/
private static void callExpand(Instruction s, IR ir) {
int NumberParams = MIR_Call.getNumberOfParams(s);
int int_index = 0; // points to the first integer volatile
int double_index = 0; // poinst to the first f.p. volatile
int callSpillLoc = STACKFRAME_HEADER_SIZE;
PhysicalRegisterSet phys = ir.regpool.getPhysicalRegisterSet();
Instruction prev = s.prevInstructionInCodeOrder();
Register FP = phys.getFP();
boolean isSysCall = ir.stackManager.isSysCall(s);
boolean firstLongHalf = false;
// (1) Expand parameters
for (int opNum = 0; opNum < NumberParams; opNum++) {
Operand param = MIR_Call.getClearParam(s, opNum);
RegisterOperand Reg = (RegisterOperand) param;
// as part of getting into MIR, we make sure all params are in registers.
Register reg = Reg.getRegister();
if (Reg.getType().isFloatType()) {
if (double_index < NUMBER_DOUBLE_PARAM) { // register copy
Register real = phys.get(FIRST_DOUBLE_PARAM + (double_index++));
s.insertBefore(MIR_Move.create(PPC_FMR, F(real), Reg));
Reg = F(real);
// Record that the call now has a use of the real reg
// This is to ensure liveness is correct
MIR_Call.setParam(s, opNum, Reg);
} else { // spill to memory
Instruction p = prev.nextInstructionInCodeOrder();
callSpillLoc += BYTES_IN_ADDRESS;
p.insertBefore(MIR_Store.create(PPC_STFS, F(reg), A(FP), IC(callSpillLoc - BYTES_IN_FLOAT)));
// We don't have uses of the heap at MIR, so null it out
MIR_Call.setParam(s, opNum, null);
}
} else if (Reg.getType().isDoubleType()) {
if (double_index < NUMBER_DOUBLE_PARAM) { // register copy
Register real = phys.get(FIRST_DOUBLE_PARAM + (double_index++));
s.insertBefore(MIR_Move.create(PPC_FMR, D(real), Reg));
Reg = D(real);
// Record that the call now has a use of the real reg
// This is to ensure liveness is correct
MIR_Call.setParam(s, opNum, Reg);
} else { // spill to memory
Instruction p = prev.nextInstructionInCodeOrder();
p.insertBefore(MIR_Store.create(PPC_STFD, D(reg), A(FP), IC(callSpillLoc)));
callSpillLoc += BYTES_IN_DOUBLE;
// We don't have uses of the heap at MIR, so null it out
MIR_Call.setParam(s, opNum, null);
}
} else { // IntType (or half of long) or reference
if (VM.BuildForSVR4ABI) {
/* NOTE: following adjustment is not stated in SVR4 ABI, but
* was implemented in GCC.
*/
if (isSysCall && Reg.getType().isLongType()) {
if (firstLongHalf) {
firstLongHalf = false;
} else {
int true_index = FIRST_INT_PARAM + int_index;
int_index += (true_index + 1) & 0x01; // if gpr is even, gpr += 1
firstLongHalf = true;
}
}
}
if (int_index < NUMBER_INT_PARAM) { // register copy
Register real = phys.get(FIRST_INT_PARAM + (int_index++));
RegisterOperand Real = new RegisterOperand(real, Reg.getType());
s.insertBefore(MIR_Move.create(PPC_MOVE, Real, Reg));
Reg = new RegisterOperand(real, Reg.getType());
// Record that the call now has a use of the real reg
// This is to ensure liveness is correct
MIR_Call.setParam(s, opNum, Reg);
} else { // spill to memory
Instruction p = prev.nextInstructionInCodeOrder();
callSpillLoc += BYTES_IN_ADDRESS;
if (VM
.BuildFor64Addr &&
(Reg.getType().isIntType() ||
Reg.getType().isShortType() ||
Reg.getType().isByteType() ||
Reg.getType().isCharType() ||
Reg.getType().isBooleanType())) {
p.insertBefore(MIR_Store.create(PPC_STW,
new RegisterOperand(reg, Reg.getType()),
A(FP),
IC(callSpillLoc - BYTES_IN_INT)));
} else {
// same size as addr (ie, either we're in 32 bit mode or we're in 64 bit mode and it's a reference or long)
p.insertBefore(MIR_Store.create(PPC_STAddr,
new RegisterOperand(reg, Reg.getType()),
A(FP),
IC(callSpillLoc - BYTES_IN_ADDRESS)));
}
// We don't have uses of the heap at MIR, so null it out
MIR_Call.setParam(s, opNum, null);
}
}
}
// If we needed to pass arguments on the stack,
// then make sure we have a big enough stack
if (callSpillLoc != STACKFRAME_HEADER_SIZE) {
ir.stackManager.allocateParameterSpace(callSpillLoc);
}
// (2) expand result
Instruction lastCallSeqInstr = s;
if (MIR_Call.hasResult2(s)) {
if (VM.VerifyAssertions) VM._assert(VM.BuildFor32Addr);
RegisterOperand result2 = MIR_Call.getClearResult2(s);
RegisterOperand physical = new RegisterOperand(phys.get(FIRST_INT_RETURN + 1), result2.getType());
Instruction tmp = MIR_Move.create(PPC_MOVE, result2, physical);
lastCallSeqInstr.insertAfter(tmp);
lastCallSeqInstr = tmp;
MIR_Call.setResult2(s, null);
}
if (MIR_Call.hasResult(s)) {
RegisterOperand result1 = MIR_Call.getClearResult(s);
if (result1.getType().isFloatType() || result1.getType().isDoubleType()) {
RegisterOperand physical = new RegisterOperand(phys.get(FIRST_DOUBLE_RETURN), result1.getType());
Instruction tmp = MIR_Move.create(PPC_FMR, result1, physical);
lastCallSeqInstr.insertAfter(tmp);
lastCallSeqInstr = tmp;
MIR_Call.setResult(s, null);
} else {
RegisterOperand physical = new RegisterOperand(phys.get(FIRST_INT_RETURN), result1.getType());
Instruction tmp = MIR_Move.create(PPC_MOVE, result1, physical);
lastCallSeqInstr.insertAfter(tmp);
lastCallSeqInstr = tmp;
MIR_Call.setResult(s, null);
}
}
}
/**
* Expand return statements.
* @param s the return instruction
* @param ir the ir
*/
private static void returnExpand(Instruction s, IR ir) {
PhysicalRegisterSet phys = ir.regpool.getPhysicalRegisterSet();
if (MIR_Return.hasVal(s)) {
RegisterOperand symb1 = MIR_Return.getClearVal(s);
RegisterOperand phys1;
if (symb1.getType().isFloatType() || symb1.getType().isDoubleType()) {
phys1 = D(phys.get(FIRST_DOUBLE_RETURN));
s.insertBefore(MIR_Move.create(PPC_FMR, phys1, symb1));
} else {
phys1 = new RegisterOperand(phys.get(FIRST_INT_RETURN), symb1.getType());
s.insertBefore(MIR_Move.create(PPC_MOVE, phys1, symb1));
}
MIR_Return.setVal(s, phys1.copyD2U());
}
if (MIR_Return.hasVal2(s)) {
if (VM.VerifyAssertions) VM._assert(VM.BuildFor32Addr);
RegisterOperand symb2 = MIR_Return.getClearVal2(s);
RegisterOperand phys2 = I(phys.get(FIRST_INT_RETURN + 1));
s.insertBefore(MIR_Move.create(PPC_MOVE, phys2, symb2));
MIR_Return.setVal2(s, phys2.copyD2U());
}
}
/**
* Save and restore all nonvolatile registers around a syscall.
* On PPC, our register conventions are compatablile with the
* natvie ABI, so there is nothing to do.
*
* @param call the sys call
*/
public static void saveNonvolatilesAroundSysCall(Instruction call, IR ir) {
}
}
| |
package com.bitmonlab.osiris.api.core.map.assemblers;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import com.bitmonlab.osiris.api.core.map.assemblers.LineStringAssemblerImpl;
import com.bitmonlab.osiris.api.core.map.transferobject.LineStringDTO;
import com.bitmonlab.osiris.api.core.map.transferobject.PointDTO;
import com.bitmonlab.osiris.commons.map.model.geojson.LineString;
import com.bitmonlab.osiris.commons.map.model.geojson.Point;
import com.bitmonlab.osiris.core.assembler.Assembler;
import com.bitmonlab.osiris.core.assembler.AssemblyException;
@RunWith(MockitoJUnitRunner.class)
public class LineStringAssemblerTest {
@Mock
private LineStringDTO lineStringDTO;
@InjectMocks
private LineStringAssemblerImpl lineStringAssemblerImpl;
@Mock
private LineString lineString1;
@Mock
private LineString lineString2;
@Mock
private PointDTO pointDTO1;
@Mock
private PointDTO pointDTO2;
@Mock
private Point point1;
@Mock
private Point point2;
@Mock
private Assembler<PointDTO,Point> pointAssembler;
@Mock
private List<Double> coordinates1;
@Mock
private List<Double> coordinates2;
@Mock
private PointDTO centroidDTO;
@Mock
private List<Double> centroid;
@Mock
private List<Double> centroid1;
@Mock
private List<Double> centroid2;
@Test
public void transformLineStringDTOtoLineString() throws AssemblyException{
Collection<PointDTO> collectionPointDTO = new ArrayList<PointDTO>();
collectionPointDTO.add(pointDTO1);
collectionPointDTO.add(pointDTO2);
Double longitude=1D;
Double latitude=2D;
//Fixture
Mockito.when(lineStringDTO.getCollectionPointDTO()).thenReturn(collectionPointDTO);
Mockito.when(pointAssembler.createDomainObject(pointDTO1)).thenReturn(point1);
Mockito.when(point1.getCoordinates()).thenReturn(coordinates1);
Mockito.when(pointAssembler.createDomainObject(pointDTO2)).thenReturn(point2);
Mockito.when(point2.getCoordinates()).thenReturn(coordinates2);
Mockito.when(lineStringDTO.getCentroidDTO()).thenReturn(centroidDTO);
Mockito.when(centroidDTO.getLongitude()).thenReturn(longitude);
Mockito.when(centroidDTO.getLatitude()).thenReturn(latitude);
//Experimentation
LineString lineStringResponse = lineStringAssemblerImpl.createDomainObject(lineStringDTO);
//Expectations
List<List<Double>> coordinatesLineStringResponse=lineStringResponse.getCoordinates();
assertEquals("LineString response must have two collections of coordinates", coordinatesLineStringResponse.size(), 2);
Iterator<List<Double>> iteratorLineString = coordinatesLineStringResponse.iterator();
Collection<Double> coordinates1Response=iteratorLineString.next();
assertEquals("Coordinates1 must have two coordinates", coordinates1Response, coordinates1);
Collection<Double> coordinates2Response=iteratorLineString.next();
assertEquals("Coordinates2 must have two coordinates", coordinates2Response, coordinates2);
List<Double> centroidResponse = lineStringResponse.getCentroid();
assertEquals("Longitudes must be equals", centroidResponse.get(0), longitude);
assertEquals("Latitudes must be equals", centroidResponse.get(1), latitude);
}
@Test
public void transformLineStringToLineStringDTO() throws AssemblyException{
List<List<Double>> collectionCoordinates=new ArrayList<List<Double>>();
Double longitude=1D;
Double latitude=2D;
collectionCoordinates.add(coordinates1);
collectionCoordinates.add(coordinates2);
Point point1=new Point();
point1.setCoordinates(coordinates1);
Point point2=new Point();
point2.setCoordinates(coordinates2);
//Fixture
Mockito.when(lineString1.getCoordinates()).thenReturn(collectionCoordinates);
Mockito.when(pointAssembler.createDataTransferObject(point1)).thenReturn(pointDTO1);
Mockito.when(pointAssembler.createDataTransferObject(point2)).thenReturn(pointDTO2);
Mockito.when(lineString1.getCentroid()).thenReturn(centroid);
Mockito.when(centroid.get(0)).thenReturn(longitude);
Mockito.when(centroid.get(1)).thenReturn(latitude);
//Experimentation
LineStringDTO lineStringDTOResponse = lineStringAssemblerImpl.createDataTransferObject(lineString1);
//Experimentation
Collection<PointDTO> collectionPointDTO=lineStringDTOResponse.getCollectionPointDTO();
assertEquals("LineStringDTO response must have two collections of coordinates", collectionPointDTO.size(), 2);
Iterator<PointDTO> iterator=collectionPointDTO.iterator();
PointDTO pointDTO1Response=iterator.next();
assertEquals("PointDTO1 must be the same", pointDTO1Response, pointDTO1);
PointDTO pointDTO2Response=iterator.next();
assertEquals("PointDTO2 must be the same", pointDTO2Response, pointDTO2);
PointDTO centroidResponse = lineStringDTOResponse.getCentroidDTO();
assertEquals("LongitudesDTO must be equals", centroidResponse.getLongitude(), longitude);
assertEquals("LatitudesDTO must be equals", centroidResponse.getLatitude() , latitude);
}
@Test
public void transformCollectionLineStringsToCollectionLineStringsDTO() throws AssemblyException{
Collection<LineString> lineStrings=new ArrayList<LineString>();
lineStrings.add(lineString1);
lineStrings.add(lineString2);
List<List<Double>> collectionCoordinates1=new ArrayList<List<Double>>();
collectionCoordinates1.add(coordinates1);
List<List<Double>> collectionCoordinates2=new ArrayList<List<Double>>();
collectionCoordinates2.add(coordinates2);
Point point1=new Point();
point1.setCoordinates(coordinates1);
Point point2=new Point();
point2.setCoordinates(coordinates2);
Double longitude1=1D;
Double latitude1=2D;
Double longitude2=1D;
Double latitude2=2D;
//Fixture
Mockito.when(lineString1.getCoordinates()).thenReturn(collectionCoordinates1);
Mockito.when(pointAssembler.createDataTransferObject(point1)).thenReturn(pointDTO1);
Mockito.when(lineString1.getCentroid()).thenReturn(centroid1);
Mockito.when(centroid1.get(0)).thenReturn(longitude1);
Mockito.when(centroid1.get(1)).thenReturn(latitude1);
Mockito.when(lineString2.getCoordinates()).thenReturn(collectionCoordinates2);
Mockito.when(pointAssembler.createDataTransferObject(point2)).thenReturn(pointDTO2);
Mockito.when(lineString2.getCentroid()).thenReturn(centroid1);
Mockito.when(centroid1.get(0)).thenReturn(longitude2);
Mockito.when(centroid1.get(1)).thenReturn(latitude2);
//Experimentation
Collection<LineStringDTO> response=lineStringAssemblerImpl.createDataTransferObjects(lineStrings);
//Expectations
assertEquals("CollectionLineStringDTO response must have two lineStringDTO", response.size(), 2);
Iterator<LineStringDTO> iteratorCollectionLineStringDTO=response.iterator();
LineStringDTO lineStringDTOResponse1=iteratorCollectionLineStringDTO.next();
Collection<PointDTO> collectionPointDTO1Response=lineStringDTOResponse1.getCollectionPointDTO();
assertEquals("CollectionPointDTO1Response must have a pointDTO", collectionPointDTO1Response.size(), 1);
Iterator<PointDTO> iteratorLineStringDTO1Response=collectionPointDTO1Response.iterator();
PointDTO pointDTO1Response=iteratorLineStringDTO1Response.next();
assertEquals("PointDTO1 must be the same", pointDTO1Response, pointDTO1);
PointDTO centroidResponse1 = lineStringDTOResponse1.getCentroidDTO();
assertEquals("LongitudesDTO must be equals", centroidResponse1.getLongitude(), longitude1);
assertEquals("LatitudesDTO must be equals", centroidResponse1.getLatitude() , latitude1);
LineStringDTO lineStringDTOResponse2=iteratorCollectionLineStringDTO.next();
Collection<PointDTO> collectionPointDTO2Response=lineStringDTOResponse2.getCollectionPointDTO();
assertEquals("CollectionPointDTO2Response must have a pointDTO", collectionPointDTO2Response.size(), 1);
Iterator<PointDTO> iteratorLineStringDTO2Response=collectionPointDTO2Response.iterator();
PointDTO pointDTO2Response=iteratorLineStringDTO2Response.next();
assertEquals("PointDTO2 must be the same", pointDTO2Response, pointDTO2);
PointDTO centroidResponse2 = lineStringDTOResponse2.getCentroidDTO();
assertEquals("LongitudesDTO must be equals", centroidResponse2.getLongitude(), longitude2);
assertEquals("LatitudesDTO must be equals", centroidResponse2.getLatitude() , latitude2);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.standalone.calendar;
import static org.junit.Assert.assertEquals;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
import org.apache.commons.lang3.time.DateUtils;
import org.flowable.common.engine.impl.calendar.DurationHelper;
import org.flowable.common.engine.impl.runtime.Clock;
import org.flowable.common.engine.impl.util.DefaultClockImpl;
import org.junit.jupiter.api.Test;
public class DurationHelperTest {
@Test
public void shouldNotExceedNumber() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentTime(new Date(0));
DurationHelper dh = new DurationHelper("R2/PT10S", testingClock);
testingClock.setCurrentTime(new Date(15000));
assertEquals(20000, dh.getDateAfter().getTime());
testingClock.setCurrentTime(new Date(30000));
assertEquals(30000, dh.getDateAfter().getTime());
}
@Test
public void shouldNotExceedNumberPeriods() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentTime(parse("19700101-00:00:00"));
DurationHelper dh = new DurationHelper("R2/1970-01-01T00:00:00/1970-01-01T00:00:10", testingClock);
testingClock.setCurrentTime(parse("19700101-00:00:15"));
assertEquals(parse("19700101-00:00:20"), dh.getDateAfter());
testingClock.setCurrentTime(parse("19700101-00:00:30"));
assertEquals(parse("19700101-00:00:30"), dh.getDateAfter());
}
@Test
public void shouldNotExceedNumberNegative() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentTime(parse("19700101-00:00:00"));
DurationHelper dh = new DurationHelper("R2/PT10S/1970-01-01T00:00:50", testingClock);
testingClock.setCurrentTime(parse("19700101-00:00:20"));
assertEquals(parse("19700101-00:00:30"), dh.getDateAfter());
testingClock.setCurrentTime(parse("19700101-00:00:35"));
assertEquals(parse("19700101-00:00:35"), dh.getDateAfter());
}
@Test
public void daylightSavingFall() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentCalendar(parseCalendar("20131103-04:45:00", TimeZone.getTimeZone("UTC")));
DurationHelper dh = new DurationHelper("R2/2013-11-03T00:45:00-04:00/PT1H", testingClock);
assertEquals(parseCalendar("20131103-05:45:00", TimeZone.getTimeZone("UTC")), dh.getCalendarAfter(testingClock.getCurrentCalendar(TimeZone.getTimeZone("US/Eastern"))));
testingClock.setCurrentCalendar(parseCalendar("20131103-05:45:00", TimeZone.getTimeZone("UTC")));
assertEquals(parseCalendar("20131103-06:45:00", TimeZone.getTimeZone("UTC")), dh.getCalendarAfter(testingClock.getCurrentCalendar(TimeZone.getTimeZone("US/Eastern"))));
}
@Test
public void daylightSavingFallFirstHour() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentCalendar(parseCalendar("20131103-05:45:00", TimeZone.getTimeZone("UTC")));
Calendar easternTime = testingClock.getCurrentCalendar(TimeZone.getTimeZone("US/Eastern"));
DurationHelper dh = new DurationHelper("R2/2013-11-03T01:45:00-04:00/PT1H", testingClock);
assertEquals(parseCalendar("20131103-06:45:00", TimeZone.getTimeZone("UTC")), dh.getCalendarAfter(easternTime));
}
@Test
public void daylightSavingFallSecondHour() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentCalendar(parseCalendar("20131103-06:45:00", TimeZone.getTimeZone("UTC")));
Calendar easternTime = testingClock.getCurrentCalendar(TimeZone.getTimeZone("US/Eastern"));
DurationHelper dh = new DurationHelper("R2/2013-11-03T01:45:00-05:00/PT1H", testingClock);
assertEquals(parseCalendar("20131103-07:45:00", TimeZone.getTimeZone("UTC")), dh.getCalendarAfter(easternTime));
}
@Test
public void daylightSavingFallObservedFirstHour() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentCalendar(parseCalendar("20131103-00:45:00", TimeZone.getTimeZone("US/Eastern")));
DurationHelper dh = new DurationHelper("R2/2013-11-03T00:45:00-04:00/PT1H", testingClock);
Calendar expected = parseCalendarWithOffset("20131103-01:45:00-04:00", TimeZone.getTimeZone("US/Eastern"));
assertEquals(0, expected.compareTo(dh.getCalendarAfter()));
}
@Test
public void daylightSavingFallObservedSecondHour() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentCalendar(parseCalendar("20131103-00:45:00", TimeZone.getTimeZone("US/Eastern")));
DurationHelper dh = new DurationHelper("R2/2013-11-03T00:45:00-04:00/PT2H", testingClock);
Calendar expected = parseCalendarWithOffset("20131103-01:45:00-05:00", TimeZone.getTimeZone("US/Eastern"));
assertEquals(0, expected.compareTo(dh.getCalendarAfter()));
}
@Test
public void daylightSavingSpring() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentCalendar(parseCalendar("20140309-05:45:00", TimeZone.getTimeZone("UTC")));
DurationHelper dh = new DurationHelper("R2/2014-03-09T00:45:00-05:00/PT1H", testingClock);
assertEquals(parseCalendar("20140309-06:45:00", TimeZone.getTimeZone("UTC")), dh.getCalendarAfter(testingClock.getCurrentCalendar(TimeZone.getTimeZone("US/Eastern"))));
}
@Test
public void daylightSavingSpringObserved() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentCalendar(parseCalendar("20140309-01:45:00", TimeZone.getTimeZone("US/Eastern")));
DurationHelper dh = new DurationHelper("R2/2014-03-09T01:45:00/PT1H", testingClock);
Calendar expected = parseCalendar("20140309-03:45:00", TimeZone.getTimeZone("US/Eastern"));
assertEquals(expected, dh.getCalendarAfter());
}
@Test
public void daylightSaving25HourDay() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentCalendar(parseCalendar("20131103-00:00:00", TimeZone.getTimeZone("US/Eastern")));
DurationHelper dh = new DurationHelper("R2/2013-11-03T00:00:00/P1D", testingClock);
assertEquals(parseCalendar("20131104-00:00:00", TimeZone.getTimeZone("US/Eastern")), dh.getCalendarAfter(testingClock.getCurrentCalendar()));
}
@Test
public void daylightSaving23HourDay() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentCalendar(parseCalendar("20140309-00:00:00", TimeZone.getTimeZone("US/Eastern")));
DurationHelper dh = new DurationHelper("R2/2014-03-09T00:00:00/P1D", testingClock);
assertEquals(parseCalendar("20140310-00:00:00", TimeZone.getTimeZone("US/Eastern")), dh.getCalendarAfter(testingClock.getCurrentCalendar()));
}
@Test
public void daylightSaving25HourDayEurope() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentCalendar(parseCalendar("20131027-00:00:00", TimeZone.getTimeZone("Europe/Amsterdam")));
DurationHelper dh = new DurationHelper("R2/2013-10-27T00:00:00/P1D", testingClock);
assertEquals(parseCalendar("20131028-00:00:00", TimeZone.getTimeZone("Europe/Amsterdam")), dh.getCalendarAfter(testingClock.getCurrentCalendar()));
}
@Test
public void daylightSaving23HourDayEurope() throws Exception {
Clock testingClock = new DefaultClockImpl();
testingClock.setCurrentCalendar(parseCalendar("20140330-00:00:00", TimeZone.getTimeZone("Europe/Amsterdam")));
DurationHelper dh = new DurationHelper("R2/2014-03-30T00:00:00/P1D", testingClock);
assertEquals(parseCalendar("20140331-00:00:00", TimeZone.getTimeZone("Europe/Amsterdam")), dh.getCalendarAfter(testingClock.getCurrentCalendar()));
}
private Date parse(String str) throws Exception {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMdd-HH:mm:ss");
return simpleDateFormat.parse(str);
}
private Calendar parseCalendarWithOffset(String str, TimeZone timeZone) throws Exception {
Calendar cal = Calendar.getInstance();
cal.setTime(DateUtils.parseDate(str, "yyyyMMdd-HH:mm:ssZZ"));
return cal;
}
private Calendar parseCalendar(String str, TimeZone timeZone) throws Exception {
return parseCalendar(str, timeZone, "yyyyMMdd-HH:mm:ss");
}
private Calendar parseCalendar(String str, TimeZone timeZone, String format) throws Exception {
Calendar date = new GregorianCalendar(timeZone);
SimpleDateFormat simpleDateFormat = new SimpleDateFormat(format);
simpleDateFormat.setTimeZone(timeZone);
date.setTime(simpleDateFormat.parse(str));
return date;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import javax.annotation.Generated;
import org.apache.camel.ExchangePattern;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
import org.apache.camel.spi.ExceptionHandler;
/**
* The quickfix component allows to send Financial Interchange (FIX) messages to
* the QuickFix engine.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface QuickfixjEndpointBuilderFactory {
/**
* Builder for endpoint consumers for the QuickFix component.
*/
public interface QuickfixjEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default AdvancedQuickfixjEndpointConsumerBuilder advanced() {
return (AdvancedQuickfixjEndpointConsumerBuilder) this;
}
/**
* This option allows to create QuickFIX/J engine on demand. Value true
* means the engine is started when first message is send or there's
* consumer configured in route definition. When false value is used,
* the engine is started at the endpoint creation. When this parameter
* is missing, the value of component's property lazyCreateEngines is
* being used.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*/
default QuickfixjEndpointConsumerBuilder lazyCreateEngine(
boolean lazyCreateEngine) {
doSetProperty("lazyCreateEngine", lazyCreateEngine);
return this;
}
/**
* This option allows to create QuickFIX/J engine on demand. Value true
* means the engine is started when first message is send or there's
* consumer configured in route definition. When false value is used,
* the engine is started at the endpoint creation. When this parameter
* is missing, the value of component's property lazyCreateEngines is
* being used.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*/
default QuickfixjEndpointConsumerBuilder lazyCreateEngine(
String lazyCreateEngine) {
doSetProperty("lazyCreateEngine", lazyCreateEngine);
return this;
}
/**
* The optional sessionID identifies a specific FIX session. The format
* of the sessionID is:
* (BeginString):(SenderCompID)/(SenderSubID)/(SenderLocationID)-(TargetCompID)/(TargetSubID)/(TargetLocationID).
*
* The option is a: <code>quickfix.SessionID</code> type.
*
* Group: common
*/
default QuickfixjEndpointConsumerBuilder sessionID(Object sessionID) {
doSetProperty("sessionID", sessionID);
return this;
}
/**
* The optional sessionID identifies a specific FIX session. The format
* of the sessionID is:
* (BeginString):(SenderCompID)/(SenderSubID)/(SenderLocationID)-(TargetCompID)/(TargetSubID)/(TargetLocationID).
*
* The option will be converted to a <code>quickfix.SessionID</code>
* type.
*
* Group: common
*/
default QuickfixjEndpointConsumerBuilder sessionID(String sessionID) {
doSetProperty("sessionID", sessionID);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default QuickfixjEndpointConsumerBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default QuickfixjEndpointConsumerBuilder bridgeErrorHandler(
String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the QuickFix component.
*/
public interface AdvancedQuickfixjEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default QuickfixjEndpointConsumerBuilder basic() {
return (QuickfixjEndpointConsumerBuilder) this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*/
default AdvancedQuickfixjEndpointConsumerBuilder exceptionHandler(
ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedQuickfixjEndpointConsumerBuilder exceptionHandler(
String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedQuickfixjEndpointConsumerBuilder exchangePattern(
ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedQuickfixjEndpointConsumerBuilder exchangePattern(
String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedQuickfixjEndpointConsumerBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedQuickfixjEndpointConsumerBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedQuickfixjEndpointConsumerBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedQuickfixjEndpointConsumerBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint producers for the QuickFix component.
*/
public interface QuickfixjEndpointProducerBuilder
extends
EndpointProducerBuilder {
default AdvancedQuickfixjEndpointProducerBuilder advanced() {
return (AdvancedQuickfixjEndpointProducerBuilder) this;
}
/**
* This option allows to create QuickFIX/J engine on demand. Value true
* means the engine is started when first message is send or there's
* consumer configured in route definition. When false value is used,
* the engine is started at the endpoint creation. When this parameter
* is missing, the value of component's property lazyCreateEngines is
* being used.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*/
default QuickfixjEndpointProducerBuilder lazyCreateEngine(
boolean lazyCreateEngine) {
doSetProperty("lazyCreateEngine", lazyCreateEngine);
return this;
}
/**
* This option allows to create QuickFIX/J engine on demand. Value true
* means the engine is started when first message is send or there's
* consumer configured in route definition. When false value is used,
* the engine is started at the endpoint creation. When this parameter
* is missing, the value of component's property lazyCreateEngines is
* being used.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*/
default QuickfixjEndpointProducerBuilder lazyCreateEngine(
String lazyCreateEngine) {
doSetProperty("lazyCreateEngine", lazyCreateEngine);
return this;
}
/**
* The optional sessionID identifies a specific FIX session. The format
* of the sessionID is:
* (BeginString):(SenderCompID)/(SenderSubID)/(SenderLocationID)-(TargetCompID)/(TargetSubID)/(TargetLocationID).
*
* The option is a: <code>quickfix.SessionID</code> type.
*
* Group: common
*/
default QuickfixjEndpointProducerBuilder sessionID(Object sessionID) {
doSetProperty("sessionID", sessionID);
return this;
}
/**
* The optional sessionID identifies a specific FIX session. The format
* of the sessionID is:
* (BeginString):(SenderCompID)/(SenderSubID)/(SenderLocationID)-(TargetCompID)/(TargetSubID)/(TargetLocationID).
*
* The option will be converted to a <code>quickfix.SessionID</code>
* type.
*
* Group: common
*/
default QuickfixjEndpointProducerBuilder sessionID(String sessionID) {
doSetProperty("sessionID", sessionID);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default QuickfixjEndpointProducerBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default QuickfixjEndpointProducerBuilder lazyStartProducer(
String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
/**
* Advanced builder for endpoint producers for the QuickFix component.
*/
public interface AdvancedQuickfixjEndpointProducerBuilder
extends
EndpointProducerBuilder {
default QuickfixjEndpointProducerBuilder basic() {
return (QuickfixjEndpointProducerBuilder) this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedQuickfixjEndpointProducerBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedQuickfixjEndpointProducerBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedQuickfixjEndpointProducerBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedQuickfixjEndpointProducerBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint for the QuickFix component.
*/
public interface QuickfixjEndpointBuilder
extends
QuickfixjEndpointConsumerBuilder,
QuickfixjEndpointProducerBuilder {
default AdvancedQuickfixjEndpointBuilder advanced() {
return (AdvancedQuickfixjEndpointBuilder) this;
}
/**
* This option allows to create QuickFIX/J engine on demand. Value true
* means the engine is started when first message is send or there's
* consumer configured in route definition. When false value is used,
* the engine is started at the endpoint creation. When this parameter
* is missing, the value of component's property lazyCreateEngines is
* being used.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*/
default QuickfixjEndpointBuilder lazyCreateEngine(
boolean lazyCreateEngine) {
doSetProperty("lazyCreateEngine", lazyCreateEngine);
return this;
}
/**
* This option allows to create QuickFIX/J engine on demand. Value true
* means the engine is started when first message is send or there's
* consumer configured in route definition. When false value is used,
* the engine is started at the endpoint creation. When this parameter
* is missing, the value of component's property lazyCreateEngines is
* being used.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*/
default QuickfixjEndpointBuilder lazyCreateEngine(
String lazyCreateEngine) {
doSetProperty("lazyCreateEngine", lazyCreateEngine);
return this;
}
/**
* The optional sessionID identifies a specific FIX session. The format
* of the sessionID is:
* (BeginString):(SenderCompID)/(SenderSubID)/(SenderLocationID)-(TargetCompID)/(TargetSubID)/(TargetLocationID).
*
* The option is a: <code>quickfix.SessionID</code> type.
*
* Group: common
*/
default QuickfixjEndpointBuilder sessionID(Object sessionID) {
doSetProperty("sessionID", sessionID);
return this;
}
/**
* The optional sessionID identifies a specific FIX session. The format
* of the sessionID is:
* (BeginString):(SenderCompID)/(SenderSubID)/(SenderLocationID)-(TargetCompID)/(TargetSubID)/(TargetLocationID).
*
* The option will be converted to a <code>quickfix.SessionID</code>
* type.
*
* Group: common
*/
default QuickfixjEndpointBuilder sessionID(String sessionID) {
doSetProperty("sessionID", sessionID);
return this;
}
}
/**
* Advanced builder for endpoint for the QuickFix component.
*/
public interface AdvancedQuickfixjEndpointBuilder
extends
AdvancedQuickfixjEndpointConsumerBuilder,
AdvancedQuickfixjEndpointProducerBuilder {
default QuickfixjEndpointBuilder basic() {
return (QuickfixjEndpointBuilder) this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedQuickfixjEndpointBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedQuickfixjEndpointBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedQuickfixjEndpointBuilder synchronous(boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedQuickfixjEndpointBuilder synchronous(String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
public interface QuickfixjBuilders {
/**
* QuickFix (camel-quickfix)
* The quickfix component allows to send Financial Interchange (FIX)
* messages to the QuickFix engine.
*
* Category: messaging
* Since: 2.1
* Maven coordinates: org.apache.camel:camel-quickfix
*
* Syntax: <code>quickfix:configurationName</code>
*
* Path parameter: configurationName (required)
* The configFile is the name of the QuickFIX/J configuration to use for
* the FIX engine (located as a resource found in your classpath).
*/
default QuickfixjEndpointBuilder quickfix(String path) {
return QuickfixjEndpointBuilderFactory.quickfix(path);
}
}
/**
* QuickFix (camel-quickfix)
* The quickfix component allows to send Financial Interchange (FIX)
* messages to the QuickFix engine.
*
* Category: messaging
* Since: 2.1
* Maven coordinates: org.apache.camel:camel-quickfix
*
* Syntax: <code>quickfix:configurationName</code>
*
* Path parameter: configurationName (required)
* The configFile is the name of the QuickFIX/J configuration to use for the
* FIX engine (located as a resource found in your classpath).
*/
static QuickfixjEndpointBuilder quickfix(String path) {
class QuickfixjEndpointBuilderImpl extends AbstractEndpointBuilder implements QuickfixjEndpointBuilder, AdvancedQuickfixjEndpointBuilder {
public QuickfixjEndpointBuilderImpl(String path) {
super("quickfix", path);
}
}
return new QuickfixjEndpointBuilderImpl(path);
}
}
| |
/**
*
* Copyright 2005-2006 The Apache Software Foundation or its licensors, as applicable.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.xbean.osgi;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
/**
* @author Dain Sundstrom
* @version $Id$
* @since 2.0
*/
public class MavenBundleManager {
private final BundleContext bundleContext;
private final File localRepository;
public MavenBundleManager(BundleContext bundleContext, File localRepository) {
this.bundleContext = bundleContext;
this.localRepository = localRepository;
}
public Project loadProject(Artifact artifact) {
if (artifact instanceof Project) {
return (Project) artifact;
} else {
return new Project(artifact.getGroupId(),
artifact.getArtifactId(),
artifact.getVersion(),
artifact.getType(),
Collections.EMPTY_SET);
}
}
public Project loadProject(String groupId, String artifactId, String version) {
return new Project(groupId, artifactId, version, "jar", Collections.EMPTY_SET);
}
public Bundle installBundle(String groupId, String artifactId, String version) throws Exception {
return installBundle(loadProject(groupId, artifactId, version));
}
public Bundle installBundle(Artifact artifact) throws Exception {
String symbolicName = artifact.getGroupId() + "." + artifact.getArtifactId();
String bundleVersion = coerceToOsgiVersion(artifact.getVersion());
// check if we already loaded this bundle
Bundle[] bundles = bundleContext.getBundles();
for (int i = 0; i < bundles.length; i++) {
Bundle bundle = bundles[i];
if (symbolicName.equals(bundle.getSymbolicName()) &&
bundleVersion.equals(bundle.getHeaders().get(Constants.BUNDLE_VERSION))) {
return bundle;
}
}
// load the project object model for this artiface
Project project = loadProject(artifact);
// build an OSGi manifest for the project
Manifest manifest = createOsgiManifest(project);
// create a jar in memory for the manifest
ByteArrayOutputStream out = new ByteArrayOutputStream();
JarOutputStream jarOut = new JarOutputStream(out, manifest);
jarOut.close();
out.close();
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
// install the in memory jar
Bundle bundle = bundleContext.installBundle(symbolicName, in);
// install bundles for all of the dependencies
for (Iterator iterator = project.getDependencies().iterator(); iterator.hasNext();) {
Artifact dependency = (Artifact) iterator.next();
installBundle(dependency);
}
return bundle;
}
public Manifest createOsgiManifest(Project project) throws IOException {
String groupId = project.getGroupId();
String artifactId = project.getArtifactId();
String version = project.getVersion();
String jar = groupId.replace('.', '/') + "/" + artifactId + "/" + version + "/" + project.getJar();
StringBuffer requireBundle = new StringBuffer();
for (Iterator iterator = project.getDependencies().iterator(); iterator.hasNext();) {
Artifact dependency = (Artifact) iterator.next();
if (requireBundle.length() > 0) requireBundle.append(',');
requireBundle.append(dependency.getGroupId()).append('.').append(dependency.getArtifactId());
requireBundle.append(";visibility:=reexport;bundle-version:=").append(coerceToOsgiVersion(dependency.getVersion()));
}
String jarPath = new File(localRepository, jar).getAbsolutePath();
StringBuffer exports = createExportList(jarPath);
Manifest manifest = new Manifest();
Attributes attributes = manifest.getMainAttributes();
attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0");
attributes.putValue(Constants.BUNDLE_MANIFESTVERSION, "2");
attributes.putValue(Constants.BUNDLE_VENDOR, groupId);
attributes.putValue(Constants.BUNDLE_NAME, artifactId);
attributes.putValue(Constants.BUNDLE_VERSION, coerceToOsgiVersion(version));
attributes.putValue(Constants.BUNDLE_SYMBOLICNAME, groupId + "." + artifactId);
attributes.putValue("Eclipse-AutoStart", "true");
attributes.putValue(Constants.BUNDLE_CLASSPATH, ".,external:" + jarPath);
attributes.putValue(Constants.EXPORT_PACKAGE, exports.toString());
if (requireBundle != null && requireBundle.length() > 0) {
attributes.putValue(Constants.REQUIRE_BUNDLE, requireBundle.toString());
}
return manifest;
}
private static String coerceToOsgiVersion(String version) {
int partsFound = 0;
String[] versionParts = new String[] { "0", "0", "0"};
StringBuffer qualifier = new StringBuffer();
for (StringTokenizer stringTokenizer = new StringTokenizer(version, ".-"); stringTokenizer.hasMoreTokens();) {
String part = stringTokenizer.nextToken();
if (partsFound < 4) {
try {
Integer.parseInt(part);
versionParts[partsFound++] = part;
} catch (NumberFormatException e) {
partsFound = 4;
qualifier.append(coerceToOsgiQualifier(part));
}
} else {
if (qualifier.length() > 0) qualifier.append("_");
qualifier.append(coerceToOsgiQualifier(part));
}
}
StringBuffer osgiVersion = new StringBuffer();
osgiVersion.append(versionParts[0]).append(".").append(versionParts[1]).append(".").append(versionParts[2]);
if (qualifier.length() > 0) {
osgiVersion.append(".").append(qualifier);
}
return osgiVersion.toString();
}
private static String coerceToOsgiQualifier(String qualifier) {
char[] chars = qualifier.toCharArray();
for (int i = 0; i < chars.length; i++) {
char c = chars[i];
if (!Character.isLetterOrDigit(c) && c != '_' && c != '-') {
chars[i] = '_';
}
}
return new String(chars);
}
private static StringBuffer createExportList(String jarPath) throws IOException {
Set packages = new HashSet();
FileInputStream in = null;
try {
in = new FileInputStream(jarPath);
JarInputStream jarIn = new JarInputStream(in);
for (JarEntry jarEntry = jarIn.getNextJarEntry(); jarEntry != null; jarEntry = jarIn.getNextJarEntry()) {
String packageName = jarEntry.getName();
if (!jarEntry.isDirectory()) {
int index = packageName.lastIndexOf("/");
// we can't export the default package
if (index > 0) {
packageName = packageName.substring(0, index);
if (!packageName.equals("META-INF")) {
packageName = packageName.replace('/', '.');
packages.add(packageName);
}
}
}
}
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
}
}
}
StringBuffer exports = new StringBuffer();
for (Iterator iterator = packages.iterator(); iterator.hasNext();) {
String packageName = (String) iterator.next();
if (exports.length() > 0) exports.append(";");
exports.append(packageName);
}
return exports;
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.util.ui;
import com.intellij.ui.scale.JBUIScale;
import com.intellij.ui.scale.ScaleType;
import com.intellij.ui.scale.Scaler;
import org.jetbrains.annotations.NotNull;
import javax.swing.plaf.UIResource;
import java.awt.*;
import static java.lang.Math.ceil;
/**
* A dimension which updates its scaled size when the user scale factor changes (see {@link ScaleType}).
* <p></p>
* Say, a dimension is created as 100x100 and the user scale factor is 2.0. Its scaled size will be 200x200.
* Then the user scale factor changes to, say, 3.0. The new scaled size will become 300x300.
*
* @author Konstantin Bulenkov
* @author tav
*/
public class JBDimension extends Dimension {
Size2D size2D;
private final MyScaler scaler = new MyScaler();
private static final class Size2D {
double width;
double height;
Size2D(double width, double height) {
this.width = width;
this.height = height;
}
int intWidth() {
return (int)ceil(width);
}
int intHeight() {
return (int)ceil(height);
}
Size2D copy() {
return new Size2D(width, height);
}
void set(double width, double height) {
this.width = width;
this.height = height;
}
}
/**
* A new dimension with the provided unscaled size.
* <p></p>
* The real dimension size will be scaled according to the current user scale factor.
*
* @param width unscaled with
* @param height unscaled size
*/
public JBDimension(int width, int height) {
this(width, height, false);
}
public static @NotNull JBDimension size(Dimension size) {
if (size instanceof JBDimension) {
JBDimension newSize = ((JBDimension)size).newSize();
return size instanceof UIResource ? newSize.asUIResource() : newSize;
}
return new JBDimension(size.width, size.height);
}
/**
* A new dimension with the provided unscaled or pre-scaled size.
* <p></p>
* When {@code preScaled} is true, the passed size will be treated as the current scaled
* dimension size (and the unscaled dimension size will be calculated according to the
* current user scale factor). Passing {@code preScaled} as false is equal to calling
* {@link #JBDimension(int, int)}.
*
* @param width unscaled or pre-scaled with
* @param height unscaled or pre-scaled size
* @param preScaled whether the passed size is unscaled ot scaled
*/
public JBDimension(int width, int height, boolean preScaled) {
this(width, (double)height, preScaled);
}
private JBDimension(double width, double height, boolean preScaled) {
size2D = new Size2D(preScaled ? width : scale(width), preScaled ? height : scale(height));
set(size2D);
}
private static double scale(double size) {
return Math.max(-1, JBUIScale.scale((float)size));
}
public static @NotNull JBDimension create(Dimension from, boolean preScaled) {
if (from instanceof JBDimension) {
return ((JBDimension)from);
}
return new JBDimension(from.width, from.height, preScaled);
}
public static @NotNull JBDimension create(Dimension from) {
return create(from, false);
}
public @NotNull JBDimensionUIResource asUIResource() {
return new JBDimensionUIResource(this);
}
public static final class JBDimensionUIResource extends JBDimension implements UIResource {
public JBDimensionUIResource(JBDimension size) {
super(0, 0);
set(size.width, size.height);
size2D = size.size2D.copy();
}
}
public @NotNull JBDimension withWidth(int width) {
JBDimension size = new JBDimension(0, 0);
size.size2D.set(scale(width), size2D.height);
size.set(size.size2D.intWidth(), height);
return size;
}
public @NotNull JBDimension withHeight(int height) {
JBDimension size = new JBDimension(0, 0);
size.size2D.set(size2D.width, scale(height));
size.set(width, size.size2D.intHeight());
return size;
}
protected void set(int width, int height) {
this.width = width;
this.height = height;
}
protected void set(Size2D size2d) {
set(size2d.intWidth(), size2d.intHeight());
}
/**
* Updates the size according to current {@link ScaleType#USR_SCALE} if necessary.
* @return whether the size has been updated
*/
public boolean update() {
if (!scaler.needUpdate()) return false;
size2D.set(scaler.scaleVal(size2D.width), scaler.scaleVal(size2D.height));
set(size2D);
scaler.update();
return true;
}
/**
* @return this JBDimension with updated size
*/
public @NotNull JBDimension size() {
update();
return this;
}
/**
* @return new JBDimension with updated size
*/
public @NotNull JBDimension newSize() {
update();
return new JBDimension(size2D.width, size2D.height, true);
}
/**
* @return updated width
*/
public int width() {
update();
return width;
}
/**
* @return updated height
*/
public int height() {
update();
return height;
}
/**
* @return updated double width
*/
public double width2d() {
update();
return size2D.width;
}
/**
* @return updated double height
*/
public double height2d() {
update();
return size2D.height;
}
@Override
public boolean equals(Object obj) {
if (obj == this) return true;
if (!(obj instanceof JBDimension)) return false;
JBDimension that = (JBDimension)obj;
return size2D.equals(that.size2D);
}
}
final class MyScaler extends Scaler {
@Override
protected double currentScale() {
return JBUIScale.scale(1f);
}
boolean needUpdate() {
return initialScale != JBUIScale.scale(1f);
}
public void update() {
setPreScaled(true); // updates initialScale
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.util.ArrayList;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapred.JobStatusChangeEvent.EventType;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import junit.framework.TestCase;
/**
* Test whether the JobInProgressListeners are informed as expected.
*/
public class TestJobInProgressListener extends TestCase {
private static final Log LOG =
LogFactory.getLog(TestJobInProgressListener.class);
private final Path testDir = new Path("test-jip-listener-update");
private JobConf configureJob(JobConf conf, int m, int r,
Path inDir, Path outputDir,
String mapSignalFile, String redSignalFile)
throws IOException {
TestJobTrackerRestart.configureWaitingJobConf(conf, inDir, outputDir,
m, r, "job-listener-test",
mapSignalFile, redSignalFile);
return conf;
}
/**
* This test case tests if external updates to JIP do not result into
* undesirable effects
* Test is as follows
* - submit 2 jobs of normal priority. job1 is a waiting job which waits and
* blocks the cluster
* - change one parameter of job2 such that the job bumps up in the queue
* - check if the queue looks ok
*
*/
public void testJobQueueChanges() throws IOException {
LOG.info("Testing job queue changes");
JobConf conf = new JobConf();
MiniDFSCluster dfs = new MiniDFSCluster(conf, 1, true, null, null);
dfs.waitActive();
FileSystem fileSys = dfs.getFileSystem();
dfs.startDataNodes(conf, 1, true, null, null, null, null);
dfs.waitActive();
String namenode = (dfs.getFileSystem()).getUri().getHost() + ":"
+ (dfs.getFileSystem()).getUri().getPort();
MiniMRCluster mr = new MiniMRCluster(1, namenode, 1);
JobClient jobClient = new JobClient(mr.createJobConf());
// clean up
fileSys.delete(testDir, true);
if (!fileSys.mkdirs(testDir)) {
throw new IOException("Mkdirs failed to create " + testDir.toString());
}
// Write the input file
Path inDir = new Path(testDir, "input");
Path shareDir = new Path(testDir, "share");
String mapSignalFile = TestJobTrackerRestart.getMapSignalFile(shareDir);
String redSignalFile = TestJobTrackerRestart.getReduceSignalFile(shareDir);
TestRackAwareTaskPlacement.writeFile(dfs.getNameNode(), conf,
new Path(inDir + "/file"),
(short)1);
JobQueueJobInProgressListener myListener =
new JobQueueJobInProgressListener();
// add the listener
mr.getJobTrackerRunner().getJobTracker()
.addJobInProgressListener(myListener);
// big blocking job
Path outputDir = new Path(testDir, "output");
Path newOutputDir = outputDir.suffix("0");
JobConf job1 = configureJob(mr.createJobConf(), 10, 0, inDir, newOutputDir,
mapSignalFile, redSignalFile);
// short blocked job
newOutputDir = outputDir.suffix("1");
JobConf job2 = configureJob(mr.createJobConf(), 1, 0, inDir, newOutputDir,
mapSignalFile, redSignalFile);
RunningJob rJob1 = jobClient.submitJob(job1);
LOG.info("Running job " + rJob1.getID().toString());
RunningJob rJob2 = jobClient.submitJob(job2);
LOG.info("Running job " + rJob2.getID().toString());
// I. Check job-priority change
LOG.info("Testing job priority changes");
// bump up job2's priority
LOG.info("Increasing job2's priority to HIGH");
rJob2.setJobPriority("HIGH");
// check if the queue is sane
assertTrue("Priority change garbles the queue",
myListener.getJobQueue().size() == 2);
JobInProgress[] queue =
myListener.getJobQueue().toArray(new JobInProgress[0]);
// check if the bump has happened
assertTrue("Priority change failed to bump up job2 in the queue",
queue[0].getJobID().equals(rJob2.getID()));
assertTrue("Priority change failed to bump down job1 in the queue",
queue[1].getJobID().equals(rJob1.getID()));
assertEquals("Priority change has garbled the queue",
2, queue.length);
// II. Check start-time change
LOG.info("Testing job start-time changes");
// reset the priority which will make the order as
// - job1
// - job2
// this will help in bumping job2 on start-time change
LOG.info("Increasing job2's priority to NORMAL");
rJob2.setJobPriority("NORMAL");
// create the change event
JobInProgress jip2 = mr.getJobTrackerRunner().getJobTracker()
.getJob(rJob2.getID());
JobInProgress jip1 = mr.getJobTrackerRunner().getJobTracker()
.getJob(rJob1.getID());
JobStatus prevStatus = (JobStatus)jip2.getStatus().clone();
// change job2's start-time and the status
jip2.startTime = jip1.startTime - 1;
jip2.status.setStartTime(jip2.startTime);
JobStatus newStatus = (JobStatus)jip2.getStatus().clone();
// inform the listener
LOG.info("Updating the listener about job2's start-time change");
JobStatusChangeEvent event =
new JobStatusChangeEvent(jip2, EventType.START_TIME_CHANGED,
prevStatus, newStatus);
myListener.jobUpdated(event);
// check if the queue is sane
assertTrue("Start time change garbles the queue",
myListener.getJobQueue().size() == 2);
queue = myListener.getJobQueue().toArray(new JobInProgress[0]);
// check if the bump has happened
assertTrue("Start time change failed to bump up job2 in the queue",
queue[0].getJobID().equals(rJob2.getID()));
assertTrue("Start time change failed to bump down job1 in the queue",
queue[1].getJobID().equals(rJob1.getID()));
assertEquals("Start time change has garbled the queue",
2, queue.length);
// signal the maps to complete
TestJobTrackerRestart.signalTasks(dfs, fileSys, true,
mapSignalFile, redSignalFile);
// check if job completion leaves the queue sane
while (rJob2.getJobState() != JobStatus.SUCCEEDED) {
TestJobTrackerRestart.waitFor(10);
}
while (rJob1.getJobState() != JobStatus.SUCCEEDED) {
TestJobTrackerRestart.waitFor(10);
}
assertTrue("Job completion garbles the queue",
myListener.getJobQueue().size() == 0);
}
// A listener that inits the tasks one at a time and also listens to the
// events
public static class MyListener extends JobInProgressListener {
private List<JobInProgress> wjobs = new ArrayList<JobInProgress>();
private List<JobInProgress> jobs = new ArrayList<JobInProgress>();
public boolean contains (JobID id) {
return contains(id, true) || contains(id, false);
}
public boolean contains (JobID id, boolean waiting) {
List<JobInProgress> queue = waiting ? wjobs : jobs;
for (JobInProgress job : queue) {
if (job.getJobID().equals(id)) {
return true;
}
}
return false;
}
public void jobAdded(JobInProgress job) {
LOG.info("Job " + job.getJobID().toString() + " added");
wjobs.add(job);
}
public void jobRemoved(JobInProgress job) {
LOG.info("Job " + job.getJobID().toString() + " removed");
}
public void jobUpdated(JobChangeEvent event) {
LOG.info("Job " + event.getJobInProgress().getJobID().toString() + " updated");
// remove the job is the event is for a completed job
if (event instanceof JobStatusChangeEvent) {
JobStatusChangeEvent statusEvent = (JobStatusChangeEvent)event;
if (statusEvent.getEventType() == EventType.RUN_STATE_CHANGED) {
// check if the state changes from
// RUNNING->COMPLETE(SUCCESS/KILLED/FAILED)
JobInProgress jip = event.getJobInProgress();
String jobId = jip.getJobID().toString();
if (statusEvent.getJobInProgress().isComplete()) {
LOG.info("Job " + jobId + " deleted from the running queue");
jobs.remove(jip);
} else {
// PREP->RUNNING
LOG.info("Job " + jobId + " deleted from the waiting queue");
wjobs.remove(jip);
jobs.add(jip);
}
}
}
}
}
public void testJobFailure() throws Exception {
LOG.info("Testing job-success");
MyListener myListener = new MyListener();
MiniMRCluster mr = new MiniMRCluster(1, "file:///", 1);
JobConf job = mr.createJobConf();
mr.getJobTrackerRunner().getJobTracker()
.addJobInProgressListener(myListener);
// submit and kill the job
JobID id = TestJobKillAndFail.runJobFail(job);
// check if the job failure was notified
assertFalse("Missing event notification on failing a running job",
myListener.contains(id));
}
public void testJobKill() throws Exception {
LOG.info("Testing job-kill");
MyListener myListener = new MyListener();
MiniMRCluster mr = new MiniMRCluster(1, "file:///", 1);
JobConf job = mr.createJobConf();
mr.getJobTrackerRunner().getJobTracker()
.addJobInProgressListener(myListener);
// submit and kill the job
JobID id = TestJobKillAndFail.runJobKill(job);
// check if the job failure was notified
assertFalse("Missing event notification on killing a running job",
myListener.contains(id));
}
public void testJobSuccess() throws Exception {
LOG.info("Testing job-success");
MyListener myListener = new MyListener();
MiniMRCluster mr = new MiniMRCluster(1, "file:///", 1);
JobConf job = mr.createJobConf();
mr.getJobTrackerRunner().getJobTracker()
.addJobInProgressListener(myListener);
// submit the job
RunningJob rJob = TestJobKillAndFail.runJob(job);
// wait for the job to be running
while (rJob.getJobState() != JobStatus.RUNNING) {
TestJobTrackerRestart.waitFor(10);
}
LOG.info("Job " + rJob.getID().toString() + " started running");
// check if the listener was updated about this change
assertFalse("Missing event notification for a running job",
myListener.contains(rJob.getID(), true));
while (rJob.getJobState() != JobStatus.SUCCEEDED) {
TestJobTrackerRestart.waitFor(10);
}
// check if the job success was notified
assertFalse("Missing event notification for a successful job",
myListener.contains(rJob.getID(), false));
}
}
| |
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import db.Database;
/**
* Servlet that handles the posting of infor from the usermanagement
* area.
*/
public class SubmitData extends HttpServlet{
HttpServletResponse response;
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException{
if (!request.getSession().getAttribute("class").equals("a")){
sendFailure("Only admins can use this page");
}
this.response = response;
response.setContentType("text/html");
String value = request.getParameter("value");
String pk = request.getParameter("pk");
String name = request.getParameter("name");
System.out.println("DEBUG received values: " + pk + " " + name + " " + value);
if (name.equals("username")){
handleUsername(pk, value);
} else if (name.equals("password")) {
handlePassword(pk, value);
} else if (name.equals("date")){
handleDateRegistered(pk, value);
} else if (name.equals("class")) {
handleClass(pk, value);
} else if (name.equals("firstname")){
handleFristName(pk, value);
} else if (name.equals("lastname")){
handleLastName(pk, value);
} else if (name.equals("address")){
handleAddress(pk, value);
} else if (name.equals("email")){
handleEmail(pk, value);
} else if (name.equals("phone")) {
handlePhone(pk, value);
} else if (name.equals("doctorid")){
String [] pks = pk.split("\\.");
handleDocID(pks[0], pks[1], value);
} else if (name.equals("patientid")){
String [] pks = pk.split("\\.");
handlePatientID(pks[0], pks[1], value);
} else {
sendFailure("value not understood");
}
}
private void handleUsername(String pk, String value){
String query = buildUsersQuery("user_name", value, pk);
if (noSpaces(value)){
runQuery(query);
} else {
sendFailure("No spaces allowed!");
}
}
private void handlePassword(String pk, String value){
String query = buildUsersQuery("password", value, pk);
if (noSpaces(value)){
runQuery(query);
} else {
sendFailure("No spaces allowed!");
}
}
private void handleDateRegistered(String pk, String value){
String query = "UPDATE users SET date_registered = to_date('" + value +"', 'YYYY-MM-DD') where user_name = " + pk;
runQuery(query);
}
private void handleClass(String pk, String value){
String query = buildUsersQuery("class", value, pk);
if (length(value, 1)){
runQuery(query);
} else {
sendFailure("Must be 1 character");
}
}
private void handleFristName(String pk, String value){
updateName(buildPersonsQuery("first_name", value, pk), value);
}
private void handleLastName(String pk, String value){
updateName(buildPersonsQuery("last_name", value, pk), value);
}
private void handleAddress(String pk, String value){
String query = buildPersonsQuery("address", value, pk);
if (length(value, 128)){
runQuery(query);
} else {
sendFailure("Must be less than 128 chars");
}
}
private void handleEmail(String pk, String value){
String query = buildPersonsQuery("email", value, pk);
if (value.contains("@") && value.contains(".")){
if (length(value, 128)){
runQuery(query);
} else {
sendFailure("Must be less than 128 chars");
}
} else {
sendFailure("Not a valid email");
}
}
private void handlePhone(String pk, String value){
String query = buildPersonsQuery("phone", value, pk);
if (numbersOnly(value)){
runQuery(query);
} else {
sendFailure("Can only contain digits");
}
}
private void handleDocID(String doc, String pat, String value){
String query = buildFamilyDoctorQuery("doctor_id", value, doc, pat);
if (numbersOnly(value)){
runQuery(query);
} else {
sendFailure("only numbers allowed");
}
}
private void handlePatientID(String doc, String pat, String value){
String query = buildFamilyDoctorQuery("patient_id", value, doc, pat);
if (numbersOnly(value)){
runQuery(query);
} else {
sendFailure("Only numbers allowed");
}
}
private void updateName(String query, String value){
if (noSpaces(value)){
if (length(value, 24)){
runQuery(query);
} else {
sendFailure("Must be less than 24 chars");
}
} else {
sendFailure("No spaces Allowed");
}
}
private boolean noSpaces(String input){
if(input.split(" ").length > 1){
return false;
}
return true;
}
private boolean numbersOnly(String input){
if (input.matches("^[\\d]+$")){
return true;
} else {
return false;
}
}
private boolean length(String input, int len){
if (input.length() <= len){
return true;
} else {
return false;
}
}
private void runQuery(String query){
Database db = new Database();
System.out.println("DEBUG, query string: " + query);
try{
Connection conn = db.getConnection();
Statement stmt = conn.createStatement();
ResultSet results = stmt.executeQuery(query);
sendOK();
} catch (Exception e){
System.out.println("Error updating data: " + e.getMessage());
sendFailure("That input is not correct");
} finally {
db.close();
}
}
private String buildUsersQuery(String field, String value, String pk){
return buildQuery("users", field, value, "user_name", pk);
}
private String buildPersonsQuery(String field, String value, String pk){
return buildQuery("persons", field, value, "person_id", pk);
}
private String buildFamilyDoctorQuery(String field, String value, String doc, String pat){
return "UPDATE family_doctor SET "+ field +"= '" + value + "' where doctor_id = " + doc + " AND patient_id = " + pat;
}
private String buildQuery(String table, String field, String value, String pk, String pk_value){
return "UPDATE "+ table +" SET "+ field +"= '" + value + "' where "+ pk +" = " + pk_value;
}
private void sendOK(){
try{
response.sendError(HttpServletResponse.SC_OK);
} catch (IOException e){
System.out.println("Error sending repsonse: " + e.getMessage());
}
}
private void sendFailure(String msg){
try{
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
response.getWriter().print(msg);
} catch (IOException e){
System.out.println("Error sending repsonse: " + e.getMessage());
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.spi.block;
import org.openjdk.jol.info.ClassLayout;
import javax.annotation.Nullable;
import java.util.Optional;
import java.util.function.BiConsumer;
import static io.airlift.slice.SizeOf.sizeOf;
import static io.prestosql.spi.block.BlockUtil.checkArrayRange;
import static io.prestosql.spi.block.BlockUtil.checkValidRegion;
import static io.prestosql.spi.block.BlockUtil.compactArray;
import static io.prestosql.spi.block.BlockUtil.countUsedPositions;
public class ShortArrayBlock
implements Block
{
private static final int INSTANCE_SIZE = ClassLayout.parseClass(ShortArrayBlock.class).instanceSize();
private final int arrayOffset;
private final int positionCount;
@Nullable
private final boolean[] valueIsNull;
private final short[] values;
private final long sizeInBytes;
private final long retainedSizeInBytes;
public ShortArrayBlock(int positionCount, Optional<boolean[]> valueIsNull, short[] values)
{
this(0, positionCount, valueIsNull.orElse(null), values);
}
ShortArrayBlock(int arrayOffset, int positionCount, boolean[] valueIsNull, short[] values)
{
if (arrayOffset < 0) {
throw new IllegalArgumentException("arrayOffset is negative");
}
this.arrayOffset = arrayOffset;
if (positionCount < 0) {
throw new IllegalArgumentException("positionCount is negative");
}
this.positionCount = positionCount;
if (values.length - arrayOffset < positionCount) {
throw new IllegalArgumentException("values length is less than positionCount");
}
this.values = values;
if (valueIsNull != null && valueIsNull.length - arrayOffset < positionCount) {
throw new IllegalArgumentException("isNull length is less than positionCount");
}
this.valueIsNull = valueIsNull;
sizeInBytes = (Short.BYTES + Byte.BYTES) * (long) positionCount;
retainedSizeInBytes = INSTANCE_SIZE + sizeOf(valueIsNull) + sizeOf(values);
}
@Override
public long getSizeInBytes()
{
return sizeInBytes;
}
@Override
public long getRegionSizeInBytes(int position, int length)
{
return (Short.BYTES + Byte.BYTES) * (long) length;
}
@Override
public long getPositionsSizeInBytes(boolean[] positions)
{
return (Short.BYTES + Byte.BYTES) * (long) countUsedPositions(positions);
}
@Override
public long getRetainedSizeInBytes()
{
return retainedSizeInBytes;
}
@Override
public long getEstimatedDataSizeForStats(int position)
{
return isNull(position) ? 0 : Short.BYTES;
}
@Override
public void retainedBytesForEachPart(BiConsumer<Object, Long> consumer)
{
consumer.accept(values, sizeOf(values));
if (valueIsNull != null) {
consumer.accept(valueIsNull, sizeOf(valueIsNull));
}
consumer.accept(this, (long) INSTANCE_SIZE);
}
@Override
public int getPositionCount()
{
return positionCount;
}
@Override
public short getShort(int position, int offset)
{
checkReadablePosition(position);
if (offset != 0) {
throw new IllegalArgumentException("offset must be zero");
}
return values[position + arrayOffset];
}
@Override
public boolean mayHaveNull()
{
return valueIsNull != null;
}
@Override
public boolean isNull(int position)
{
checkReadablePosition(position);
return valueIsNull != null && valueIsNull[position + arrayOffset];
}
@Override
public void writePositionTo(int position, BlockBuilder blockBuilder)
{
checkReadablePosition(position);
blockBuilder.writeShort(values[position + arrayOffset]);
blockBuilder.closeEntry();
}
@Override
public Block getSingleValueBlock(int position)
{
checkReadablePosition(position);
return new ShortArrayBlock(
0,
1,
isNull(position) ? new boolean[] {true} : null,
new short[] {values[position + arrayOffset]});
}
@Override
public Block copyPositions(int[] positions, int offset, int length)
{
checkArrayRange(positions, offset, length);
boolean[] newValueIsNull = null;
if (valueIsNull != null) {
newValueIsNull = new boolean[length];
}
short[] newValues = new short[length];
for (int i = 0; i < length; i++) {
int position = positions[offset + i];
checkReadablePosition(position);
if (valueIsNull != null) {
newValueIsNull[i] = valueIsNull[position + arrayOffset];
}
newValues[i] = values[position + arrayOffset];
}
return new ShortArrayBlock(0, length, newValueIsNull, newValues);
}
@Override
public Block getRegion(int positionOffset, int length)
{
checkValidRegion(getPositionCount(), positionOffset, length);
return new ShortArrayBlock(positionOffset + arrayOffset, length, valueIsNull, values);
}
@Override
public Block copyRegion(int positionOffset, int length)
{
checkValidRegion(getPositionCount(), positionOffset, length);
positionOffset += arrayOffset;
boolean[] newValueIsNull = valueIsNull == null ? null : compactArray(valueIsNull, positionOffset, length);
short[] newValues = compactArray(values, positionOffset, length);
if (newValueIsNull == valueIsNull && newValues == values) {
return this;
}
return new ShortArrayBlock(0, length, newValueIsNull, newValues);
}
@Override
public String getEncodingName()
{
return ShortArrayBlockEncoding.NAME;
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder("ShortArrayBlock{");
sb.append("positionCount=").append(getPositionCount());
sb.append('}');
return sb.toString();
}
private void checkReadablePosition(int position)
{
if (position < 0 || position >= getPositionCount()) {
throw new IllegalArgumentException("position is not valid");
}
}
}
| |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.config;
import com.hazelcast.internal.config.ConfigSections;
import com.hazelcast.internal.config.MemberDomConfigProcessor;
import com.hazelcast.internal.config.XmlConfigLocator;
import com.hazelcast.logging.ILogger;
import com.hazelcast.logging.Logger;
import com.hazelcast.internal.nio.IOUtil;
import com.hazelcast.internal.util.ExceptionUtil;
import com.hazelcast.spi.annotation.PrivateApi;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Properties;
import static com.hazelcast.instance.BuildInfoProvider.HAZELCAST_INTERNAL_OVERRIDE_VERSION;
import static com.hazelcast.internal.util.Preconditions.checkNotNull;
import static com.hazelcast.internal.util.Preconditions.checkTrue;
import static com.hazelcast.internal.util.StringUtil.LINE_SEPARATOR;
/**
* A XML {@link ConfigBuilder} implementation.
*/
public class XmlConfigBuilder extends AbstractXmlConfigBuilder implements ConfigBuilder {
private static final ILogger LOGGER = Logger.getLogger(XmlConfigBuilder.class);
private final InputStream in;
private File configurationFile;
private URL configurationUrl;
/**
* Constructs a XmlConfigBuilder that reads from the provided XML file.
*
* @param xmlFileName the name of the XML file that the XmlConfigBuilder reads from
* @throws FileNotFoundException if the file can't be found
*/
public XmlConfigBuilder(String xmlFileName) throws FileNotFoundException {
this(new FileInputStream(xmlFileName));
this.configurationFile = new File(xmlFileName);
}
/**
* Constructs a XmlConfigBuilder that reads from the given InputStream.
*
* @param inputStream the InputStream containing the XML configuration
* @throws IllegalArgumentException if inputStream is {@code null}
*/
public XmlConfigBuilder(InputStream inputStream) {
checkTrue(inputStream != null, "inputStream can't be null");
this.in = inputStream;
}
/**
* Constructs a XMLConfigBuilder that reads from the given URL.
*
* @param url the given url that the XMLConfigBuilder reads from
* @throws IOException if URL is invalid
*/
public XmlConfigBuilder(URL url) throws IOException {
checkNotNull(url, "URL is null!");
this.in = url.openStream();
this.configurationUrl = url;
}
/**
* Constructs a XmlConfigBuilder that tries to find a usable XML configuration file.
*/
public XmlConfigBuilder() {
this((XmlConfigLocator) null);
}
/**
* Constructs a {@link XmlConfigBuilder} that loads the configuration
* with the provided {@link XmlConfigLocator}.
* <p>
* If the provided {@link XmlConfigLocator} is {@code null}, a new
* instance is created and the config is located in every possible
* places. For these places, please see {@link XmlConfigLocator}.
* <p>
* If the provided {@link XmlConfigLocator} is not {@code null}, it
* is expected that it already located the configuration XML to load
* from. No further attempt to locate the configuration XML is made
* if the configuration XML is not located already.
*
* @param locator the configured locator to use
*/
@PrivateApi
public XmlConfigBuilder(XmlConfigLocator locator) {
if (locator == null) {
locator = new XmlConfigLocator();
locator.locateEverywhere();
}
this.in = locator.getIn();
this.configurationFile = locator.getConfigurationFile();
this.configurationUrl = locator.getConfigurationUrl();
}
/**
* Sets the used properties. Can be null if no properties should be used.
* <p>
* Properties are used to resolve ${variable} occurrences in the XML file.
*
* @param properties the new properties
* @return the XmlConfigBuilder
*/
public XmlConfigBuilder setProperties(Properties properties) {
super.setPropertiesInternal(properties);
return this;
}
@Override
protected ConfigType getConfigType() {
return ConfigType.SERVER;
}
@Override
public Config build() {
return build(new Config());
}
Config build(Config config) {
config.setConfigurationFile(configurationFile);
config.setConfigurationUrl(configurationUrl);
try {
parseAndBuildConfig(config);
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
return config;
}
private void parseAndBuildConfig(Config config) throws Exception {
Document doc = parse(in);
Element root = doc.getDocumentElement();
checkRootElement(root);
try {
root.getTextContent();
} catch (Throwable e) {
domLevel3 = false;
}
process(root);
if (shouldValidateTheSchema()) {
schemaValidation(root.getOwnerDocument());
}
new MemberDomConfigProcessor(domLevel3, config).buildConfig(root);
}
private void checkRootElement(Element root) {
String rootNodeName = root.getNodeName();
if (!ConfigSections.HAZELCAST.isEqual(rootNodeName)) {
throw new InvalidConfigurationException("Invalid root element in xml configuration!"
+ " Expected: <" + ConfigSections.HAZELCAST.getName() + ">, Actual: <" + rootNodeName + ">.");
}
}
private boolean shouldValidateTheSchema() {
// in case of overridden Hazelcast version there may be no schema with that version
// (this feature is used only in Simulator testing)
return System.getProperty(HAZELCAST_INTERNAL_OVERRIDE_VERSION) == null;
}
@Override
protected Document parse(InputStream is) throws Exception {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setNamespaceAware(true);
dbf.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder builder = dbf.newDocumentBuilder();
Document doc;
try {
doc = builder.parse(is);
} catch (Exception e) {
if (configurationFile != null) {
String msg = "Failed to parse " + configurationFile
+ LINE_SEPARATOR + "Exception: " + e.getMessage()
+ LINE_SEPARATOR + "Hazelcast startup interrupted.";
LOGGER.severe(msg);
} else if (configurationUrl != null) {
String msg = "Failed to parse " + configurationUrl
+ LINE_SEPARATOR + "Exception: " + e.getMessage()
+ LINE_SEPARATOR + "Hazelcast startup interrupted.";
LOGGER.severe(msg);
} else {
String msg = "Failed to parse the inputstream"
+ LINE_SEPARATOR + "Exception: " + e.getMessage()
+ LINE_SEPARATOR + "Hazelcast startup interrupted.";
LOGGER.severe(msg);
}
throw new InvalidConfigurationException(e.getMessage(), e);
} finally {
IOUtil.closeResource(is);
}
return doc;
}
}
| |
/*
* Copyright 2013 gitblit.com.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gitblit.manager;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.gitblit.Constants;
import com.gitblit.IStoredSettings;
import com.gitblit.Keys;
import com.gitblit.models.ServerSettings;
import com.gitblit.models.ServerStatus;
import com.gitblit.models.SettingModel;
import com.gitblit.utils.StringUtils;
import com.gitblit.utils.XssFilter;
public class RuntimeManager implements IRuntimeManager {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final IStoredSettings settings;
private final XssFilter xssFilter;
private final ServerStatus serverStatus;
private final ServerSettings settingsModel;
private File baseFolder;
private TimeZone timezone;
public RuntimeManager(IStoredSettings settings, XssFilter xssFilter) {
this(settings, xssFilter, null);
}
public RuntimeManager(IStoredSettings settings, XssFilter xssFilter, File baseFolder) {
this.settings = settings;
this.settingsModel = new ServerSettings();
this.serverStatus = new ServerStatus();
this.xssFilter = xssFilter;
this.baseFolder = baseFolder == null ? new File("") : baseFolder;
}
@Override
public RuntimeManager start() {
logger.info("Basefolder : " + baseFolder.getAbsolutePath());
logger.info("Settings : " + settings.toString());
logTimezone("JVM timezone: ", TimeZone.getDefault());
logTimezone("App timezone: ", getTimezone());
logger.info("JVM locale : " + Locale.getDefault());
logger.info("App locale : " + (getLocale() == null ? "<client>" : getLocale()));
return this;
}
@Override
public RuntimeManager stop() {
return this;
}
@Override
public File getBaseFolder() {
return baseFolder;
}
@Override
public void setBaseFolder(File folder) {
this.baseFolder = folder;
}
/**
* Returns the boot date of the Gitblit server.
*
* @return the boot date of Gitblit
*/
@Override
public Date getBootDate() {
return serverStatus.bootDate;
}
@Override
public ServerSettings getSettingsModel() {
// ensure that the current values are updated in the setting models
for (String key : settings.getAllKeys(null)) {
SettingModel setting = settingsModel.get(key);
if (setting == null) {
// unreferenced setting, create a setting model
setting = new SettingModel();
setting.name = key;
settingsModel.add(setting);
}
setting.currentValue = settings.getString(key, "");
}
// settingsModel.pushScripts = getAllScripts();
return settingsModel;
}
/**
* Determine if this Gitblit instance is actively serving git repositories
* or if it is merely a repository viewer.
*
* @return true if Gitblit is serving repositories
*/
@Override
public boolean isServingRepositories() {
return isServingHTTP()
|| isServingGIT()
|| isServingSSH();
}
/**
* Determine if this Gitblit instance is actively serving git repositories
* over the HTTP protocol.
*
* @return true if Gitblit is serving repositories over the HTTP protocol
*/
@Override
public boolean isServingHTTP() {
return settings.getBoolean(Keys.git.enableGitServlet, true);
}
/**
* Determine if this Gitblit instance is actively serving git repositories
* over the Git Daemon protocol.
*
* @return true if Gitblit is serving repositories over the Git Daemon protocol
*/
@Override
public boolean isServingGIT() {
return settings.getInteger(Keys.git.daemonPort, 0) > 0;
}
/**
* Determine if this Gitblit instance is actively serving git repositories
* over the SSH protocol.
*
* @return true if Gitblit is serving repositories over the SSH protocol
*/
@Override
public boolean isServingSSH() {
return settings.getInteger(Keys.git.sshPort, 0) > 0;
}
/**
* Returns the preferred timezone for the Gitblit instance.
*
* @return a timezone
*/
@Override
public TimeZone getTimezone() {
if (timezone == null) {
String tzid = settings.getString(Keys.web.timezone, null);
if (StringUtils.isEmpty(tzid)) {
timezone = TimeZone.getDefault();
return timezone;
}
timezone = TimeZone.getTimeZone(tzid);
}
return timezone;
}
private void logTimezone(String type, TimeZone zone) {
SimpleDateFormat df = new SimpleDateFormat("z Z");
df.setTimeZone(zone);
String offset = df.format(new Date());
logger.info("{}{} ({})", new Object [] { type, zone.getID(), offset });
}
@Override
public Locale getLocale() {
String lc = settings.getString(Keys.web.forceDefaultLocale, null);
if (!StringUtils.isEmpty(lc)) {
int underscore = lc.indexOf('_');
if (underscore > 0) {
String lang = lc.substring(0, underscore);
String cc = lc.substring(underscore + 1);
return new Locale(lang, cc);
} else {
return new Locale(lc);
}
}
return null;
}
/**
* Is Gitblit running in debug mode?
*
* @return true if Gitblit is running in debug mode
*/
@Override
public boolean isDebugMode() {
return settings.getBoolean(Keys.web.debugMode, false);
}
/**
* Returns the file object for the specified configuration key.
*
* @return the file
*/
@Override
public File getFileOrFolder(String key, String defaultFileOrFolder) {
String fileOrFolder = settings.getString(key, defaultFileOrFolder);
return getFileOrFolder(fileOrFolder);
}
/**
* Returns the file object which may have it's base-path determined by
* environment variables for running on a cloud hosting service. All Gitblit
* file or folder retrievals are (at least initially) funneled through this
* method so it is the correct point to globally override/alter filesystem
* access based on environment or some other indicator.
*
* @return the file
*/
@Override
public File getFileOrFolder(String fileOrFolder) {
return com.gitblit.utils.FileUtils.resolveParameter(Constants.baseFolder$,
baseFolder, fileOrFolder);
}
/**
* Returns the runtime settings.
*
* @return runtime settings
*/
@Override
public IStoredSettings getSettings() {
return settings;
}
/**
* Updates the runtime settings.
*
* @param settings
* @return true if the update succeeded
*/
@Override
public boolean updateSettings(Map<String, String> updatedSettings) {
return settings.saveSettings(updatedSettings);
}
@Override
public ServerStatus getStatus() {
// update heap memory status
serverStatus.heapAllocated = Runtime.getRuntime().totalMemory();
serverStatus.heapFree = Runtime.getRuntime().freeMemory();
return serverStatus;
}
/**
* Returns the XSS filter.
*
* @return the XSS filter
*/
@Override
public XssFilter getXssFilter() {
return xssFilter;
}
}
| |
/*
* Copyright 2015-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.server.controller;
import java.net.URI;
import java.util.Arrays;
import java.util.Date;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase;
import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase.Replace;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.dataflow.core.ApplicationType;
import org.springframework.cloud.dataflow.core.Launcher;
import org.springframework.cloud.dataflow.core.TaskDefinition;
import org.springframework.cloud.dataflow.registry.service.AppRegistryService;
import org.springframework.cloud.dataflow.server.TaskValidationController;
import org.springframework.cloud.dataflow.server.configuration.TestDependencies;
import org.springframework.cloud.dataflow.server.job.LauncherRepository;
import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository;
import org.springframework.cloud.dataflow.server.service.TaskDeleteService;
import org.springframework.cloud.dataflow.server.service.TaskExecutionService;
import org.springframework.cloud.dataflow.server.service.TaskSaveService;
import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest;
import org.springframework.cloud.deployer.spi.task.TaskLauncher;
import org.springframework.cloud.task.repository.TaskExecution;
import org.springframework.cloud.task.repository.TaskExplorer;
import org.springframework.http.MediaType;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* @author Michael Minella
* @author Mark Fisher
* @author Glenn Renfro
* @author Gunnar Hillert
* @author Ilayaperumal Gopinathan
* @author Christian Tzolov
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = TestDependencies.class)
@DirtiesContext(classMode = DirtiesContext.ClassMode.BEFORE_EACH_TEST_METHOD)
@AutoConfigureTestDatabase(replace = Replace.ANY)
public class TaskControllerTests {
@Autowired
TaskExecutionService taskExecutionService;
@Autowired
private TaskDefinitionRepository repository;
@Autowired
private AppRegistryService registry;
private MockMvc mockMvc;
@Autowired
private WebApplicationContext wac;
@Autowired
private TaskLauncher taskLauncher;
@Autowired
private Launcher launcher;
@Autowired
private LauncherRepository launcherRepository;
@Autowired
private TaskExplorer taskExplorer;
@Autowired
private TaskValidationController taskValidationController;
@Autowired
private TaskSaveService taskSaveService;
@Autowired
private TaskDeleteService taskDeleteService;
@Before
public void setupMockMVC() {
this.mockMvc = MockMvcBuilders.webAppContextSetup(wac)
.defaultRequest(get("/").accept(MediaType.APPLICATION_JSON)).build();
launcherRepository.save(new Launcher("default", "local", taskLauncher));
when(taskLauncher.launch(any(AppDeploymentRequest.class))).thenReturn("testID");
final TaskExecution taskExecutionRunning = new TaskExecution();
taskExecutionRunning.setTaskName("myTask");
taskExecutionRunning.setStartTime(new Date());
when(taskExplorer.getLatestTaskExecutionForTaskName("myTask")).thenReturn(taskExecutionRunning);
final TaskExecution taskExecutionComplete = new TaskExecution();
taskExecutionComplete.setTaskName("myTask2");
taskExecutionComplete.setStartTime(new Date());
taskExecutionComplete.setEndTime(new Date());
taskExecutionComplete.setExitCode(0);
when(taskExplorer.getLatestTaskExecutionForTaskName("myTask2")).thenReturn(taskExecutionComplete);
when(taskExplorer.getLatestTaskExecutionsByTaskNames(any()))
.thenReturn(Arrays.asList(taskExecutionRunning, taskExecutionComplete));
}
@Test(expected = IllegalArgumentException.class)
public void testTaskDefinitionControllerConstructorMissingRepository() {
new TaskDefinitionController(mock(TaskExplorer.class), null, taskSaveService, taskDeleteService);
}
@Test(expected = IllegalArgumentException.class)
public void testTaskDefinitionControllerConstructorMissingTaskExplorer() {
new TaskDefinitionController(null, mock(TaskDefinitionRepository.class), taskSaveService, taskDeleteService);
}
@Test
public void testTaskLaunchWithNullIDReturned() throws Exception {
when(taskLauncher.launch(any(AppDeploymentRequest.class))).thenReturn(null);
repository.save(new TaskDefinition("myTask", "foo"));
this.registry.save("foo", ApplicationType.task,
"1.0.0", new URI("maven://org.springframework.cloud:foo:1"), null);
mockMvc.perform(post("/tasks/executions").param("name", "myTask").accept(MediaType.APPLICATION_JSON))
.andExpect(status().isInternalServerError());
}
@Test
public void testSaveErrorNotInRegistry() throws Exception {
assertEquals(0, repository.count());
mockMvc.perform(post("/tasks/definitions/").param("name", "myTask").param("definition", "task")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isNotFound());
assertEquals(0, repository.count());
}
@Test
public void testSave() throws Exception {
assertEquals(0, repository.count());
this.registry.save("task", ApplicationType.task, "1.0.0", new URI("http://fake.example.com/"), null);
mockMvc.perform(post("/tasks/definitions/").param("name", "myTask").param("definition", "task")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isOk());
assertEquals(1, repository.count());
TaskDefinition myTask = repository.findById("myTask").get();
assertEquals(1, myTask.getProperties().size());
assertEquals("myTask", myTask.getProperties().get("spring.cloud.task.name"));
assertEquals("task", myTask.getDslText());
assertEquals("myTask", myTask.getName());
}
@Test
public void testSaveDuplicate() throws Exception {
this.registry.save("task", ApplicationType.task, "1.0.0", new URI("http://fake.example.com/"), null);
repository.save(new TaskDefinition("myTask", "task"));
mockMvc.perform(post("/tasks/definitions/").param("name", "myTask").param("definition", "task")
.accept(MediaType.APPLICATION_JSON)).andExpect(status().isConflict());
assertEquals(1, repository.count());
}
@Test
public void testSaveWithParameters() throws Exception {
this.registry.save("task", ApplicationType.task, "1.0.0", new URI("http://fake.example.com/"), null);
mockMvc.perform(post("/tasks/definitions/").param("name", "myTask")
.param("definition", "task --foo=bar --bar=baz").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isOk());
assertEquals(1, repository.count());
TaskDefinition myTask = repository.findById("myTask").get();
assertEquals("bar", myTask.getProperties().get("foo"));
assertEquals("baz", myTask.getProperties().get("bar"));
assertEquals("task --foo=bar --bar=baz", myTask.getDslText());
assertEquals("task", myTask.getRegisteredAppName());
assertEquals("myTask", myTask.getName());
}
@Test
public void testSaveCompositeTaskWithParameters() throws Exception {
registry.save("task", ApplicationType.task, "1.0.0", new URI("http://fake.example.com/"), null);
mockMvc.perform(post("/tasks/definitions/").param("name", "myTask")
.param("definition", "t1: task --foo='bar rab' && t2: task --foo='one two'")
.accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isOk());
assertEquals(3, repository.count());
TaskDefinition myTask1 = repository.findById("myTask-t1").get();
assertEquals("bar rab", myTask1.getProperties().get("foo"));
assertEquals("task --foo='bar rab'", myTask1.getDslText());
assertEquals("task", myTask1.getRegisteredAppName());
assertEquals("myTask-t1", myTask1.getName());
TaskDefinition myTask2 = repository.findById("myTask-t2").get();
assertEquals("one two", myTask2.getProperties().get("foo"));
assertEquals("task --foo='one two'", myTask2.getDslText());
assertEquals("task", myTask2.getRegisteredAppName());
assertEquals("myTask-t2", myTask2.getName());
}
@Test
public void testFindTaskNameContainsSubstring() throws Exception {
repository.save(new TaskDefinition("foo", "task"));
repository.save(new TaskDefinition("foz", "task"));
repository.save(new TaskDefinition("ooz", "task"));
mockMvc.perform(get("/tasks/definitions").param("search", "f")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isOk())
.andExpect(jsonPath("$.content.*", hasSize(2)))
.andExpect(jsonPath("$.content[0].name", is("foo")))
.andExpect(jsonPath("$.content[1].name", is("foz")));
mockMvc.perform(get("/tasks/definitions").param("search", "oz")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isOk())
.andExpect(jsonPath("$.content.*", hasSize(2)))
.andExpect(jsonPath("$.content[0].name", is("foz")))
.andExpect(jsonPath("$.content[1].name", is("ooz")));
mockMvc.perform(get("/tasks/definitions").param("search", "o")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isOk())
.andExpect(jsonPath("$.content.*", hasSize(3)))
.andExpect(jsonPath("$.content[0].name", is("foo")))
.andExpect(jsonPath("$.content[1].name", is("foz")))
.andExpect(jsonPath("$.content[2].name", is("ooz")));
}
@Test
public void testDestroyTask() throws Exception {
repository.save(new TaskDefinition("myTask", "task"));
mockMvc.perform(delete("/tasks/definitions/myTask").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isOk());
assertEquals(0, repository.count());
}
@Test
public void testDestroyTaskNotFound() throws Exception {
mockMvc.perform(delete("/tasks/definitions/myTask").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isNotFound());
assertEquals(0, repository.count());
}
@Test
public void testDestroyAllTask() throws Exception {
repository.save(new TaskDefinition("myTask1", "task"));
repository.save(new TaskDefinition("myTask2", "task && task2"));
repository.save(new TaskDefinition("myTask3", "task"));
assertEquals(3, repository.count());
mockMvc.perform(get("/tasks/definitions/").accept(MediaType.APPLICATION_JSON)).andExpect(status().isOk())
.andExpect(jsonPath("$.content", hasSize(3)));
mockMvc.perform(delete("/tasks/definitions").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isOk());
assertEquals(0, repository.count());
}
@Test
public void testMissingApplication() throws Exception {
repository.save(new TaskDefinition("myTask", "no-such-task-app"));
mockMvc.perform(post("/tasks/executions").param("name", "myTask").accept(MediaType.APPLICATION_JSON))
.andDo(print()).andExpect(status().is5xxServerError())
.andExpect(content().json("[{message: \"Unknown task app: no-such-task-app\"}]"));
}
@Test
public void testTaskNotDefined() throws Exception {
mockMvc.perform(post("/tasks/executions")
.param("name", "myFoo").accept(MediaType.APPLICATION_JSON))
.andDo(print()).andExpect(status().isNotFound())
.andExpect(content().json("[{message: \"Could not find task definition named myFoo\"}]"));
}
@Test
public void testLaunch() throws Exception {
repository.save(new TaskDefinition("myTask", "foo"));
this.registry.save("foo", ApplicationType.task,
"1.0.0", new URI("file:src/test/resources/apps/foo-task"), null);
mockMvc.perform(post("/tasks/executions").param("name", "myTask").accept(MediaType.APPLICATION_JSON))
.andDo(print()).andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> argumentCaptor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(this.taskLauncher, atLeast(1)).launch(argumentCaptor.capture());
AppDeploymentRequest request = argumentCaptor.getValue();
assertEquals("myTask", request.getDefinition().getProperties().get("spring.cloud.task.name"));
mockMvc.perform(delete("/tasks/definitions").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isOk());
// Destroy should be called only if there was a launch task
Mockito.verify(taskLauncher).destroy("myTask");
}
@Test
public void testLaunchWithAppProperties() throws Exception {
repository.save(new TaskDefinition("myTask2", "foo2 --common.prop2=wizz"));
this.registry.save("foo2", ApplicationType.task,
"1.0.0", new URI("file:src/test/resources/apps/foo-task"), null);
mockMvc.perform(post("/tasks/executions").param("name", "myTask2")
.accept(MediaType.APPLICATION_JSON))
.andDo(print()).andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> argumentCaptor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(this.taskLauncher, atLeast(1)).launch(argumentCaptor.capture());
AppDeploymentRequest request = argumentCaptor.getValue();
assertThat(request.getDefinition().getProperties(), hasEntry("common.prop2", "wizz"));
assertEquals("myTask2", request.getDefinition().getProperties().get("spring.cloud.task.name"));
}
@Test
public void testLaunchWithArguments() throws Exception {
repository.save(new TaskDefinition("myTask3", "foo3"));
this.registry.save("foo3", ApplicationType.task,
"1.0.0", new URI("file:src/test/resources/apps/foo-task"), null);
mockMvc.perform(post("/tasks/executions")
// .param("name", "myTask3")
.contentType(MediaType.APPLICATION_FORM_URLENCODED)
.content(EntityUtils.toString(new UrlEncodedFormEntity(Arrays.asList(
new BasicNameValuePair("name", "myTask3"),
new BasicNameValuePair("arguments",
"--foobar=jee --foobar2=jee2,foo=bar --foobar3='jee3 jee3'")))))
.accept(MediaType.APPLICATION_JSON))
.andDo(print())
.andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> argumentCaptor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(this.taskLauncher, atLeast(1)).launch(argumentCaptor.capture());
AppDeploymentRequest request = argumentCaptor.getValue();
assertThat(request.getCommandlineArguments().size(), is(3 + 1)); // +1 for spring.cloud.task.executionid
assertThat(request.getCommandlineArguments().get(0), is("--foobar=jee"));
assertThat(request.getCommandlineArguments().get(1), is("--foobar2=jee2,foo=bar"));
assertThat(request.getCommandlineArguments().get(2), is("--foobar3='jee3 jee3'"));
assertEquals("myTask3", request.getDefinition().getProperties().get("spring.cloud.task.name"));
}
@Test
public void testDisplaySingleTask() throws Exception {
TaskDefinition taskDefinition = new TaskDefinition("myTask", "timestamp");
repository.save(taskDefinition);
TaskDefinition taskDefinition2 = new TaskDefinition("myTask2", "timestamp");
repository.save(taskDefinition2);
TaskDefinition taskDefinition3 = new TaskDefinition("myTask3", "timestamp");
repository.save(taskDefinition3);
assertEquals(3, repository.count());
mockMvc.perform(get("/tasks/definitions/myTask").accept(MediaType.APPLICATION_JSON)).andExpect(status().isOk())
.andExpect(content().json("{name: \"myTask\"}"))
.andExpect(content().json("{status: \"RUNNING\"}"))
.andExpect(content().json("{dslText: \"timestamp\"}"));
mockMvc.perform(get("/tasks/definitions/myTask2").accept(MediaType.APPLICATION_JSON)).andExpect(status().isOk())
.andExpect(content().json("{name: \"myTask2\"}"))
.andExpect(content().json("{status: \"COMPLETE\"}"))
.andExpect(content().json("{dslText: \"timestamp\"}"));
mockMvc.perform(get("/tasks/definitions/myTask3").accept(MediaType.APPLICATION_JSON)).andExpect(status().isOk())
.andExpect(content().json("{name: \"myTask3\"}"))
.andExpect(content().json("{status: \"UNKNOWN\"}"))
.andExpect(content().json("{dslText: \"timestamp\"}"));
}
@Test
public void testDisplaySingleTaskNotFound() throws Exception {
mockMvc.perform(get("/tasks/definitions/myTask").accept(MediaType.APPLICATION_JSON))
.andExpect(status().isNotFound());
}
@Test
public void testGetAllTasks() throws Exception {
TaskDefinition taskDefinition = new TaskDefinition("myTask", "timestamp");
repository.save(taskDefinition);
TaskDefinition taskDefinition2 = new TaskDefinition("myTask2", "timestamp");
repository.save(taskDefinition2);
TaskDefinition taskDefinition3 = new TaskDefinition("myTask3", "timestamp");
repository.save(taskDefinition3);
assertEquals(3, repository.count());
mockMvc.perform(get("/tasks/definitions/").accept(MediaType.APPLICATION_JSON)).andExpect(status().isOk())
.andExpect(jsonPath("$.content", hasSize(3)))
.andExpect(jsonPath("$.content[*].name", containsInAnyOrder("myTask", "myTask2", "myTask3")))
.andExpect(jsonPath("$.content[*].dslText", containsInAnyOrder("timestamp", "timestamp", "timestamp")))
.andExpect(jsonPath("$.content[*].status", containsInAnyOrder("RUNNING", "COMPLETE", "UNKNOWN")));
}
@Test
public void testValidate() throws Exception {
repository.save(new TaskDefinition("myTask", "foo"));
this.registry.save("foo", ApplicationType.task,
"1.0.0", new URI("file:src/test/resources/apps/foo-task"), null);
mockMvc.perform(get("/tasks/validation/myTask")).andExpect(status().isOk())
.andDo(print()).andExpect(content().json(
"{\"appName\":\"myTask\",\"appStatuses\":{\"task:myTask\":\"valid\"},\"dsl\":\"foo\",\"links\":[]}"));
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package generic.lsh.vector;
import java.io.IOException;
import java.io.Writer;
import ghidra.xml.XmlElement;
import ghidra.xml.XmlPullParser;
public class WeightFactory {
private double idfweight[] = new double[512]; // Weights associated with (normalized) idf counts
private double tfweight[] = new double[64]; // Weights associated with tf (term frequency) counts
private double weightnorm; // Scale to which idf weights are normalized = -log2( probability of 1000th most common hash)
private double probflip0; // Hash flipping probability in causal model, param0
private double probflip1; // Hash flipping probability in causal model, param1
private double probdiff0; // Hash addition/removal probability, param0
private double probdiff1; // Hash addition/removal probability, param1
private double scale; // Final scaling to all weights
private double addend; // Final correction to score
private double probflip0_norm;
private double probflip1_norm;
private double probdiff0_norm;
private double probdiff1_norm;
private void updateNorms() {
probflip0_norm = probflip0 * scale;
probflip1_norm = probflip1 * scale;
probdiff0_norm = probdiff0 * scale;
probdiff1_norm = probdiff1 * scale;
}
/**
* @return number of weights in the IDF portion of the table
*/
public final int getIDFSize() {
return idfweight.length;
}
/**
* @return number of weights in the TF portion of the table
*/
public final int getTFSize() {
return tfweight.length;
}
/**
* @return number of floating-point entries needed to serialize the factory
*/
public final int getSize() {
return idfweight.length + tfweight.length + 7;
}
/**
* @param val
* @return the IDF weight at the given position
*/
public final double getIDFWeight(short val) {
return idfweight[val];
}
/**
* @param val is the term count (-1)
* @return the TF weight for the given count
*/
public final double getTFWeight(short val) {
return tfweight[val];
}
/**
* Given an IDF position and a TF count, build the feature coefficient
* @param i is the IDF position
* @param t is the TF count
* @return the feature coefficient
*/
public final double getCoeff(short i, short t) {
return idfweight[i] * tfweight[t];
}
/**
* @return the weight normalization factor
*/
public final double getWeightNorm() {
return weightnorm;
}
/**
* @return the first feature flip penalty parameter
*/
public final double getFlipNorm0() {
return probflip0_norm;
}
/**
* @return the first feature drop penalty parameter
*/
public final double getDiffNorm0() {
return probdiff0_norm;
}
/**
* @return the second feature flip penalty parameter
*/
public final double getFlipNorm1() {
return probflip1_norm;
}
/**
* @return the second feature drop penalty parameter
*/
public final double getDiffNorm1() {
return probdiff1_norm;
}
/**
* @return the final score scaling factor
*/
public final double getScale() {
return scale;
}
/**
* @return the final score addend
*/
public final double getAddend() {
return addend;
}
public void setLogarithmicTFWeights() {
double log2 = Math.log(2.0);
for (int i = 0; i < tfweight.length; ++i) {
tfweight[i] = Math.sqrt(1.0 + Math.log( i + 1) / log2);
}
}
/**
* Serialize this object as XML to a Writer
* @param fwrite is the Writer
* @throws IOException
*/
public void saveXml(Writer fwrite) throws IOException {
fwrite.append("<weightfactory scale=\"");
fwrite.append(Double.toString(scale));
fwrite.append("\" addend=\"");
fwrite.append(Double.toString(addend));
fwrite.append("\">\n");
double scale_sqrt = Math.sqrt(scale);
for (double element : idfweight) {
fwrite.append(" <idf>");
fwrite.append(Double.toString(element / scale_sqrt));
fwrite.append("</idf>\n");
}
for (double element : tfweight) {
fwrite.append(" <tf>");
fwrite.append(Double.toString(element));
fwrite.append("</tf>\n");
}
fwrite.append(" <weightnorm>").append(Double.toString(weightnorm * scale)).append("</weightnorm>\n");
fwrite.append(" <probflip0>").append(Double.toString(probflip0)).append("</probflip0>\n");
fwrite.append(" <probflip1>").append(Double.toString(probflip1)).append("</probflip1>\n");
fwrite.append(" <probdiff0>").append(Double.toString(probdiff0)).append("</probdiff0>\n");
fwrite.append(" <probdiff1>").append(Double.toString(probdiff1)).append("</probdiff1>\n");
fwrite.append("<weightfactory>\n");
}
/**
* Condense weight table down to array of doubles
* @return array of doubles
*/
public double[] toArray() {
int numrows = getSize();
double[] res = new double[ numrows ];
double scaleSqrt = Math.sqrt(scale);
for (int i = 0; i < idfweight.length; ++i) {
res[i] = idfweight[i] / scaleSqrt;
}
for (int i = 0; i < tfweight.length; ++i) {
res[i + idfweight.length] = tfweight[i];
}
res[numrows - 7] = weightnorm * scale;
res[numrows - 6] = probflip0;
res[numrows - 5] = probflip1;
res[numrows - 4] = probdiff0;
res[numrows - 3] = probdiff1;
res[numrows - 2] = scale;
res[numrows - 1] = addend;
return res;
}
/**
* Initialize the WeightTable from an array of doubles
* @param weightArray
*/
public void set(double[] weightArray) {
int numrows = weightArray.length;
if (numrows != getSize()) {
throw new NumberFormatException("Not enough values in double array");
}
scale = weightArray[numrows - 2];
addend = weightArray[numrows - 1];
weightnorm = weightArray[numrows - 7] / scale;
probflip0 = weightArray[numrows - 6];
probflip1 = weightArray[numrows - 5];
probdiff0 = weightArray[numrows - 4];
probdiff1 = weightArray[numrows - 3];
double sqrtScale = Math.sqrt(scale);
for (int i = 0; i < idfweight.length; ++i) {
idfweight[i] = weightArray[i] * sqrtScale;
}
for (int i = 0; i < tfweight.length; ++i) {
tfweight[i] = weightArray[i + idfweight.length];
}
updateNorms();
}
/**
* Build (deserialize) this object from an XML stream
* @param parser is the XML parser
*/
public void restoreXml(XmlPullParser parser) {
XmlElement el = parser.start("weightfactory");
scale = Double.parseDouble(el.getAttribute("scale"));
addend = Double.parseDouble(el.getAttribute("addend"));
double scale_sqrt = Math.sqrt(scale);
for(int i=0;i<idfweight.length;++i) {
parser.start("idf");
double val = Double.parseDouble(parser.end().getText());
idfweight[i] = val * scale_sqrt;
}
for(int i=0;i<tfweight.length;++i) {
parser.start("tf");
double val = Double.parseDouble(parser.end().getText());
tfweight[i] = val;
}
parser.start("weightnorm");
weightnorm = Double.parseDouble(parser.end().getText());
weightnorm /= scale;
parser.start("probflip0");
probflip0 = Double.parseDouble(parser.end().getText());
parser.start("probflip1");
probflip1 = Double.parseDouble(parser.end().getText());
parser.start("probdiff0");
probdiff0 = Double.parseDouble(parser.end().getText());
parser.start("probdiff1");
probdiff1 = Double.parseDouble(parser.end().getText());
parser.end(el);
updateNorms();
}
}
| |
package org.sagebionetworks.object.snapshot.worker.utils;
import static org.junit.Assert.*;
import static org.mockito.Matchers.anyList;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.*;
import java.io.IOException;
import java.util.Arrays;
import java.util.Date;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.sagebionetworks.asynchronous.workers.sqs.MessageUtils;
import org.sagebionetworks.audit.dao.ObjectRecordDAO;
import org.sagebionetworks.audit.utils.ObjectRecordBuilderUtils;
import org.sagebionetworks.common.util.progress.ProgressCallback;
import org.sagebionetworks.repo.model.ObjectType;
import org.sagebionetworks.repo.model.audit.FileHandleSnapshot;
import org.sagebionetworks.repo.model.audit.ObjectRecord;
import org.sagebionetworks.repo.model.dao.FileHandleDao;
import org.sagebionetworks.repo.model.file.ExternalFileHandle;
import org.sagebionetworks.repo.model.file.ExternalObjectStoreFileHandle;
import org.sagebionetworks.repo.model.file.FileHandle;
import org.sagebionetworks.repo.model.file.PreviewFileHandle;
import org.sagebionetworks.repo.model.file.ProxyFileHandle;
import org.sagebionetworks.repo.model.file.S3FileHandle;
import org.sagebionetworks.repo.model.message.ChangeMessage;
import org.sagebionetworks.repo.model.message.ChangeType;
import org.springframework.test.util.ReflectionTestUtils;
import com.amazonaws.services.sqs.model.Message;
public class FileHandleSnapshotRecordWriterTest {
@Mock
private FileHandleDao mockFileHandleDao;
@Mock
private ObjectRecordDAO mockObjectRecordDao;
@Mock
private ProgressCallback mockCallback;
private FileHandleSnapshotRecordWriter writer;
private String id = "123";
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
writer = new FileHandleSnapshotRecordWriter();
ReflectionTestUtils.setField(writer, "fileHandleDao", mockFileHandleDao);
ReflectionTestUtils.setField(writer, "objectRecordDAO", mockObjectRecordDao);
}
@Test
public void deleteFileMessageTest() throws IOException {
Message message = MessageUtils.buildMessage(ChangeType.DELETE, id, ObjectType.FILE, "etag", System.currentTimeMillis());
ChangeMessage changeMessage = MessageUtils.extractMessageBody(message);
writer.buildAndWriteRecords(mockCallback, Arrays.asList(changeMessage));
verify(mockObjectRecordDao, never()).saveBatch(anyList(), anyString());
}
@Test (expected=IllegalArgumentException.class)
public void invalidChangeMessageTest() throws IOException {
Message message = MessageUtils.buildMessage(ChangeType.UPDATE, id, ObjectType.PRINCIPAL, "etag", System.currentTimeMillis());
ChangeMessage changeMessage = MessageUtils.extractMessageBody(message);
writer.buildAndWriteRecords(mockCallback, Arrays.asList(changeMessage));
}
@Test
public void validChangeMessageTest() throws IOException {
FileHandle fileHandle = new S3FileHandle();
fileHandle.setEtag("etag");
when(mockFileHandleDao.get(id)).thenReturn(fileHandle);
Message message = MessageUtils.buildMessage(ChangeType.UPDATE, id, ObjectType.FILE, "etag", System.currentTimeMillis());
ChangeMessage changeMessage = MessageUtils.extractMessageBody(message);
FileHandleSnapshot record = FileHandleSnapshotRecordWriter.buildFileHandleSnapshot(fileHandle);
ObjectRecord expected = ObjectRecordBuilderUtils.buildObjectRecord(record, changeMessage.getTimestamp().getTime());
writer.buildAndWriteRecords(mockCallback, Arrays.asList(changeMessage, changeMessage));
verify(mockFileHandleDao, times(2)).get(id);
verify(mockObjectRecordDao).saveBatch(eq(Arrays.asList(expected, expected)), eq(expected.getJsonClassName()));
}
@Test
public void testBuildFileHandleSnapshotWithS3FileHandle() {
S3FileHandle s3FH = new S3FileHandle();
s3FH.setBucketName("bucket");
s3FH.setConcreteType(S3FileHandle.class.getName());
s3FH.setContentMd5("md5");
s3FH.setContentSize(1L);
s3FH.setCreatedBy("998");
s3FH.setCreatedOn(new Date());
s3FH.setFileName("fileName");
s3FH.setId("555");
s3FH.setKey("key");
s3FH.setStorageLocationId(900L);
FileHandleSnapshot snapshot = FileHandleSnapshotRecordWriter.buildFileHandleSnapshot(s3FH);
assertEquals(s3FH.getBucketName(), snapshot.getBucket());
assertEquals(s3FH.getConcreteType(), snapshot.getConcreteType());
assertEquals(s3FH.getContentMd5(), snapshot.getContentMd5());
assertEquals(s3FH.getContentSize(), snapshot.getContentSize());
assertEquals(s3FH.getCreatedBy(), snapshot.getCreatedBy());
assertEquals(s3FH.getCreatedOn(), snapshot.getCreatedOn());
assertEquals(s3FH.getFileName(), snapshot.getFileName());
assertEquals(s3FH.getId(), snapshot.getId());
assertEquals(s3FH.getKey(), snapshot.getKey());
assertEquals(s3FH.getStorageLocationId(), snapshot.getStorageLocationId());
}
@Test
public void testBuildFileHandleSnapshotWithPreviewFileHandle() {
PreviewFileHandle previewFH = new PreviewFileHandle();
previewFH.setBucketName("bucket");
previewFH.setConcreteType(S3FileHandle.class.getName());
previewFH.setContentMd5("md5");
previewFH.setContentSize(1L);
previewFH.setCreatedBy("998");
previewFH.setCreatedOn(new Date());
previewFH.setFileName("fileName");
previewFH.setId("555");
previewFH.setKey("key");
previewFH.setStorageLocationId(900L);
FileHandleSnapshot snapshot = FileHandleSnapshotRecordWriter.buildFileHandleSnapshot(previewFH);
assertEquals(previewFH.getBucketName(), snapshot.getBucket());
assertEquals(previewFH.getConcreteType(), snapshot.getConcreteType());
assertEquals(previewFH.getContentMd5(), snapshot.getContentMd5());
assertEquals(previewFH.getContentSize(), snapshot.getContentSize());
assertEquals(previewFH.getCreatedBy(), snapshot.getCreatedBy());
assertEquals(previewFH.getCreatedOn(), snapshot.getCreatedOn());
assertEquals(previewFH.getFileName(), snapshot.getFileName());
assertEquals(previewFH.getId(), snapshot.getId());
assertEquals(previewFH.getKey(), snapshot.getKey());
assertEquals(previewFH.getStorageLocationId(), snapshot.getStorageLocationId());
}
@Test
public void testBuildFileHandleSnapshotWithExternalFileHandle() {
ExternalFileHandle externalFH = new ExternalFileHandle();
externalFH.setConcreteType(S3FileHandle.class.getName());
externalFH.setContentMd5("md5");
externalFH.setContentSize(1L);
externalFH.setCreatedBy("998");
externalFH.setCreatedOn(new Date());
externalFH.setFileName("fileName");
externalFH.setId("555");
externalFH.setExternalURL("externalURL");
externalFH.setStorageLocationId(900L);
FileHandleSnapshot snapshot = FileHandleSnapshotRecordWriter.buildFileHandleSnapshot(externalFH);
assertNull(snapshot.getBucket());
assertEquals(externalFH.getConcreteType(), snapshot.getConcreteType());
assertEquals(externalFH.getContentMd5(), snapshot.getContentMd5());
assertEquals(externalFH.getContentSize(), snapshot.getContentSize());
assertEquals(externalFH.getCreatedBy(), snapshot.getCreatedBy());
assertEquals(externalFH.getCreatedOn(), snapshot.getCreatedOn());
assertEquals(externalFH.getFileName(), snapshot.getFileName());
assertEquals(externalFH.getId(), snapshot.getId());
assertEquals(externalFH.getExternalURL(), snapshot.getKey());
assertEquals(externalFH.getStorageLocationId(), snapshot.getStorageLocationId());
}
@Test
public void testBuildFileHandleSnapshotWithProxyFileHandle() {
ProxyFileHandle proxyFH = new ProxyFileHandle();
proxyFH.setConcreteType(ProxyFileHandle.class.getName());
proxyFH.setContentMd5("md5");
proxyFH.setContentSize(1L);
proxyFH.setCreatedBy("998");
proxyFH.setCreatedOn(new Date());
proxyFH.setFileName("fileName");
proxyFH.setId("555");
proxyFH.setFilePath("filePath");
proxyFH.setStorageLocationId(900L);
FileHandleSnapshot snapshot = FileHandleSnapshotRecordWriter.buildFileHandleSnapshot(proxyFH);
assertNull(snapshot.getBucket());
assertEquals(proxyFH.getConcreteType(), snapshot.getConcreteType());
assertEquals(proxyFH.getContentMd5(), snapshot.getContentMd5());
assertEquals(proxyFH.getContentSize(), snapshot.getContentSize());
assertEquals(proxyFH.getCreatedBy(), snapshot.getCreatedBy());
assertEquals(proxyFH.getCreatedOn(), snapshot.getCreatedOn());
assertEquals(proxyFH.getFileName(), snapshot.getFileName());
assertEquals(proxyFH.getId(), snapshot.getId());
assertEquals(proxyFH.getFilePath(), snapshot.getKey());
assertEquals(proxyFH.getStorageLocationId(), snapshot.getStorageLocationId());
}
@Test
public void testBuildFileHandleSnapshotWithExternalObjectStoreFileHandle() {
ExternalObjectStoreFileHandle externalObjectStoreFileHandle = new ExternalObjectStoreFileHandle();
externalObjectStoreFileHandle.setConcreteType(ExternalObjectStoreFileHandle.class.getName());
externalObjectStoreFileHandle.setContentMd5("md5");
externalObjectStoreFileHandle.setContentSize(1L);
externalObjectStoreFileHandle.setCreatedBy("998");
externalObjectStoreFileHandle.setCreatedOn(new Date());
externalObjectStoreFileHandle.setFileName("fileName");
externalObjectStoreFileHandle.setId("555");
externalObjectStoreFileHandle.setFileKey("key");
externalObjectStoreFileHandle.setStorageLocationId(900L);
FileHandleSnapshot snapshot = FileHandleSnapshotRecordWriter.buildFileHandleSnapshot(externalObjectStoreFileHandle);
assertNull(snapshot.getBucket());
assertEquals(externalObjectStoreFileHandle.getConcreteType(), snapshot.getConcreteType());
assertEquals(externalObjectStoreFileHandle.getContentMd5(), snapshot.getContentMd5());
assertEquals(externalObjectStoreFileHandle.getContentSize(), snapshot.getContentSize());
assertEquals(externalObjectStoreFileHandle.getCreatedBy(), snapshot.getCreatedBy());
assertEquals(externalObjectStoreFileHandle.getCreatedOn(), snapshot.getCreatedOn());
assertEquals(externalObjectStoreFileHandle.getFileName(), snapshot.getFileName());
assertEquals(externalObjectStoreFileHandle.getId(), snapshot.getId());
assertEquals(externalObjectStoreFileHandle.getFileKey(), snapshot.getKey());
assertEquals(externalObjectStoreFileHandle.getStorageLocationId(), snapshot.getStorageLocationId());
}
}
| |
package mods.cartlivery.common.container;
import mods.cartlivery.CartConfig;
import mods.cartlivery.common.item.ItemSticker;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Items;
import net.minecraft.inventory.Container;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.InventoryCraftResult;
import net.minecraft.inventory.InventoryCrafting;
import net.minecraft.inventory.Slot;
import net.minecraft.inventory.SlotCrafting;
import net.minecraft.item.ItemStack;
public class ContainerCutter extends Container {
public String pattern = "";
public EntityPlayer player;
public int toolIndex;
public IInventory inventoryInput;
public IInventory inventoryOutput;
public ContainerCutter(EntityPlayer player) {
this.player = player;
toolIndex = player.inventory.currentItem;
this.inventoryInput = new InventoryCrafting(this, 1, 1);
this.inventoryOutput = new InventoryCraftResult();
for (int i = 0; i < 3; ++i)
for (int j = 0; j < 9; ++j)
addSlotToContainer(new Slot(player.inventory, j + (i + 1) * 9, 8 + j * 18, 84 + i * 18));
for (int i = 0; i < 9; ++i)
if (i != toolIndex) {
addSlotToContainer(new Slot(player.inventory, i, 8 + i * 18, 142));
} else {
addSlotToContainer(new Slot(player.inventory, i, 8 + i * 18, 142) {
@Override
public boolean canTakeStack(EntityPlayer par1EntityPlayer) {
return false;
}
});
}
addSlotToContainer(new Slot(inventoryInput, 0, 106, 36) {
@Override
public boolean isItemValid(ItemStack stack) {
return stack.getItem() == Items.paper;
}
});
addSlotToContainer(new SlotCrafting(player, inventoryInput, inventoryOutput, 0, 144, 36) {
@Override
public void onPickupFromSlot(EntityPlayer player, ItemStack stack) {
super.onPickupFromSlot(player, stack);
damageTool();
if(CartConfig.PLAY_SOUNDS)
player.playSound("CartLivery:sticker_cut", 1.0F, 1.0F);
}
});
}
@Override
public void onContainerClosed(EntityPlayer player) {
super.onContainerClosed(player);
ItemStack drop = inventoryInput.getStackInSlotOnClosing(0);
if (drop != null) player.dropPlayerItemWithRandomChoice(drop, false);
}
@Override
public void onCraftMatrixChanged(IInventory inv) {
if (inventoryInput.getStackInSlot(0) == null || pattern.isEmpty()) {
inventoryOutput.setInventorySlotContents(0, null);
} else {
inventoryOutput.setInventorySlotContents(0, ItemSticker.create(pattern));
}
}
protected void damageTool() {
ItemStack tool = player.inventory.getStackInSlot(toolIndex);
tool.setItemDamage(tool.getItemDamage() + 1);
if (tool.getItemDamage() > tool.getMaxDamage()) {
player.inventory.setInventorySlotContents(toolIndex, null);
player.closeScreen();
}
}
@Override
public boolean canInteractWith(EntityPlayer player) {
return true;
}
@Override
public ItemStack transferStackInSlot(EntityPlayer player, int slotNumber) {
ItemStack itemStack = null;
Slot slot = (Slot) this.inventorySlots.get(slotNumber);
if (slot != null && slot.getHasStack()) {
ItemStack itemStack1 = slot.getStack();
itemStack = itemStack1.copy();
// if we are in the Cutter
if (slotNumber == 36 || slotNumber == 37) {
if (!this.mergeItemStack(itemStack1, 0, 35, false)) {
return null;
}
// otherwise just put it in one of the machine slots
} else if (!this.mergeItemStack(itemStack1, 36, 37, false)) {
return null;
}
if (itemStack1.stackSize == 0) {
slot.putStack(null);
} else {
slot.onSlotChanged();
}
if (itemStack1.stackSize == itemStack.stackSize) {
return null;
}
slot.onPickupFromSlot(player, itemStack1);
}
return itemStack;
}
/**
* Added validation of slot input
*
* @author CrazyPants
*/
@Override
protected boolean mergeItemStack(ItemStack par1ItemStack, int fromIndex, int toIndex, boolean reversOrder) {
boolean result = false;
int checkIndex = fromIndex;
if (reversOrder) {
checkIndex = toIndex - 1;
}
Slot slot;
ItemStack itemstack1;
if (par1ItemStack.isStackable()) {
while (par1ItemStack.stackSize > 0 && (!reversOrder && checkIndex < toIndex || reversOrder && checkIndex >= fromIndex)) {
slot = (Slot) this.inventorySlots.get(checkIndex);
itemstack1 = slot.getStack();
if (itemstack1 != null && itemstack1.getItem() == par1ItemStack.getItem() && (!par1ItemStack.getHasSubtypes() || par1ItemStack.getItemDamage() == itemstack1.getItemDamage())
&& ItemStack.areItemStackTagsEqual(par1ItemStack, itemstack1) && slot.isItemValid(par1ItemStack)) {
int mergedSize = itemstack1.stackSize + par1ItemStack.stackSize;
int maxStackSize = Math.min(par1ItemStack.getMaxStackSize(), slot.getSlotStackLimit());
if (mergedSize <= maxStackSize) {
par1ItemStack.stackSize = 0;
itemstack1.stackSize = mergedSize;
slot.onSlotChanged();
result = true;
} else if (itemstack1.stackSize < maxStackSize) {
par1ItemStack.stackSize -= maxStackSize - itemstack1.stackSize;
itemstack1.stackSize = maxStackSize;
slot.onSlotChanged();
result = true;
}
}
if (reversOrder) {
--checkIndex;
} else {
++checkIndex;
}
}
}
if (par1ItemStack.stackSize > 0) {
if (reversOrder) {
checkIndex = toIndex - 1;
} else {
checkIndex = fromIndex;
}
while (!reversOrder && checkIndex < toIndex || reversOrder && checkIndex >= fromIndex) {
slot = (Slot) this.inventorySlots.get(checkIndex);
itemstack1 = slot.getStack();
if (itemstack1 == null && slot.isItemValid(par1ItemStack)) {
ItemStack in = par1ItemStack.copy();
in.stackSize = Math.min(in.stackSize, slot.getSlotStackLimit());
slot.putStack(in);
slot.onSlotChanged();
if (in.stackSize >= par1ItemStack.stackSize) {
par1ItemStack.stackSize = 0;
} else {
par1ItemStack.stackSize -= in.stackSize;
}
result = true;
break;
}
if (reversOrder) {
--checkIndex;
} else {
++checkIndex;
}
}
}
return result;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.action;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.ingest.IngestStats;
import org.elasticsearch.xpack.core.action.AbstractGetResourcesRequest;
import org.elasticsearch.xpack.core.action.AbstractGetResourcesResponse;
import org.elasticsearch.xpack.core.action.util.QueryPage;
import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceStats;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
public class GetTrainedModelsStatsAction extends ActionType<GetTrainedModelsStatsAction.Response> {
public static final GetTrainedModelsStatsAction INSTANCE = new GetTrainedModelsStatsAction();
public static final String NAME = "cluster:monitor/xpack/ml/inference/stats/get";
public static final ParseField MODEL_ID = new ParseField("model_id");
public static final ParseField PIPELINE_COUNT = new ParseField("pipeline_count");
public static final ParseField INFERENCE_STATS = new ParseField("inference_stats");
private GetTrainedModelsStatsAction() {
super(NAME, GetTrainedModelsStatsAction.Response::new);
}
public static class Request extends AbstractGetResourcesRequest {
public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match");
public Request() {
setAllowNoResources(true);
}
public Request(String id) {
setResourceId(id);
setAllowNoResources(true);
}
public Request(StreamInput in) throws IOException {
super(in);
}
@Override
public String getResourceIdField() {
return TrainedModelConfig.MODEL_ID.getPreferredName();
}
}
public static class Response extends AbstractGetResourcesResponse<Response.TrainedModelStats> {
public static class TrainedModelStats implements ToXContentObject, Writeable {
private final String modelId;
private final IngestStats ingestStats;
private final InferenceStats inferenceStats;
private final int pipelineCount;
private static final IngestStats EMPTY_INGEST_STATS = new IngestStats(new IngestStats.Stats(0, 0, 0, 0),
Collections.emptyList(),
Collections.emptyMap());
public TrainedModelStats(String modelId, IngestStats ingestStats, int pipelineCount, InferenceStats inferenceStats) {
this.modelId = Objects.requireNonNull(modelId);
this.ingestStats = ingestStats == null ? EMPTY_INGEST_STATS : ingestStats;
if (pipelineCount < 0) {
throw new ElasticsearchException("[{}] must be a greater than or equal to 0", PIPELINE_COUNT.getPreferredName());
}
this.pipelineCount = pipelineCount;
this.inferenceStats = inferenceStats;
}
public TrainedModelStats(StreamInput in) throws IOException {
modelId = in.readString();
ingestStats = new IngestStats(in);
pipelineCount = in.readVInt();
this.inferenceStats = in.readOptionalWriteable(InferenceStats::new);
}
public String getModelId() {
return modelId;
}
public IngestStats getIngestStats() {
return ingestStats;
}
public int getPipelineCount() {
return pipelineCount;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(MODEL_ID.getPreferredName(), modelId);
builder.field(PIPELINE_COUNT.getPreferredName(), pipelineCount);
if (pipelineCount > 0) {
// Ingest stats is a fragment
ingestStats.toXContent(builder, params);
}
if (this.inferenceStats != null) {
builder.field(INFERENCE_STATS.getPreferredName(), this.inferenceStats);
}
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(modelId);
ingestStats.writeTo(out);
out.writeVInt(pipelineCount);
out.writeOptionalWriteable(this.inferenceStats);
}
@Override
public int hashCode() {
return Objects.hash(modelId, ingestStats, pipelineCount, inferenceStats);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
TrainedModelStats other = (TrainedModelStats) obj;
return Objects.equals(this.modelId, other.modelId)
&& Objects.equals(this.ingestStats, other.ingestStats)
&& Objects.equals(this.pipelineCount, other.pipelineCount)
&& Objects.equals(this.inferenceStats, other.inferenceStats);
}
}
public static final ParseField RESULTS_FIELD = new ParseField("trained_model_stats");
public Response(StreamInput in) throws IOException {
super(in);
}
public Response(QueryPage<Response.TrainedModelStats> trainedModels) {
super(trainedModels);
}
@Override
protected Reader<Response.TrainedModelStats> getReader() {
return Response.TrainedModelStats::new;
}
public static class Builder {
private long totalModelCount;
private Set<String> expandedIds;
private Map<String, IngestStats> ingestStatsMap;
private Map<String, InferenceStats> inferenceStatsMap;
public Builder setTotalModelCount(long totalModelCount) {
this.totalModelCount = totalModelCount;
return this;
}
public Builder setExpandedIds(Set<String> expandedIds) {
this.expandedIds = expandedIds;
return this;
}
public Set<String> getExpandedIds() {
return this.expandedIds;
}
public Builder setIngestStatsByModelId(Map<String, IngestStats> ingestStatsByModelId) {
this.ingestStatsMap = ingestStatsByModelId;
return this;
}
public Builder setInferenceStatsByModelId(Map<String, InferenceStats> infereceStatsByModelId) {
this.inferenceStatsMap = infereceStatsByModelId;
return this;
}
public Response build() {
List<TrainedModelStats> trainedModelStats = new ArrayList<>(expandedIds.size());
expandedIds.forEach(id -> {
IngestStats ingestStats = ingestStatsMap.get(id);
InferenceStats inferenceStats = inferenceStatsMap.get(id);
trainedModelStats.add(new TrainedModelStats(
id,
ingestStats,
ingestStats == null ?
0 :
ingestStats.getPipelineStats().size(),
inferenceStats));
});
trainedModelStats.sort(Comparator.comparing(TrainedModelStats::getModelId));
return new Response(new QueryPage<>(trainedModelStats, totalModelCount, RESULTS_FIELD));
}
}
}
}
| |
package me.vshl.apps.pictachio.fragments;
import android.Manifest;
import android.app.Fragment;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.provider.MediaStore;
import android.support.annotation.NonNull;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import com.koushikdutta.ion.Ion;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import me.vshl.apps.pictachio.R;
import me.vshl.apps.pictachio.activities.DetailActivity;
/**
* A simple {@link Fragment} subclass.
*/
public class CameraRollFragment extends Fragment {
private static final int REQUEST_READ_EXTERNAL_STORAGE = 1;
private final ContentAdapter adapter = new ContentAdapter();
private ArrayList<String> imageList = new ArrayList<>();
private View mLayout;
public CameraRollFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mLayout = getActivity().findViewById(R.id.drawer_layout);
if (ActivityCompat.checkSelfPermission(getActivity(),
Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
requestReadExternalStoragePermission();
}
updateAdapter();
}
@Override
public void onResume() {
super.onResume();
updateAdapter();
}
@Override
public void onDestroy() {
imageList = null;
super.onDestroy();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
RecyclerView recyclerView = (RecyclerView) inflater.inflate(
R.layout.recycler_view, container, false);
recyclerView.setAdapter(adapter);
recyclerView.setHasFixedSize(true);
// Set padding for Tiles
int tilePadding = getResources().getDimensionPixelSize(R.dimen.tile_padding);
recyclerView.setPadding(tilePadding, tilePadding, tilePadding, tilePadding);
recyclerView.setLayoutManager(new GridLayoutManager(getActivity(), 2));
return recyclerView;
}
/**
* Method to update the RecyclerView adapter
*/
public void updateAdapter() {
Handler mHandler = new Handler();
mHandler.postDelayed(loadImages, 1000);
}
private void requestReadExternalStoragePermission() {
if (ActivityCompat.shouldShowRequestPermissionRationale(getActivity(),
Manifest.permission.READ_EXTERNAL_STORAGE)) {
Snackbar.make(mLayout, R.string.permission_read_external_storage_rationale,
Snackbar.LENGTH_INDEFINITE)
.setAction(R.string.ok, new View.OnClickListener() {
@Override
public void onClick(View view) {
ActivityCompat.requestPermissions(getActivity(),
new String[]{Manifest.permission.READ_EXTERNAL_STORAGE},
REQUEST_READ_EXTERNAL_STORAGE);
}
})
.show();
} else {
ActivityCompat.requestPermissions(getActivity(),
new String[]{Manifest.permission.READ_EXTERNAL_STORAGE},
REQUEST_READ_EXTERNAL_STORAGE);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
if (requestCode == REQUEST_READ_EXTERNAL_STORAGE) {
if (grantResults.length == 1 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Snackbar.make(mLayout, R.string.permission_available_read_external_storage,
Snackbar.LENGTH_SHORT).show();
} else {
Snackbar.make(mLayout, R.string.permission_not_granted, Snackbar.LENGTH_SHORT)
.show();
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
/**
* Adapter to display recycler view
*/
public class ContentAdapter extends RecyclerView.Adapter<ContentAdapter.ViewHolder> {
public ContentAdapter() {
// no-op
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
Context context = parent.getContext();
View itemView = LayoutInflater
.from(context)
.inflate(R.layout.fragment_recycler_grid, parent, false);
return new ViewHolder(context, itemView);
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
Ion.with(holder.getIv())
.centerCrop()
.placeholder(R.drawable.placeholder)
.error(R.drawable.error)
.load(imageList.get(position));
}
@Override
public int getItemCount() {
return imageList.size();
}
public class ViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
private final ImageView iv;
private final Context context;
public ViewHolder(Context context, View itemView) {
super(itemView);
this.iv = (ImageView) itemView.findViewById(R.id.imageTile);
this.context = context;
itemView.setOnClickListener(this);
}
public ImageView getIv() {
return iv;
}
@Override
public void onClick(View v) {
int position = getLayoutPosition();
File imageFile = new File(imageList.get(position));
Uri uri = Uri.fromFile(imageFile);
Intent intent = new Intent(context, DetailActivity.class);
intent.putExtra("image", uri.toString());
context.startActivity(intent);
}
}
}
/**
* Method to create a background task for the image loading
*/
private Runnable loadImages = new Runnable() {
@SuppressWarnings("unchecked")
@Override
public void run() {
new AsyncTask<ArrayList<String>, Void, ArrayList<String>>() {
@Override
protected ArrayList<String> doInBackground(ArrayList<String>... params) {
Cursor cursor;
ArrayList<String> imagePaths = new ArrayList<>();
Uri queryUri = MediaStore.Files.getContentUri("external");
cursor = getActivity().getContentResolver().query(queryUri,
null,
MediaStore.Images.Media.DATA + " like ? ",
new String[]{"%DCIM/Camera%"},
null);
while ((cursor != null && cursor.moveToNext())) {
int mediaType = cursor.getInt(cursor.getColumnIndex(
MediaStore.Files.FileColumns.MEDIA_TYPE));
if (mediaType != MediaStore.Files.FileColumns.MEDIA_TYPE_IMAGE
&& mediaType != MediaStore.Files.FileColumns.MEDIA_TYPE_VIDEO)
continue;
if (mediaType == MediaStore.Files.FileColumns.MEDIA_TYPE_IMAGE) {
String path = cursor.getString(
cursor.getColumnIndexOrThrow(MediaStore.Files.FileColumns.DATA));
imagePaths.add(path);
}
}
if (cursor != null)
cursor.close();
Collections.sort(imagePaths, Collections.<String>reverseOrder());
return imagePaths;
}
@Override
protected void onPostExecute(ArrayList<String> strings) {
super.onPostExecute(strings);
if (strings != null) {
imageList.clear();
imageList.addAll(strings);
adapter.notifyDataSetChanged();
}
}
}.execute();
}
};
}
| |
package app.andrey_voroshkov.chorus_laptimer;
import android.hardware.usb.UsbDeviceConnection;
import android.hardware.usb.UsbManager;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.text.TextUtils;
import android.util.Log;
import com.hoho.android.usbserial.driver.UsbSerialDriver;
import com.hoho.android.usbserial.driver.UsbSerialPort;
import com.hoho.android.usbserial.driver.UsbSerialProber;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import static android.content.ContentValues.TAG;
/**
* Created by Andrey_Voroshkov on 10/15/2017.
*/
public class USBService implements Connection{
final static String KEY_MSG_TYPE = "msg";
final static String KEY_MSG_DATA = "data";
final static int MSG_ON_CONNECT = 7771;
final static int MSG_ON_DISCONNECT = 7772;
final static int MSG_ON_RECEIVE = 7773;
final static int MSG_ON_CONNECTION_FAIL = 7774;
final static int SEND_TIMEOUT = 100;
final static int READ_TIMEOUT = 100;
ConnectionListener mConnectionListener = null;
Handler mActivityHandler = null;
UsbManager mUsbManager = null;
ListenerThread mListenerThread = null;
SenderThread mSenderThread = null;
volatile UsbSerialPort mPort = null;
USBService(UsbManager usbManager) {
mActivityHandler = new HandlerExtension();
mUsbManager = usbManager;
}
private class HandlerExtension extends Handler {
@Override
public void handleMessage(Message message){
if (mConnectionListener == null) return;
Bundle msgBundle = message.getData();
int msgType = msgBundle.getInt(KEY_MSG_TYPE);
String data = msgBundle.getString(KEY_MSG_DATA);
switch(msgType) {
case MSG_ON_CONNECT:
mConnectionListener.onConnected(data);
break;
case MSG_ON_CONNECTION_FAIL:
mConnectionListener.onConnectionFailed(data);
break;
case MSG_ON_DISCONNECT:
mConnectionListener.onDisconnected();
break;
case MSG_ON_RECEIVE:
mConnectionListener.onDataReceived(data);
break;
}
}
}
private Message composeMessage(int type, String data) {
Bundle msgBundle = new Bundle();
msgBundle.putInt(KEY_MSG_TYPE, type);
msgBundle.putString(KEY_MSG_DATA, data);
Message msg = new Message();
msg.setData(msgBundle);
return msg;
}
public void setConnectionListener(ConnectionListener listener) {
mConnectionListener = listener;
}
@Override
public void connect() {
if (mPort != null) return;
if (mUsbManager == null) {
mActivityHandler.sendMessage(composeMessage(MSG_ON_CONNECTION_FAIL, "UsbManager not found"));
return;
}
List<UsbSerialDriver> availableDrivers = UsbSerialProber.getDefaultProber().findAllDrivers(mUsbManager);
if (availableDrivers.isEmpty()) {
mActivityHandler.sendMessage(composeMessage(MSG_ON_CONNECTION_FAIL, "No available USB Device Drivers found"));
return;
}
// Open a connection to the first available driver.
UsbSerialDriver driver = availableDrivers.get(0);
UsbDeviceConnection connection = mUsbManager.openDevice(driver.getDevice());
if (connection == null) {
// You probably need to call UsbManager.requestPermission(driver.getDevice(), ..)
mActivityHandler.sendMessage(composeMessage(MSG_ON_CONNECTION_FAIL, "Cannot open USB data port"));
return;
}
// Read some data! Most have just one port (port 0).
mPort = driver.getPorts().get(0);
try {
mPort.open(connection);
mPort.setParameters(115200, 8, UsbSerialPort.STOPBITS_1, UsbSerialPort.PARITY_NONE);
} catch (IOException e) {
Log.e(TAG, "Error setting up device: " + e.getMessage(), e);
try {
mPort.close();
} catch (IOException e2) {
// Ignore.
}
mPort = null;
mActivityHandler.sendMessage(composeMessage(MSG_ON_CONNECTION_FAIL, e.toString()));
return;
}
// create a latch to make sure that sending thread is started before we send anything to USB
CountDownLatch senderInitializedSignal = new CountDownLatch(mSenderThread == null ? 1 : 0);
if (mListenerThread == null) {
mListenerThread = new ListenerThread();
mListenerThread.start();
}
if (mSenderThread == null) {
mSenderThread = new SenderThread(senderInitializedSignal);
mSenderThread.start();
}
try {
senderInitializedSignal.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
String deviceName;
try {
deviceName = mPort.getDriver().getClass().getSimpleName();
}
catch(Exception e) {
deviceName = "";
}
mActivityHandler.sendMessage(composeMessage(MSG_ON_CONNECT, deviceName));
}
@Override
public void disconnect() {
// don't stop the sender thread as it uses a Looper and cannot be easily stopped (to the best of my current knowledge)
// but stop the listener thread
if (mListenerThread != null) {
mListenerThread.interrupt();
try {
mListenerThread.join();
mListenerThread = null;
}
catch(InterruptedException e) {
// do nothing ?
}
}
if (mPort != null) {
try {
mPort.close();
}
catch(Exception e) {
// TODO: handle exception here ?
}
finally {
mPort = null;
}
}
mActivityHandler.sendMessage(composeMessage(MSG_ON_DISCONNECT, "disconnect"));
}
public void send(String data) {
if (mPort == null) return;
if (mSenderThread == null) return;
mSenderThread.send(data);
}
private class SenderThread extends Thread {
CountDownLatch mInitializedSignal;
SenderThread(CountDownLatch initializedSignal) {
super();
mInitializedSignal = initializedSignal;
}
private Handler mSendHandler;
public void run () {
// prepare handler to process send commands via messages to SenderThread
Looper.prepare();
mSendHandler = new Handler();
mInitializedSignal.countDown();
Looper.loop();
}
public void send(final String data) {
if (mPort == null) return;
// TODO: check that there are no situations when we try sending to closed port
mSendHandler.post(new Runnable() {
@Override
public void run() {
try {
mPort.write(data.getBytes(), SEND_TIMEOUT);
} catch (Exception e) {
disconnect();
//TODO: implement some handling here!
}
}
});
}
}
private class ListenerThread extends Thread {
byte[] mReceiveArray = new byte[4098];
String mLastIncompleteChunk = "";
private void parseAndCallback(String str) {
if (mConnectionListener == null || str.length() == 0) return;
char lastChar = str.charAt(str.length()-1);
boolean isLastChunkIncomplete = lastChar != '\n';
String[] chunks = TextUtils.split(str, "\n");
int lastChunkIndex = chunks.length - 1;
if (!mLastIncompleteChunk.isEmpty()) {
chunks[0] = mLastIncompleteChunk + chunks[0];
}
if (isLastChunkIncomplete) {
mLastIncompleteChunk = chunks[lastChunkIndex];
chunks[lastChunkIndex] = "";
} else {
mLastIncompleteChunk = "";
}
for (String chunk : chunks) {
if (chunk.isEmpty()) continue;
mActivityHandler.sendMessage(composeMessage(MSG_ON_RECEIVE, chunk));
}
}
public void run() {
while (!isInterrupted()) {
if (mPort == null) continue;
try {
int len = 0;
len = mPort.read(mReceiveArray, READ_TIMEOUT);
if (len > 0) {
Charset charset = Charset.forName("ASCII");
String result = new String(mReceiveArray, 0, len, charset);
parseAndCallback(result);
}
} catch (Exception e) {
disconnect();
//TODO: implement some handling here!
break;
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.xnet.provider.jsse;
import org.apache.harmony.xnet.provider.jsse.AlertException;
import org.apache.harmony.xnet.provider.jsse.SSLSessionImpl;
import org.apache.harmony.xnet.provider.jsse.SSLEngineDataStream;
import java.io.IOException;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.nio.ReadOnlyBufferException;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLHandshakeException;
import javax.net.ssl.SSLEngineResult;
import javax.net.ssl.SSLException;
import javax.net.ssl.SSLSession;
/**
* Implementation of SSLEngine.
* @see javax.net.ssl.SSLEngine class documentation for more information.
*/
public class SSLEngineImpl extends SSLEngine {
// indicates if peer mode was set
private boolean peer_mode_was_set = false;
// indicates if handshake has been started
private boolean handshake_started = false;
// indicates if inbound operations finished
private boolean isInboundDone = false;
// indicates if outbound operations finished
private boolean isOutboundDone = false;
// indicates if close_notify alert had been sent to another peer
private boolean close_notify_was_sent = false;
// indicates if close_notify alert had been received from another peer
private boolean close_notify_was_received = false;
// indicates if engine was closed (it means that
// all the works on it are done, except (probably) some finalizing work)
private boolean engine_was_closed = false;
// indicates if engine was shutted down (it means that
// all cleaning work had been done and the engine is not operable)
private boolean engine_was_shutteddown = false;
// record protocol to be used
protected SSLRecordProtocol recordProtocol;
// input stream for record protocol
private SSLBufferedInput recProtIS;
// handshake protocol to be used
private HandshakeProtocol handshakeProtocol;
// alert protocol to be used
private AlertProtocol alertProtocol;
// place where application data will be stored
private SSLEngineAppData appData;
// outcoming application data stream
private SSLEngineDataStream dataStream = new SSLEngineDataStream();
// active session object
private SSLSessionImpl session;
// peer configuration parameters
protected SSLParameters sslParameters;
// in case of emergency situations when data could not be
// placed in destination buffers it will be stored in this
// fields
private byte[] remaining_wrapped_data = null;
private byte[] remaining_hsh_data = null;
// logger
private Logger.Stream logger = Logger.getStream("engine");
/**
* Ctor
* @param sslParameters: SSLParameters
*/
protected SSLEngineImpl(SSLParameters sslParameters) {
super();
this.sslParameters = sslParameters;
}
/**
* Ctor
* @param host: String
* @param port: int
* @param sslParameters: SSLParameters
*/
protected SSLEngineImpl(String host, int port, SSLParameters sslParameters) {
super(host, port);
this.sslParameters = sslParameters;
}
/**
* Starts the handshake.
* @throws SSLException
* @see javax.net.ssl.SSLEngine#beginHandshake() method documentation
* for more information
*/
@Override
public void beginHandshake() throws SSLException {
if (engine_was_closed) {
throw new SSLException("Engine has already been closed.");
}
if (!peer_mode_was_set) {
throw new IllegalStateException("Client/Server mode was not set");
}
if (!handshake_started) {
handshake_started = true;
if (getUseClientMode()) {
handshakeProtocol = new ClientHandshakeImpl(this);
} else {
handshakeProtocol = new ServerHandshakeImpl(this);
}
appData = new SSLEngineAppData();
alertProtocol = new AlertProtocol();
recProtIS = new SSLBufferedInput();
recordProtocol = new SSLRecordProtocol(handshakeProtocol,
alertProtocol, recProtIS, appData);
}
handshakeProtocol.start();
}
/**
* Closes inbound operations of this engine
* @throws SSLException
* @see javax.net.ssl.SSLEngine#closeInbound() method documentation
* for more information
*/
@Override
public void closeInbound() throws SSLException {
if (logger != null) {
logger.println("closeInbound() "+isInboundDone);
}
if (isInboundDone) {
return;
}
isInboundDone = true;
engine_was_closed = true;
if (handshake_started) {
if (!close_notify_was_received) {
if (session != null) {
session.invalidate();
}
alertProtocol.alert(AlertProtocol.FATAL,
AlertProtocol.INTERNAL_ERROR);
throw new SSLException("Inbound is closed before close_notify "
+ "alert has been received.");
}
} else {
// engine is closing before initial handshake has been made
shutdown();
}
}
/**
* Closes outbound operations of this engine
* @see javax.net.ssl.SSLEngine#closeOutbound() method documentation
* for more information
*/
@Override
public void closeOutbound() {
if (logger != null) {
logger.println("closeOutbound() "+isOutboundDone);
}
if (isOutboundDone) {
return;
}
isOutboundDone = true;
if (handshake_started) {
// initial handshake had been started
alertProtocol.alert(AlertProtocol.WARNING,
AlertProtocol.CLOSE_NOTIFY);
close_notify_was_sent = true;
} else {
// engine is closing before initial handshake has been made
shutdown();
}
engine_was_closed = true;
}
/**
* Returns handshake's delegated tasks to be run
* @return the delegated task to be executed.
* @see javax.net.ssl.SSLEngine#getDelegatedTask() method documentation
* for more information
*/
@Override
public Runnable getDelegatedTask() {
return handshakeProtocol.getTask();
}
/**
* Returns names of supported cipher suites.
* @return array of strings containing the names of supported cipher suites
* @see javax.net.ssl.SSLEngine#getSupportedCipherSuites() method
* documentation for more information
*/
@Override
public String[] getSupportedCipherSuites() {
return CipherSuite.getSupportedCipherSuiteNames();
}
// --------------- SSLParameters based methods ---------------------
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#getEnabledCipherSuites() method
* documentation for more information
*/
@Override
public String[] getEnabledCipherSuites() {
return sslParameters.getEnabledCipherSuites();
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#setEnabledCipherSuites(String) method
* documentation for more information
*/
@Override
public void setEnabledCipherSuites(String[] suites) {
sslParameters.setEnabledCipherSuites(suites);
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#getSupportedProtocols() method
* documentation for more information
*/
@Override
public String[] getSupportedProtocols() {
return ProtocolVersion.supportedProtocols.clone();
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#getEnabledProtocols() method
* documentation for more information
*/
@Override
public String[] getEnabledProtocols() {
return sslParameters.getEnabledProtocols();
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#setEnabledProtocols(String) method
* documentation for more information
*/
@Override
public void setEnabledProtocols(String[] protocols) {
sslParameters.setEnabledProtocols(protocols);
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#setUseClientMode(boolean) method
* documentation for more information
*/
@Override
public void setUseClientMode(boolean mode) {
if (handshake_started) {
throw new IllegalArgumentException(
"Could not change the mode after the initial handshake has begun.");
}
sslParameters.setUseClientMode(mode);
peer_mode_was_set = true;
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#getUseClientMode() method
* documentation for more information
*/
@Override
public boolean getUseClientMode() {
return sslParameters.getUseClientMode();
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#setNeedClientAuth(boolean) method
* documentation for more information
*/
@Override
public void setNeedClientAuth(boolean need) {
sslParameters.setNeedClientAuth(need);
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#getNeedClientAuth() method
* documentation for more information
*/
@Override
public boolean getNeedClientAuth() {
return sslParameters.getNeedClientAuth();
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#setWantClientAuth(boolean) method
* documentation for more information
*/
@Override
public void setWantClientAuth(boolean want) {
sslParameters.setWantClientAuth(want);
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#getWantClientAuth() method
* documentation for more information
*/
@Override
public boolean getWantClientAuth() {
return sslParameters.getWantClientAuth();
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#setEnableSessionCreation(boolean) method
* documentation for more information
*/
@Override
public void setEnableSessionCreation(boolean flag) {
sslParameters.setEnableSessionCreation(flag);
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#getEnableSessionCreation() method
* documentation for more information
*/
@Override
public boolean getEnableSessionCreation() {
return sslParameters.getEnableSessionCreation();
}
// -----------------------------------------------------------------
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#getHandshakeStatus() method
* documentation for more information
*/
@Override
public SSLEngineResult.HandshakeStatus getHandshakeStatus() {
if (!handshake_started || engine_was_shutteddown) {
// initial handshake has not been started yet
return SSLEngineResult.HandshakeStatus.NOT_HANDSHAKING;
}
if (alertProtocol.hasAlert()) {
// need to send an alert
return SSLEngineResult.HandshakeStatus.NEED_WRAP;
}
if (close_notify_was_sent && !close_notify_was_received) {
// waiting for "close_notify" response
return SSLEngineResult.HandshakeStatus.NEED_UNWRAP;
}
return handshakeProtocol.getStatus();
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#getSession() method
* documentation for more information
*/
@Override
public SSLSession getSession() {
if (session != null) {
return session;
}
return SSLSessionImpl.NULL_SESSION;
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#isInboundDone() method
* documentation for more information
*/
@Override
public boolean isInboundDone() {
return isInboundDone || engine_was_closed;
}
/**
* This method works according to the specification of implemented class.
* @see javax.net.ssl.SSLEngine#isOutboundDone() method
* documentation for more information
*/
@Override
public boolean isOutboundDone() {
return isOutboundDone;
}
/**
* Decodes one complete SSL/TLS record provided in the source buffer.
* If decoded record contained application data, this data will
* be placed in the destination buffers.
* For more information about TLS record fragmentation see
* TLS v 1 specification (http://www.ietf.org/rfc/rfc2246.txt) p 6.2.
* @param src source buffer containing SSL/TLS record.
* @param dsts destination buffers to place received application data.
* @see javax.net.ssl.SSLEngine#unwrap(ByteBuffer,ByteBuffer[],int,int)
* method documentation for more information
*/
@Override
public SSLEngineResult unwrap(ByteBuffer src, ByteBuffer[] dsts,
int offset, int length) throws SSLException {
if (engine_was_shutteddown) {
return new SSLEngineResult(SSLEngineResult.Status.CLOSED,
SSLEngineResult.HandshakeStatus.NOT_HANDSHAKING, 0, 0);
}
if ((src == null) || (dsts == null)) {
throw new IllegalStateException(
"Some of the input parameters are null");
}
if (!handshake_started) {
beginHandshake();
}
SSLEngineResult.HandshakeStatus handshakeStatus = getHandshakeStatus();
// If is is initial handshake or connection closure stage,
// check if this call was made in spite of handshake status
if ((session == null || engine_was_closed) && (
handshakeStatus.equals(
SSLEngineResult.HandshakeStatus.NEED_WRAP) ||
handshakeStatus.equals(
SSLEngineResult.HandshakeStatus.NEED_TASK))) {
return new SSLEngineResult(
getEngineStatus(), handshakeStatus, 0, 0);
}
if (src.remaining() < recordProtocol.getMinRecordSize()) {
return new SSLEngineResult(
SSLEngineResult.Status.BUFFER_UNDERFLOW,
getHandshakeStatus(), 0, 0);
}
try {
src.mark();
// check the destination buffers and count their capacity
int capacity = 0;
for (int i=offset; i<offset+length; i++) {
if (dsts[i] == null) {
throw new IllegalStateException(
"Some of the input parameters are null");
}
if (dsts[i].isReadOnly()) {
throw new ReadOnlyBufferException();
}
capacity += dsts[i].remaining();
}
if (capacity < recordProtocol.getDataSize(src.remaining())) {
return new SSLEngineResult(
SSLEngineResult.Status.BUFFER_OVERFLOW,
getHandshakeStatus(), 0, 0);
}
recProtIS.setSourceBuffer(src);
// unwrap the record contained in source buffer, pass it
// to appropriate client protocol (alert, handshake, or app)
// and retrieve the type of unwrapped data
int type = recordProtocol.unwrap();
// process the data and return the result
switch (type) {
case ContentType.HANDSHAKE:
case ContentType.CHANGE_CIPHER_SPEC:
if (handshakeProtocol.getStatus().equals(
SSLEngineResult.HandshakeStatus.FINISHED)) {
session = recordProtocol.getSession();
}
break;
case ContentType.APPLICATION_DATA:
break;
case ContentType.ALERT:
if (alertProtocol.isFatalAlert()) {
alertProtocol.setProcessed();
if (session != null) {
session.invalidate();
}
String description = "Fatal alert received "
+ alertProtocol.getAlertDescription();
shutdown();
throw new SSLException(description);
} else {
if (logger != null) {
logger.println("Warning allert has been received: "
+ alertProtocol.getAlertDescription());
}
switch(alertProtocol.getDescriptionCode()) {
case AlertProtocol.CLOSE_NOTIFY:
alertProtocol.setProcessed();
close_notify_was_received = true;
if (!close_notify_was_sent) {
closeOutbound();
closeInbound();
} else {
closeInbound();
shutdown();
}
break;
case AlertProtocol.NO_RENEGOTIATION:
alertProtocol.setProcessed();
if (session == null) {
// message received during the initial
// handshake
throw new AlertException(
AlertProtocol.HANDSHAKE_FAILURE,
new SSLHandshakeException(
"Received no_renegotiation "
+ "during the initial handshake"));
} else {
// just stop the handshake
handshakeProtocol.stop();
}
break;
default:
alertProtocol.setProcessed();
}
}
break;
}
return new SSLEngineResult(getEngineStatus(), getHandshakeStatus(),
recProtIS.consumed(),
// place the app. data (if any) into the dest. buffers
// and get the number of produced bytes:
appData.placeTo(dsts, offset, length));
} catch (BufferUnderflowException e) {
// there was not enought data ource buffer to make complete packet
src.reset();
return new SSLEngineResult(SSLEngineResult.Status.BUFFER_UNDERFLOW,
getHandshakeStatus(), 0, 0);
} catch (AlertException e) {
// fatal alert occured
alertProtocol.alert(AlertProtocol.FATAL, e.getDescriptionCode());
engine_was_closed = true;
src.reset();
if (session != null) {
session.invalidate();
}
// shutdown work will be made after the alert will be sent
// to another peer (by wrap method)
throw e.getReason();
} catch (SSLException e) {
throw e;
} catch (IOException e) {
alertProtocol.alert(AlertProtocol.FATAL,
AlertProtocol.INTERNAL_ERROR);
engine_was_closed = true;
// shutdown work will be made after the alert will be sent
// to another peer (by wrap method)
throw new SSLException(e.getMessage());
}
}
/**
* Encodes the application data into SSL/TLS record. If handshake status
* of the engine differs from NOT_HANDSHAKING the operation can work
* without consuming of the source data.
* For more information about TLS record fragmentation see
* TLS v 1 specification (http://www.ietf.org/rfc/rfc2246.txt) p 6.2.
* @param srcs the source buffers with application data to be encoded
* into SSL/TLS record.
* @param offset the offset in the destination buffers array pointing to
* the first buffer with the source data.
* @param len specifies the maximum number of buffers to be procesed.
* @param dst the destination buffer where encoded data will be placed.
* @see javax.net.ssl.SSLEngine#wrap(ByteBuffer[],int,int,ByteBuffer) method
* documentation for more information
*/
@Override
public SSLEngineResult wrap(ByteBuffer[] srcs, int offset,
int len, ByteBuffer dst) throws SSLException {
if (engine_was_shutteddown) {
return new SSLEngineResult(SSLEngineResult.Status.CLOSED,
SSLEngineResult.HandshakeStatus.NOT_HANDSHAKING, 0, 0);
}
if ((srcs == null) || (dst == null)) {
throw new IllegalStateException(
"Some of the input parameters are null");
}
if (dst.isReadOnly()) {
throw new ReadOnlyBufferException();
}
if (!handshake_started) {
beginHandshake();
}
SSLEngineResult.HandshakeStatus handshakeStatus = getHandshakeStatus();
// If it is an initial handshake or connection closure stage,
// check if this call was made in spite of handshake status
if ((session == null || engine_was_closed) && (
handshakeStatus.equals(
SSLEngineResult.HandshakeStatus.NEED_UNWRAP) ||
handshakeStatus.equals(
SSLEngineResult.HandshakeStatus.NEED_TASK))) {
return new SSLEngineResult(
getEngineStatus(), handshakeStatus, 0, 0);
}
int capacity = dst.remaining();
int produced = 0;
if (alertProtocol.hasAlert()) {
// we have an alert to be sent
if (capacity < recordProtocol.getRecordSize(2)) {
return new SSLEngineResult(
SSLEngineResult.Status.BUFFER_OVERFLOW,
handshakeStatus, 0, 0);
}
byte[] alert_data = alertProtocol.wrap();
// place the alert record into destination
dst.put(alert_data);
if (alertProtocol.isFatalAlert()) {
alertProtocol.setProcessed();
if (session != null) {
session.invalidate();
}
// fatal alert has been sent, so shut down the engine
shutdown();
return new SSLEngineResult(
SSLEngineResult.Status.CLOSED,
SSLEngineResult.HandshakeStatus.NOT_HANDSHAKING,
0, alert_data.length);
} else {
alertProtocol.setProcessed();
// check if the works on this engine have been done
if (close_notify_was_sent && close_notify_was_received) {
shutdown();
return new SSLEngineResult(SSLEngineResult.Status.CLOSED,
SSLEngineResult.HandshakeStatus.NOT_HANDSHAKING,
0, alert_data.length);
}
return new SSLEngineResult(
getEngineStatus(),
getHandshakeStatus(),
0, alert_data.length);
}
}
if (capacity < recordProtocol.getMinRecordSize()) {
if (logger != null) {
logger.println("Capacity of the destination("
+capacity+") < MIN_PACKET_SIZE("
+recordProtocol.getMinRecordSize()+")");
}
return new SSLEngineResult(SSLEngineResult.Status.BUFFER_OVERFLOW,
handshakeStatus, 0, 0);
}
try {
if (!handshakeStatus.equals(
SSLEngineResult.HandshakeStatus.NEED_WRAP)) {
// so we wraps application data
dataStream.setSourceBuffers(srcs, offset, len);
if ((capacity < SSLRecordProtocol.MAX_SSL_PACKET_SIZE) &&
(capacity < recordProtocol.getRecordSize(
dataStream.available()))) {
if (logger != null) {
logger.println("The destination buffer("
+capacity+") can not take the resulting packet("
+ recordProtocol.getRecordSize(
dataStream.available())+")");
}
return new SSLEngineResult(
SSLEngineResult.Status.BUFFER_OVERFLOW,
handshakeStatus, 0, 0);
}
if (remaining_wrapped_data == null) {
remaining_wrapped_data =
recordProtocol.wrap(ContentType.APPLICATION_DATA,
dataStream);
}
if (capacity < remaining_wrapped_data.length) {
// It should newer happen because we checked the destination
// buffer size, but there is a possibility
// (if dest buffer was filled outside)
// so we just remember the data into remaining_wrapped_data
// and will enclose it during the the next call
return new SSLEngineResult(
SSLEngineResult.Status.BUFFER_OVERFLOW,
handshakeStatus, dataStream.consumed(), 0);
} else {
dst.put(remaining_wrapped_data);
produced = remaining_wrapped_data.length;
remaining_wrapped_data = null;
return new SSLEngineResult(getEngineStatus(),
handshakeStatus, dataStream.consumed(), produced);
}
} else {
if (remaining_hsh_data == null) {
remaining_hsh_data = handshakeProtocol.wrap();
}
if (capacity < remaining_hsh_data.length) {
// It should newer happen because we checked the destination
// buffer size, but there is a possibility
// (if dest buffer was filled outside)
// so we just remember the data into remaining_hsh_data
// and will enclose it during the the next call
return new SSLEngineResult(
SSLEngineResult.Status.BUFFER_OVERFLOW,
handshakeStatus, 0, 0);
} else {
dst.put(remaining_hsh_data);
produced = remaining_hsh_data.length;
remaining_hsh_data = null;
handshakeStatus = handshakeProtocol.getStatus();
if (handshakeStatus.equals(
SSLEngineResult.HandshakeStatus.FINISHED)) {
session = recordProtocol.getSession();
}
}
return new SSLEngineResult(
getEngineStatus(), getHandshakeStatus(), 0, produced);
}
} catch (AlertException e) {
// fatal alert occured
alertProtocol.alert(AlertProtocol.FATAL, e.getDescriptionCode());
engine_was_closed = true;
if (session != null) {
session.invalidate();
}
// shutdown work will be made after the alert will be sent
// to another peer (by wrap method)
throw e.getReason();
}
}
// Shutdownes the engine and makes all cleanup work.
private void shutdown() {
engine_was_closed = true;
engine_was_shutteddown = true;
isOutboundDone = true;
isInboundDone = true;
if (handshake_started) {
alertProtocol.shutdown();
alertProtocol = null;
handshakeProtocol.shutdown();
handshakeProtocol = null;
recordProtocol.shutdown();
recordProtocol = null;
}
}
private SSLEngineResult.Status getEngineStatus() {
return (engine_was_closed)
? SSLEngineResult.Status.CLOSED
: SSLEngineResult.Status.OK;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.xcontent;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParseException;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Constants;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.Instant;
import org.joda.time.ReadableInstant;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
public abstract class BaseXContentTestCase extends ESTestCase {
protected abstract XContentType xcontentType();
private XContentBuilder builder() throws IOException {
return XContentBuilder.builder(xcontentType().xContent());
}
public void testContentType() throws IOException {
assertThat(builder().contentType(), equalTo(xcontentType()));
}
public void testStartEndObject() throws IOException {
expectUnclosedException(() -> BytesReference.bytes(builder().startObject()));
expectUnclosedException(() -> builder().startObject().close());
expectUnclosedException(() -> Strings.toString(builder().startObject()));
expectObjectException(() -> BytesReference.bytes(builder().endObject()));
expectObjectException(() -> builder().endObject().close());
expectObjectException(() -> Strings.toString(builder().endObject()));
expectValueException(() -> builder().startObject("foo").endObject());
expectNonNullFieldException(() -> builder().startObject().startObject(null));
assertResult("{}", () -> builder().startObject().endObject());
assertResult("{'foo':{}}", () -> builder().startObject().startObject("foo").endObject().endObject());
assertResult("{'foo':{'bar':{}}}", () -> builder()
.startObject()
.startObject("foo")
.startObject("bar")
.endObject()
.endObject()
.endObject());
}
public void testStartEndArray() throws IOException {
expectUnclosedException(() -> BytesReference.bytes(builder().startArray()));
expectUnclosedException(() -> builder().startArray().close());
expectUnclosedException(() -> Strings.toString(builder().startArray()));
expectArrayException(() -> BytesReference.bytes(builder().endArray()));
expectArrayException(() -> builder().endArray().close());
expectArrayException(() -> Strings.toString(builder().endArray()));
expectValueException(() -> builder().startArray("foo").endObject());
expectFieldException(() -> builder().startObject().startArray().endArray().endObject());
expectNonNullFieldException(() -> builder().startObject().startArray(null).endArray().endObject());
assertResult("{'foo':[]}", () -> builder().startObject().startArray("foo").endArray().endObject());
assertResult("{'foo':[1,2,3]}", () -> builder()
.startObject()
.startArray("foo")
.value(1)
.value(2)
.value(3)
.endArray()
.endObject());
}
public void testField() throws IOException {
expectValueException(() -> BytesReference.bytes(builder().field("foo")));
expectNonNullFieldException(() -> BytesReference.bytes(builder().field(null)));
expectUnclosedException(() -> BytesReference.bytes(builder().startObject().field("foo")));
assertResult("{'foo':'bar'}", () -> builder().startObject().field("foo").value("bar").endObject());
}
public void testNullField() throws IOException {
expectValueException(() -> BytesReference.bytes(builder().nullField("foo")));
expectNonNullFieldException(() -> BytesReference.bytes(builder().nullField(null)));
expectUnclosedException(() -> BytesReference.bytes(builder().startObject().nullField("foo")));
assertResult("{'foo':null}", () -> builder().startObject().nullField("foo").endObject());
}
public void testNullValue() throws IOException {
assertResult("{'foo':null}", () -> builder().startObject().field("foo").nullValue().endObject());
}
public void testBooleans() throws IOException {
assertResult("{'boolean':null}", () -> builder().startObject().field("boolean", (Boolean) null).endObject());
assertResult("{'boolean':true}", () -> builder().startObject().field("boolean", Boolean.TRUE).endObject());
assertResult("{'boolean':false}", () -> builder().startObject().field("boolean", Boolean.FALSE).endObject());
assertResult("{'boolean':[true,false,true]}", () -> builder().startObject().array("boolean", true, false, true).endObject());
assertResult("{'boolean':[false,true]}", () -> builder().startObject().array("boolean", new boolean[]{false, true}).endObject());
assertResult("{'boolean':null}", () -> builder().startObject().array("boolean", (boolean[]) null).endObject());
assertResult("{'boolean':[]}", () -> builder().startObject().array("boolean", new boolean[]{}).endObject());
assertResult("{'boolean':null}", () -> builder().startObject().field("boolean").value((Boolean) null).endObject());
assertResult("{'boolean':true}", () -> builder().startObject().field("boolean").value(Boolean.TRUE).endObject());
assertResult("{'boolean':false}", () -> builder().startObject().field("boolean").value(Boolean.FALSE).endObject());
}
public void testBytes() throws IOException {
assertResult("{'byte':null}", () -> builder().startObject().field("byte", (Byte) null).endObject());
assertResult("{'byte':0}", () -> builder().startObject().field("byte", (byte) 0).endObject());
assertResult("{'byte':1}", () -> builder().startObject().field("byte", (byte) 1).endObject());
assertResult("{'byte':null}", () -> builder().startObject().field("byte").value((Byte) null).endObject());
assertResult("{'byte':0}", () -> builder().startObject().field("byte").value((byte) 0).endObject());
assertResult("{'byte':1}", () -> builder().startObject().field("byte").value((byte) 1).endObject());
}
public void testDoubles() throws IOException {
assertResult("{'double':null}", () -> builder().startObject().field("double", (Double) null).endObject());
assertResult("{'double':42.5}", () -> builder().startObject().field("double", Double.valueOf(42.5)).endObject());
assertResult("{'double':1.2}", () -> builder().startObject().field("double", 1.2).endObject());
assertResult("{'double':[42.0,43.0,45]}", () -> builder().startObject().array("double", 42.0, 43.0, 45).endObject());
assertResult("{'double':null}", () -> builder().startObject().array("double", (double[]) null).endObject());
assertResult("{'double':[]}", () -> builder().startObject().array("double", new double[]{}).endObject());
assertResult("{'double':null}", () -> builder().startObject().field("double").value((Double) null).endObject());
assertResult("{'double':0.001}", () -> builder().startObject().field("double").value(0.001).endObject());
assertResult("{'double':[1.7976931348623157E308,4.9E-324]}", () -> builder()
.startObject()
.array("double", new double[]{Double.MAX_VALUE, Double.MIN_VALUE})
.endObject());
}
public void testFloats() throws IOException {
assertResult("{'float':null}", () -> builder().startObject().field("float", (Float) null).endObject());
assertResult("{'float':42.5}", () -> builder().startObject().field("float", Float.valueOf(42.5f)).endObject());
assertResult("{'float':1.2}", () -> builder().startObject().field("float", 1.2f).endObject());
assertResult("{'float':null}", () -> builder().startObject().array("float", (float[]) null).endObject());
assertResult("{'float':[]}", () -> builder().startObject().array("float", new float[]{}).endObject());
assertResult("{'float':null}", () -> builder().startObject().field("float").value((Float) null).endObject());
assertResult("{'float':9.9E-7}", () -> builder().startObject().field("float").value(0.00000099f).endObject());
assertResult("{'float':[42.0,43.0,45.666668]}", () -> builder()
.startObject()
.array("float", 42.0f, 43.0f, 45.66666667f)
.endObject());
assertResult("{'float':[3.4028235E38,1.4E-45]}", () -> builder()
.startObject()
.array("float", new float[]{Float.MAX_VALUE, Float.MIN_VALUE})
.endObject());
}
public void testIntegers() throws IOException {
assertResult("{'integer':null}", () -> builder().startObject().field("integer", (Integer) null).endObject());
assertResult("{'integer':42}", () -> builder().startObject().field("integer", Integer.valueOf(42)).endObject());
assertResult("{'integer':3}", () -> builder().startObject().field("integer", 3).endObject());
assertResult("{'integer':[1,3,5,7,11]}", () -> builder().startObject().array("integer", 1, 3, 5, 7, 11).endObject());
assertResult("{'integer':null}", () -> builder().startObject().array("integer", (int[]) null).endObject());
assertResult("{'integer':[]}", () -> builder().startObject().array("integer", new int[]{}).endObject());
assertResult("{'integer':null}", () -> builder().startObject().field("integer").value((Integer) null).endObject());
assertResult("{'integer':42}", () -> builder().startObject().field("integer").value(42).endObject());
assertResult("{'integer':[2147483647,-2147483648]}", () -> builder()
.startObject()
.array("integer", new int[]{Integer.MAX_VALUE, Integer.MIN_VALUE})
.endObject());
}
public void testLongs() throws IOException {
assertResult("{'long':null}", () -> builder().startObject().field("long", (Long) null).endObject());
assertResult("{'long':42}", () -> builder().startObject().field("long", Long.valueOf(42L)).endObject());
assertResult("{'long':9223372036854775807}", () -> builder().startObject().field("long", 9_223_372_036_854_775_807L).endObject());
assertResult("{'long':[1,3,5,7,11]}", () -> builder().startObject().array("long", 1L, 3L, 5L, 7L, 11L).endObject());
assertResult("{'long':null}", () -> builder().startObject().array("long", (long[]) null).endObject());
assertResult("{'long':[]}", () -> builder().startObject().array("long", new long[]{}).endObject());
assertResult("{'long':null}", () -> builder().startObject().field("long").value((Long) null).endObject());
assertResult("{'long':42}", () -> builder().startObject().field("long").value(42).endObject());
assertResult("{'long':[2147483647,-2147483648]}", () -> builder()
.startObject()
.array("long", new long[]{Integer.MAX_VALUE, Integer.MIN_VALUE})
.endObject());
}
public void testShorts() throws IOException {
assertResult("{'short':null}", () -> builder().startObject().field("short", (Short) null).endObject());
assertResult("{'short':5000}", () -> builder().startObject().field("short", Short.valueOf((short) 5000)).endObject());
assertResult("{'short':null}", () -> builder().startObject().array("short", (short[]) null).endObject());
assertResult("{'short':[]}", () -> builder().startObject().array("short", new short[]{}).endObject());
assertResult("{'short':null}", () -> builder().startObject().field("short").value((Short) null).endObject());
assertResult("{'short':42}", () -> builder().startObject().field("short").value((short) 42).endObject());
assertResult("{'short':[1,3,5,7,11]}", () -> builder()
.startObject()
.array("short", (short) 1, (short) 3, (short) 5, (short) 7, (short) 11)
.endObject());
assertResult("{'short':[32767,-32768]}", () -> builder()
.startObject()
.array("short", new short[]{Short.MAX_VALUE, Short.MIN_VALUE})
.endObject());
}
public void testStrings() throws IOException {
assertResult("{'string':null}", () -> builder().startObject().field("string", (String) null).endObject());
assertResult("{'string':'value'}", () -> builder().startObject().field("string", "value").endObject());
assertResult("{'string':''}", () -> builder().startObject().field("string", "").endObject());
assertResult("{'string':null}", () -> builder().startObject().array("string", (String[]) null).endObject());
assertResult("{'string':[]}", () -> builder().startObject().array("string", Strings.EMPTY_ARRAY).endObject());
assertResult("{'string':null}", () -> builder().startObject().field("string").value((String) null).endObject());
assertResult("{'string':'42'}", () -> builder().startObject().field("string").value("42").endObject());
assertResult("{'string':['a','b','c','d']}", () -> builder()
.startObject()
.array("string", "a", "b", "c", "d")
.endObject());
}
public void testBinaryField() throws Exception {
assertResult("{'binary':null}", () -> builder().startObject().field("binary", (byte[]) null).endObject());
final byte[] randomBytes = randomBytes();
BytesReference bytes = BytesReference.bytes(builder().startObject().field("binary", randomBytes).endObject());
XContentParser parser = createParser(xcontentType().xContent(), bytes);
assertSame(parser.nextToken(), Token.START_OBJECT);
assertSame(parser.nextToken(), Token.FIELD_NAME);
assertEquals(parser.currentName(), "binary");
assertTrue(parser.nextToken().isValue());
assertArrayEquals(randomBytes, parser.binaryValue());
assertSame(parser.nextToken(), Token.END_OBJECT);
assertNull(parser.nextToken());
}
public void testBinaryValue() throws Exception {
assertResult("{'binary':null}", () -> builder().startObject().field("binary").value((byte[]) null).endObject());
final byte[] randomBytes = randomBytes();
BytesReference bytes = BytesReference.bytes(builder().startObject().field("binary").value(randomBytes).endObject());
XContentParser parser = createParser(xcontentType().xContent(), bytes);
assertSame(parser.nextToken(), Token.START_OBJECT);
assertSame(parser.nextToken(), Token.FIELD_NAME);
assertEquals(parser.currentName(), "binary");
assertTrue(parser.nextToken().isValue());
assertArrayEquals(randomBytes, parser.binaryValue());
assertSame(parser.nextToken(), Token.END_OBJECT);
assertNull(parser.nextToken());
}
public void testBinaryValueWithOffsetLength() throws Exception {
assertResult("{'binary':null}", () -> builder().startObject().field("binary").value(null, 0, 0).endObject());
final byte[] randomBytes = randomBytes();
final int offset = randomIntBetween(0, randomBytes.length - 1);
final int length = randomIntBetween(1, Math.max(1, randomBytes.length - offset - 1));
XContentBuilder builder = builder().startObject();
if (randomBoolean()) {
builder.field("bin", randomBytes, offset, length);
} else {
builder.field("bin").value(randomBytes, offset, length);
}
builder.endObject();
XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder));
assertSame(parser.nextToken(), Token.START_OBJECT);
assertSame(parser.nextToken(), Token.FIELD_NAME);
assertEquals(parser.currentName(), "bin");
assertTrue(parser.nextToken().isValue());
assertArrayEquals(Arrays.copyOfRange(randomBytes, offset, offset + length), parser.binaryValue());
assertSame(parser.nextToken(), Token.END_OBJECT);
assertNull(parser.nextToken());
}
public void testBinaryUTF8() throws Exception {
assertResult("{'utf8':null}", () -> builder().startObject().nullField("utf8").endObject());
final BytesRef randomBytesRef = new BytesRef(randomBytes());
XContentBuilder builder = builder().startObject();
builder.field("utf8").utf8Value(randomBytesRef.bytes, randomBytesRef.offset, randomBytesRef.length);
builder.endObject();
XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder));
assertSame(parser.nextToken(), Token.START_OBJECT);
assertSame(parser.nextToken(), Token.FIELD_NAME);
assertEquals(parser.currentName(), "utf8");
assertTrue(parser.nextToken().isValue());
assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(randomBytesRef.utf8ToString()));
assertSame(parser.nextToken(), Token.END_OBJECT);
assertNull(parser.nextToken());
}
public void testText() throws Exception {
assertResult("{'text':null}", () -> builder().startObject().field("text", (Text) null).endObject());
assertResult("{'text':''}", () -> builder().startObject().field("text", new Text("")).endObject());
assertResult("{'text':'foo bar'}", () -> builder().startObject().field("text", new Text("foo bar")).endObject());
final BytesReference random = new BytesArray(randomBytes());
XContentBuilder builder = builder().startObject().field("text", new Text(random)).endObject();
XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder));
assertSame(parser.nextToken(), Token.START_OBJECT);
assertSame(parser.nextToken(), Token.FIELD_NAME);
assertEquals(parser.currentName(), "text");
assertTrue(parser.nextToken().isValue());
assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(random.utf8ToString()));
assertSame(parser.nextToken(), Token.END_OBJECT);
assertNull(parser.nextToken());
}
public void testReadableInstant() throws Exception {
assertResult("{'instant':null}", () -> builder().startObject().timeField("instant", (ReadableInstant) null).endObject());
assertResult("{'instant':null}", () -> builder().startObject().field("instant").timeValue((ReadableInstant) null).endObject());
final DateTime t1 = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC);
String expected = "{'t1':'2016-01-01T00:00:00.000Z'}";
assertResult(expected, () -> builder().startObject().timeField("t1", t1).endObject());
assertResult(expected, () -> builder().startObject().field("t1").timeValue(t1).endObject());
final DateTime t2 = new DateTime(2016, 12, 25, 7, 59, 42, 213, DateTimeZone.UTC);
expected = "{'t2':'2016-12-25T07:59:42.213Z'}";
assertResult(expected, () -> builder().startObject().timeField("t2", t2).endObject());
assertResult(expected, () -> builder().startObject().field("t2").timeValue(t2).endObject());
final DateTimeFormatter formatter = randomFrom(ISODateTimeFormat.basicDate(), ISODateTimeFormat.dateTimeNoMillis());
final DateTime t3 = DateTime.now();
expected = "{'t3':'" + formatter.print(t3) + "'}";
assertResult(expected, () -> builder().startObject().timeField("t3", formatter.print(t3)).endObject());
assertResult(expected, () -> builder().startObject().field("t3").value(formatter.print(t3)).endObject());
final DateTime t4 = new DateTime(randomDateTimeZone());
expected = "{'t4':'" + formatter.print(t4) + "'}";
assertResult(expected, () -> builder().startObject().timeField("t4", formatter.print(t4)).endObject());
assertResult(expected, () -> builder().startObject().field("t4").value(formatter.print(t4)).endObject());
long date = Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00
final DateTime t5 = new DateTime(date, randomDateTimeZone());
expected = "{'t5':'" + XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(t5) + "'}";
assertResult(expected, () -> builder().startObject().timeField("t5", t5).endObject());
assertResult(expected, () -> builder().startObject().field("t5").timeValue(t5).endObject());
expected = "{'t5':'" + formatter.print(t5) + "'}";
assertResult(expected, () -> builder().startObject().timeField("t5", formatter.print(t5)).endObject());
assertResult(expected, () -> builder().startObject().field("t5").value(formatter.print(t5)).endObject());
Instant i1 = new Instant(1451606400000L); // 2016-01-01T00:00:00.000Z
expected = "{'i1':'2016-01-01T00:00:00.000Z'}";
assertResult(expected, () -> builder().startObject().timeField("i1", i1).endObject());
assertResult(expected, () -> builder().startObject().field("i1").timeValue(i1).endObject());
Instant i2 = new Instant(1482652782213L); // 2016-12-25T07:59:42.213Z
expected = "{'i2':'" + formatter.print(i2) + "'}";
assertResult(expected, () -> builder().startObject().timeField("i2", formatter.print(i2)).endObject());
assertResult(expected, () -> builder().startObject().field("i2").value(formatter.print(i2)).endObject());
}
public void testDate() throws Exception {
assertResult("{'date':null}", () -> builder().startObject().timeField("date", (Date) null).endObject());
assertResult("{'date':null}", () -> builder().startObject().field("date").timeValue((Date) null).endObject());
final Date d1 = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toDate();
assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().timeField("d1", d1).endObject());
assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1").timeValue(d1).endObject());
final Date d2 = new DateTime(2016, 12, 25, 7, 59, 42, 213, DateTimeZone.UTC).toDate();
assertResult("{'d2':'2016-12-25T07:59:42.213Z'}", () -> builder().startObject().timeField("d2", d2).endObject());
assertResult("{'d2':'2016-12-25T07:59:42.213Z'}", () -> builder().startObject().field("d2").timeValue(d2).endObject());
final DateTimeFormatter formatter = randomFrom(ISODateTimeFormat.basicDate(), ISODateTimeFormat.dateTimeNoMillis());
final Date d3 = DateTime.now().toDate();
String expected = "{'d3':'" + formatter.print(d3.getTime()) + "'}";
assertResult(expected, () -> builder().startObject().field("d3").value(formatter.print(d3.getTime())).endObject());
}
public void testDateField() throws Exception {
final Date d = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toDate();
assertResult("{'date_in_millis':1451606400000}", () -> builder()
.startObject()
.timeField("date_in_millis", "date", d.getTime())
.endObject());
assertResult("{'date':'2016-01-01T00:00:00.000Z','date_in_millis':1451606400000}", () -> builder()
.humanReadable(true)
.startObject
().timeField("date_in_millis", "date", d.getTime())
.endObject());
}
public void testCalendar() throws Exception {
Calendar calendar = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toCalendar(Locale.ROOT);
assertResult("{'calendar':'2016-01-01T00:00:00.000Z'}", () -> builder()
.startObject()
.field("calendar")
.timeValue(calendar)
.endObject());
}
public void testGeoPoint() throws Exception {
assertResult("{'geo':null}", () -> builder().startObject().field("geo", (GeoPoint) null).endObject());
assertResult("{'geo':{'lat':52.4267578125,'lon':13.271484375}}", () -> builder()
.startObject()
. field("geo", GeoPoint.fromGeohash("u336q"))
.endObject());
assertResult("{'geo':{'lat':52.5201416015625,'lon':13.4033203125}}", () -> builder()
.startObject()
.field("geo")
.value(GeoPoint.fromGeohash("u33dc1"))
.endObject());
}
public void testLatLon() throws Exception {
final String expected = "{'latlon':{'lat':13.271484375,'lon':52.4267578125}}";
assertResult(expected, () -> builder().startObject().latlon("latlon", 13.271484375, 52.4267578125).endObject());
assertResult(expected, () -> builder().startObject().field("latlon").latlon(13.271484375, 52.4267578125).endObject());
}
public void testPath() throws Exception {
assertResult("{'path':null}", () -> builder().startObject().field("path", (Path) null).endObject());
final Path path = PathUtils.get("first", "second", "third");
final String expected = Constants.WINDOWS ? "{'path':'first\\\\second\\\\third'}" : "{'path':'first/second/third'}";
assertResult(expected, () -> builder().startObject().field("path", path).endObject());
}
public void testObjects() throws Exception {
Map<String, Object[]> objects = new HashMap<>();
objects.put("{'objects':[false,true,false]}", new Object[]{false, true, false});
objects.put("{'objects':[1,1,2,3,5,8,13]}", new Object[]{(byte) 1, (byte) 1, (byte) 2, (byte) 3, (byte) 5, (byte) 8, (byte) 13});
objects.put("{'objects':[1.0,1.0,2.0,3.0,5.0,8.0,13.0]}", new Object[]{1.0d, 1.0d, 2.0d, 3.0d, 5.0d, 8.0d, 13.0d});
objects.put("{'objects':[1.0,1.0,2.0,3.0,5.0,8.0,13.0]}", new Object[]{1.0f, 1.0f, 2.0f, 3.0f, 5.0f, 8.0f, 13.0f});
objects.put("{'objects':[{'lat':45.759429931640625,'lon':4.8394775390625}]}", new Object[]{GeoPoint.fromGeohash("u05kq4k")});
objects.put("{'objects':[1,1,2,3,5,8,13]}", new Object[]{1, 1, 2, 3, 5, 8, 13});
objects.put("{'objects':[1,1,2,3,5,8,13]}", new Object[]{1L, 1L, 2L, 3L, 5L, 8L, 13L});
objects.put("{'objects':[1,1,2,3,5,8]}", new Object[]{(short) 1, (short) 1, (short) 2, (short) 3, (short) 5, (short) 8});
objects.put("{'objects':['a','b','c']}", new Object[]{"a", "b", "c"});
objects.put("{'objects':['a','b','c']}", new Object[]{new Text("a"), new Text(new BytesArray("b")), new Text("c")});
objects.put("{'objects':null}", null);
objects.put("{'objects':[null,null,null]}", new Object[]{null, null, null});
objects.put("{'objects':['OPEN','CLOSE']}", IndexMetaData.State.values());
objects.put("{'objects':[{'f1':'v1'},{'f2':'v2'}]}", new Object[]{singletonMap("f1", "v1"), singletonMap("f2", "v2")});
objects.put("{'objects':[[1,2,3],[4,5]]}", new Object[]{Arrays.asList(1, 2, 3), Arrays.asList(4, 5)});
final String paths = Constants.WINDOWS ? "{'objects':['a\\\\b\\\\c','d\\\\e']}" : "{'objects':['a/b/c','d/e']}";
objects.put(paths, new Object[]{PathUtils.get("a", "b", "c"), PathUtils.get("d", "e")});
final DateTimeFormatter formatter = XContentElasticsearchExtension.DEFAULT_DATE_PRINTER;
final Date d1 = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toDate();
final Date d2 = new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC).toDate();
objects.put("{'objects':['" + formatter.print(d1.getTime()) + "','" + formatter.print(d2.getTime()) + "']}", new Object[]{d1, d2});
final DateTime dt1 = DateTime.now();
final DateTime dt2 = new DateTime(2016, 12, 25, 7, 59, 42, 213, DateTimeZone.UTC);
objects.put("{'objects':['" + formatter.print(dt1) + "','2016-12-25T07:59:42.213Z']}", new Object[]{dt1, dt2});
final Calendar c1 = new DateTime(2012, 7, 7, 10, 23, DateTimeZone.UTC).toCalendar(Locale.ROOT);
final Calendar c2 = new DateTime(2014, 11, 16, 19, 36, DateTimeZone.UTC).toCalendar(Locale.ROOT);
objects.put("{'objects':['2012-07-07T10:23:00.000Z','2014-11-16T19:36:00.000Z']}", new Object[]{c1, c2});
final ToXContent x1 = (builder, params) -> builder.startObject().field("f1", "v1").field("f2", 2).array("f3", 3, 4, 5).endObject();
final ToXContent x2 = (builder, params) -> builder.startObject().field("f1", "v1").field("f2", x1).endObject();
objects.put("{'objects':[{'f1':'v1','f2':2,'f3':[3,4,5]},{'f1':'v1','f2':{'f1':'v1','f2':2,'f3':[3,4,5]}}]}", new Object[]{x1, x2});
for (Map.Entry<String, Object[]> o : objects.entrySet()) {
final String expected = o.getKey();
assertResult(expected, () -> builder().startObject().field("objects", o.getValue()).endObject());
assertResult(expected, () -> builder().startObject().field("objects").value(o.getValue()).endObject());
assertResult(expected, () -> builder().startObject().array("objects", o.getValue()).endObject());
}
}
public void testObject() throws Exception {
Map<String, Object> object = new HashMap<>();
object.put("{'object':false}", Boolean.FALSE);
object.put("{'object':13}", (byte) 13);
object.put("{'object':5.0}", 5.0d);
object.put("{'object':8.0}", 8.0f);
object.put("{'object':{'lat':45.759429931640625,'lon':4.8394775390625}}", GeoPoint.fromGeohash("u05kq4k"));
object.put("{'object':3}", 3);
object.put("{'object':2}", 2L);
object.put("{'object':1}", (short) 1);
object.put("{'object':'string'}", "string");
object.put("{'object':'a'}", new Text("a"));
object.put("{'object':'b'}", new Text(new BytesArray("b")));
object.put("{'object':null}", null);
object.put("{'object':'OPEN'}", IndexMetaData.State.OPEN);
object.put("{'object':'NM'}", DistanceUnit.NAUTICALMILES);
object.put("{'object':{'f1':'v1'}}", singletonMap("f1", "v1"));
object.put("{'object':{'f1':{'f2':'v2'}}}", singletonMap("f1", singletonMap("f2", "v2")));
object.put("{'object':[1,2,3]}", Arrays.asList(1, 2, 3));
final String path = Constants.WINDOWS ? "{'object':'a\\\\b\\\\c'}" : "{'object':'a/b/c'}";
object.put(path, PathUtils.get("a", "b", "c"));
final DateTimeFormatter formatter = XContentElasticsearchExtension.DEFAULT_DATE_PRINTER;
final Date d1 = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toDate();
object.put("{'object':'" + formatter.print(d1.getTime()) + "'}", d1);
final DateTime d2 = DateTime.now();
object.put("{'object':'" + formatter.print(d2) + "'}", d2);
final Calendar c1 = new DateTime(2010, 1, 1, 0, 0, DateTimeZone.UTC).toCalendar(Locale.ROOT);
object.put("{'object':'2010-01-01T00:00:00.000Z'}", c1);
final ToXContent x1 = (builder, params) -> builder.startObject().field("f1", "v1").field("f2", 2).array("f3", 3, 4, 5).endObject();
final ToXContent x2 = (builder, params) -> builder.startObject().field("f1", "v1").field("f2", x1).endObject();
object.put("{'object':{'f1':'v1','f2':{'f1':'v1','f2':2,'f3':[3,4,5]}}}", x2);
for (Map.Entry<String, Object> o : object.entrySet()) {
final String expected = o.getKey();
assertResult(expected, () -> builder().humanReadable(true).startObject().field("object", o.getValue()).endObject());
assertResult(expected, () -> builder().humanReadable(true).startObject().field("object").value(o.getValue()).endObject());
}
assertResult("{'objects':[null,null,null]}", () -> builder().startObject().array("objects", null, null, null).endObject());
}
public void testToXContent() throws Exception {
assertResult("{'xcontent':null}", () -> builder().startObject().field("xcontent", (ToXContent) null).endObject());
assertResult("{'xcontent':null}", () -> builder().startObject().field("xcontent").value((ToXContent) null).endObject());
ToXContent xcontent0 = (builder, params) -> {
builder.startObject();
builder.field("field", "value");
builder.array("array", "1", "2", "3");
builder.startObject("foo");
builder.field("bar", "baz");
builder.endObject();
builder.endObject();
return builder;
};
assertResult("{'field':'value','array':['1','2','3'],'foo':{'bar':'baz'}}", () -> builder().value(xcontent0));
assertResult("{'xcontent':{'field':'value','array':['1','2','3'],'foo':{'bar':'baz'}}}", () -> builder()
.startObject()
.field("xcontent", xcontent0)
.endObject());
ToXContent xcontent1 = (builder, params) -> {
builder.startObject();
builder.field("field", "value");
builder.startObject("foo");
builder.field("bar", "baz");
builder.endObject();
builder.endObject();
return builder;
};
ToXContent xcontent2 = (builder, params) -> {
builder.startObject();
builder.field("root", xcontent0);
builder.array("childs", xcontent0, xcontent1);
builder.endObject();
return builder;
};
assertResult("{'root':{" +
"'field':'value'," +
"'array':['1','2','3']," +
"'foo':{'bar':'baz'}" +
"}," +
"'childs':[" +
"{'field':'value','array':['1','2','3'],'foo':{'bar':'baz'}}," +
"{'field':'value','foo':{'bar':'baz'}}" +
"]}", () -> builder().value(xcontent2));
}
public void testMap() throws Exception {
Map<String, Map<String, ?>> maps = new HashMap<>();
maps.put("{'map':null}", (Map) null);
maps.put("{'map':{}}", Collections.emptyMap());
maps.put("{'map':{'key':'value'}}", singletonMap("key", "value"));
Map<String, Object> innerMap = new HashMap<>();
innerMap.put("string", "value");
innerMap.put("int", 42);
innerMap.put("long", 42L);
innerMap.put("long[]", new long[]{1L, 3L});
innerMap.put("path", PathUtils.get("path", "to", "file"));
innerMap.put("object", singletonMap("key", "value"));
final String path = Constants.WINDOWS ? "path\\\\to\\\\file" : "path/to/file";
maps.put("{'map':{'path':'" + path + "','string':'value','long[]':[1,3],'int':42,'long':42,'object':{'key':'value'}}}", innerMap);
for (Map.Entry<String, Map<String, ?>> m : maps.entrySet()) {
final String expected = m.getKey();
assertResult(expected, () -> builder().startObject().field("map", m.getValue()).endObject());
assertResult(expected, () -> builder().startObject().field("map").value(m.getValue()).endObject());
assertResult(expected, () -> builder().startObject().field("map").map(m.getValue()).endObject());
}
}
public void testIterable() throws Exception {
Map<String, Iterable<?>> iterables = new HashMap<>();
iterables.put("{'iter':null}", (Iterable) null);
iterables.put("{'iter':[]}", Collections.emptyList());
iterables.put("{'iter':['a','b']}", Arrays.asList("a", "b"));
final String path = Constants.WINDOWS ? "{'iter':'path\\\\to\\\\file'}" : "{'iter':'path/to/file'}";
iterables.put(path, PathUtils.get("path", "to", "file"));
final String paths = Constants.WINDOWS ? "{'iter':['a\\\\b\\\\c','c\\\\d']}" : "{'iter':['a/b/c','c/d']}";
iterables.put(paths, Arrays.asList(PathUtils.get("a", "b", "c"), PathUtils.get("c", "d")));
for (Map.Entry<String, Iterable<?>> i : iterables.entrySet()) {
final String expected = i.getKey();
assertResult(expected, () -> builder().startObject().field("iter", i.getValue()).endObject());
assertResult(expected, () -> builder().startObject().field("iter").value(i.getValue()).endObject());
}
}
public void testUnknownObject() throws Exception {
Map<String, Object> objects = new HashMap<>();
objects.put("{'obj':50.63}", DistanceUnit.METERS.fromMeters(50.63));
objects.put("{'obj':'MINUTES'}", TimeUnit.MINUTES);
objects.put("{'obj':'class org.elasticsearch.common.xcontent.BaseXContentTestCase'}", BaseXContentTestCase.class);
for (Map.Entry<String, ?> o : objects.entrySet()) {
final String expected = o.getKey();
assertResult(expected, () -> builder().startObject().field("obj", o.getValue()).endObject());
assertResult(expected, () -> builder().startObject().field("obj").value(o.getValue()).endObject());
}
}
public void testBasics() throws IOException {
ByteArrayOutputStream os = new ByteArrayOutputStream();
try (XContentGenerator generator = xcontentType().xContent().createGenerator(os)) {
generator.writeStartObject();
generator.writeEndObject();
}
byte[] data = os.toByteArray();
assertEquals(xcontentType(), XContentFactory.xContentType(data));
}
public void testMissingEndObject() throws IOException {
IOException e = expectThrows(IOException.class, () -> {
ByteArrayOutputStream os = new ByteArrayOutputStream();
try (XContentGenerator generator = xcontentType().xContent().createGenerator(os)) {
generator.writeStartObject();
generator.writeFieldName("foo");
generator.writeNumber(2L);
}
});
assertEquals(e.getMessage(), "Unclosed object or array found");
}
public void testMissingEndArray() throws IOException {
IOException e = expectThrows(IOException.class, () -> {
ByteArrayOutputStream os = new ByteArrayOutputStream();
try (XContentGenerator generator = xcontentType().xContent().createGenerator(os)) {
generator.writeStartArray();
generator.writeNumber(2L);
}
});
assertEquals(e.getMessage(), "Unclosed object or array found");
}
public void testRawField() throws Exception {
for (boolean useStream : new boolean[]{false, true}) {
for (XContentType xcontentType : XContentType.values()) {
doTestRawField(xcontentType.xContent(), useStream);
}
}
}
void doTestRawField(XContent source, boolean useStream) throws Exception {
ByteArrayOutputStream os = new ByteArrayOutputStream();
try (XContentGenerator generator = source.createGenerator(os)) {
generator.writeStartObject();
generator.writeFieldName("foo");
generator.writeNull();
generator.writeEndObject();
}
final byte[] rawData = os.toByteArray();
os = new ByteArrayOutputStream();
try (XContentGenerator generator = xcontentType().xContent().createGenerator(os)) {
generator.writeStartObject();
if (useStream) {
generator.writeRawField("bar", new ByteArrayInputStream(rawData));
} else {
generator.writeRawField("bar", new BytesArray(rawData).streamInput());
}
generator.writeEndObject();
}
XContentParser parser = xcontentType().xContent()
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray());
assertEquals(Token.START_OBJECT, parser.nextToken());
assertEquals(Token.FIELD_NAME, parser.nextToken());
assertEquals("bar", parser.currentName());
assertEquals(Token.START_OBJECT, parser.nextToken());
assertEquals(Token.FIELD_NAME, parser.nextToken());
assertEquals("foo", parser.currentName());
assertEquals(Token.VALUE_NULL, parser.nextToken());
assertEquals(Token.END_OBJECT, parser.nextToken());
assertEquals(Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
}
public void testRawValue() throws Exception {
for (XContentType xcontentType : XContentType.values()) {
doTestRawValue(xcontentType.xContent());
}
}
void doTestRawValue(XContent source) throws Exception {
ByteArrayOutputStream os = new ByteArrayOutputStream();
try (XContentGenerator generator = source.createGenerator(os)) {
generator.writeStartObject();
generator.writeFieldName("foo");
generator.writeNull();
generator.writeEndObject();
}
final byte[] rawData = os.toByteArray();
os = new ByteArrayOutputStream();
try (XContentGenerator generator = xcontentType().xContent().createGenerator(os)) {
generator.writeRawValue(new BytesArray(rawData).streamInput(), source.type());
}
XContentParser parser = xcontentType().xContent()
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray());
assertEquals(Token.START_OBJECT, parser.nextToken());
assertEquals(Token.FIELD_NAME, parser.nextToken());
assertEquals("foo", parser.currentName());
assertEquals(Token.VALUE_NULL, parser.nextToken());
assertEquals(Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
os = new ByteArrayOutputStream();
try (XContentGenerator generator = xcontentType().xContent().createGenerator(os)) {
generator.writeStartObject();
generator.writeFieldName("test");
generator.writeRawValue(new BytesArray(rawData).streamInput(), source.type());
generator.writeEndObject();
}
parser = xcontentType().xContent()
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray());
assertEquals(Token.START_OBJECT, parser.nextToken());
assertEquals(Token.FIELD_NAME, parser.nextToken());
assertEquals("test", parser.currentName());
assertEquals(Token.START_OBJECT, parser.nextToken());
assertEquals(Token.FIELD_NAME, parser.nextToken());
assertEquals("foo", parser.currentName());
assertEquals(Token.VALUE_NULL, parser.nextToken());
assertEquals(Token.END_OBJECT, parser.nextToken());
assertEquals(Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
}
protected void doTestBigInteger(JsonGenerator generator, ByteArrayOutputStream os) throws Exception {
// Big integers cannot be handled explicitly, but if some values happen to be big ints,
// we can still call parser.map() and get the bigint value so that eg. source filtering
// keeps working
BigInteger bigInteger = BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE);
generator.writeStartObject();
generator.writeFieldName("foo");
generator.writeString("bar");
generator.writeFieldName("bigint");
generator.writeNumber(bigInteger);
generator.writeEndObject();
generator.flush();
byte[] serialized = os.toByteArray();
XContentParser parser = xcontentType().xContent()
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, serialized);
Map<String, Object> map = parser.map();
assertEquals("bar", map.get("foo"));
assertEquals(bigInteger, map.get("bigint"));
}
public void testEnsureNameNotNull() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> XContentBuilder.ensureNameNotNull(null));
assertThat(e.getMessage(), containsString("Field name cannot be null"));
}
public void testEnsureNotNull() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> XContentBuilder.ensureNotNull(null, "message"));
assertThat(e.getMessage(), containsString("message"));
XContentBuilder.ensureNotNull("foo", "No exception must be thrown");
}
public void testEnsureNoSelfReferences() throws IOException {
CollectionUtils.ensureNoSelfReferences(emptyMap());
CollectionUtils.ensureNoSelfReferences(null);
Map<String, Object> map = new HashMap<>();
map.put("field", map);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder().map(map));
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
}
/**
* Test that the same map written multiple times do not trigger the self-reference check in
* {@link CollectionUtils#ensureNoSelfReferences(Object)}
*/
public void testRepeatedMapsAndNoSelfReferences() throws Exception {
Map<String, Object> mapB = singletonMap("b", "B");
Map<String, Object> mapC = singletonMap("c", "C");
Map<String, Object> mapD = singletonMap("d", "D");
Map<String, Object> mapA = new HashMap<>();
mapA.put("a", 0);
mapA.put("b1", mapB);
mapA.put("b2", mapB);
mapA.put("c", Arrays.asList(mapC, mapC));
mapA.put("d1", mapD);
mapA.put("d2", singletonMap("d3", mapD));
final String expected =
"{'map':{'b2':{'b':'B'},'a':0,'c':[{'c':'C'},{'c':'C'}],'d1':{'d':'D'},'d2':{'d3':{'d':'D'}},'b1':{'b':'B'}}}";
assertResult(expected, () -> builder().startObject().field("map", mapA).endObject());
assertResult(expected, () -> builder().startObject().field("map").value(mapA).endObject());
assertResult(expected, () -> builder().startObject().field("map").map(mapA).endObject());
}
public void testSelfReferencingMapsOneLevel() throws IOException {
Map<String, Object> map0 = new HashMap<>();
Map<String, Object> map1 = new HashMap<>();
map0.put("foo", 0);
map0.put("map1", map1); // map 0 -> map 1
map1.put("bar", 1);
map1.put("map0", map0); // map 1 -> map 0 loop
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder().map(map0));
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
}
public void testSelfReferencingMapsTwoLevels() throws IOException {
Map<String, Object> map0 = new HashMap<>();
Map<String, Object> map1 = new HashMap<>();
Map<String, Object> map2 = new HashMap<>();
map0.put("foo", 0);
map0.put("map1", map1); // map 0 -> map 1
map1.put("bar", 1);
map1.put("map2", map2); // map 1 -> map 2
map2.put("baz", 2);
map2.put("map0", map0); // map 2 -> map 0 loop
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder().map(map0));
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
}
public void testSelfReferencingObjectsArray() throws IOException {
Object[] values = new Object[3];
values[0] = 0;
values[1] = 1;
values[2] = values;
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder()
.startObject()
.field("field", values)
.endObject());
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
e = expectThrows(IllegalArgumentException.class, () -> builder()
.startObject()
.array("field", values)
.endObject());
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
}
public void testSelfReferencingIterable() throws IOException {
List<Object> values = new ArrayList<>();
values.add("foo");
values.add("bar");
values.add(values);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder()
.startObject()
.field("field", (Iterable) values)
.endObject());
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
}
public void testSelfReferencingIterableOneLevel() throws IOException {
Map<String, Object> map = new HashMap<>();
map.put("foo", 0);
map.put("bar", 1);
Iterable<Object> values = Arrays.asList("one", "two", map);
map.put("baz", values);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder()
.startObject()
.field("field", (Iterable) values)
.endObject());
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
}
public void testSelfReferencingIterableTwoLevels() throws IOException {
Map<String, Object> map0 = new HashMap<>();
Map<String, Object> map1 = new HashMap<>();
Map<String, Object> map2 = new HashMap<>();
List<Object> it1 = new ArrayList<>();
map0.put("foo", 0);
map0.put("it1", (Iterable<?>) it1); // map 0 -> it1
it1.add(map1);
it1.add(map2); // it 1 -> map 1, map 2
map2.put("baz", 2);
map2.put("map0", map0); // map 2 -> map 0 loop
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder().map(map0));
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
}
public void testChecksForDuplicates() throws Exception {
assumeTrue("Test only makes sense if XContent parser has strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());
XContentBuilder builder = builder()
.startObject()
.field("key", 1)
.field("key", 2)
.endObject();
JsonParseException pex = expectThrows(JsonParseException.class, () -> createParser(builder).map());
assertThat(pex.getMessage(), startsWith("Duplicate field 'key'"));
}
public void testNamedObject() throws IOException {
Object test1 = new Object();
Object test2 = new Object();
NamedXContentRegistry registry = new NamedXContentRegistry(Arrays.asList(
new NamedXContentRegistry.Entry(Object.class, new ParseField("test1"), p -> test1),
new NamedXContentRegistry.Entry(Object.class, new ParseField("test2", "deprecated"), p -> test2),
new NamedXContentRegistry.Entry(Object.class, new ParseField("str"), p -> p.text())));
XContentBuilder b = XContentBuilder.builder(xcontentType().xContent());
b.value("test");
XContentParser p = xcontentType().xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE,
BytesReference.bytes(b).streamInput());
assertEquals(test1, p.namedObject(Object.class, "test1", null));
assertEquals(test2, p.namedObject(Object.class, "test2", null));
assertEquals(test2, p.namedObject(Object.class, "deprecated", null));
assertWarnings("Deprecated field [deprecated] used, expected [test2] instead");
{
p.nextToken();
assertEquals("test", p.namedObject(Object.class, "str", null));
NamedObjectNotFoundException e = expectThrows(NamedObjectNotFoundException.class,
() -> p.namedObject(Object.class, "unknown", null));
assertThat(e.getMessage(), endsWith("unable to parse Object with name [unknown]: parser not found"));
}
{
Exception e = expectThrows(NamedObjectNotFoundException.class, () -> p.namedObject(String.class, "doesn't matter", null));
assertEquals("unknown named object category [java.lang.String]", e.getMessage());
}
{
XContentParser emptyRegistryParser = xcontentType().xContent()
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, new byte[] {});
Exception e = expectThrows(NamedObjectNotFoundException.class,
() -> emptyRegistryParser.namedObject(String.class, "doesn't matter", null));
assertEquals("named objects are not supported for this parser", e.getMessage());
}
}
private static void expectUnclosedException(ThrowingRunnable runnable) {
IllegalStateException e = expectThrows(IllegalStateException.class, runnable);
assertThat(e.getMessage(), containsString("Failed to close the XContentBuilder"));
assertThat(e.getCause(), allOf(notNullValue(), instanceOf(IOException.class)));
assertThat(e.getCause().getMessage(), containsString("Unclosed object or array found"));
}
private static void expectValueException(ThrowingRunnable runnable) {
JsonGenerationException e = expectThrows(JsonGenerationException.class, runnable);
assertThat(e.getMessage(), containsString("expecting a value"));
}
private static void expectFieldException(ThrowingRunnable runnable) {
JsonGenerationException e = expectThrows(JsonGenerationException.class, runnable);
assertThat(e.getMessage(), containsString("expecting field name"));
}
private static void expectNonNullFieldException(ThrowingRunnable runnable) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, runnable);
assertThat(e.getMessage(), containsString("Field name cannot be null"));
}
private static void expectNonNullFormatterException(ThrowingRunnable runnable) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, runnable);
assertThat(e.getMessage(), containsString("DateTimeFormatter cannot be null"));
}
private static void expectObjectException(ThrowingRunnable runnable) {
JsonGenerationException e = expectThrows(JsonGenerationException.class, runnable);
assertThat(e.getMessage(), containsString("Current context not Object"));
}
private static void expectArrayException(ThrowingRunnable runnable) {
JsonGenerationException e = expectThrows(JsonGenerationException.class, runnable);
assertThat(e.getMessage(), containsString("Current context not Array"));
}
public static Matcher<String> equalToJson(String json) {
return Matchers.equalTo(json.replace("'", "\""));
}
private static void assertResult(String expected, Builder builder) throws IOException {
// Build the XContentBuilder, convert its bytes to JSON and check it matches
assertThat(XContentHelper.convertToJson(BytesReference.bytes(builder.build()), randomBoolean()), equalToJson(expected));
}
private static byte[] randomBytes() throws Exception {
return randomUnicodeOfLength(scaledRandomIntBetween(10, 1000)).getBytes("UTF-8");
}
@FunctionalInterface
private interface Builder {
XContentBuilder build() throws IOException;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openspaces.core.internal.commons.math.fraction;
import java.io.Serializable;
import java.math.BigInteger;
import org.openspaces.core.internal.commons.math.FieldElement;
import org.openspaces.core.internal.commons.math.MathRuntimeException;
import org.openspaces.core.internal.commons.math.util.MathUtils;
/**
* Representation of a rational number.
*
* implements Serializable since 2.0
*
* @since 1.1
* @version $Revision: 922715 $ $Date: 2010-03-13 20:38:14 -0500 (Sat, 13 Mar 2010) $
*/
public class Fraction
extends Number
implements FieldElement<Fraction>, Comparable<Fraction>, Serializable {
/** A fraction representing "2 / 1". */
public static final Fraction TWO = new Fraction(2, 1);
/** A fraction representing "1". */
public static final Fraction ONE = new Fraction(1, 1);
/** A fraction representing "0". */
public static final Fraction ZERO = new Fraction(0, 1);
/** A fraction representing "4/5". */
public static final Fraction FOUR_FIFTHS = new Fraction(4, 5);
/** A fraction representing "1/5". */
public static final Fraction ONE_FIFTH = new Fraction(1, 5);
/** A fraction representing "1/2". */
public static final Fraction ONE_HALF = new Fraction(1, 2);
/** A fraction representing "1/4". */
public static final Fraction ONE_QUARTER = new Fraction(1, 4);
/** A fraction representing "1/3". */
public static final Fraction ONE_THIRD = new Fraction(1, 3);
/** A fraction representing "3/5". */
public static final Fraction THREE_FIFTHS = new Fraction(3, 5);
/** A fraction representing "3/4". */
public static final Fraction THREE_QUARTERS = new Fraction(3, 4);
/** A fraction representing "2/5". */
public static final Fraction TWO_FIFTHS = new Fraction(2, 5);
/** A fraction representing "2/4". */
public static final Fraction TWO_QUARTERS = new Fraction(2, 4);
/** A fraction representing "2/3". */
public static final Fraction TWO_THIRDS = new Fraction(2, 3);
/** A fraction representing "-1 / 1". */
public static final Fraction MINUS_ONE = new Fraction(-1, 1);
/** Message for zero denominator. */
private static final String ZERO_DENOMINATOR_MESSAGE =
"zero denominator in fraction {0}/{1}";
/** Message for overflow. */
private static final String OVERFLOW_MESSAGE =
"overflow in fraction {0}/{1}, cannot negate";
/** Message for null fraction. */
private static final String NULL_FRACTION =
"null fraction";
/** Serializable version identifier */
private static final long serialVersionUID = 3698073679419233275L;
/** The denominator. */
private final int denominator;
/** The numerator. */
private final int numerator;
/**
* Create a fraction given the double value.
* @param value the double value to convert to a fraction.
* @throws FractionConversionException if the continued fraction failed to
* converge.
*/
public Fraction(double value) throws FractionConversionException {
this(value, 1.0e-5, 100);
}
/**
* Create a fraction given the double value and maximum error allowed.
* <p>
* References:
* <ul>
* <li><a href="http://mathworld.wolfram.com/ContinuedFraction.html">
* Continued Fraction</a> equations (11) and (22)-(26)</li>
* </ul>
* </p>
* @param value the double value to convert to a fraction.
* @param epsilon maximum error allowed. The resulting fraction is within
* <code>epsilon</code> of <code>value</code>, in absolute terms.
* @param maxIterations maximum number of convergents
* @throws FractionConversionException if the continued fraction failed to
* converge.
*/
public Fraction(double value, double epsilon, int maxIterations)
throws FractionConversionException
{
this(value, epsilon, Integer.MAX_VALUE, maxIterations);
}
/**
* Create a fraction given the double value and maximum denominator.
* <p>
* References:
* <ul>
* <li><a href="http://mathworld.wolfram.com/ContinuedFraction.html">
* Continued Fraction</a> equations (11) and (22)-(26)</li>
* </ul>
* </p>
* @param value the double value to convert to a fraction.
* @param maxDenominator The maximum allowed value for denominator
* @throws FractionConversionException if the continued fraction failed to
* converge
*/
public Fraction(double value, int maxDenominator)
throws FractionConversionException
{
this(value, 0, maxDenominator, 100);
}
/**
* Create a fraction given the double value and either the maximum error
* allowed or the maximum number of denominator digits.
* <p>
*
* NOTE: This constructor is called with EITHER
* - a valid epsilon value and the maxDenominator set to Integer.MAX_VALUE
* (that way the maxDenominator has no effect).
* OR
* - a valid maxDenominator value and the epsilon value set to zero
* (that way epsilon only has effect if there is an exact match before
* the maxDenominator value is reached).
* </p><p>
*
* It has been done this way so that the same code can be (re)used for both
* scenarios. However this could be confusing to users if it were part of
* the public API and this constructor should therefore remain PRIVATE.
* </p>
*
* See JIRA issue ticket MATH-181 for more details:
*
* https://issues.apache.org/jira/browse/MATH-181
*
* @param value the double value to convert to a fraction.
* @param epsilon maximum error allowed. The resulting fraction is within
* <code>epsilon</code> of <code>value</code>, in absolute terms.
* @param maxDenominator maximum denominator value allowed.
* @param maxIterations maximum number of convergents
* @throws FractionConversionException if the continued fraction failed to
* converge.
*/
private Fraction(double value, double epsilon, int maxDenominator, int maxIterations)
throws FractionConversionException
{
long overflow = Integer.MAX_VALUE;
double r0 = value;
long a0 = (long)Math.floor(r0);
if (a0 > overflow) {
throw new FractionConversionException(value, a0, 1l);
}
// check for (almost) integer arguments, which should not go
// to iterations.
if (Math.abs(a0 - value) < epsilon) {
this.numerator = (int) a0;
this.denominator = 1;
return;
}
long p0 = 1;
long q0 = 0;
long p1 = a0;
long q1 = 1;
long p2 = 0;
long q2 = 1;
int n = 0;
boolean stop = false;
do {
++n;
double r1 = 1.0 / (r0 - a0);
long a1 = (long)Math.floor(r1);
p2 = (a1 * p1) + p0;
q2 = (a1 * q1) + q0;
if ((p2 > overflow) || (q2 > overflow)) {
throw new FractionConversionException(value, p2, q2);
}
double convergent = (double)p2 / (double)q2;
if (n < maxIterations && Math.abs(convergent - value) > epsilon && q2 < maxDenominator) {
p0 = p1;
p1 = p2;
q0 = q1;
q1 = q2;
a0 = a1;
r0 = r1;
} else {
stop = true;
}
} while (!stop);
if (n >= maxIterations) {
throw new FractionConversionException(value, maxIterations);
}
if (q2 < maxDenominator) {
this.numerator = (int) p2;
this.denominator = (int) q2;
} else {
this.numerator = (int) p1;
this.denominator = (int) q1;
}
}
/**
* Create a fraction from an int.
* The fraction is num / 1.
* @param num the numerator.
*/
public Fraction(int num) {
this(num, 1);
}
/**
* Create a fraction given the numerator and denominator. The fraction is
* reduced to lowest terms.
* @param num the numerator.
* @param den the denominator.
* @throws ArithmeticException if the denominator is <code>zero</code>
*/
public Fraction(int num, int den) {
if (den == 0) {
throw MathRuntimeException.createArithmeticException(
ZERO_DENOMINATOR_MESSAGE, num, den);
}
if (den < 0) {
if (num == Integer.MIN_VALUE || den == Integer.MIN_VALUE) {
throw MathRuntimeException.createArithmeticException(
OVERFLOW_MESSAGE, num, den);
}
num = -num;
den = -den;
}
// reduce numerator and denominator by greatest common denominator.
final int d = MathUtils.gcd(num, den);
if (d > 1) {
num /= d;
den /= d;
}
// move sign to numerator.
if (den < 0) {
num = -num;
den = -den;
}
this.numerator = num;
this.denominator = den;
}
/**
* Returns the absolute value of this fraction.
* @return the absolute value.
*/
public Fraction abs() {
Fraction ret;
if (numerator >= 0) {
ret = this;
} else {
ret = negate();
}
return ret;
}
/**
* Compares this object to another based on size.
* @param object the object to compare to
* @return -1 if this is less than <tt>object</tt>, +1 if this is greater
* than <tt>object</tt>, 0 if they are equal.
*/
public int compareTo(Fraction object) {
long nOd = ((long) numerator) * object.denominator;
long dOn = ((long) denominator) * object.numerator;
return (nOd < dOn) ? -1 : ((nOd > dOn) ? +1 : 0);
}
/**
* Gets the fraction as a <tt>double</tt>. This calculates the fraction as
* the numerator divided by denominator.
* @return the fraction as a <tt>double</tt>
*/
@Override
public double doubleValue() {
return (double)numerator / (double)denominator;
}
/**
* Test for the equality of two fractions. If the lowest term
* numerator and denominators are the same for both fractions, the two
* fractions are considered to be equal.
* @param other fraction to test for equality to this fraction
* @return true if two fractions are equal, false if object is
* <tt>null</tt>, not an instance of {@link Fraction}, or not equal
* to this fraction instance.
*/
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof Fraction) {
// since fractions are always in lowest terms, numerators and
// denominators can be compared directly for equality.
Fraction rhs = (Fraction)other;
return (numerator == rhs.numerator) &&
(denominator == rhs.denominator);
}
return false;
}
/**
* Gets the fraction as a <tt>float</tt>. This calculates the fraction as
* the numerator divided by denominator.
* @return the fraction as a <tt>float</tt>
*/
@Override
public float floatValue() {
return (float)doubleValue();
}
/**
* Access the denominator.
* @return the denominator.
*/
public int getDenominator() {
return denominator;
}
/**
* Access the numerator.
* @return the numerator.
*/
public int getNumerator() {
return numerator;
}
/**
* Gets a hashCode for the fraction.
* @return a hash code value for this object
*/
@Override
public int hashCode() {
return 37 * (37 * 17 + numerator) + denominator;
}
/**
* Gets the fraction as an <tt>int</tt>. This returns the whole number part
* of the fraction.
* @return the whole number fraction part
*/
@Override
public int intValue() {
return (int)doubleValue();
}
/**
* Gets the fraction as a <tt>long</tt>. This returns the whole number part
* of the fraction.
* @return the whole number fraction part
*/
@Override
public long longValue() {
return (long)doubleValue();
}
/**
* Return the additive inverse of this fraction.
* @return the negation of this fraction.
*/
public Fraction negate() {
if (numerator==Integer.MIN_VALUE) {
throw MathRuntimeException.createArithmeticException(
OVERFLOW_MESSAGE, numerator, denominator);
}
return new Fraction(-numerator, denominator);
}
/**
* Return the multiplicative inverse of this fraction.
* @return the reciprocal fraction
*/
public Fraction reciprocal() {
return new Fraction(denominator, numerator);
}
/**
* <p>Adds the value of this fraction to another, returning the result in reduced form.
* The algorithm follows Knuth, 4.5.1.</p>
*
* @param fraction the fraction to add, must not be <code>null</code>
* @return a <code>Fraction</code> instance with the resulting values
* @throws IllegalArgumentException if the fraction is <code>null</code>
* @throws ArithmeticException if the resulting numerator or denominator exceeds
* <code>Integer.MAX_VALUE</code>
*/
public Fraction add(Fraction fraction) {
return addSub(fraction, true /* add */);
}
/**
* Add an integer to the fraction.
* @param i the <tt>integer</tt> to add.
* @return this + i
*/
public Fraction add(final int i) {
return new Fraction(numerator + i * denominator, denominator);
}
/**
* <p>Subtracts the value of another fraction from the value of this one,
* returning the result in reduced form.</p>
*
* @param fraction the fraction to subtract, must not be <code>null</code>
* @return a <code>Fraction</code> instance with the resulting values
* @throws IllegalArgumentException if the fraction is <code>null</code>
* @throws ArithmeticException if the resulting numerator or denominator
* cannot be represented in an <code>int</code>.
*/
public Fraction subtract(Fraction fraction) {
return addSub(fraction, false /* subtract */);
}
/**
* Subtract an integer from the fraction.
* @param i the <tt>integer</tt> to subtract.
* @return this - i
*/
public Fraction subtract(final int i) {
return new Fraction(numerator - i * denominator, denominator);
}
/**
* Implement add and subtract using algorithm described in Knuth 4.5.1.
*
* @param fraction the fraction to subtract, must not be <code>null</code>
* @param isAdd true to add, false to subtract
* @return a <code>Fraction</code> instance with the resulting values
* @throws IllegalArgumentException if the fraction is <code>null</code>
* @throws ArithmeticException if the resulting numerator or denominator
* cannot be represented in an <code>int</code>.
*/
private Fraction addSub(Fraction fraction, boolean isAdd) {
if (fraction == null) {
throw MathRuntimeException.createIllegalArgumentException(NULL_FRACTION);
}
// zero is identity for addition.
if (numerator == 0) {
return isAdd ? fraction : fraction.negate();
}
if (fraction.numerator == 0) {
return this;
}
// if denominators are randomly distributed, d1 will be 1 about 61%
// of the time.
int d1 = MathUtils.gcd(denominator, fraction.denominator);
if (d1==1) {
// result is ( (u*v' +/- u'v) / u'v')
int uvp = MathUtils.mulAndCheck(numerator, fraction.denominator);
int upv = MathUtils.mulAndCheck(fraction.numerator, denominator);
return new Fraction
(isAdd ? MathUtils.addAndCheck(uvp, upv) :
MathUtils.subAndCheck(uvp, upv),
MathUtils.mulAndCheck(denominator, fraction.denominator));
}
// the quantity 't' requires 65 bits of precision; see knuth 4.5.1
// exercise 7. we're going to use a BigInteger.
// t = u(v'/d1) +/- v(u'/d1)
BigInteger uvp = BigInteger.valueOf(numerator)
.multiply(BigInteger.valueOf(fraction.denominator/d1));
BigInteger upv = BigInteger.valueOf(fraction.numerator)
.multiply(BigInteger.valueOf(denominator/d1));
BigInteger t = isAdd ? uvp.add(upv) : uvp.subtract(upv);
// but d2 doesn't need extra precision because
// d2 = gcd(t,d1) = gcd(t mod d1, d1)
int tmodd1 = t.mod(BigInteger.valueOf(d1)).intValue();
int d2 = (tmodd1==0)?d1:MathUtils.gcd(tmodd1, d1);
// result is (t/d2) / (u'/d1)(v'/d2)
BigInteger w = t.divide(BigInteger.valueOf(d2));
if (w.bitLength() > 31) {
throw MathRuntimeException.createArithmeticException("overflow, numerator too large after multiply: {0}",
w);
}
return new Fraction (w.intValue(),
MathUtils.mulAndCheck(denominator/d1,
fraction.denominator/d2));
}
/**
* <p>Multiplies the value of this fraction by another, returning the
* result in reduced form.</p>
*
* @param fraction the fraction to multiply by, must not be <code>null</code>
* @return a <code>Fraction</code> instance with the resulting values
* @throws IllegalArgumentException if the fraction is <code>null</code>
* @throws ArithmeticException if the resulting numerator or denominator exceeds
* <code>Integer.MAX_VALUE</code>
*/
public Fraction multiply(Fraction fraction) {
if (fraction == null) {
throw MathRuntimeException.createIllegalArgumentException(NULL_FRACTION);
}
if (numerator == 0 || fraction.numerator == 0) {
return ZERO;
}
// knuth 4.5.1
// make sure we don't overflow unless the result *must* overflow.
int d1 = MathUtils.gcd(numerator, fraction.denominator);
int d2 = MathUtils.gcd(fraction.numerator, denominator);
return getReducedFraction
(MathUtils.mulAndCheck(numerator/d1, fraction.numerator/d2),
MathUtils.mulAndCheck(denominator/d2, fraction.denominator/d1));
}
/**
* Multiply the fraction by an integer.
* @param i the <tt>integer</tt> to multiply by.
* @return this * i
*/
public Fraction multiply(final int i) {
return new Fraction(numerator * i, denominator);
}
/**
* <p>Divide the value of this fraction by another.</p>
*
* @param fraction the fraction to divide by, must not be <code>null</code>
* @return a <code>Fraction</code> instance with the resulting values
* @throws IllegalArgumentException if the fraction is <code>null</code>
* @throws ArithmeticException if the fraction to divide by is zero
* @throws ArithmeticException if the resulting numerator or denominator exceeds
* <code>Integer.MAX_VALUE</code>
*/
public Fraction divide(Fraction fraction) {
if (fraction == null) {
throw MathRuntimeException.createIllegalArgumentException(NULL_FRACTION);
}
if (fraction.numerator == 0) {
throw MathRuntimeException.createArithmeticException(
"the fraction to divide by must not be zero: {0}/{1}",
fraction.numerator, fraction.denominator);
}
return multiply(fraction.reciprocal());
}
/**
* Divide the fraction by an integer.
* @param i the <tt>integer</tt> to divide by.
* @return this * i
*/
public Fraction divide(final int i) {
return new Fraction(numerator, denominator * i);
}
/**
* <p>Creates a <code>Fraction</code> instance with the 2 parts
* of a fraction Y/Z.</p>
*
* <p>Any negative signs are resolved to be on the numerator.</p>
*
* @param numerator the numerator, for example the three in 'three sevenths'
* @param denominator the denominator, for example the seven in 'three sevenths'
* @return a new fraction instance, with the numerator and denominator reduced
* @throws ArithmeticException if the denominator is <code>zero</code>
*/
public static Fraction getReducedFraction(int numerator, int denominator) {
if (denominator == 0) {
throw MathRuntimeException.createArithmeticException(
ZERO_DENOMINATOR_MESSAGE, numerator, denominator);
}
if (numerator==0) {
return ZERO; // normalize zero.
}
// allow 2^k/-2^31 as a valid fraction (where k>0)
if (denominator==Integer.MIN_VALUE && (numerator&1)==0) {
numerator/=2; denominator/=2;
}
if (denominator < 0) {
if (numerator==Integer.MIN_VALUE ||
denominator==Integer.MIN_VALUE) {
throw MathRuntimeException.createArithmeticException(
OVERFLOW_MESSAGE, numerator, denominator);
}
numerator = -numerator;
denominator = -denominator;
}
// simplify fraction.
int gcd = MathUtils.gcd(numerator, denominator);
numerator /= gcd;
denominator /= gcd;
return new Fraction(numerator, denominator);
}
/**
* <p>
* Returns the <code>String</code> representing this fraction, ie
* "num / dem" or just "num" if the denominator is one.
* </p>
*
* @return a string representation of the fraction.
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
String str = null;
if (denominator == 1) {
str = Integer.toString(numerator);
} else if (numerator == 0) {
str = "0";
} else {
str = numerator + " / " + denominator;
}
return str;
}
/** {@inheritDoc} */
public FractionField getField() {
return FractionField.getInstance();
}
}
| |
package ca.wimsc.client.common.map.layers;
import java.util.Date;
import java.util.Set;
import ca.wimsc.client.common.model.Model;
import ca.wimsc.client.common.model.MostRecentTweets;
import ca.wimsc.client.common.model.ShowTweetsMode;
import ca.wimsc.client.common.model.Tweet;
import ca.wimsc.client.common.util.Common;
import ca.wimsc.client.common.util.DateUtil;
import ca.wimsc.client.common.util.IPropertyChangeListener;
import ca.wimsc.client.common.util.StringUtil;
import com.google.gwt.core.client.GWT;
import com.google.gwt.core.client.Scheduler;
import com.google.gwt.core.client.Scheduler.RepeatingCommand;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.Image;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.Widget;
/**
* Displays the most recent tweet for a particular route in a single line
*/
public class RecentTweetsFooterLayer extends FlowPanel implements IMapFooterLayer {
protected static final String HASHTAG_TTCU = "#ttcu";
private Image myCategoryIcon;
private MostRecentTweets myCurrentResults;
private Set<String> myCurrentRouteTag;
private boolean myDestroyed;
private MySelectedRoutePropertyChangeListener mySelectedRoutePropertyChangeListener;
private HTML myTextLabel;
private FlowPanel myTweetsPanel;
private RepeatingCommand myScheduledUpdateTask;
/**
* Constructor
*/
public RecentTweetsFooterLayer() {
myCategoryIcon = new Image("images/twitter_icon.png");
myCategoryIcon.addStyleName("twitterCategoryIcon");
myCategoryIcon.setVisible(false);
add(myCategoryIcon);
myTweetsPanel = new FlowPanel();
add(myTweetsPanel);
mySelectedRoutePropertyChangeListener = new MySelectedRoutePropertyChangeListener();
Model.INSTANCE.addPropertyChangeListener(Model.SELECTED_ROUTE_PROPERTY, mySelectedRoutePropertyChangeListener);
myScheduledUpdateTask = new RepeatingCommand() {
@Override
public boolean execute() {
if (!myDestroyed) {
updateShowingTweet();
}
return !myDestroyed;
}
};
Scheduler.get().scheduleFixedDelay(myScheduledUpdateTask, 60 * 60 * 1000);
updateCurrentRouteTag();
}
/**
* Clean up
*/
@Override
public void closeNow() {
Model.INSTANCE.removePropertyChangeListener(Model.SELECTED_ROUTE_PROPERTY, mySelectedRoutePropertyChangeListener);
myDestroyed = true;
}
/**
* Create an inline label displaying the category of the current tweet (e.g. #ttcu)
*/
protected Widget createCategoryLabel() {
// Anchor categoryLabel = new Anchor(HASHTAG_TTCU, "http://ttcupdates.com/", "_blank");
// categoryLabel.addStyleName("twitterCategoryLabel");
// return categoryLabel;
return null;
}
protected MostRecentTweets getCurrentResults() {
return myCurrentResults;
}
/**
* {@inheritDoc}
*/
@Override
public int getFooterHeight() {
return 20;
}
/**
* For normal mode, hard code (that rhymes!)
*/
protected ShowTweetsMode getShowTweetsMode() {
return ShowTweetsMode.SHOW_ONE;
}
protected FlowPanel getTweetsPanel() {
return myTweetsPanel;
}
protected void onDoneAddingTweet() {
// nothing, may be overridden
}
protected void redrawContents() {
myTweetsPanel.clear();
ShowTweetsMode showTweetsMode = getShowTweetsMode();
if (myCurrentResults != null && myCurrentResults.getMostRecent() != null) {
myCategoryIcon.setVisible(true);
addStyleName("twitterPanel");
try {
Tweet firstTweet = myCurrentResults.getMostRecent();
Date createdAt = firstTweet.getCreatedAtDate();
if (createdAt == null) {
return;
}
Widget categoryLabel = createCategoryLabel();
if (categoryLabel != null) {
myTweetsPanel.add(categoryLabel);
}
Label timeElapsedLabel = new Label(DateUtil.formatTimeElapsed(createdAt) + " ago ");
timeElapsedLabel.addStyleName("twitterTimeElapsedLabel");
myTweetsPanel.add(timeElapsedLabel);
if (!showTweetsMode.equals(ShowTweetsMode.SHORT)) {
Label nameLabel = new Label("@" + firstTweet.getFromUser() + " ");
nameLabel.addStyleName("twitterNameLabel");
myTweetsPanel.add(nameLabel);
String tweetText = firstTweet.getText().replace(HASHTAG_TTCU, "");
String anchorTweetText = StringUtil.addAnchorTagsAroundLinks(tweetText);
if (myTextLabel == null) {
myTextLabel = new HTML(anchorTweetText);
myTextLabel.addStyleName("twitterTextLabel");
}
myTextLabel.setTitle(tweetText);
myTweetsPanel.add(myTextLabel);
}
onDoneAddingTweet();
} catch (Exception e) {
GWT.log("Problem adding Tweet panel: ", e);
}
} else {
myCategoryIcon.setVisible(false);
removeStyleName("twitterPanel");
}
}
private void updateCurrentRouteTag() {
Set<String> newValue = Model.INSTANCE.getSelectedRouteTags();
if (newValue == null) {
return;
}
if (myCurrentRouteTag == null || !myCurrentRouteTag.equals(newValue)) {
myCurrentRouteTag = (Set<String>) newValue;
updateShowingTweet();
}
}
/**
* Update the showing tweet on this panel
*/
public void updateShowingTweet() {
if (myCurrentRouteTag != null && myCurrentRouteTag.size() > 0) {
Common.SC_SVC_TWIT.getMostRecentTweetForRoutes(myCurrentRouteTag, new AsyncCallback<MostRecentTweets>() {
@Override
public void onSuccess(MostRecentTweets theResult) {
myCurrentResults = theResult;
redrawContents();
}
@Override
public void onFailure(Throwable theCaught) {
Common.report(Common.CLIENT_LOGGING_HANDLER, theCaught);
}
});
}
}
private final class MySelectedRoutePropertyChangeListener implements IPropertyChangeListener {
@Override
public void propertyChanged(String thePropertyName, Object theOldValue, Object theNewValue) {
if (thePropertyName != Model.SELECTED_ROUTE_PROPERTY) {
return;
}
updateCurrentRouteTag();
}
}
/**
* {@inheritDoc}
*/
@Override
public int getBottomIndex() {
return 0;
}
}
| |
/*
* Xero Accounting API
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* Contact: api@xero.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package com.xero.models.accounting;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonValue;
import com.xero.api.StringUtil;
import io.swagger.annotations.ApiModelProperty;
import java.util.Objects;
/** Address */
public class Address {
StringUtil util = new StringUtil();
/** define the type of address */
public enum AddressTypeEnum {
/** POBOX */
POBOX("POBOX"),
/** STREET */
STREET("STREET");
private String value;
AddressTypeEnum(String value) {
this.value = value;
}
/**
* getValue
*
* @return String value
*/
@JsonValue
public String getValue() {
return value;
}
/**
* toString
*
* @return String value
*/
@Override
public String toString() {
return String.valueOf(value);
}
/**
* fromValue
*
* @param value String
*/
@JsonCreator
public static AddressTypeEnum fromValue(String value) {
for (AddressTypeEnum b : AddressTypeEnum.values()) {
if (b.value.equals(value)) {
return b;
}
}
throw new IllegalArgumentException("Unexpected value '" + value + "'");
}
}
@JsonProperty("AddressType")
private AddressTypeEnum addressType;
@JsonProperty("AddressLine1")
private String addressLine1;
@JsonProperty("AddressLine2")
private String addressLine2;
@JsonProperty("AddressLine3")
private String addressLine3;
@JsonProperty("AddressLine4")
private String addressLine4;
@JsonProperty("City")
private String city;
@JsonProperty("Region")
private String region;
@JsonProperty("PostalCode")
private String postalCode;
@JsonProperty("Country")
private String country;
@JsonProperty("AttentionTo")
private String attentionTo;
/**
* define the type of address
*
* @param addressType AddressTypeEnum
* @return Address
*/
public Address addressType(AddressTypeEnum addressType) {
this.addressType = addressType;
return this;
}
/**
* define the type of address
*
* @return addressType
*/
@ApiModelProperty(value = "define the type of address")
/**
* define the type of address
*
* @return addressType AddressTypeEnum
*/
public AddressTypeEnum getAddressType() {
return addressType;
}
/**
* define the type of address
*
* @param addressType AddressTypeEnum
*/
public void setAddressType(AddressTypeEnum addressType) {
this.addressType = addressType;
}
/**
* max length = 500
*
* @param addressLine1 String
* @return Address
*/
public Address addressLine1(String addressLine1) {
this.addressLine1 = addressLine1;
return this;
}
/**
* max length = 500
*
* @return addressLine1
*/
@ApiModelProperty(value = "max length = 500")
/**
* max length = 500
*
* @return addressLine1 String
*/
public String getAddressLine1() {
return addressLine1;
}
/**
* max length = 500
*
* @param addressLine1 String
*/
public void setAddressLine1(String addressLine1) {
this.addressLine1 = addressLine1;
}
/**
* max length = 500
*
* @param addressLine2 String
* @return Address
*/
public Address addressLine2(String addressLine2) {
this.addressLine2 = addressLine2;
return this;
}
/**
* max length = 500
*
* @return addressLine2
*/
@ApiModelProperty(value = "max length = 500")
/**
* max length = 500
*
* @return addressLine2 String
*/
public String getAddressLine2() {
return addressLine2;
}
/**
* max length = 500
*
* @param addressLine2 String
*/
public void setAddressLine2(String addressLine2) {
this.addressLine2 = addressLine2;
}
/**
* max length = 500
*
* @param addressLine3 String
* @return Address
*/
public Address addressLine3(String addressLine3) {
this.addressLine3 = addressLine3;
return this;
}
/**
* max length = 500
*
* @return addressLine3
*/
@ApiModelProperty(value = "max length = 500")
/**
* max length = 500
*
* @return addressLine3 String
*/
public String getAddressLine3() {
return addressLine3;
}
/**
* max length = 500
*
* @param addressLine3 String
*/
public void setAddressLine3(String addressLine3) {
this.addressLine3 = addressLine3;
}
/**
* max length = 500
*
* @param addressLine4 String
* @return Address
*/
public Address addressLine4(String addressLine4) {
this.addressLine4 = addressLine4;
return this;
}
/**
* max length = 500
*
* @return addressLine4
*/
@ApiModelProperty(value = "max length = 500")
/**
* max length = 500
*
* @return addressLine4 String
*/
public String getAddressLine4() {
return addressLine4;
}
/**
* max length = 500
*
* @param addressLine4 String
*/
public void setAddressLine4(String addressLine4) {
this.addressLine4 = addressLine4;
}
/**
* max length = 255
*
* @param city String
* @return Address
*/
public Address city(String city) {
this.city = city;
return this;
}
/**
* max length = 255
*
* @return city
*/
@ApiModelProperty(value = "max length = 255")
/**
* max length = 255
*
* @return city String
*/
public String getCity() {
return city;
}
/**
* max length = 255
*
* @param city String
*/
public void setCity(String city) {
this.city = city;
}
/**
* max length = 255
*
* @param region String
* @return Address
*/
public Address region(String region) {
this.region = region;
return this;
}
/**
* max length = 255
*
* @return region
*/
@ApiModelProperty(value = "max length = 255")
/**
* max length = 255
*
* @return region String
*/
public String getRegion() {
return region;
}
/**
* max length = 255
*
* @param region String
*/
public void setRegion(String region) {
this.region = region;
}
/**
* max length = 50
*
* @param postalCode String
* @return Address
*/
public Address postalCode(String postalCode) {
this.postalCode = postalCode;
return this;
}
/**
* max length = 50
*
* @return postalCode
*/
@ApiModelProperty(value = "max length = 50")
/**
* max length = 50
*
* @return postalCode String
*/
public String getPostalCode() {
return postalCode;
}
/**
* max length = 50
*
* @param postalCode String
*/
public void setPostalCode(String postalCode) {
this.postalCode = postalCode;
}
/**
* max length = 50, [A-Z], [a-z] only
*
* @param country String
* @return Address
*/
public Address country(String country) {
this.country = country;
return this;
}
/**
* max length = 50, [A-Z], [a-z] only
*
* @return country
*/
@ApiModelProperty(value = "max length = 50, [A-Z], [a-z] only")
/**
* max length = 50, [A-Z], [a-z] only
*
* @return country String
*/
public String getCountry() {
return country;
}
/**
* max length = 50, [A-Z], [a-z] only
*
* @param country String
*/
public void setCountry(String country) {
this.country = country;
}
/**
* max length = 255
*
* @param attentionTo String
* @return Address
*/
public Address attentionTo(String attentionTo) {
this.attentionTo = attentionTo;
return this;
}
/**
* max length = 255
*
* @return attentionTo
*/
@ApiModelProperty(value = "max length = 255")
/**
* max length = 255
*
* @return attentionTo String
*/
public String getAttentionTo() {
return attentionTo;
}
/**
* max length = 255
*
* @param attentionTo String
*/
public void setAttentionTo(String attentionTo) {
this.attentionTo = attentionTo;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Address address = (Address) o;
return Objects.equals(this.addressType, address.addressType)
&& Objects.equals(this.addressLine1, address.addressLine1)
&& Objects.equals(this.addressLine2, address.addressLine2)
&& Objects.equals(this.addressLine3, address.addressLine3)
&& Objects.equals(this.addressLine4, address.addressLine4)
&& Objects.equals(this.city, address.city)
&& Objects.equals(this.region, address.region)
&& Objects.equals(this.postalCode, address.postalCode)
&& Objects.equals(this.country, address.country)
&& Objects.equals(this.attentionTo, address.attentionTo);
}
@Override
public int hashCode() {
return Objects.hash(
addressType,
addressLine1,
addressLine2,
addressLine3,
addressLine4,
city,
region,
postalCode,
country,
attentionTo);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class Address {\n");
sb.append(" addressType: ").append(toIndentedString(addressType)).append("\n");
sb.append(" addressLine1: ").append(toIndentedString(addressLine1)).append("\n");
sb.append(" addressLine2: ").append(toIndentedString(addressLine2)).append("\n");
sb.append(" addressLine3: ").append(toIndentedString(addressLine3)).append("\n");
sb.append(" addressLine4: ").append(toIndentedString(addressLine4)).append("\n");
sb.append(" city: ").append(toIndentedString(city)).append("\n");
sb.append(" region: ").append(toIndentedString(region)).append("\n");
sb.append(" postalCode: ").append(toIndentedString(postalCode)).append("\n");
sb.append(" country: ").append(toIndentedString(country)).append("\n");
sb.append(" attentionTo: ").append(toIndentedString(attentionTo)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Copyright 2018 University of Michigan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.verdictdb.connection;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.verdictdb.commons.StringSplitter;
import org.verdictdb.commons.VerdictDBLogger;
import org.verdictdb.exception.VerdictDBDbmsException;
import org.verdictdb.sqlsyntax.SparkSyntax;
import org.verdictdb.sqlsyntax.SqlSyntax;
import java.util.ArrayList;
import java.util.List;
public class SparkConnection extends DbmsConnection {
SparkSession sc;
SqlSyntax syntax;
String currentSchema;
private VerdictDBLogger log = VerdictDBLogger.getLogger(this.getClass());;
public SparkConnection(Object sc) {
this.sc = (SparkSession) sc;
this.syntax = new SparkSyntax();
}
public SparkConnection(SparkSession sc) {
this.sc = sc;
this.syntax = new SparkSyntax();
}
public SparkConnection(SparkSession sc, SqlSyntax syntax) {
this.sc = sc;
this.syntax = syntax;
}
@Override
public List<String> getSchemas() throws VerdictDBDbmsException {
List<String> schemas = new ArrayList<>();
DbmsQueryResult queryResult = execute(syntax.getSchemaCommand());
while (queryResult.next()) {
schemas.add((String) queryResult.getValue(syntax.getSchemaNameColumnIndex()));
}
return schemas;
}
@Override
public List<String> getTables(String schema) throws VerdictDBDbmsException {
List<String> tables = new ArrayList<>();
try {
DbmsQueryResult queryResult = execute(syntax.getTableCommand(schema));
while (queryResult.next()) {
tables.add((String) queryResult.getValue(syntax.getTableNameColumnIndex()));
}
} catch (Exception e) {
if (e.getMessage().contains("not found")) {
// do nothing
} else {
throw e;
}
}
return tables;
}
@Override
public List<Pair<String, String>> getColumns(String schema, String table)
throws VerdictDBDbmsException {
List<Pair<String, String>> columns = new ArrayList<>();
DbmsQueryResult queryResult = execute(syntax.getColumnsCommand(schema, table));
while (queryResult.next()) {
String name = queryResult.getString(syntax.getColumnNameColumnIndex());
String type = queryResult.getString(syntax.getColumnTypeColumnIndex());
type = type.toLowerCase();
// when there exists partitions in a table, this extra information will be returned.
// we should ignore this.
if (name.equalsIgnoreCase("# Partition Information")) {
break;
}
columns.add(new ImmutablePair<>(name, type));
}
return columns;
}
// ignores catalog
@Override
public List<Pair<String, String>> getColumns(String catalog, String schema, String table)
throws VerdictDBDbmsException {
List<Pair<String, String>> columns = new ArrayList<>();
DbmsQueryResult queryResult = execute(syntax.getColumnsCommand(schema, table));
while (queryResult.next()) {
String name = queryResult.getString(syntax.getColumnNameColumnIndex());
String type = queryResult.getString(syntax.getColumnTypeColumnIndex());
type = type.toLowerCase();
// when there exists partitions in a table, this extra information will be returned.
// we should ignore this.
if (name.equalsIgnoreCase("# Partition Information")) {
break;
}
columns.add(new ImmutablePair<>(name, type));
}
return columns;
}
@Override
public List<String> getPartitionColumns(String schema, String table)
throws VerdictDBDbmsException {
List<String> partition = new ArrayList<>();
DbmsQueryResult queryResult = execute(syntax.getPartitionCommand(schema, table));
boolean hasPartitionInfoStarted = false;
while (queryResult.next()) {
String name = queryResult.getString(0);
if (hasPartitionInfoStarted && (name.equalsIgnoreCase("# col_name") == false)) {
partition.add(name);
} else if (name.equalsIgnoreCase("# Partition Information")) {
hasPartitionInfoStarted = true;
}
}
return partition;
}
@Override
public String getDefaultSchema() {
return currentSchema;
}
@Override
public void setDefaultSchema(String schema) {
currentSchema = schema;
}
@Override
public List<String> getPrimaryKey(String schema, String table) throws VerdictDBDbmsException {
return null;
}
@Override
public DbmsQueryResult execute(String sql) throws VerdictDBDbmsException {
String quoteChars = "'\"";
List<String> sqls = StringSplitter.splitOnSemicolon(sql, quoteChars);
DbmsQueryResult finalResult = null;
for (String s : sqls) {
finalResult = executeSingle(s);
}
return finalResult;
}
public DbmsQueryResult executeSingle(String sql) throws VerdictDBDbmsException {
sql = sql.replace(";", ""); // remove semicolons
log.trace("Issues the following query to Spark: " + sql);
try {
SparkQueryResult srs = null;
Dataset<Row> result = sc.sql(sql);
if (result != null) {
srs = new SparkQueryResult(result);
}
return srs;
} catch (Exception e) {
String msg = "Issued the following query: " + sql + "\n" + e.getMessage();
throw new VerdictDBDbmsException(msg);
}
}
@Override
public SqlSyntax getSyntax() {
return syntax;
}
@Override
public void abort() {}
@Override
public void close() {
try {
this.sc.close();
} catch (Exception e) {
e.printStackTrace();
}
}
public SparkSession getSparkSession() {
return sc;
}
@Override
public DbmsConnection copy() {
SparkConnection newConn = new SparkConnection(sc, syntax);
newConn.setDefaultSchema(currentSchema);
return newConn;
}
}
| |
package mcjty.rftools.blocks.dimlets;
import mcjty.container.InventoryHelper;
import mcjty.entity.GenericEnergyHandlerTileEntity;
import mcjty.rftools.blocks.BlockTools;
import mcjty.rftools.blocks.teleporter.TeleporterSetup;
import mcjty.rftools.dimension.DimensionInformation;
import mcjty.rftools.dimension.RfToolsDimensionManager;
import mcjty.rftools.dimension.world.WorldGenerationTools;
import mcjty.rftools.items.dimlets.*;
import mcjty.rftools.items.dimlets.types.IDimletType;
import mcjty.rftools.network.Argument;
import mcjty.rftools.network.PacketHandler;
import mcjty.rftools.network.PacketRequestIntegerFromServer;
import net.minecraft.block.Block;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.ISidedInventory;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.world.World;
import net.minecraftforge.common.util.Constants;
import net.minecraftforge.common.util.ForgeDirection;
import java.util.Map;
public class DimensionEditorTileEntity extends GenericEnergyHandlerTileEntity implements ISidedInventory {
public static final String CMD_GETEDITING = "getEditing";
public static final String CLIENTCMD_GETEDITING = "getEditing";
private static int editPercentage = 0;
private int ticksLeft = -1;
private int ticksCost = -1;
private int rfPerTick = -1;
private InventoryHelper inventoryHelper = new InventoryHelper(this, DimensionEditorContainer.factory, 2);
public DimensionEditorTileEntity() {
super(DimletConfiguration.EDITOR_MAXENERGY, DimletConfiguration.EDITOR_RECEIVEPERTICK);
}
@Override
protected void checkStateServer() {
ItemStack injectableItemStack = validateInjectableItemStack();
if (injectableItemStack == null) {
return;
}
ItemStack dimensionItemStack = validateDimensionItemStack();
if (dimensionItemStack == null) {
return;
}
if (ticksLeft == -1) {
// We were not injecting. Start now.
if (isMatterReceiver(injectableItemStack)) {
ticksCost = DimletCosts.baseDimensionTickCost + 1000;
ticksLeft = ticksCost;
rfPerTick = DimletCosts.baseDimensionCreationCost + 200;
} else {
DimletKey key = KnownDimletConfiguration.getDimletKey(injectableItemStack, worldObj);
DimletEntry dimletEntry = KnownDimletConfiguration.getEntry(key);
ticksCost = DimletCosts.baseDimensionTickCost + dimletEntry.getTickCost();
ticksLeft = ticksCost;
rfPerTick = DimletCosts.baseDimensionCreationCost + dimletEntry.getRfCreateCost();
}
} else {
int rf = getEnergyStored(ForgeDirection.DOWN);
int rfpt = rfPerTick;
rfpt = (int) (rfpt * (2.0f - getInfusedFactor()) / 2.0f);
if (rf >= rfpt) {
// Enough energy.
extractEnergy(ForgeDirection.DOWN, rfpt, false);
ticksLeft--;
if (ticksLeft <= 0) {
RfToolsDimensionManager dimensionManager = RfToolsDimensionManager.getDimensionManager(worldObj);
ItemStack dimensionTab = validateDimensionItemStack();
NBTTagCompound tagCompound = dimensionTab.getTagCompound();
int id = tagCompound.getInteger("id");
injectableItemStack = validateInjectableItemStack();
if (isMatterReceiver(injectableItemStack)) {
World dimWorld = dimensionManager.getWorldForDimension(id);
int y = findGoodReceiverLocation(dimWorld);
if (y == -1) {
y = dimWorld.getHeight() / 2;
}
dimWorld.setBlock(8, y, 8, TeleporterSetup.matterReceiverBlock, 0, 2);
TeleporterSetup.matterReceiverBlock.onBlockPlaced(dimWorld, 8, y, 8, 0, 0, 0, 0, 0);
TeleporterSetup.matterReceiverBlock.onBlockPlacedBy(dimWorld, 8, y, 8, null, injectableItemStack);
dimWorld.setBlockToAir(8, y+1, 8);
dimWorld.setBlockToAir(8, y+2, 8);
} else {
DimletKey key = KnownDimletConfiguration.getDimletKey(injectableItemStack, worldObj);
DimensionInformation information = dimensionManager.getDimensionInformation(id);
information.injectDimlet(key);
dimensionManager.save(worldObj);
}
inventoryHelper.decrStackSize(DimensionEditorContainer.SLOT_INJECTINPUT, 1);
stopInjecting();
}
}
}
markDirty();
setState();
}
private int findGoodReceiverLocation(World dimWorld) {
int y = WorldGenerationTools.findSuitableEmptySpot(dimWorld, 8, 8);
y++;
return y;
}
private ItemStack validateInjectableItemStack() {
ItemStack itemStack = inventoryHelper.getStacks()[DimensionEditorContainer.SLOT_INJECTINPUT];
if (itemStack == null || itemStack.stackSize == 0) {
stopInjecting();
return null;
}
if (isMatterReceiver(itemStack)) {
return itemStack;
}
DimletKey key = KnownDimletConfiguration.getDimletKey(itemStack, worldObj);
DimletType type = key.getType();
IDimletType itype = type.dimletType;
if (itype.isInjectable()) {
return itemStack;
} else {
return null;
}
}
private boolean isMatterReceiver(ItemStack itemStack) {
Block block = BlockTools.getBlock(itemStack);
if (block == TeleporterSetup.matterReceiverBlock) {
// We can inject matter receivers too.
return true;
}
return false;
}
private ItemStack validateDimensionItemStack() {
ItemStack itemStack = inventoryHelper.getStacks()[DimensionEditorContainer.SLOT_DIMENSIONTARGET];
if (itemStack == null || itemStack.stackSize == 0) {
stopInjecting();
return null;
}
NBTTagCompound tagCompound = itemStack.getTagCompound();
int id = tagCompound.getInteger("id");
if (id == 0) {
// Not a valid dimension.
stopInjecting();
return null;
}
return itemStack;
}
private void stopInjecting() {
setState();
ticksLeft = -1;
ticksCost = -1;
rfPerTick = -1;
markDirty();
}
private void setState() {
int state = 0;
if (ticksLeft == 0) {
state = 0;
} else if (ticksLeft == -1) {
state = 1;
} else if (((ticksLeft >> 2) & 1) == 0) {
state = 2;
} else {
state = 3;
}
int metadata = worldObj.getBlockMetadata(xCoord, yCoord, zCoord);
int newmeta = BlockTools.setState(metadata, state);
if (newmeta != metadata) {
worldObj.setBlockMetadataWithNotify(xCoord, yCoord, zCoord, newmeta, 2);
}
}
@Override
public int[] getAccessibleSlotsFromSide(int side) {
return DimletResearcherContainer.factory.getAccessibleSlots();
}
@Override
public boolean canInsertItem(int index, ItemStack item, int side) {
return DimletResearcherContainer.factory.isInputSlot(index);
}
@Override
public boolean canExtractItem(int index, ItemStack item, int side) {
return DimletResearcherContainer.factory.isOutputSlot(index);
}
@Override
public int getSizeInventory() {
return inventoryHelper.getStacks().length;
}
@Override
public ItemStack getStackInSlot(int index) {
return inventoryHelper.getStacks()[index];
}
@Override
public ItemStack decrStackSize(int index, int amount) {
return inventoryHelper.decrStackSize(index, amount);
}
@Override
public ItemStack getStackInSlotOnClosing(int index) {
return null;
}
@Override
public void setInventorySlotContents(int index, ItemStack stack) {
inventoryHelper.setInventorySlotContents(getInventoryStackLimit(), index, stack);
}
@Override
public String getInventoryName() {
return "Editor Inventory";
}
@Override
public boolean hasCustomInventoryName() {
return false;
}
@Override
public int getInventoryStackLimit() {
return 16;
}
@Override
public boolean isUseableByPlayer(EntityPlayer player) {
return true;
}
@Override
public void openInventory() {
}
@Override
public void closeInventory() {
}
@Override
public boolean isItemValidForSlot(int index, ItemStack stack) {
return true;
}
// Request the building percentage from the server. This has to be called on the client side.
public void requestBuildingPercentage() {
PacketHandler.INSTANCE.sendToServer(new PacketRequestIntegerFromServer(xCoord, yCoord, zCoord,
CMD_GETEDITING,
CLIENTCMD_GETEDITING));
}
@Override
public Integer executeWithResultInteger(String command, Map<String, Argument> args) {
Integer rc = super.executeWithResultInteger(command, args);
if (rc != null) {
return rc;
}
if (CMD_GETEDITING.equals(command)) {
if (ticksLeft == -1) {
return 0;
} else {
return (ticksCost - ticksLeft) * 100 / ticksCost;
}
}
return null;
}
@Override
public boolean execute(String command, Integer result) {
boolean rc = super.execute(command, result);
if (rc) {
return true;
}
if (CLIENTCMD_GETEDITING.equals(command)) {
editPercentage = result;
return true;
}
return false;
}
public static int getEditPercentage() {
return editPercentage;
}
@Override
public void readFromNBT(NBTTagCompound tagCompound) {
super.readFromNBT(tagCompound);
}
@Override
public void readRestorableFromNBT(NBTTagCompound tagCompound) {
super.readRestorableFromNBT(tagCompound);
readBufferFromNBT(tagCompound);
ticksLeft = tagCompound.getInteger("ticksLeft");
ticksCost = tagCompound.getInteger("ticksCost");
rfPerTick = tagCompound.getInteger("rfPerTick");
}
private void readBufferFromNBT(NBTTagCompound tagCompound) {
NBTTagList bufferTagList = tagCompound.getTagList("Items", Constants.NBT.TAG_COMPOUND);
for (int i = 0 ; i < bufferTagList.tagCount() ; i++) {
NBTTagCompound nbtTagCompound = bufferTagList.getCompoundTagAt(i);
inventoryHelper.getStacks()[i] = ItemStack.loadItemStackFromNBT(nbtTagCompound);
}
}
@Override
public void writeToNBT(NBTTagCompound tagCompound) {
super.writeToNBT(tagCompound);
}
@Override
public void writeRestorableToNBT(NBTTagCompound tagCompound) {
super.writeRestorableToNBT(tagCompound);
writeBufferToNBT(tagCompound);
tagCompound.setInteger("ticksLeft", ticksLeft);
tagCompound.setInteger("ticksCost", ticksCost);
tagCompound.setInteger("rfPerTick", rfPerTick);
}
private void writeBufferToNBT(NBTTagCompound tagCompound) {
NBTTagList bufferTagList = new NBTTagList();
for (int i = 0 ; i < inventoryHelper.getStacks().length ; i++) {
ItemStack stack = inventoryHelper.getStacks()[i];
NBTTagCompound nbtTagCompound = new NBTTagCompound();
if (stack != null) {
stack.writeToNBT(nbtTagCompound);
}
bufferTagList.appendTag(nbtTagCompound);
}
tagCompound.setTag("Items", bufferTagList);
}
}
| |
/*
* Generated from AppInsightsTypes.bond (https://github.com/Microsoft/bond)
*/
package com.microsoft.applicationinsights.contracts;
import java.io.IOException;
import java.io.Writer;
import java.util.Map;
import java.util.List;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.ArrayList;
import com.microsoft.telemetry.ITelemetry;
import com.microsoft.telemetry.ITelemetryData;
import com.microsoft.telemetry.IContext;
import com.microsoft.telemetry.IJsonSerializable;
import com.microsoft.telemetry.Base;
import com.microsoft.telemetry.Data;
import com.microsoft.telemetry.Domain;
import com.microsoft.telemetry.Extension;
import com.microsoft.telemetry.JsonHelper;
/**
* Data contract class ExceptionData.
*/
public class ExceptionData extends ITelemetry
{
/**
* Backing field for property Ver.
*/
private int ver = 2;
/**
* Backing field for property HandledAt.
*/
private String handledAt;
/**
* Backing field for property Exceptions.
*/
private List<ExceptionDetails> exceptions;
/**
* Backing field for property SeverityLevel.
*/
private int severityLevel;
/**
* Backing field for property ProblemId.
*/
private String problemId;
/**
* Backing field for property CrashThreadId.
*/
private int crashThreadId;
/**
* Backing field for property Properties.
*/
private Map<String, String> properties;
/**
* Backing field for property Measurements.
*/
private Map<String, Double> measurements;
/**
* Initializes a new instance of the ExceptionData class.
*/
public ExceptionData()
{
this.InitializeFields();
this.SetupAttributes();
}
/**
* Envelope Name for this telemetry.
*/
public String getEnvelopeName() {
return "Microsoft.ApplicationInsights.Exception";
}
/**
* Base Type for this telemetry.
*/
public String getBaseType() {
return "Microsoft.ApplicationInsights.ExceptionData";
}
/**
* Gets the Ver property.
*/
public int getVer() {
return this.ver;
}
/**
* Sets the Ver property.
*/
public void setVer(int value) {
this.ver = value;
}
/**
* Gets the HandledAt property.
*/
public String getHandledAt() {
return this.handledAt;
}
/**
* Sets the HandledAt property.
*/
public void setHandledAt(String value) {
this.handledAt = value;
}
/**
* Gets the Exceptions property.
*/
public List<ExceptionDetails> getExceptions() {
if (this.exceptions == null) {
this.exceptions = new ArrayList<ExceptionDetails>();
}
return this.exceptions;
}
/**
* Sets the Exceptions property.
*/
public void setExceptions(List<ExceptionDetails> value) {
this.exceptions = value;
}
/**
* Gets the SeverityLevel property.
*/
public int getSeverityLevel() {
return this.severityLevel;
}
/**
* Sets the SeverityLevel property.
*/
public void setSeverityLevel(int value) {
this.severityLevel = value;
}
/**
* Gets the ProblemId property.
*/
public String getProblemId() {
return this.problemId;
}
/**
* Sets the ProblemId property.
*/
public void setProblemId(String value) {
this.problemId = value;
}
/**
* Gets the CrashThreadId property.
*/
public int getCrashThreadId() {
return this.crashThreadId;
}
/**
* Sets the CrashThreadId property.
*/
public void setCrashThreadId(int value) {
this.crashThreadId = value;
}
/**
* Gets the Properties property.
*/
public Map<String, String> getProperties() {
if (this.properties == null) {
this.properties = new LinkedHashMap<String, String>();
}
return this.properties;
}
/**
* Sets the Properties property.
*/
public void setProperties(Map<String, String> value) {
this.properties = value;
}
/**
* Gets the Measurements property.
*/
public Map<String, Double> getMeasurements() {
if (this.measurements == null) {
this.measurements = new LinkedHashMap<String, Double>();
}
return this.measurements;
}
/**
* Sets the Measurements property.
*/
public void setMeasurements(Map<String, Double> value) {
this.measurements = value;
}
/**
* Serializes the beginning of this object to the passed in writer.
* @param writer The writer to serialize this object to.
*/
protected String serializeContent(Writer writer) throws IOException
{
String prefix = super.serializeContent(writer);
writer.write(prefix + "\"ver\":");
writer.write(JsonHelper.convert(this.ver));
prefix = ",";
writer.write(prefix + "\"handledAt\":");
writer.write(JsonHelper.convert(this.handledAt));
prefix = ",";
writer.write(prefix + "\"exceptions\":");
JsonHelper.writeList(writer, this.exceptions);
prefix = ",";
if (!(this.severityLevel == 0))
{
writer.write(prefix + "\"severityLevel\":");
writer.write(JsonHelper.convert(this.severityLevel));
prefix = ",";
}
if (!(this.problemId == null))
{
writer.write(prefix + "\"problemId\":");
writer.write(JsonHelper.convert(this.problemId));
prefix = ",";
}
if (!(this.crashThreadId == 0))
{
writer.write(prefix + "\"crashThreadId\":");
writer.write(JsonHelper.convert(this.crashThreadId));
prefix = ",";
}
if (!(this.properties == null))
{
writer.write(prefix + "\"properties\":");
JsonHelper.writeDictionary(writer, this.properties);
prefix = ",";
}
if (!(this.measurements == null))
{
writer.write(prefix + "\"measurements\":");
JsonHelper.writeDictionary(writer, this.measurements);
prefix = ",";
}
return prefix;
}
/**
* Sets up the events attributes
*/
public void SetupAttributes()
{
}
/**
* Optionally initializes fields for the current context.
*/
protected void InitializeFields() {
QualifiedName = "com.microsoft.applicationinsights.contracts.ExceptionData";
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bah.lucene.blockcache_v2;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.Random;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RAMDirectory;
import org.junit.Before;
import org.junit.Test;
import com.bah.lucene.blockcache_v2.Cache;
import com.bah.lucene.blockcache_v2.CacheDirectory;
import com.bah.lucene.blockcache_v2.CacheIndexInput;
import com.bah.lucene.blockcache_v2.CacheKey;
import com.bah.lucene.blockcache_v2.CacheValue;
import com.bah.lucene.blockcache_v2.cachevalue.UnsafeCacheValue;
import com.bah.lucene.buffer.BufferStore;
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap;
import com.googlecode.concurrentlinkedhashmap.EvictionListener;
import com.googlecode.concurrentlinkedhashmap.Weigher;
public class CacheIndexInputTest {
private long seed;
private final int sampleSize = 10000;
private final int maxBufSize = 10000;
private final int maxOffset = 1000;
@Before
public void setup() {
BufferStore.init(128, 128);
seed = new Random().nextLong();
System.out.println("Using seed [" + seed + "]");
// seed = -265282183286396219l;
}
@Test
public void test1() throws IOException {
RAMDirectory directory = new RAMDirectory();
String name = "test";
IndexOutput output = directory.createOutput(name, IOContext.DEFAULT);
byte[] bs = "hello world".getBytes();
output.writeBytes(bs, bs.length);
output.close();
IndexInput input = directory.openInput(name, IOContext.DEFAULT);
Cache cache = getCache();
CacheIndexInput cacheInput = new CacheIndexInput(null, name, input, cache);
byte[] buf = new byte[bs.length];
cacheInput.readBytes(buf, 0, buf.length);
cacheInput.close();
assertArrayEquals(bs, buf);
directory.close();
}
@Test
public void test2() throws IOException {
Cache cache = getCache();
RAMDirectory directory = new RAMDirectory();
Random random = new Random(seed);
String name = "test2";
long size = (10 * 1024 * 1024) + 13;
IndexOutput output = directory.createOutput(name, IOContext.DEFAULT);
writeRandomData(size, random, output);
output.close();
IndexInput input = directory.openInput(name, IOContext.DEFAULT);
IndexInput testInput = new CacheIndexInput(null, name, input.clone(), cache);
readRandomData(input, testInput, random, sampleSize, maxBufSize, maxOffset);
readRandomDataShort(input, testInput, random, sampleSize);
readRandomDataInt(input, testInput, random, sampleSize);
readRandomDataLong(input, testInput, random, sampleSize);
testInput.close();
input.close();
directory.close();
}
public static void readRandomData(IndexInput baseInput, IndexInput testInput, Random random, int sampleSize,
int maxBufSize, int maxOffset) throws IOException {
assertEquals(baseInput.length(), testInput.length());
int fileLength = (int) baseInput.length();
for (int i = 0; i < sampleSize; i++) {
int position = random.nextInt(fileLength - maxBufSize);
int bufSize = random.nextInt(maxBufSize - maxOffset) + 1;
byte[] buf1 = new byte[bufSize];
byte[] buf2 = new byte[bufSize];
int offset = random.nextInt(Math.min(maxOffset, bufSize));
int len = Math.min(random.nextInt(bufSize - offset), fileLength - position);
baseInput.seek(position);
baseInput.readBytes(buf1, offset, len);
testInput.seek(position);
testInput.readBytes(buf2, offset, len);
assertArrayEquals("Read [" + i + "] The position is [" + position + "] and bufSize [" + bufSize + "]", buf1, buf2);
}
}
public static void readRandomDataInt(IndexInput baseInput, IndexInput testInput, Random random, int sampleSize)
throws IOException {
assertEquals(baseInput.length(), testInput.length());
int fileLength = (int) baseInput.length();
for (int i = 0; i < sampleSize; i++) {
int position = random.nextInt(fileLength - 4);
baseInput.seek(position);
int i1 = baseInput.readInt();
testInput.seek(position);
int i2 = testInput.readInt();
assertEquals("Read [" + i + "] The position is [" + position + "]", i1, i2);
}
}
public static void readRandomDataShort(IndexInput baseInput, IndexInput testInput, Random random, int sampleSize)
throws IOException {
assertEquals(baseInput.length(), testInput.length());
int fileLength = (int) baseInput.length();
for (int i = 0; i < sampleSize; i++) {
int position = random.nextInt(fileLength - 2);
baseInput.seek(position);
short i1 = baseInput.readShort();
testInput.seek(position);
short i2 = testInput.readShort();
assertEquals("Read [" + i + "] The position is [" + position + "]", i1, i2);
}
}
public static void readRandomDataLong(IndexInput baseInput, IndexInput testInput, Random random, int sampleSize)
throws IOException {
assertEquals(baseInput.length(), testInput.length());
int fileLength = (int) baseInput.length();
for (int i = 0; i < sampleSize; i++) {
int position = random.nextInt(fileLength - 8);
baseInput.seek(position);
long i1 = baseInput.readLong();
testInput.seek(position);
long i2 = testInput.readLong();
assertEquals("Read [" + i + "] The position is [" + position + "]", i1, i2);
}
}
public static void writeRandomData(long size, Random random, IndexOutput... outputs) throws IOException {
byte[] buf = new byte[1024];
for (long l = 0; l < size; l += buf.length) {
random.nextBytes(buf);
int length = (int) Math.min(buf.length, size - l);
for (IndexOutput output : outputs) {
output.writeBytes(buf, length);
}
}
}
public static Cache getCache() {
EvictionListener<CacheKey, CacheValue> listener = new EvictionListener<CacheKey, CacheValue>() {
@Override
public void onEviction(CacheKey key, CacheValue value) {
if (value.refCount() == 0) {
value.release();
} else {
// doing something else...
fail();
}
}
};
Weigher<CacheValue> weigher = new Weigher<CacheValue>() {
@Override
public int weightOf(CacheValue value) {
return value.length();
}
};
long maximumWeightedCapacity = 1 * 1024 * 1024;
final ConcurrentLinkedHashMap<CacheKey, CacheValue> cache = new ConcurrentLinkedHashMap.Builder<CacheKey, CacheValue>()
.weigher(weigher).maximumWeightedCapacity(maximumWeightedCapacity).listener(listener).build();
Cache cacheFactory = new Cache() {
@Override
public CacheValue newInstance(CacheDirectory directory, String fileName, int cacheBlockSize) {
return new UnsafeCacheValue(cacheBlockSize);
}
@Override
public long getFileId(CacheDirectory directory, String fileName) {
return fileName.hashCode();
}
@Override
public int getFileBufferSize(CacheDirectory directory, String fileName) {
return 1024;
}
@Override
public int getCacheBlockSize(CacheDirectory directory, String fileName) {
return 8192;
}
@Override
public boolean cacheFileForReading(CacheDirectory directory, String name, IOContext context) {
return true;
}
@Override
public boolean cacheFileForWriting(CacheDirectory directory, String name, IOContext context) {
return true;
}
@Override
public CacheValue get(CacheKey key) {
return cache.get(key);
}
@Override
public void put(CacheKey key, CacheValue value) {
cache.put(key, value);
}
@Override
public void removeFile(CacheDirectory directory, String fileName) throws IOException {
}
@Override
public void releaseDirectory(String directoryName) {
}
@Override
public CacheValue getQuietly(CacheKey key) {
return cache.getQuietly(key);
}
@Override
public boolean shouldBeQuiet(CacheDirectory directory, String fileName) {
return false;
}
@Override
public void close() throws IOException {
}
};
return cacheFactory;
}
}
| |
/*
* Copyright 2014 Avanza Bank AB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.avanza.astrix.beans.service;
import static org.junit.Assert.assertEquals;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.avanza.astrix.beans.factory.MissingBeanProviderException;
import com.avanza.astrix.context.AstrixContext;
import com.avanza.astrix.context.TestAstrixConfigurer;
import com.avanza.astrix.provider.core.AstrixApiProvider;
import com.avanza.astrix.provider.core.AstrixConfigDiscovery;
import com.avanza.astrix.provider.core.Service;
public class AsyncServiceTest {
private PingAsync ping;
private AstrixContext context;
private BlockingPing server = new BlockingPing();
@Before
public void setup() {
SingleServiceComponent singleService = new SingleServiceComponent(PingAsync.class, server);
TestAstrixConfigurer astrixConfigurer = new TestAstrixConfigurer();
astrixConfigurer.enableFaultTolerance(true);
astrixConfigurer.registerApiProvider(PingApi.class);
astrixConfigurer.registerPlugin(ServiceComponent.class, singleService);
astrixConfigurer.set("pingUri", singleService.getName() + ":");
context = astrixConfigurer.configure();
ping = context.getBean(PingAsync.class);
}
@After
public void after() {
context.destroy();
}
@Test
public void asyncServiceInvocationShouldRunAsynchronouslyWithMethodCalll() throws Exception {
Future<String> response = ping.ping("foo");
server.setResponse("bar");
assertEquals("bar", response.get());
}
@Test
public void asyncServiceInvocationShouldStartSynchronouslyWithMethodCalll() throws Exception {
PingAsync ping = context.getBean(PingAsync.class);
@SuppressWarnings("unused")
Future<String> response = ping.ping("foo");
assertEquals("Service invocation should be started synchronously with method call. Last server invocation: ",
"foo", server.pingRequests.poll(1, TimeUnit.SECONDS));
}
@Test(expected = MissingBeanProviderException.class)
public void validatesAllThatAllMethodsInReactiveTypeAreReactive() throws Exception {
AstrixContext context = new TestAstrixConfigurer().registerApiProvider(BrokenPingApi.class)
.configure();
context.getBean(BrokenPingAsync.class);
}
@Test(expected = MissingBeanProviderException.class)
public void validatesAllThatAllMethodsInReactiveTypeCorrespondToSyncVersion() throws Exception {
AstrixContext context = new TestAstrixConfigurer().registerApiProvider(InconsistentPingApi.class)
.configure();
context.getBean(InconsistentPingAsync.class);
}
public static final class BlockingPing implements PingAsync {
private final BlockingQueue<String> pingResponses = new LinkedBlockingQueue<>();
private final BlockingQueue<String> pingRequests = new LinkedBlockingQueue<>();
@Override
public CompletableFuture<String> ping(String msg) {
pingRequests.add(msg);
CompletableFuture<String> result = new CompletableFuture<String>();
new Thread(() -> {
try {
String response = pingResponses.poll(1, TimeUnit.SECONDS);
if (response != null) {
result.complete(response);
} else {
result.completeExceptionally(new IllegalStateException("TIMEOUT"));
}
} catch (InterruptedException e) {
result.completeExceptionally(new IllegalStateException("TIMEOUT"));
}
}).start();
return result;
}
public void setResponse(String response) {
this.pingResponses.add(response);
}
}
public interface Ping {
String ping(String msg);
}
public interface PingAsync {
CompletableFuture<String> ping(String msg);
}
public interface BrokenPing {
String invalidPing(String msg);
String validPing(String msg);
}
public interface BrokenPingAsync {
Future<String> invalidPing(String msg); // Future is not a reactive type
CompletableFuture<String> validPing(String msg);
}
public interface InconsistentPing {
String ping(String msg);
}
public interface InconsistentPingAsync {
CompletableFuture<String> inconsistendPingMethod(String msg);
}
@AstrixApiProvider
public static interface PingApi {
@AstrixConfigDiscovery("pingUri")
@Service
Ping ping();
}
@AstrixApiProvider
public static interface BrokenPingApi {
@AstrixConfigDiscovery("pingUri")
@Service
BrokenPing ping();
}
@AstrixApiProvider
public static interface InconsistentPingApi {
@AstrixConfigDiscovery("pingUri")
@Service
InconsistentPing ping();
}
private static class SingleServiceComponent implements ServiceComponent {
private Class<?> api;
private Object instance;
public SingleServiceComponent(Class<?> api, Object instance) {
this.api = api;
this.instance = instance;
}
@SuppressWarnings("unchecked")
@Override
public <T> BoundServiceBeanInstance<T> bind(ServiceDefinition<T> serviceDefinition, ServiceProperties serviceProperties) {
return new SimpleBoundServiceBeanInstance<T>((T) instance);
}
@Override
public ServiceProperties parseServiceProviderUri(String serviceProviderUri) {
return new ServiceProperties();
}
@Override
public <T> ServiceProperties createServiceProperties(ServiceDefinition<T> exportedServiceDefinition) {
return new ServiceProperties();
}
@Override
public String getName() {
return "single-service";
}
@Override
public boolean canBindType(Class<?> type) {
return type.equals(api);
}
@Override
public <T> void exportService(Class<T> providedApi, T provider, ServiceDefinition<T> serviceDefinition) {
throw new UnsupportedOperationException();
}
@Override
public boolean requiresProviderInstance() {
return false;
}
}
}
| |
package org.apache.lucene.index;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
public class TestDirectoryReaderReopen extends LuceneTestCase {
public void testReopen() throws Exception {
final Directory dir1 = newDirectory();
createIndex(random(), dir1, false);
performDefaultTests(new TestReopen() {
@Override
protected void modifyIndex(int i) throws IOException {
TestDirectoryReaderReopen.modifyIndex(i, dir1);
}
@Override
protected DirectoryReader openReader() throws IOException {
return DirectoryReader.open(dir1);
}
});
dir1.close();
final Directory dir2 = newDirectory();
createIndex(random(), dir2, true);
performDefaultTests(new TestReopen() {
@Override
protected void modifyIndex(int i) throws IOException {
TestDirectoryReaderReopen.modifyIndex(i, dir2);
}
@Override
protected DirectoryReader openReader() throws IOException {
return DirectoryReader.open(dir2);
}
});
dir2.close();
}
// LUCENE-1228: IndexWriter.commit() does not update the index version
// populate an index in iterations.
// at the end of every iteration, commit the index and reopen/recreate the reader.
// in each iteration verify the work of previous iteration.
// try this once with reopen once recreate, on both RAMDir and FSDir.
public void testCommitReopen () throws IOException {
Directory dir = newDirectory();
doTestReopenWithCommit(random(), dir, true);
dir.close();
}
public void testCommitRecreate () throws IOException {
Directory dir = newDirectory();
doTestReopenWithCommit(random(), dir, false);
dir.close();
}
private void doTestReopenWithCommit (Random random, Directory dir, boolean withReopen) throws IOException {
IndexWriter iwriter = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(
OpenMode.CREATE).setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(newLogMergePolicy()));
iwriter.commit();
DirectoryReader reader = DirectoryReader.open(dir);
try {
int M = 3;
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setTokenized(false);
FieldType customType2 = new FieldType(TextField.TYPE_STORED);
customType2.setTokenized(false);
customType2.setOmitNorms(true);
FieldType customType3 = new FieldType();
customType3.setStored(true);
for (int i=0; i<4; i++) {
for (int j=0; j<M; j++) {
Document doc = new Document();
doc.add(newField("id", i+"_"+j, customType));
doc.add(newField("id2", i+"_"+j, customType2));
doc.add(newField("id3", i+"_"+j, customType3));
iwriter.addDocument(doc);
if (i>0) {
int k = i-1;
int n = j + k*M;
StoredDocument prevItereationDoc = reader.document(n);
assertNotNull(prevItereationDoc);
String id = prevItereationDoc.get("id");
assertEquals(k+"_"+j, id);
}
}
iwriter.commit();
if (withReopen) {
// reopen
DirectoryReader r2 = DirectoryReader.openIfChanged(reader);
if (r2 != null) {
reader.close();
reader = r2;
}
} else {
// recreate
reader.close();
reader = DirectoryReader.open(dir);
}
}
} finally {
iwriter.close();
reader.close();
}
}
private void performDefaultTests(TestReopen test) throws Exception {
DirectoryReader index1 = test.openReader();
DirectoryReader index2 = test.openReader();
TestDirectoryReader.assertIndexEquals(index1, index2);
// verify that reopen() does not return a new reader instance
// in case the index has no changes
ReaderCouple couple = refreshReader(index2, false);
assertTrue(couple.refreshedReader == index2);
couple = refreshReader(index2, test, 0, true);
index1.close();
index1 = couple.newReader;
DirectoryReader index2_refreshed = couple.refreshedReader;
index2.close();
// test if refreshed reader and newly opened reader return equal results
TestDirectoryReader.assertIndexEquals(index1, index2_refreshed);
index2_refreshed.close();
assertReaderClosed(index2, true);
assertReaderClosed(index2_refreshed, true);
index2 = test.openReader();
for (int i = 1; i < 4; i++) {
index1.close();
couple = refreshReader(index2, test, i, true);
// refresh DirectoryReader
index2.close();
index2 = couple.refreshedReader;
index1 = couple.newReader;
TestDirectoryReader.assertIndexEquals(index1, index2);
}
index1.close();
index2.close();
assertReaderClosed(index1, true);
assertReaderClosed(index2, true);
}
public void testThreadSafety() throws Exception {
final Directory dir = newDirectory();
// NOTE: this also controls the number of threads!
final int n = _TestUtil.nextInt(random(), 20, 40);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for (int i = 0; i < n; i++) {
writer.addDocument(createDocument(i, 3));
}
writer.forceMerge(1);
writer.close();
final TestReopen test = new TestReopen() {
@Override
protected void modifyIndex(int i) throws IOException {
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
modifier.addDocument(createDocument(n + i, 6));
modifier.close();
}
@Override
protected DirectoryReader openReader() throws IOException {
return DirectoryReader.open(dir);
}
};
final List<ReaderCouple> readers = Collections.synchronizedList(new ArrayList<ReaderCouple>());
DirectoryReader firstReader = DirectoryReader.open(dir);
DirectoryReader reader = firstReader;
ReaderThread[] threads = new ReaderThread[n];
final Set<DirectoryReader> readersToClose = Collections.synchronizedSet(new HashSet<DirectoryReader>());
for (int i = 0; i < n; i++) {
if (i % 2 == 0) {
DirectoryReader refreshed = DirectoryReader.openIfChanged(reader);
if (refreshed != null) {
readersToClose.add(reader);
reader = refreshed;
}
}
final DirectoryReader r = reader;
final int index = i;
ReaderThreadTask task;
if (i < 4 || (i >=10 && i < 14) || i > 18) {
task = new ReaderThreadTask() {
@Override
public void run() throws Exception {
Random rnd = LuceneTestCase.random();
while (!stopped) {
if (index % 2 == 0) {
// refresh reader synchronized
ReaderCouple c = (refreshReader(r, test, index, true));
readersToClose.add(c.newReader);
readersToClose.add(c.refreshedReader);
readers.add(c);
// prevent too many readers
break;
} else {
// not synchronized
DirectoryReader refreshed = DirectoryReader.openIfChanged(r);
if (refreshed == null) {
refreshed = r;
}
IndexSearcher searcher = newSearcher(refreshed);
ScoreDoc[] hits = searcher.search(
new TermQuery(new Term("field1", "a" + rnd.nextInt(refreshed.maxDoc()))),
null, 1000).scoreDocs;
if (hits.length > 0) {
searcher.doc(hits[0].doc);
}
if (refreshed != r) {
refreshed.close();
}
}
synchronized(this) {
wait(_TestUtil.nextInt(random(), 1, 100));
}
}
}
};
} else {
task = new ReaderThreadTask() {
@Override
public void run() throws Exception {
Random rnd = LuceneTestCase.random();
while (!stopped) {
int numReaders = readers.size();
if (numReaders > 0) {
ReaderCouple c = readers.get(rnd.nextInt(numReaders));
TestDirectoryReader.assertIndexEquals(c.newReader, c.refreshedReader);
}
synchronized(this) {
wait(_TestUtil.nextInt(random(), 1, 100));
}
}
}
};
}
threads[i] = new ReaderThread(task);
threads[i].start();
}
synchronized(this) {
wait(1000);
}
for (int i = 0; i < n; i++) {
if (threads[i] != null) {
threads[i].stopThread();
}
}
for (int i = 0; i < n; i++) {
if (threads[i] != null) {
threads[i].join();
if (threads[i].error != null) {
String msg = "Error occurred in thread " + threads[i].getName() + ":\n" + threads[i].error.getMessage();
fail(msg);
}
}
}
for (final DirectoryReader readerToClose : readersToClose) {
readerToClose.close();
}
firstReader.close();
reader.close();
for (final DirectoryReader readerToClose : readersToClose) {
assertReaderClosed(readerToClose, true);
}
assertReaderClosed(reader, true);
assertReaderClosed(firstReader, true);
dir.close();
}
private static class ReaderCouple {
ReaderCouple(DirectoryReader r1, DirectoryReader r2) {
newReader = r1;
refreshedReader = r2;
}
DirectoryReader newReader;
DirectoryReader refreshedReader;
}
abstract static class ReaderThreadTask {
protected volatile boolean stopped;
public void stop() {
this.stopped = true;
}
public abstract void run() throws Exception;
}
private static class ReaderThread extends Thread {
ReaderThreadTask task;
Throwable error;
ReaderThread(ReaderThreadTask task) {
this.task = task;
}
public void stopThread() {
this.task.stop();
}
@Override
public void run() {
try {
this.task.run();
} catch (Throwable r) {
r.printStackTrace(System.out);
this.error = r;
}
}
}
private Object createReaderMutex = new Object();
private ReaderCouple refreshReader(DirectoryReader reader, boolean hasChanges) throws IOException {
return refreshReader(reader, null, -1, hasChanges);
}
ReaderCouple refreshReader(DirectoryReader reader, TestReopen test, int modify, boolean hasChanges) throws IOException {
synchronized (createReaderMutex) {
DirectoryReader r = null;
if (test != null) {
test.modifyIndex(modify);
r = test.openReader();
}
DirectoryReader refreshed = null;
try {
refreshed = DirectoryReader.openIfChanged(reader);
if (refreshed == null) {
refreshed = reader;
}
} finally {
if (refreshed == null && r != null) {
// Hit exception -- close opened reader
r.close();
}
}
if (hasChanges) {
if (refreshed == reader) {
fail("No new DirectoryReader instance created during refresh.");
}
} else {
if (refreshed != reader) {
fail("New DirectoryReader instance created during refresh even though index had no changes.");
}
}
return new ReaderCouple(r, refreshed);
}
}
public static void createIndex(Random random, Directory dir, boolean multiSegment) throws IOException {
IndexWriter.unlock(dir);
IndexWriter w = new IndexWriter(dir, LuceneTestCase.newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(random))
.setMergePolicy(new LogDocMergePolicy()));
for (int i = 0; i < 100; i++) {
w.addDocument(createDocument(i, 4));
if (multiSegment && (i % 10) == 0) {
w.commit();
}
}
if (!multiSegment) {
w.forceMerge(1);
}
w.close();
DirectoryReader r = DirectoryReader.open(dir);
if (multiSegment) {
assertTrue(r.leaves().size() > 1);
} else {
assertTrue(r.leaves().size() == 1);
}
r.close();
}
public static Document createDocument(int n, int numFields) {
StringBuilder sb = new StringBuilder();
Document doc = new Document();
sb.append("a");
sb.append(n);
FieldType customType2 = new FieldType(TextField.TYPE_STORED);
customType2.setTokenized(false);
customType2.setOmitNorms(true);
FieldType customType3 = new FieldType();
customType3.setStored(true);
doc.add(new TextField("field1", sb.toString(), Field.Store.YES));
doc.add(new Field("fielda", sb.toString(), customType2));
doc.add(new Field("fieldb", sb.toString(), customType3));
sb.append(" b");
sb.append(n);
for (int i = 1; i < numFields; i++) {
doc.add(new TextField("field" + (i+1), sb.toString(), Field.Store.YES));
}
return doc;
}
static void modifyIndex(int i, Directory dir) throws IOException {
switch (i) {
case 0: {
if (VERBOSE) {
System.out.println("TEST: modify index");
}
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.deleteDocuments(new Term("field2", "a11"));
w.deleteDocuments(new Term("field2", "b30"));
w.close();
break;
}
case 1: {
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.forceMerge(1);
w.close();
break;
}
case 2: {
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.addDocument(createDocument(101, 4));
w.forceMerge(1);
w.addDocument(createDocument(102, 4));
w.addDocument(createDocument(103, 4));
w.close();
break;
}
case 3: {
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.addDocument(createDocument(101, 4));
w.close();
break;
}
}
}
static void assertReaderClosed(IndexReader reader, boolean checkSubReaders) {
assertEquals(0, reader.getRefCount());
if (checkSubReaders && reader instanceof CompositeReader) {
// we cannot use reader context here, as reader is
// already closed and calling getTopReaderContext() throws AlreadyClosed!
List<? extends IndexReader> subReaders = ((CompositeReader) reader).getSequentialSubReaders();
for (final IndexReader r : subReaders) {
assertReaderClosed(r, checkSubReaders);
}
}
}
abstract static class TestReopen {
protected abstract DirectoryReader openReader() throws IOException;
protected abstract void modifyIndex(int i) throws IOException;
}
static class KeepAllCommits extends IndexDeletionPolicy {
@Override
public void onInit(List<? extends IndexCommit> commits) {
}
@Override
public void onCommit(List<? extends IndexCommit> commits) {
}
}
public void testReopenOnCommit() throws Throwable {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setIndexDeletionPolicy(new KeepAllCommits()).
setMaxBufferedDocs(-1).
setMergePolicy(newLogMergePolicy(10))
);
for(int i=0;i<4;i++) {
Document doc = new Document();
doc.add(newStringField("id", ""+i, Field.Store.NO));
writer.addDocument(doc);
Map<String,String> data = new HashMap<String,String>();
data.put("index", i+"");
writer.setCommitData(data);
writer.commit();
}
for(int i=0;i<4;i++) {
writer.deleteDocuments(new Term("id", ""+i));
Map<String,String> data = new HashMap<String,String>();
data.put("index", (4+i)+"");
writer.setCommitData(data);
writer.commit();
}
writer.close();
DirectoryReader r = DirectoryReader.open(dir);
assertEquals(0, r.numDocs());
Collection<IndexCommit> commits = DirectoryReader.listCommits(dir);
for (final IndexCommit commit : commits) {
DirectoryReader r2 = DirectoryReader.openIfChanged(r, commit);
assertNotNull(r2);
assertTrue(r2 != r);
final Map<String,String> s = commit.getUserData();
final int v;
if (s.size() == 0) {
// First commit created by IW
v = -1;
} else {
v = Integer.parseInt(s.get("index"));
}
if (v < 4) {
assertEquals(1+v, r2.numDocs());
} else {
assertEquals(7-v, r2.numDocs());
}
r.close();
r = r2;
}
r.close();
dir.close();
}
public void testOpenIfChangedNRTToCommit() throws Exception {
Directory dir = newDirectory();
// Can't use RIW because it randomly commits:
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newStringField("field", "value", Field.Store.NO));
w.addDocument(doc);
w.commit();
List<IndexCommit> commits = DirectoryReader.listCommits(dir);
assertEquals(1, commits.size());
w.addDocument(doc);
DirectoryReader r = DirectoryReader.open(w, true);
assertEquals(2, r.numDocs());
IndexReader r2 = DirectoryReader.openIfChanged(r, commits.get(0));
assertNotNull(r2);
r.close();
assertEquals(1, r2.numDocs());
w.close();
r2.close();
dir.close();
}
}
| |
package fr.ourten.teabeans.binding;
import fr.ourten.teabeans.listener.ValueChangeListener;
import fr.ourten.teabeans.listener.ValueInvalidationListener;
import fr.ourten.teabeans.property.Property;
import fr.ourten.teabeans.value.Observable;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class BindingTest
{
private int count;
@BeforeEach
public void setup()
{
count = 0;
}
@Test
public void testBaseBinding()
{
Property<String> p1 = new Property<>("none");
Property<String> p2 = new Property<>("nothing");
Binding<String> binding = new Binding<String>()
{
{
super.bind(p1);
super.bind(p2);
}
@Override
public String computeValue()
{
count++;
return p1.getValue() + p2.getValue();
}
};
// Only here to check lazy evaluation
assertThat(binding.getValue()).isNotNull();
assertThat(binding.getValue()).isEqualTo("nonenothing");
p1.setValue("lala");
assertThat(binding.getValue()).isEqualTo("lalanothing");
p2.setValue("toto");
assertThat(binding.getValue()).isEqualTo("lalatoto");
assertThat(count).isEqualTo(3);
}
@Test
public void testChainedBinding()
{
Property<String> p1 = new Property<>("none");
Property<String> p2 = new Property<>("nothing");
Property<String> p3 = new Property<>("");
p3.bindProperty(new Binding<String>()
{
{
super.bind(p1);
super.bind(p2);
}
@Override
public String computeValue()
{
count++;
return p1.getValue() + p2.getValue();
}
});
assertThat(p3.getValue()).isEqualTo("nonenothing");
assertThat(count).isEqualTo(1);
}
@Test
public void testBindingUnbinding()
{
Property<String> p1 = new Property<>("none");
Property<String> p2 = new Property<>("nothing");
Binding<String> binding = new Binding<String>()
{
{
super.bind(p1);
super.bind(p2);
}
@Override
public String computeValue()
{
count++;
return p1.getValue() + p2.getValue();
}
};
assertThat(binding.getDependencies().toArray()).containsExactly(new Observable[]{p1, p2});
binding.unbind(p1, p2);
assertThat(binding.getDependencies().toArray()).containsExactly(new Observable[0]);
}
@Test
public void testUnidirectionnalUnbinding()
{
Property<String> p1 = new Property<>("none");
Property<String> p2 = new Property<>("nothing");
p1.bindProperty(p2);
assertThat(p1.getValue()).isEqualTo(p2.getValue());
p2.setValue("lalala");
assertThat(p1.getValue()).isEqualTo(p2.getValue());
p1.unbind();
p2.setValue("another value");
assertThat(p1.getValue()).isNotEqualTo(p2.getValue());
}
@Test
public void testInvalidationBinding()
{
Property<String> p1 = new Property<>("none");
Property<String> p2 = new Property<>("nothing");
Binding<String> binding = new Binding<String>()
{
{
super.bind(p1);
super.bind(p2);
}
@Override
public String computeValue()
{
count++;
return p1.getValue() + p2.getValue();
}
};
assertThat(binding.isValid()).isFalse();
assertThat(binding.getValue()).isEqualTo("nonenothing");
assertThat(binding.isValid()).isTrue();
p1.setValue("another");
assertThat(binding.isValid()).isFalse();
}
@Test
public void testBindingListener()
{
Property<String> p1 = new Property<>("none");
Property<String> p2 = new Property<>("nothing");
Binding<String> binding = new Binding<String>()
{
{
super.bind(p1);
super.bind(p2);
}
@Override
public String computeValue()
{
return p1.getValue() + p2.getValue();
}
};
ValueChangeListener<String> listener = (observable, oldValue, newValue) -> count++;
ValueInvalidationListener listener2 = observable -> count++;
binding.addChangeListener(listener);
binding.addListener(listener2);
binding.getValue();
assertThat(count).isEqualTo(1);
p1.setValue("none");
assertThat(count).isEqualTo(2);
binding.removeChangeListener(listener);
p1.setValue("test");
assertThat(count).isEqualTo(3);
binding.removeListener(listener2);
p1.setValue("testagain");
assertThat(count).isEqualTo(3);
}
@Test
public void testNullBinding()
{
Property<String> p1 = new Property<>("nonnull");
Binding<String> binding = new Binding<String>()
{
{
super.bind(p1);
}
@Override
public String computeValue()
{
return p1.getValue();
}
};
binding.addChangeListener((observable, oldValue, newValue) -> count++);
assertThat(binding.getValue()).isNotNull();
assertThat(count).isEqualTo(1);
p1.setValue(null);
assertThat(binding.getValue()).isNull();
assertThat(count).isEqualTo(2);
binding.invalidate();
assertThat(binding.getValue()).isNull();
assertThat(count).isEqualTo(2);
p1.setValue(null);
assertThat(count).isEqualTo(2);
p1.setValue("nonnull");
assertThat(binding.getValue()).isNotNull();
assertThat(count).isEqualTo(3);
binding.invalidate();
assertThat(binding.getValue()).isNotNull();
assertThat(count).isEqualTo(3);
}
}
| |
/* $This file is distributed under the terms of the license in LICENSE$ */
package edu.cornell.mannlib.vitro.webapp.controller.grefine;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jena.vocabulary.OWL;
import edu.cornell.mannlib.vitro.webapp.beans.DataProperty;
import edu.cornell.mannlib.vitro.webapp.beans.VClass;
import edu.cornell.mannlib.vitro.webapp.controller.VitroHttpServlet;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.DataPropertyDao;
import edu.cornell.mannlib.vitro.webapp.dao.VClassDao;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
/**
* This servlet is for servicing Google Refine's
* "Add columns from VIVO" requests.
*
* @author Eliza Chan (elc2013@med.cornell.edu)
*
*/
@WebServlet(name = "Google Refine Property List Service", urlPatterns = {"/get_properties_of_type"} )
public class GrefinePropertyListServlet extends VitroHttpServlet {
private int MAXDEPTH = 7;
public static final int MAX_QUERY_LENGTH = 500;
private static final long serialVersionUID = 1L;
private static final Log log = LogFactory.getLog(GrefinePropertyListServlet.class.getName());
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
//resp.setContentType("application/json");
super.doPost(req, resp);
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
super.doGet(req, resp);
resp.setContentType("application/json");
VitroRequest vreq = new VitroRequest(req);
try {
String callbackStr = (vreq.getParameter("callback") == null) ? ""
: vreq.getParameter("callback");
ServletOutputStream out = resp.getOutputStream();
VClassDao vcDao = vreq.getUnfilteredWebappDaoFactory().getVClassDao();
DataPropertyDao dao = vreq.getUnfilteredWebappDaoFactory().getDataPropertyDao();
String topUri = vreq.getParameter("type");
VClass topClass = vcDao.getVClassByURI(topUri);
HashSet<String> propURIs = new HashSet<String>();
HashMap<VClass, List<DataProperty>> classPropertiesMap =
populateClassPropertiesMap(vcDao, dao, topUri, propURIs);
// Construct json String
ObjectNode completeJson = JsonNodeFactory.instance.objectNode();
ArrayNode propertiesJsonArr = JsonNodeFactory.instance.arrayNode();
if (classPropertiesMap.size() > 0) {
for (VClass vc : classPropertiesMap.keySet()) { // add results to schema
//System.out.println("vc uri: " + vc.getURI());
//System.out.println("vc name: " + vc.getName());
ArrayList<DataProperty> vcProps = (ArrayList<DataProperty>) classPropertiesMap.get(vc);
for (DataProperty prop : vcProps) {
String nameStr = prop.getPublicName() == null ? prop.getName() : prop.getPublicName();
//System.out.println("--- uri: " + prop.getURI());
//System.out.println("--- name: " + nameStr);
// top level
ObjectNode propertiesItemJson = JsonNodeFactory.instance.objectNode();
ObjectNode rootSchemaJson = JsonNodeFactory.instance.objectNode();
rootSchemaJson.put("id", vc.getURI());
rootSchemaJson.put("name", vc.getName());
rootSchemaJson.put("alias", JsonNodeFactory.instance.arrayNode());
propertiesItemJson.put("schema", rootSchemaJson);
// second level
propertiesItemJson.put("id", prop.getURI());
propertiesItemJson.put("name", nameStr);
propertiesItemJson.put("alias", JsonNodeFactory.instance.arrayNode());
ObjectNode expectsJson = JsonNodeFactory.instance.objectNode();
expectsJson.put("id", prop.getURI());
expectsJson.put("name", nameStr);
expectsJson.put("alias", JsonNodeFactory.instance.arrayNode());
propertiesItemJson.put("expects", expectsJson);
propertiesJsonArr.add(propertiesItemJson);
}
}
}
// get data properties from subclasses
List<VClass> lvl2Classes = new ArrayList<VClass>();
List roots = null;
String requestType = vreq.getParameter("type");
if (requestType != null) {
roots = new LinkedList<VClass>();
roots.add(vcDao.getVClassByURI(requestType));
}
if (roots != null) {
String ontologyUri = null;
Collections.sort(roots);
Iterator rootIt = roots.iterator();
if (rootIt.hasNext()) {
while (rootIt.hasNext()) {
VClass root = (VClass) rootIt.next();
if (root != null) {
List<VClass> lvl2ChildClasses = new ArrayList<VClass>();
addChildren(vcDao, vreq.getUnfilteredWebappDaoFactory(), root, lvl2ChildClasses, 0, ontologyUri);
lvl2Classes.addAll(lvl2ChildClasses);
}
}
}
}
for (VClass lvl2Class: lvl2Classes) {
HashMap<VClass, List<DataProperty>> lvl2ClassPropertiesMap =
populateClassPropertiesMap(vcDao, dao, lvl2Class.getURI(), propURIs);
if (lvl2ClassPropertiesMap.size() > 0) {
for (VClass vc : lvl2ClassPropertiesMap.keySet()) { // add results to schema
ArrayList<DataProperty> vcProps = (ArrayList<DataProperty>) lvl2ClassPropertiesMap.get(vc);
for (DataProperty prop : vcProps) {
String nameStr = prop.getPublicName() == null ? prop.getName() : prop.getPublicName();
// top level
ObjectNode propertiesItemJson = JsonNodeFactory.instance.objectNode();
ObjectNode rootSchemaJson = JsonNodeFactory.instance.objectNode();
rootSchemaJson.put("id", topClass.getURI());
rootSchemaJson.put("name", topClass.getName());
rootSchemaJson.put("alias", JsonNodeFactory.instance.arrayNode());
propertiesItemJson.put("schema", rootSchemaJson);
// second level
propertiesItemJson.put("id", vc.getURI());
propertiesItemJson.put("name", vc.getName());
propertiesItemJson.put("alias", JsonNodeFactory.instance.arrayNode());
propertiesItemJson.put("id2", prop.getURI());
propertiesItemJson.put("name2", nameStr);
propertiesItemJson.put("alias2", JsonNodeFactory.instance.arrayNode());
ObjectNode expectsJson = JsonNodeFactory.instance.objectNode();
expectsJson.put("id", prop.getURI());
expectsJson.put("name", nameStr);
expectsJson.put("alias", JsonNodeFactory.instance.arrayNode());
propertiesItemJson.put("expects", expectsJson);
propertiesJsonArr.add(propertiesItemJson);
}
}
}
}
completeJson.put("properties", propertiesJsonArr);
out.print(callbackStr + "(" + completeJson.toString() + ")");
} catch (Exception ex) {
log.warn(ex, ex);
}
}
private HashMap<VClass, List<DataProperty>> populateClassPropertiesMap (
VClassDao vcDao,
DataPropertyDao dao,
String uri,
HashSet<String> propURIs) {
HashMap<VClass, List<DataProperty>> classPropertiesMap = new HashMap<VClass, List<DataProperty>>();
List<DataProperty> props = new ArrayList<DataProperty>();
VClass topVc = vcDao.getVClassByURI(uri);
Collection <DataProperty> dataProps = dao.getDataPropertiesForVClass(uri);
for (DataProperty dp : dataProps) {
if (!(propURIs.contains(dp.getURI()))) {
propURIs.add(dp.getURI());
DataProperty prop = dao.getDataPropertyByURI(dp.getURI());
if (prop != null) {
props.add(prop);
}
}
}
if (props.size() > 0) {
Collections.sort(props);
for (DataProperty prop: props) {
String nameStr = prop.getPublicName()==null ? prop.getName() : prop.getPublicName();
if (nameStr != null) {
if (prop.getDomainClassURI() != null) {
VClass vc = vcDao.getVClassByURI(prop.getDomainClassURI());
if (classPropertiesMap.get(vc) != null) {
ArrayList<DataProperty> existingList = (ArrayList<DataProperty>)classPropertiesMap.get(vc);
existingList.add(prop);
} else {
ArrayList<DataProperty> newList = new ArrayList<DataProperty>();
newList.add(prop);
classPropertiesMap.put(vc, newList);
}
} else { // some properties have no domain, belong to top vc by default
if (classPropertiesMap.get(topVc) != null) {
ArrayList<DataProperty> existingList = (ArrayList<DataProperty>)classPropertiesMap.get(topVc);
existingList.add(prop);
} else {
ArrayList<DataProperty> newList = new ArrayList<DataProperty>();
newList.add(prop);
classPropertiesMap.put(topVc, newList);
}
}
}
}
}
return classPropertiesMap;
}
private void addChildren(VClassDao vcDao, WebappDaoFactory wadf, VClass parent, List<VClass> list, int position, String ontologyUri) {
List<VClass> rowElts = addVClassDataToResultsList(wadf, parent, position, ontologyUri);
int childShift = (rowElts.size() > 0) ? 1 : 0; // if addVClassDataToResultsList filtered out the result, don't shift the children over
list.addAll(rowElts);
List childURIstrs = vcDao.getSubClassURIs(parent.getURI());
if ((childURIstrs.size()>0) && position<MAXDEPTH) {
List childClasses = new ArrayList();
for (Object childURIstr : childURIstrs) {
String URIstr = (String) childURIstr;
try {
VClass child = (VClass) vcDao.getVClassByURI(URIstr);
if (!child.getURI().equals(OWL.Nothing.getURI())) {
childClasses.add(child);
}
} catch (Exception e) {
}
}
Collections.sort(childClasses);
for (Object childClass : childClasses) {
VClass child = (VClass) childClass;
addChildren(vcDao, wadf, child, list, position + childShift, ontologyUri);
}
}
}
private List<VClass> addVClassDataToResultsList(WebappDaoFactory wadf, VClass vcw, int position, String ontologyUri) {
List<VClass> results = new ArrayList<VClass>();
if (ontologyUri == null || ( (vcw.getNamespace()!=null) && (vcw.getNamespace().equals(ontologyUri)) ) ) {
results.add(vcw);
/*
for (int i=0; i<position; i++) {
results.add("@@entities");
}
if (position==0)
results.add("XX"); // column 1
Integer numCols = (NUM_COLS-1)-position;
try {
numCols = addColToResults(((vcw.getLocalNameWithPrefix() == null) ? "" : "<a href=\"vclassEdit?uri="+URLEncoder.encode(vcw.getURI(),"UTF-8")+"\">"+vcw.getLocalNameWithPrefix()+"</a>"), results, numCols);
} catch (Exception e) {
numCols = addColToResults(((vcw.getLocalNameWithPrefix() == null) ? "" : vcw.getLocalNameWithPrefix()), results, numCols); // column 2
}
numCols = addColToResults(((vcw.getShortDef() == null) ? "" : vcw.getShortDef()), results, numCols); // column 3
numCols = addColToResults(((vcw.getExample() == null) ? "" : vcw.getExample()), results, numCols); // column 4
// Get group name if it exists
VClassGroupDao groupDao= wadf.getVClassGroupDao();
String groupURI = vcw.getGroupURI();
String groupName = null;
VClassGroup classGroup = null;
if(groupURI != null) {
classGroup = groupDao.getGroupByURI(groupURI);
if (classGroup != null) {
groupName = classGroup.getPublicName();
}
}
numCols = addColToResults(((groupName == null) ? "" : groupName), results, numCols); // column 5
// Get ontology name
String ontName = null;
try {
OntologyDao ontDao = wadf.getOntologyDao();
Ontology ont = ontDao.getOntologyByURI(vcw.getNamespace());
ontName = ont.getName();
} catch (Exception e) {}
numCols = addColToResults(((ontName == null) ? "" : ontName), results, numCols); // column 6
numCols = addColToResults(vcw.getHiddenFromDisplayBelowRoleLevel() == null ? "unspecified" : vcw.getHiddenFromDisplayBelowRoleLevel().getShorthand(), results, numCols); // column 7
numCols = addColToResults(vcw.getProhibitedFromUpdateBelowRoleLevel() == null ? "unspecified" : vcw.getProhibitedFromUpdateBelowRoleLevel().getShorthand(), results, numCols); // column 8
results.add("XX"); // column 9
*/
}
return results;
}
private Integer addColToResults (String value, List results, Integer colIndex) {
if (colIndex>0) {
results.add(value);
}
return colIndex-1;
}
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.spi.impl.operationservice.impl;
import com.hazelcast.core.HazelcastInstanceNotActiveException;
import com.hazelcast.core.IndeterminateOperationState;
import com.hazelcast.core.IndeterminateOperationStateException;
import com.hazelcast.core.OperationTimeoutException;
import com.hazelcast.internal.nio.Packet;
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.spi.impl.AbstractInvocationFuture;
import com.hazelcast.spi.impl.InternalCompletableFuture;
import com.hazelcast.spi.impl.operationservice.WrappableException;
import com.hazelcast.spi.impl.operationservice.impl.responses.NormalResponse;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static com.hazelcast.internal.util.Clock.currentTimeMillis;
import static com.hazelcast.internal.util.ExceptionUtil.cloneExceptionWithFixedAsyncStackTrace;
import static com.hazelcast.internal.util.StringUtil.timeToString;
import static com.hazelcast.spi.impl.operationservice.impl.InvocationConstant.CALL_TIMEOUT;
import static com.hazelcast.spi.impl.operationservice.impl.InvocationConstant.HEARTBEAT_TIMEOUT;
import static com.hazelcast.spi.impl.operationservice.impl.InvocationConstant.INTERRUPTED;
/**
* The InvocationFuture is the {@link InternalCompletableFuture} that waits on the completion
* of an {@link Invocation}. The Invocation executes an operation.
* <p>
* In the past the InvocationFuture.get logic was also responsible for detecting the heartbeat for blocking operations
* using the CONTINUE_WAIT and detecting if an operation is still running using the IsStillRunning functionality. This
* has been removed from the future and moved into the {@link InvocationMonitor}.
*
* @param <E>
*/
public final class InvocationFuture<E> extends AbstractInvocationFuture<E> {
final Invocation invocation;
volatile boolean interrupted;
private final boolean deserialize;
InvocationFuture(Invocation invocation, boolean deserialize) {
super(invocation.context.logger);
this.invocation = invocation;
this.deserialize = deserialize;
}
@Override
protected void onInterruptDetected() {
interrupted = true;
}
@Override
public boolean isCompletedExceptionally() {
return (state instanceof ExceptionalResult
|| state == CALL_TIMEOUT
|| state == HEARTBEAT_TIMEOUT
|| state == INTERRUPTED);
}
@Override
protected String invocationToString() {
return invocation.toString();
}
@Override
protected TimeoutException newTimeoutException(long timeout, TimeUnit unit) {
return new TimeoutException(String.format("%s failed to complete within %d %s. %s",
invocation.op.getClass().getSimpleName(), timeout, unit, invocation));
}
@Override
protected Exception wrapToInstanceNotActiveException(RejectedExecutionException e) {
if (!invocation.context.nodeEngine.isRunning()) {
return new HazelcastInstanceNotActiveException(e.getMessage());
}
return e;
}
@Override
protected E resolveAndThrowIfException(Object unresolved) throws ExecutionException, InterruptedException {
Object value = resolve(unresolved);
return returnOrThrowWithGetConventions(value);
}
// public for tests
public static <T> T returnOrThrowWithGetConventions(Object response) throws ExecutionException, InterruptedException {
if (!(response instanceof ExceptionalResult)) {
return (T) response;
}
response = ((ExceptionalResult) response).getCause();
if (response instanceof WrappableException) {
response = ((WrappableException) response).wrap();
} else if (response instanceof RuntimeException || response instanceof Error) {
response = cloneExceptionWithFixedAsyncStackTrace((Throwable) response);
}
if (response instanceof CancellationException) {
throw (CancellationException) response;
} else if (response instanceof ExecutionException) {
throw (ExecutionException) response;
} else if (response instanceof InterruptedException) {
throw (InterruptedException) response;
} else {
throw new ExecutionException((Throwable) response);
}
}
@SuppressWarnings({"checkstyle:npathcomplexity", "checkstyle:cyclomaticcomplexity"})
@Override
protected Object resolve(Object unresolved) {
if (unresolved == null) {
return null;
} else if (unresolved == INTERRUPTED) {
return new ExceptionalResult(
new InterruptedException(invocation.op.getClass().getSimpleName() + " was interrupted. " + invocation));
} else if (unresolved == CALL_TIMEOUT) {
return new ExceptionalResult(newOperationTimeoutException(false));
} else if (unresolved == HEARTBEAT_TIMEOUT) {
return new ExceptionalResult(newOperationTimeoutException(true));
} else if (unresolved.getClass() == Packet.class) {
NormalResponse response = invocation.context.serializationService.toObject(unresolved);
unresolved = response.getValue();
}
Object value = unresolved;
if (deserialize && value instanceof Data) {
value = invocation.context.serializationService.toObject(value);
if (value == null) {
return null;
}
}
Throwable cause = (value instanceof ExceptionalResult)
? ((ExceptionalResult) value).getCause()
: null;
if (invocation.shouldFailOnIndeterminateOperationState()
&& (value instanceof IndeterminateOperationState
|| cause instanceof IndeterminateOperationState)) {
value = wrapThrowable(new IndeterminateOperationStateException("indeterminate operation state",
cause == null ? (Throwable) value : cause));
}
return value;
}
private OperationTimeoutException newOperationTimeoutException(boolean heartbeatTimeout) {
StringBuilder sb = new StringBuilder();
if (heartbeatTimeout) {
sb.append(invocation.op.getClass().getSimpleName())
.append(" invocation failed to complete due to operation-heartbeat-timeout. ");
sb.append("Current time: ").append(timeToString(currentTimeMillis())).append(". ");
sb.append("Start time: ").append(timeToString(invocation.firstInvocationTimeMillis)).append(". ");
sb.append("Total elapsed time: ")
.append(currentTimeMillis() - invocation.firstInvocationTimeMillis).append(" ms. ");
long lastHeartbeatMillis = invocation.lastHeartbeatMillis;
sb.append("Last operation heartbeat: ");
appendHeartbeat(sb, lastHeartbeatMillis);
long lastHeartbeatFromMemberMillis = invocation.context.invocationMonitor
.getLastMemberHeartbeatMillis(invocation.getTargetAddress());
sb.append("Last operation heartbeat from member: ");
appendHeartbeat(sb, lastHeartbeatFromMemberMillis);
} else {
sb.append(invocation.op.getClass().getSimpleName())
.append(" got rejected before execution due to not starting within the operation-call-timeout of: ")
.append(invocation.callTimeoutMillis).append(" ms. ");
sb.append("Current time: ").append(timeToString(currentTimeMillis())).append(". ");
sb.append("Start time: ").append(timeToString(invocation.firstInvocationTimeMillis)).append(". ");
sb.append("Total elapsed time: ")
.append(currentTimeMillis() - invocation.firstInvocationTimeMillis).append(" ms. ");
}
sb.append(invocation);
String msg = sb.toString();
return new OperationTimeoutException(msg);
}
private static void appendHeartbeat(StringBuilder sb, long lastHeartbeatMillis) {
if (lastHeartbeatMillis == 0) {
sb.append("never. ");
} else {
sb.append(timeToString(lastHeartbeatMillis)).append(". ");
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.data.cosmos.internal.directconnectivity.rntbd;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.PropertyWriter;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.CorruptedFrameException;
import static com.azure.data.cosmos.internal.directconnectivity.rntbd.RntbdConstants.RntbdHeader;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.lenientFormat;
@JsonPropertyOrder({ "id", "name", "type", "present", "required", "value" })
final class RntbdToken {
// region Fields
private static final int HEADER_LENGTH = Short.BYTES + Byte.BYTES;
static {
RntbdObjectMapper.registerPropertyFilter(RntbdToken.class, RntbdToken.PropertyFilter.class);
}
private final RntbdHeader header;
private int length;
private Object value;
// endregion
// region Constructors
private RntbdToken(final RntbdHeader header) {
checkNotNull(header, "header");
this.header = header;
this.value = null;
this.length = Integer.MIN_VALUE;
}
// endregion
// region Accessors
@JsonProperty
public short getId() {
return this.header.id();
}
@JsonProperty
public String getName() {
return this.header.name();
}
@JsonProperty
public RntbdTokenType getTokenType() {
return this.header.type();
}
@JsonProperty
public Object getValue() {
final RntbdTokenType.Codec codec = this.header.type().codec();
if (this.value == null) {
return codec.defaultValue();
}
if (this.value instanceof ByteBuf) {
final ByteBuf buffer = (ByteBuf)this.value;
this.value = codec.defaultValue();
try {
this.value = codec.read(buffer);
} catch (final CorruptedFrameException error) {
String message = lenientFormat("failed to read %s value: %s", this.getName(), error.getMessage());
throw new CorruptedFrameException(message);
} finally {
buffer.release();
}
} else {
this.value = codec.convert(this.value);
}
return this.value;
}
public <T> T getValue(final Class<T> cls) {
return cls.cast(this.getValue());
}
@JsonProperty
public void setValue(final Object value) {
this.ensureValid(value);
this.releaseBuffer();
this.value = value;
this.length = Integer.MIN_VALUE;
}
@JsonIgnore
public final Class<?> getValueType() {
return this.header.type().codec().valueType();
}
@JsonProperty
public boolean isPresent() {
return this.value != null;
}
@JsonProperty
public boolean isRequired() {
return this.header.isRequired();
}
// endregion
// region Methods
public int computeLength() {
if (!this.isPresent()) {
return 0;
}
if (this.value instanceof ByteBuf) {
final ByteBuf buffer = (ByteBuf)this.value;
checkState(buffer.readerIndex() == 0);
return HEADER_LENGTH + buffer.readableBytes();
}
if (this.length == Integer.MIN_VALUE) {
this.length = HEADER_LENGTH + this.header.type().codec().computeLength(this.value);
}
return this.length;
}
public static RntbdToken create(final RntbdHeader header) {
return new RntbdToken(header);
}
public void decode(final ByteBuf in) {
checkNotNull(in, "expected non-null in");
if (this.value instanceof ByteBuf) {
((ByteBuf)this.value).release();
}
this.value = this.header.type().codec().readSlice(in).retain(); // No data transfer until first call to RntbdToken.getValue
}
public void encode(final ByteBuf out) {
checkNotNull(out, "out");
if (!this.isPresent()) {
if (this.isRequired()) {
final String message = lenientFormat("Missing value for required header: %s", this);
throw new IllegalStateException(message);
}
return;
}
out.writeShortLE(this.getId());
out.writeByte(this.getTokenType().id());
if (this.value instanceof ByteBuf) {
out.writeBytes((ByteBuf)this.value);
} else {
this.ensureValid(this.value);
this.header.type().codec().write(this.value, out);
}
}
public boolean releaseBuffer() {
return this.value instanceof ByteBuf && ((ByteBuf)this.value).release();
}
@Override
public String toString() {
return RntbdObjectMapper.toString(this);
}
// endregion
// region Privates
private void ensureValid(final Object value) {
checkArgument(value != null, "value: null");
checkArgument(this.header.type().codec().isValid(value), "value: %s = %s", value.getClass().getName(), value);
}
// endregion
// region Types
static class PropertyFilter extends SimpleBeanPropertyFilter {
@Override
public void serializeAsField(final Object object, final JsonGenerator generator, final SerializerProvider provider, final PropertyWriter writer) throws Exception {
if (generator.canOmitFields()) {
final Object value = writer.getMember().getValue(object);
if (value instanceof RntbdToken && !((RntbdToken)value).isPresent()) {
return;
}
}
writer.serializeAsField(object, generator, provider);
}
}
// endregion
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.map.impl;
import com.hazelcast.cluster.ClusterState;
import com.hazelcast.config.InMemoryFormat;
import com.hazelcast.config.MapConfig;
import com.hazelcast.config.PartitioningStrategyConfig;
import com.hazelcast.internal.eviction.ExpirationManager;
import com.hazelcast.internal.monitor.impl.LocalMapStatsImpl;
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.internal.util.collection.PartitionIdSet;
import com.hazelcast.internal.util.comparators.ValueComparator;
import com.hazelcast.map.impl.event.MapEventPublisher;
import com.hazelcast.map.impl.eviction.MapClearExpiredRecordsTask;
import com.hazelcast.map.impl.journal.MapEventJournal;
import com.hazelcast.map.impl.mapstore.writebehind.NodeWideUsedCapacityCounter;
import com.hazelcast.map.impl.nearcache.MapNearCacheManager;
import com.hazelcast.map.impl.operation.MapOperationProvider;
import com.hazelcast.map.impl.query.QueryEngine;
import com.hazelcast.map.impl.query.QueryRunner;
import com.hazelcast.map.impl.query.ResultProcessorRegistry;
import com.hazelcast.map.impl.querycache.QueryCacheContext;
import com.hazelcast.map.impl.recordstore.RecordStore;
import com.hazelcast.partition.PartitioningStrategy;
import com.hazelcast.query.impl.IndexCopyBehavior;
import com.hazelcast.query.impl.IndexProvider;
import com.hazelcast.query.impl.getters.Extractors;
import com.hazelcast.query.impl.predicates.QueryOptimizer;
import com.hazelcast.spi.impl.NodeEngine;
import com.hazelcast.spi.impl.eventservice.EventFilter;
import com.hazelcast.spi.impl.operationservice.Operation;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Semaphore;
import java.util.function.Predicate;
/**
* Context which is needed by a map service.
* <p>
* Shared instances, configurations of all
* maps can be reached over this context.
* <p>
* Also this context provides some support methods which are used
* in map operations and {@link RecordStore} implementations. For
* example all {@link PartitionContainer} and {@link MapContainer}
* instances can also be reached by using this interface.
* <p>
* It is also responsible for providing methods which are used by
* lower layers of Hazelcast and exposed on {@link MapService}.
*
* @see MapManagedService
*/
public interface MapServiceContext extends MapServiceContextInterceptorSupport,
MapServiceContextEventListenerSupport {
Object toObject(Object data);
Data toData(Object object, PartitioningStrategy partitionStrategy);
Data toData(Object object);
Data toDataWithSchema(Object object);
MapContainer getMapContainer(String mapName);
MapContainer getExistingMapContainer(String mapName);
Map<String, MapContainer> getMapContainers();
PartitionContainer getPartitionContainer(int partitionId);
void initPartitionsContainers();
/**
* Removes all record stores inside the supplied partition ID matching with
* the supplied predicate.
*
* @param predicate only matching record-stores with this predicate will be removed
* @param partitionId partition ID
* @param onShutdown {@code true} if this method is called during map service shutdown,
* otherwise set {@code false}
* @param onRecordStoreDestroy {@code true} if this method is called during to destroy record store,
* otherwise set {@code false}
* @see MapManagedService#reset()
* @see MapManagedService#shutdown(boolean)
*/
void removeRecordStoresFromPartitionMatchingWith(Predicate<RecordStore> predicate, int partitionId,
boolean onShutdown, boolean onRecordStoreDestroy);
/**
* Removes write-behind-queue-reservation-counters inside
* supplied partition from matching record-stores.
*
* @param predicate only matching record-stores
* with this predicate will be removed
* @param partitionId partition ID
*/
void removeWbqCountersFromMatchingPartitionsWith(Predicate<RecordStore> predicate, int partitionId);
MapService getService();
void destroyMapStores();
void flushMaps();
void destroyMap(String mapName);
void reset();
/**
* Releases internal resources solely managed by Hazelcast.
* This method is called when MapService is shutting down.
*/
void shutdown();
RecordStore createRecordStore(MapContainer mapContainer, int partitionId, MapKeyLoader keyLoader);
RecordStore getRecordStore(int partitionId, String mapName);
RecordStore getRecordStore(int partitionId, String mapName, boolean skipLoadingOnCreate);
RecordStore getExistingRecordStore(int partitionId, String mapName);
/**
* Returns cached collection of owned partitions,
* When it is null, reloads and caches it again.
*/
PartitionIdSet getOrInitCachedMemberPartitions();
void nullifyOwnedPartitions();
ExpirationManager getExpirationManager();
void setService(MapService mapService);
NodeEngine getNodeEngine();
MapEventPublisher getMapEventPublisher();
MapEventJournal getEventJournal();
QueryEngine getQueryEngine(String name);
QueryRunner getMapQueryRunner(String name);
QueryOptimizer getQueryOptimizer();
LocalMapStatsProvider getLocalMapStatsProvider();
MapClearExpiredRecordsTask getClearExpiredRecordsTask();
MapOperationProvider getMapOperationProvider(String mapName);
IndexProvider getIndexProvider(MapConfig mapConfig);
Extractors getExtractors(String mapName);
void incrementOperationStats(long startTime, LocalMapStatsImpl localMapStats, String mapName, Operation operation);
boolean removeMapContainer(MapContainer mapContainer);
PartitioningStrategy getPartitioningStrategy(String mapName, PartitioningStrategyConfig config);
void removePartitioningStrategyFromCache(String mapName);
PartitionContainer[] getPartitionContainers();
void onClusterStateChange(ClusterState newState);
ResultProcessorRegistry getResultProcessorRegistry();
MapNearCacheManager getMapNearCacheManager();
QueryCacheContext getQueryCacheContext();
UUID addListenerAdapter(ListenerAdapter listenerAdaptor, EventFilter eventFilter, String mapName);
CompletableFuture<UUID> addListenerAdapterAsync(ListenerAdapter listenerAdaptor, EventFilter eventFilter, String mapName);
UUID addLocalListenerAdapter(ListenerAdapter listenerAdaptor, String mapName);
IndexCopyBehavior getIndexCopyBehavior();
boolean globalIndexEnabled();
ValueComparator getValueComparatorOf(InMemoryFormat inMemoryFormat);
NodeWideUsedCapacityCounter getNodeWideUsedCapacityCounter();
ExecutorStats getOffloadedEntryProcessorExecutorStats();
Semaphore getNodeWideLoadedKeyLimiter();
/**
* @return {@code true} when Merkle tree maintenance should be enabled for given {@code mapConfig},
* otherwise {@code false}.
*/
default boolean shouldEnableMerkleTree(MapConfig mapConfig, boolean log) {
return false;
}
}
| |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.appformer.maven.integration.embedder;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map.Entry;
import java.util.Properties;
import org.apache.maven.DefaultMaven;
import org.apache.maven.Maven;
import org.apache.maven.artifact.InvalidRepositoryException;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.execution.DefaultMavenExecutionRequest;
import org.apache.maven.execution.DefaultMavenExecutionResult;
import org.apache.maven.execution.MavenExecutionRequest;
import org.apache.maven.execution.MavenExecutionRequestPopulationException;
import org.apache.maven.execution.MavenExecutionRequestPopulator;
import org.apache.maven.execution.MavenExecutionResult;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.Profile;
import org.apache.maven.model.building.ModelSource;
import org.apache.maven.plugin.LegacySupport;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.ProjectBuilder;
import org.apache.maven.project.ProjectBuildingException;
import org.apache.maven.project.ProjectBuildingRequest;
import org.apache.maven.project.ProjectBuildingResult;
import org.apache.maven.repository.RepositorySystem;
import org.apache.maven.settings.Settings;
import org.apache.maven.settings.building.DefaultSettingsBuildingRequest;
import org.apache.maven.settings.building.FileSettingsSource;
import org.apache.maven.settings.building.SettingsBuilder;
import org.apache.maven.settings.building.SettingsBuildingException;
import org.apache.maven.settings.building.SettingsBuildingRequest;
import org.apache.maven.settings.building.SettingsSource;
import org.codehaus.plexus.PlexusContainer;
import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
import org.codehaus.plexus.logging.Logger;
import org.codehaus.plexus.util.Os;
import org.eclipse.aether.RepositorySystemSession;
import org.appformer.maven.integration.MavenRepositoryConfiguration;
import org.slf4j.LoggerFactory;
import static org.appformer.maven.integration.IoUtils.copyInTempFile;
public class MavenEmbedder {
private static final org.slf4j.Logger log = LoggerFactory.getLogger( MavenEmbedder.class );
public static final File DEFAULT_GLOBAL_SETTINGS_FILE =
new File( System.getProperty( "maven.home", System.getProperty( "user.dir", "" ) ), "conf/settings.xml" );
private final MavenRequest mavenRequest;
private final ComponentProvider componentProvider;
private MavenExecutionRequest mavenExecutionRequest;
private MavenSession mavenSession;
public MavenEmbedder( MavenRequest mavenRequest ) throws MavenEmbedderException {
this( Thread.currentThread().getContextClassLoader(), null, mavenRequest );
}
public MavenEmbedder( ClassLoader mavenClassLoader,
ClassLoader parent,
MavenRequest mavenRequest ) throws MavenEmbedderException {
this( mavenRequest, MavenEmbedderUtils.buildComponentProvider( mavenClassLoader, parent, mavenRequest ) );
}
private MavenEmbedder( MavenRequest mavenRequest,
ComponentProvider componentProvider ) throws MavenEmbedderException {
this.mavenRequest = mavenRequest;
this.componentProvider = componentProvider;
try {
this.mavenExecutionRequest = this.buildMavenExecutionRequest( mavenRequest );
RepositorySystemSession rss = ( (DefaultMaven) componentProvider.lookup( Maven.class ) ).newRepositorySession( mavenExecutionRequest );
mavenSession = new MavenSession( componentProvider.getPlexusContainer(), rss, mavenExecutionRequest, new DefaultMavenExecutionResult() );
componentProvider.lookup( LegacySupport.class ).setSession( mavenSession );
} catch ( MavenEmbedderException e ) {
log.error( "Unable to build MavenEmbedder", e );
throw e;
} catch ( ComponentLookupException e ) {
log.error( "Unable to build MavenEmbedder", e );
throw new MavenEmbedderException( e.getMessage(), e );
}
}
protected MavenExecutionRequest buildMavenExecutionRequest( MavenRequest mavenRequest )
throws MavenEmbedderException, ComponentLookupException {
MavenExecutionRequest mavenExecutionRequest = new DefaultMavenExecutionRequest();
if ( mavenRequest.getGlobalSettingsFile() != null ) {
mavenExecutionRequest.setGlobalSettingsFile( new File( mavenRequest.getGlobalSettingsFile() ) );
}
SettingsSource userSettings = mavenRequest.getUserSettingsSource();
if ( userSettings != null ) {
if ( userSettings instanceof FileSettingsSource ) {
mavenExecutionRequest.setUserSettingsFile( ( (FileSettingsSource) userSettings ).getSettingsFile() );
} else {
try {
mavenExecutionRequest.setUserSettingsFile( copyInTempFile( userSettings.getInputStream(), "xml" ) );
} catch ( IOException ioe ) {
log.warn( "Unable to use maven settings defined in " + userSettings, ioe );
}
}
}
try {
componentProvider.lookup( MavenExecutionRequestPopulator.class ).populateFromSettings( mavenExecutionRequest, getSettings() );
componentProvider.lookup( MavenExecutionRequestPopulator.class ).populateDefaults( mavenExecutionRequest );
} catch ( MavenExecutionRequestPopulationException e ) {
throw new MavenEmbedderException( e.getMessage(), e );
}
ArtifactRepository localRepository = getLocalRepository();
mavenExecutionRequest.setLocalRepository( localRepository );
mavenExecutionRequest.setLocalRepositoryPath( localRepository.getBasedir() );
mavenExecutionRequest.setOffline( mavenRequest.isOffline() );
mavenExecutionRequest.setUpdateSnapshots( mavenRequest.isUpdateSnapshots() );
// TODO check null and create a console one ?
mavenExecutionRequest.setTransferListener( mavenRequest.getTransferListener() );
mavenExecutionRequest.setCacheNotFound( mavenRequest.isCacheNotFound() );
mavenExecutionRequest.setCacheTransferError( true );
mavenExecutionRequest.setUserProperties( mavenRequest.getUserProperties() );
mavenExecutionRequest.getSystemProperties().putAll( System.getProperties() );
if ( mavenRequest.getSystemProperties() != null ) {
mavenExecutionRequest.getSystemProperties().putAll( mavenRequest.getSystemProperties() );
}
mavenExecutionRequest.getSystemProperties().putAll( getEnvVars() );
if ( mavenRequest.getProfiles() != null && !mavenRequest.getProfiles().isEmpty() ) {
for ( String id : mavenRequest.getProfiles() ) {
Profile p = new Profile();
p.setId( id );
p.setSource( "cli" );
mavenExecutionRequest.addProfile( p );
mavenExecutionRequest.addActiveProfile( id );
}
}
MavenRepositoryConfiguration mavenRepoConf = getMavenRepositoryConfiguration();
//DROOLS-899: Copy repositories defined in settings to execution request
for ( ArtifactRepository artifactRepository : mavenRepoConf.getArtifactRepositoriesForRequest() ) {
mavenExecutionRequest.addRemoteRepository( artifactRepository );
}
mavenExecutionRequest.setProxies( mavenRepoConf.getProxies() );
mavenExecutionRequest.setLoggingLevel( mavenRequest.getLoggingLevel() );
componentProvider.lookup( Logger.class ).setThreshold( mavenRequest.getLoggingLevel() );
mavenExecutionRequest.setExecutionListener( mavenRequest.getExecutionListener() )
.setInteractiveMode( mavenRequest.isInteractive() )
.setGlobalChecksumPolicy( mavenRequest.getGlobalChecksumPolicy() )
.setGoals( mavenRequest.getGoals() );
if ( mavenRequest.getPom() != null ) {
mavenExecutionRequest.setPom( new File( mavenRequest.getPom() ) );
}
if ( mavenRequest.getWorkspaceReader() != null ) {
mavenExecutionRequest.setWorkspaceReader( mavenRequest.getWorkspaceReader() );
}
return mavenExecutionRequest;
}
protected MavenRepositoryConfiguration getMavenRepositoryConfiguration() {
return MavenSettings.getMavenRepositoryConfiguration();
}
private Properties getEnvVars() {
Properties envVars = new Properties();
boolean caseSensitive = !Os.isFamily( Os.FAMILY_WINDOWS );
for ( Entry<String, String> entry : System.getenv().entrySet() ) {
String key = "env." + ( caseSensitive ? entry.getKey() : entry.getKey().toUpperCase( Locale.ENGLISH ) );
envVars.setProperty( key, entry.getValue() );
}
return envVars;
}
public Settings getSettings() throws MavenEmbedderException, ComponentLookupException {
SettingsBuildingRequest settingsBuildingRequest = new DefaultSettingsBuildingRequest();
if ( this.mavenRequest.getGlobalSettingsFile() != null ) {
settingsBuildingRequest.setGlobalSettingsFile( new File( this.mavenRequest.getGlobalSettingsFile() ) );
} else {
settingsBuildingRequest.setGlobalSettingsFile( DEFAULT_GLOBAL_SETTINGS_FILE );
}
if ( this.mavenRequest.getUserSettingsSource() != null ) {
settingsBuildingRequest.setUserSettingsSource( this.mavenRequest.getUserSettingsSource() );
} else {
SettingsSource userSettingsSource = MavenSettings.getUserSettingsSource();
if ( userSettingsSource != null ) {
settingsBuildingRequest.setUserSettingsSource( userSettingsSource );
}
}
settingsBuildingRequest.setUserProperties( this.mavenRequest.getUserProperties() );
settingsBuildingRequest.getSystemProperties().putAll( System.getProperties() );
settingsBuildingRequest.getSystemProperties().putAll( this.mavenRequest.getSystemProperties() );
settingsBuildingRequest.getSystemProperties().putAll( getEnvVars() );
try {
return componentProvider.lookup( SettingsBuilder.class ).build( settingsBuildingRequest ).getEffectiveSettings();
} catch ( SettingsBuildingException e ) {
throw new MavenEmbedderException( e.getMessage(), e );
}
}
public ArtifactRepository getLocalRepository() throws ComponentLookupException {
try {
String localRepositoryPath = getLocalRepositoryPath();
if ( localRepositoryPath != null ) {
return componentProvider.lookup( RepositorySystem.class ).createLocalRepository( new File( localRepositoryPath ) );
}
return componentProvider.lookup( RepositorySystem.class ).createLocalRepository( RepositorySystem.defaultUserLocalRepository );
} catch ( InvalidRepositoryException e ) {
// never happened
throw new IllegalStateException( e );
}
}
public String getLocalRepositoryPath() {
String path = null;
try {
Settings settings = getSettings();
path = settings.getLocalRepository();
} catch ( MavenEmbedderException e ) {
// ignore
} catch ( ComponentLookupException e ) {
// ignore
}
if ( this.mavenRequest.getLocalRepositoryPath() != null ) {
path = this.mavenRequest.getLocalRepositoryPath();
}
if ( path == null ) {
path = RepositorySystem.defaultUserLocalRepository.getAbsolutePath();
}
return path;
}
// ----------------------------------------------------------------------
// Project
// ----------------------------------------------------------------------
public MavenProject readProject( final InputStream mavenProjectStream ) throws ProjectBuildingException, MavenEmbedderException {
ModelSource modelSource = new ModelSource() {
@Override
public InputStream getInputStream() throws IOException {
return mavenProjectStream;
}
@Override
public String getLocation() {
return "";
}
};
ClassLoader originalCl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader( componentProvider.getSystemClassLoader() );
ProjectBuilder projectBuilder = componentProvider.lookup( ProjectBuilder.class );
// BZ-1007894: Check if added dependencies are resolvable.
ProjectBuildingResult result = projectBuilder.build( modelSource, getProjectBuildingRequest() );
if ( result != null && result.getDependencyResolutionResult() != null && !result.getDependencyResolutionResult().getCollectionErrors().isEmpty() ) {
// A dependency resolution error has been produced. It can contains some error. Throw the first one to the client, so the user will fix every one sequentially.
Exception depedencyResolutionException = result.getDependencyResolutionResult().getCollectionErrors().get( 0 );
if ( depedencyResolutionException != null ) {
throw new MavenEmbedderException( depedencyResolutionException.getMessage(), depedencyResolutionException );
}
}
return result.getProject();
} catch ( ComponentLookupException e ) {
throw new MavenEmbedderException( e.getMessage(), e );
} finally {
Thread.currentThread().setContextClassLoader( originalCl );
try {
mavenProjectStream.close();
} catch ( IOException e ) {
}
}
}
public MavenProject readProject( File mavenProject ) throws ProjectBuildingException, MavenEmbedderException {
List<MavenProject> projects = readProjects( mavenProject, false );
return projects == null || projects.isEmpty() ? null : projects.get( 0 );
}
public List<MavenProject> readProjects( File mavenProject,
boolean recursive ) throws ProjectBuildingException, MavenEmbedderException {
ClassLoader originalCl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader( componentProvider.getSystemClassLoader() );
List<ProjectBuildingResult> results = buildProjects( mavenProject, recursive );
List<MavenProject> projects = new ArrayList<MavenProject>( results.size() );
for ( ProjectBuildingResult result : results ) {
projects.add( result.getProject() );
}
return projects;
} finally {
Thread.currentThread().setContextClassLoader( originalCl );
}
}
public List<ProjectBuildingResult> buildProjects( File mavenProject,
boolean recursive ) throws ProjectBuildingException, MavenEmbedderException {
ClassLoader originalCl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader( componentProvider.getSystemClassLoader() );
ProjectBuilder projectBuilder = componentProvider.lookup( ProjectBuilder.class );
return projectBuilder.build( Collections.singletonList( mavenProject ), recursive, getProjectBuildingRequest() );
} catch ( ComponentLookupException e ) {
throw new MavenEmbedderException( e.getMessage(), e );
} finally {
Thread.currentThread().setContextClassLoader( originalCl );
}
}
private ProjectBuildingRequest getProjectBuildingRequest() throws ComponentLookupException {
ProjectBuildingRequest projectBuildingRequest = this.mavenExecutionRequest.getProjectBuildingRequest();
projectBuildingRequest.setValidationLevel( this.mavenRequest.getValidationLevel() );
RepositorySystemSession repositorySystemSession = componentProvider.getRepositorySystemSession( mavenExecutionRequest );
projectBuildingRequest.setRepositorySession( repositorySystemSession );
projectBuildingRequest.setProcessPlugins( this.mavenRequest.isProcessPlugins() );
projectBuildingRequest.setResolveDependencies( this.mavenRequest.isResolveDependencies() );
return projectBuildingRequest;
}
public MavenSession getMavenSession() {
return mavenSession;
}
public MavenExecutionRequest getMavenExecutionRequest() {
return mavenExecutionRequest;
}
public void dispose() {
PlexusContainer plexusContainer = componentProvider.getPlexusContainer();
if ( plexusContainer != null ) {
plexusContainer.dispose();
}
}
public MavenExecutionResult execute( final MavenRequest mavenRequest )
throws MavenEmbedderException {
final ClassLoader originalCl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader( componentProvider.getSystemClassLoader() );
final Maven maven = componentProvider.lookup( Maven.class );
return maven.execute( buildMavenExecutionRequest( mavenRequest ) );
} catch ( final MavenEmbedderException e ) {
log.error( "An MavenEmbedderException occurred during maven execution.", e );
throw e;
} catch ( final Throwable e ) {
log.error( "An exception occurred during maven execution.", e );
throw new MavenEmbedderException( e.getMessage(), e );
} finally {
Thread.currentThread().setContextClassLoader( originalCl );
}
}
}
| |
/*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package io.reactivex.netty.protocol.tcp.server;
import io.netty.buffer.ByteBufAllocator;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelOption;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.ServerChannel;
import io.netty.handler.logging.LogLevel;
import io.netty.util.concurrent.EventExecutorGroup;
import io.reactivex.netty.protocol.tcp.server.events.TcpServerEventListener;
import io.reactivex.netty.protocol.tcp.server.events.TcpServerEventPublisher;
import io.reactivex.netty.server.ServerState;
import io.reactivex.netty.ssl.SslCodec;
import rx.Subscription;
import rx.functions.Action1;
import rx.functions.Func0;
import rx.functions.Func1;
import javax.net.ssl.SSLEngine;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Level;
import java.util.logging.Logger;
public class TcpServerImpl<R, W> extends TcpServer<R, W> {
private static final Logger logger = Logger.getLogger(TcpServerImpl.class.getName());
protected enum ServerStatus {Created, Starting, Started, Shutdown}
private final ServerState<R, W> state;
private ChannelFuture bindFuture;
protected final AtomicReference<ServerStatus> serverStateRef;
public TcpServerImpl(SocketAddress socketAddress) {
state = TcpServerState.create(socketAddress);
serverStateRef = new AtomicReference<>(ServerStatus.Created);
}
public TcpServerImpl(SocketAddress socketAddress, EventLoopGroup parent, EventLoopGroup child,
Class<? extends ServerChannel> channelClass) {
state = TcpServerState.create(socketAddress, parent, child, channelClass);
serverStateRef = new AtomicReference<>(ServerStatus.Created);
}
private TcpServerImpl(ServerState<R, W> state) {
this.state = state;
serverStateRef = new AtomicReference<>(ServerStatus.Created);
}
@Override
public <T> TcpServer<R, W> channelOption(ChannelOption<T> option, T value) {
return copy(state.channelOption(option, value));
}
@Override
public <T> TcpServer<R, W> clientChannelOption(ChannelOption<T> option, T value) {
return copy(state.clientChannelOption(option, value));
}
@Override
public <RR, WW> TcpServer<RR, WW> addChannelHandlerFirst(String name, Func0<ChannelHandler> handlerFactory) {
return copy(state.<RR, WW>addChannelHandlerFirst(name, handlerFactory));
}
@Override
public <RR, WW> TcpServer<RR, WW> addChannelHandlerFirst(EventExecutorGroup group, String name,
Func0<ChannelHandler> handlerFactory) {
return copy(state.<RR, WW>addChannelHandlerFirst(group, name, handlerFactory));
}
@Override
public <RR, WW> TcpServer<RR, WW> addChannelHandlerLast(String name, Func0<ChannelHandler> handlerFactory) {
return copy(state.<RR, WW>addChannelHandlerLast(name, handlerFactory));
}
@Override
public <RR, WW> TcpServer<RR, WW> addChannelHandlerLast(EventExecutorGroup group, String name,
Func0<ChannelHandler> handlerFactory) {
return copy(state.<RR, WW>addChannelHandlerLast(group, name, handlerFactory));
}
@Override
public <RR, WW> TcpServer<RR, WW> addChannelHandlerBefore(String baseName, String name, Func0<ChannelHandler> handlerFactory) {
return copy(state.<RR, WW>addChannelHandlerBefore(baseName, name, handlerFactory));
}
@Override
public <RR, WW> TcpServer<RR, WW> addChannelHandlerBefore(EventExecutorGroup group, String baseName, String name,
Func0<ChannelHandler> handlerFactory) {
return copy(state.<RR, WW>addChannelHandlerBefore(group, baseName, name, handlerFactory));
}
@Override
public <RR, WW> TcpServer<RR, WW> addChannelHandlerAfter(String baseName, String name, Func0<ChannelHandler> handlerFactory) {
return copy(state.<RR, WW>addChannelHandlerAfter(baseName, name, handlerFactory));
}
@Override
public <RR, WW> TcpServer<RR, WW> addChannelHandlerAfter(EventExecutorGroup group, String baseName, String name,
Func0<ChannelHandler> handlerFactory) {
return copy(state.<RR, WW>addChannelHandlerAfter(group, baseName, name, handlerFactory));
}
@Override
public <RR, WW> TcpServer<RR, WW> pipelineConfigurator(Action1<ChannelPipeline> pipelineConfigurator) {
return copy(state.<RR, WW>pipelineConfigurator(pipelineConfigurator));
}
@Override
public TcpServer<R, W> secure(Func1<ByteBufAllocator, SSLEngine> sslEngineFactory) {
return copy(((TcpServerState<R, W>)state).secure(sslEngineFactory));
}
@Override
public TcpServer<R, W> secure(SSLEngine sslEngine) {
return copy(((TcpServerState<R, W>)state).secure(sslEngine));
}
@Override
public TcpServer<R, W> secure(SslCodec sslCodec) {
return copy(((TcpServerState<R, W>)state).secure(sslCodec));
}
@Override
public TcpServer<R, W> unsafeSecure() {
return copy(((TcpServerState<R, W>)state).unsafeSecure());
}
@Override
@Deprecated
public TcpServer<R, W> enableWireLogging(LogLevel wireLoggingLevel) {
return copy(state.<W, R>enableWireLogging(wireLoggingLevel));
}
@Override
public TcpServer<R, W> enableWireLogging(String name, LogLevel wireLoggingLevel) {
return copy(state.<W, R>enableWireLogging(name, wireLoggingLevel));
}
@Override
public int getServerPort() {
final SocketAddress localAddress = getServerAddress();
if (localAddress instanceof InetSocketAddress) {
return ((InetSocketAddress) localAddress).getPort();
} else {
return 0;
}
}
@Override
public SocketAddress getServerAddress() {
SocketAddress localAddress;
if (null != bindFuture && bindFuture.isDone()) {
localAddress = bindFuture.channel().localAddress();
} else {
localAddress = state.getServerAddress();
}
return localAddress;
}
@Override
public TcpServer<R, W> start(final ConnectionHandler<R, W> connectionHandler) {
if (!serverStateRef.compareAndSet(ServerStatus.Created, ServerStatus.Starting)) {
throw new IllegalStateException("Server already started");
}
try {
Action1<ChannelPipeline> handlerFactory = new Action1<ChannelPipeline>() {
@Override
public void call(ChannelPipeline pipeline) {
TcpServerState<R, W> tcpState = (TcpServerState<R, W>) state;
TcpServerConnectionToChannelBridge.addToPipeline(pipeline, connectionHandler,
tcpState.getEventPublisher(), tcpState.isSecure());
}
};
final TcpServerState<R, W> newState = (TcpServerState<R, W>) state.pipelineConfigurator(handlerFactory);
bindFuture = newState.getBootstrap().bind(newState.getServerAddress()).sync();
if (!bindFuture.isSuccess()) {
throw new RuntimeException(bindFuture.cause());
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
serverStateRef.set(ServerStatus.Started); // It will come here only if this was the thread that transitioned to Starting
logger.info("Rx server started at port: " + getServerPort());
return this;
}
@Override
public void shutdown() {
if (!serverStateRef.compareAndSet(ServerStatus.Started, ServerStatus.Shutdown)) {
throw new IllegalStateException("The server is already shutdown.");
} else {
try {
bindFuture.channel().close().sync();
} catch (InterruptedException e) {
logger.log(Level.SEVERE, "Interrupted while waiting for the server socket to close.", e);
}
}
}
@Override
public void awaitShutdown() {
ServerStatus status = serverStateRef.get();
switch (status) {
case Created:
case Starting:
throw new IllegalStateException("Server not started yet.");
case Started:
try {
bindFuture.channel().closeFuture().await();
} catch (InterruptedException e) {
Thread.interrupted(); // Reset the interrupted status
logger.log(Level.SEVERE, "Interrupted while waiting for the server socket to close.", e);
}
break;
case Shutdown:
// Nothing to do as it is already shutdown.
break;
}
}
@Override
public void awaitShutdown(long duration, TimeUnit timeUnit) {
ServerStatus status = serverStateRef.get();
switch (status) {
case Created:
case Starting:
throw new IllegalStateException("Server not started yet.");
case Started:
try {
bindFuture.channel().closeFuture().await(duration, timeUnit);
} catch (InterruptedException e) {
Thread.interrupted(); // Reset the interrupted status
logger.log(Level.SEVERE, "Interrupted while waiting for the server socket to close.", e);
}
break;
case Shutdown:
// Nothing to do as it is already shutdown.
break;
}
}
@Override
public TcpServerEventPublisher getEventPublisher() {
return ((TcpServerState<R, W>)state).getEventPublisher();
}
@Override
public Subscription subscribe(TcpServerEventListener listener) {
return ((TcpServerState<R, W>)state).getEventPublisher().subscribe(listener);
}
private static <RR, WW> TcpServer<RR, WW> copy(ServerState<RR, WW> newState) {
return new TcpServerImpl<>(newState);
}
}
| |
package com.google.javascript.gents;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.collect.Sets;
import com.google.common.io.ByteSource;
import com.google.common.io.Files;
import com.google.javascript.jscomp.CodeConsumer;
import com.google.javascript.jscomp.CodeGenerator;
import com.google.javascript.jscomp.CodePrinter;
import com.google.javascript.jscomp.CodePrinter.Builder.CodeGeneratorFactory;
import com.google.javascript.jscomp.CodePrinter.Format;
import com.google.javascript.jscomp.Compiler;
import com.google.javascript.jscomp.CompilerOptions;
import com.google.javascript.jscomp.CompilerPass;
import com.google.javascript.jscomp.ErrorFormat;
import com.google.javascript.jscomp.SourceFile;
import com.google.javascript.rhino.Node;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.kohsuke.args4j.CmdLineException;
/**
* A tool that transpiles {@code .js} ES6 and ES5 Closure annotated JavaScript to {@code .ts}
* TypeScript.
*/
public class TypeScriptGenerator {
/**
* Command line clang-format string to format stdin. The filename 'a.ts' is only used to inform
* clang-format of the file type (TS).
*/
private static final String[] CLANG_FORMAT = {
"clang-format", "-assume-filename=a.ts", "-style=Google"
};
static {
// In some environments (Mac OS X programs started from Finder, like your IDE) PATH does
// not contain "clang-format". This property allows explicitly configuring its location.
String cfLocation = System.getProperty("gents.clangFormat");
if (cfLocation != null) {
CLANG_FORMAT[0] = cfLocation;
}
}
public static void main(String[] args) {
Options options = null;
try {
options = new Options(args);
} catch (CmdLineException e) {
System.err.println(e.getMessage());
System.err.println("Usage: gents [options...] arguments...");
e.getParser().printUsage(System.err);
System.err.println();
System.exit(1);
}
try {
TypeScriptGenerator generator = new TypeScriptGenerator(options);
generator.generateTypeScript();
if (generator.hasErrors()) {
// Already reported through the print stream.
System.exit(2);
}
} catch (Exception e) {
e.printStackTrace(System.err);
System.err.println("Uncaught exception in gents, exiting.");
System.exit(3);
}
System.exit(0);
}
private final Options opts;
private final Compiler compiler;
private final GentsErrorManager errorManager;
final PathUtil pathUtil;
final NameUtil nameUtil;
TypeScriptGenerator(Options opts) {
this.opts = opts;
this.compiler = new Compiler();
compiler.disableThreads();
this.errorManager = new GentsErrorManager(System.err,
ErrorFormat.MULTILINE.toFormatter(compiler, true), opts.debug);
compiler.setErrorManager(errorManager);
this.pathUtil = new PathUtil(opts.root);
this.nameUtil = new NameUtil(compiler);
}
boolean hasErrors() {
return errorManager.getErrorCount() > 0;
}
void generateTypeScript() {
List<SourceFile> srcFiles = getFiles(opts.srcFiles);
List<SourceFile> externFiles = getFiles(opts.externs);
Set<String> filesToConvert = Sets.newLinkedHashSet(opts.filesToConvert);
Map<String, String> result = generateTypeScript(filesToConvert, srcFiles, externFiles);
for (String filename : filesToConvert) {
String relativePath = pathUtil.getRelativePath(".", filename);
String basename = pathUtil.getFileNameWithoutExtension(relativePath);
String tsCode = result.get(basename);
if ("-".equals(opts.output)) {
System.out.println("========================================");
System.out.println("File: " + relativePath);
System.out.println("========================================");
System.out.println(tsCode);
} else {
String tsFilename = pathUtil.removeExtension(relativePath) + ".ts";
File output = new File(new File(opts.output), tsFilename);
if (!output.getParentFile().exists() &&
!output.getParentFile().mkdirs()) {
throw new IllegalArgumentException("Unable to make directories " + output.getParent());
}
try {
Files.write(tsCode, output, UTF_8);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to write to file " + output.getName(), e);
}
}
}
}
/**
* Returns a map from the basename to the TypeScript code generated for the file.
*/
Map<String, String> generateTypeScript(Set<String> filesToConvert,
List<SourceFile> srcFiles, List<SourceFile> externs) throws AssertionError {
Map<String, String> sourceFileMap = new LinkedHashMap<>();
final CompilerOptions compilerOpts = opts.getCompilerOptions();
// Compile javascript code
compiler.compile(externs, srcFiles, compilerOpts);
Node externRoot = compiler.getRoot().getFirstChild();
Node srcRoot = compiler.getRoot().getLastChild();
CollectModuleMetadata modulePrePass = new CollectModuleMetadata(compiler, nameUtil,
filesToConvert);
modulePrePass.process(externRoot, srcRoot);
// Strips all file nodes that we are not compiling.
stripNonCompiledNodes(srcRoot, filesToConvert);
CommentLinkingPass commentsPass = new CommentLinkingPass(compiler);
commentsPass.process(externRoot, srcRoot);
final NodeComments comments = commentsPass.getComments();
ModuleConversionPass modulePass = new ModuleConversionPass(compiler, pathUtil, nameUtil,
modulePrePass.getFileMap(), modulePrePass.getNamespaceMap(), comments);
modulePass.process(externRoot, srcRoot);
CompilerPass classPass = new ClassConversionPass(compiler, comments);
classPass.process(externRoot, srcRoot);
CompilerPass typingPass = new TypeAnnotationPass(compiler, pathUtil, nameUtil,
modulePrePass.getSymbolMap(), modulePass.getTypeRewrite(), comments);
typingPass.process(externRoot, srcRoot);
CompilerPass stylePass = new StyleFixPass(compiler, comments);
stylePass.process(externRoot, srcRoot);
// We only use the source root as the extern root is ignored for codegen
for (Node file : srcRoot.children()) {
String basename = pathUtil.getFileNameWithoutExtension(file.getSourceFileName());
CodeGeneratorFactory factory = new CodeGeneratorFactory() {
@Override
public CodeGenerator getCodeGenerator(Format outputFormat, CodeConsumer cc) {
return new GentsCodeGenerator(cc, compilerOpts, comments);
}
};
String tsCode = new CodePrinter.Builder(file)
.setCompilerOptions(opts.getCompilerOptions())
.setTypeRegistry(compiler.getTypeRegistry())
.setCodeGeneratorFactory(factory)
.setPrettyPrint(true)
.setLineBreak(true)
.setOutputTypes(true)
.build();
sourceFileMap.put(basename, tryClangFormat(tsCode));
}
errorManager.doGenerateReport();
return sourceFileMap;
}
/**
* Attempts to format the generated TypeScript using clang-format.
* On failure to format (ie. clang-format does not exist), return the inputted string.
*/
String tryClangFormat(String code) {
Process process = null;
try {
process = Runtime.getRuntime().exec(CLANG_FORMAT);
final OutputStream stdin = process.getOutputStream();
// stdout must be final for the nested object byteSource to return it.
final InputStream stdout = process.getInputStream();
// Write TypeScript code to stdin of the process
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(stdin));
writer.write(code);
writer.flush();
writer.close();
// Reads stdout of the process
ByteSource byteSource = new ByteSource() {
@Override
public InputStream openStream() throws IOException {
return stdout;
}
};
return byteSource.asCharSource(UTF_8).read();
} catch (IOException e) {
System.err.println("clang-format has failed to execute: " + e.getMessage());
return code;
} finally {
if (process != null) {
// TODO(renez): Use .waitFor(n, TimeUnit.SECONDS) and .destroyForcibly() once we moved to
// Java 8.
process.destroy();
}
}
}
/**
* Removes the root nodes for all the library files from the source node.
*/
void stripNonCompiledNodes(Node n, Set<String> filesToCompile) {
for (Node child : n.children()) {
if (!filesToCompile.contains(child.getSourceFileName())) {
child.detachFromParent();
}
}
}
/**
* Returns a list of source files from a list of file names.
*/
List<SourceFile> getFiles(Iterable<String> fileNames) {
List<SourceFile> files = new ArrayList<>();
for (String fileName : fileNames) {
files.add(SourceFile.fromFile(fileName, UTF_8));
}
return files;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v8/services/customer_client_link_service.proto
package com.google.ads.googleads.v8.services;
/**
* <pre>
* Request message for [CustomerClientLinkService.GetCustomerClientLink][google.ads.googleads.v8.services.CustomerClientLinkService.GetCustomerClientLink].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.services.GetCustomerClientLinkRequest}
*/
public final class GetCustomerClientLinkRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v8.services.GetCustomerClientLinkRequest)
GetCustomerClientLinkRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetCustomerClientLinkRequest.newBuilder() to construct.
private GetCustomerClientLinkRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetCustomerClientLinkRequest() {
resourceName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetCustomerClientLinkRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetCustomerClientLinkRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
resourceName_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.services.CustomerClientLinkServiceProto.internal_static_google_ads_googleads_v8_services_GetCustomerClientLinkRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.services.CustomerClientLinkServiceProto.internal_static_google_ads_googleads_v8_services_GetCustomerClientLinkRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest.class, com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest.Builder.class);
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object resourceName_;
/**
* <pre>
* Required. The resource name of the customer client link to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Required. The resource name of the customer client link to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest other = (com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for [CustomerClientLinkService.GetCustomerClientLink][google.ads.googleads.v8.services.CustomerClientLinkService.GetCustomerClientLink].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.services.GetCustomerClientLinkRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.services.GetCustomerClientLinkRequest)
com.google.ads.googleads.v8.services.GetCustomerClientLinkRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.services.CustomerClientLinkServiceProto.internal_static_google_ads_googleads_v8_services_GetCustomerClientLinkRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.services.CustomerClientLinkServiceProto.internal_static_google_ads_googleads_v8_services_GetCustomerClientLinkRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest.class, com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
resourceName_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v8.services.CustomerClientLinkServiceProto.internal_static_google_ads_googleads_v8_services_GetCustomerClientLinkRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest build() {
com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest buildPartial() {
com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest result = new com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest(this);
result.resourceName_ = resourceName_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest) {
return mergeFrom((com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest other) {
if (other == com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Required. The resource name of the customer client link to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The resource name of the customer client link to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The resource name of the customer client link to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceName_ = value;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the customer client link to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the customer client link to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceName_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.services.GetCustomerClientLinkRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v8.services.GetCustomerClientLinkRequest)
private static final com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest();
}
public static com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetCustomerClientLinkRequest>
PARSER = new com.google.protobuf.AbstractParser<GetCustomerClientLinkRequest>() {
@java.lang.Override
public GetCustomerClientLinkRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetCustomerClientLinkRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetCustomerClientLinkRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetCustomerClientLinkRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v8.services.GetCustomerClientLinkRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.eventgrid.v2018_05_01_preview.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.CloudException;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.Path;
import retrofit2.http.Query;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in TopicTypes.
*/
public class TopicTypesInner {
/** The Retrofit service to perform REST calls. */
private TopicTypesService service;
/** The service client containing this operation class. */
private EventGridManagementClientImpl client;
/**
* Initializes an instance of TopicTypesInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public TopicTypesInner(Retrofit retrofit, EventGridManagementClientImpl client) {
this.service = retrofit.create(TopicTypesService.class);
this.client = client;
}
/**
* The interface defining all the services for TopicTypes to be
* used by Retrofit to perform actually REST calls.
*/
interface TopicTypesService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.eventgrid.v2018_05_01_preview.TopicTypes list" })
@GET("providers/Microsoft.EventGrid/topicTypes")
Observable<Response<ResponseBody>> list(@Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.eventgrid.v2018_05_01_preview.TopicTypes get" })
@GET("providers/Microsoft.EventGrid/topicTypes/{topicTypeName}")
Observable<Response<ResponseBody>> get(@Path("topicTypeName") String topicTypeName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.eventgrid.v2018_05_01_preview.TopicTypes listEventTypes" })
@GET("providers/Microsoft.EventGrid/topicTypes/{topicTypeName}/eventTypes")
Observable<Response<ResponseBody>> listEventTypes(@Path("topicTypeName") String topicTypeName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* List topic types.
* List all registered topic types.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the List<TopicTypeInfoInner> object if successful.
*/
public List<TopicTypeInfoInner> list() {
return listWithServiceResponseAsync().toBlocking().single().body();
}
/**
* List topic types.
* List all registered topic types.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<TopicTypeInfoInner>> listAsync(final ServiceCallback<List<TopicTypeInfoInner>> serviceCallback) {
return ServiceFuture.fromResponse(listWithServiceResponseAsync(), serviceCallback);
}
/**
* List topic types.
* List all registered topic types.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<TopicTypeInfoInner> object
*/
public Observable<List<TopicTypeInfoInner>> listAsync() {
return listWithServiceResponseAsync().map(new Func1<ServiceResponse<List<TopicTypeInfoInner>>, List<TopicTypeInfoInner>>() {
@Override
public List<TopicTypeInfoInner> call(ServiceResponse<List<TopicTypeInfoInner>> response) {
return response.body();
}
});
}
/**
* List topic types.
* List all registered topic types.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<TopicTypeInfoInner> object
*/
public Observable<ServiceResponse<List<TopicTypeInfoInner>>> listWithServiceResponseAsync() {
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.list(this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<List<TopicTypeInfoInner>>>>() {
@Override
public Observable<ServiceResponse<List<TopicTypeInfoInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<TopicTypeInfoInner>> result = listDelegate(response);
List<TopicTypeInfoInner> items = null;
if (result.body() != null) {
items = result.body().items();
}
ServiceResponse<List<TopicTypeInfoInner>> clientResponse = new ServiceResponse<List<TopicTypeInfoInner>>(items, result.response());
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<TopicTypeInfoInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<TopicTypeInfoInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<TopicTypeInfoInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Get a topic type.
* Get information about a topic type.
*
* @param topicTypeName Name of the topic type
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the TopicTypeInfoInner object if successful.
*/
public TopicTypeInfoInner get(String topicTypeName) {
return getWithServiceResponseAsync(topicTypeName).toBlocking().single().body();
}
/**
* Get a topic type.
* Get information about a topic type.
*
* @param topicTypeName Name of the topic type
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<TopicTypeInfoInner> getAsync(String topicTypeName, final ServiceCallback<TopicTypeInfoInner> serviceCallback) {
return ServiceFuture.fromResponse(getWithServiceResponseAsync(topicTypeName), serviceCallback);
}
/**
* Get a topic type.
* Get information about a topic type.
*
* @param topicTypeName Name of the topic type
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the TopicTypeInfoInner object
*/
public Observable<TopicTypeInfoInner> getAsync(String topicTypeName) {
return getWithServiceResponseAsync(topicTypeName).map(new Func1<ServiceResponse<TopicTypeInfoInner>, TopicTypeInfoInner>() {
@Override
public TopicTypeInfoInner call(ServiceResponse<TopicTypeInfoInner> response) {
return response.body();
}
});
}
/**
* Get a topic type.
* Get information about a topic type.
*
* @param topicTypeName Name of the topic type
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the TopicTypeInfoInner object
*/
public Observable<ServiceResponse<TopicTypeInfoInner>> getWithServiceResponseAsync(String topicTypeName) {
if (topicTypeName == null) {
throw new IllegalArgumentException("Parameter topicTypeName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.get(topicTypeName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<TopicTypeInfoInner>>>() {
@Override
public Observable<ServiceResponse<TopicTypeInfoInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<TopicTypeInfoInner> clientResponse = getDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<TopicTypeInfoInner> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<TopicTypeInfoInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<TopicTypeInfoInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* List event types.
* List event types for a topic type.
*
* @param topicTypeName Name of the topic type
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the List<EventTypeInner> object if successful.
*/
public List<EventTypeInner> listEventTypes(String topicTypeName) {
return listEventTypesWithServiceResponseAsync(topicTypeName).toBlocking().single().body();
}
/**
* List event types.
* List event types for a topic type.
*
* @param topicTypeName Name of the topic type
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<EventTypeInner>> listEventTypesAsync(String topicTypeName, final ServiceCallback<List<EventTypeInner>> serviceCallback) {
return ServiceFuture.fromResponse(listEventTypesWithServiceResponseAsync(topicTypeName), serviceCallback);
}
/**
* List event types.
* List event types for a topic type.
*
* @param topicTypeName Name of the topic type
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<EventTypeInner> object
*/
public Observable<List<EventTypeInner>> listEventTypesAsync(String topicTypeName) {
return listEventTypesWithServiceResponseAsync(topicTypeName).map(new Func1<ServiceResponse<List<EventTypeInner>>, List<EventTypeInner>>() {
@Override
public List<EventTypeInner> call(ServiceResponse<List<EventTypeInner>> response) {
return response.body();
}
});
}
/**
* List event types.
* List event types for a topic type.
*
* @param topicTypeName Name of the topic type
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<EventTypeInner> object
*/
public Observable<ServiceResponse<List<EventTypeInner>>> listEventTypesWithServiceResponseAsync(String topicTypeName) {
if (topicTypeName == null) {
throw new IllegalArgumentException("Parameter topicTypeName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.listEventTypes(topicTypeName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<List<EventTypeInner>>>>() {
@Override
public Observable<ServiceResponse<List<EventTypeInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<EventTypeInner>> result = listEventTypesDelegate(response);
List<EventTypeInner> items = null;
if (result.body() != null) {
items = result.body().items();
}
ServiceResponse<List<EventTypeInner>> clientResponse = new ServiceResponse<List<EventTypeInner>>(items, result.response());
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<EventTypeInner>> listEventTypesDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<EventTypeInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<EventTypeInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.runtime;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.events.EventKind;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.util.io.OutErr;
import com.google.devtools.common.options.EnumConverter;
import com.google.devtools.common.options.Option;
import com.google.devtools.common.options.OptionDocumentationCategory;
import com.google.devtools.common.options.OptionEffectTag;
import com.google.devtools.common.options.OptionMetadataTag;
import com.google.devtools.common.options.OptionsBase;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.EnumSet;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* BlazeCommandEventHandler: an event handler established for the duration of a
* single Blaze command.
*/
public class BlazeCommandEventHandler implements EventHandler {
private static final Logger logger = Logger.getLogger(BlazeCommandEventHandler.class.getName());
public enum UseColor { YES, NO, AUTO }
public enum UseCurses { YES, NO, AUTO }
public static class UseColorConverter extends EnumConverter<UseColor> {
public UseColorConverter() {
super(UseColor.class, "--color setting");
}
}
public static class UseCursesConverter extends EnumConverter<UseCurses> {
public UseCursesConverter() {
super(UseCurses.class, "--curses setting");
}
}
public static class Options extends OptionsBase {
@Option(
name = "show_progress",
defaultValue = "true",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help = "Display progress messages during a build."
)
public boolean showProgress;
@Option(
name = "show_task_finish",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help = "Display progress messages when tasks complete, not just when they start."
)
public boolean showTaskFinish;
@Option(
name = "show_progress_rate_limit",
defaultValue = "0.2", // A nice middle ground; snappy but not too spammy in logs.
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help = "Minimum number of seconds between progress messages in the output."
)
public double showProgressRateLimit;
@Option(
name = "color",
defaultValue = "auto",
converter = UseColorConverter.class,
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help = "Use terminal controls to colorize output."
)
public UseColor useColorEnum;
@Option(
name = "curses",
defaultValue = "auto",
converter = UseCursesConverter.class,
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help = "Use terminal cursor controls to minimize scrolling output"
)
public UseCurses useCursesEnum;
@Option(
name = "terminal_columns",
defaultValue = "80",
metadataTags = {OptionMetadataTag.HIDDEN},
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.UNKNOWN},
help = "A system-generated parameter which specifies the terminal width in columns."
)
public int terminalColumns;
@Option(
name = "isatty",
defaultValue = "false",
metadataTags = {OptionMetadataTag.HIDDEN},
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.UNKNOWN},
help =
"A system-generated parameter which is used to notify the "
+ "server whether this client is running in a terminal. "
+ "If this is set to false, then '--color=auto' will be treated as '--color=no'. "
+ "If this is set to true, then '--color=auto' will be treated as '--color=yes'."
)
public boolean isATty;
// This lives here (as opposed to the more logical BuildRequest.Options)
// because the client passes it to the server *always*. We don't want the
// client to have to figure out when it should or shouldn't to send it.
@Option(
name = "emacs",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.UNKNOWN},
help =
"A system-generated parameter which is true iff EMACS=t or INSIDE_EMACS is set "
+ "in the environment of the client. This option controls certain display "
+ "features."
)
public boolean runningInEmacs;
@Option(
name = "show_timestamps",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help = "Include timestamps in messages"
)
public boolean showTimestamp;
@Option(
name = "progress_in_terminal_title",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help =
"Show the command progress in the terminal title. "
+ "Useful to see what bazel is doing when having multiple terminal tabs.")
public boolean progressInTermTitle;
@Option(
name = "experimental_external_repositories",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help = "Use external repositories for improved stability and speed when available."
)
public boolean externalRepositories;
@Option(
name = "force_experimental_external_repositories",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help = "Forces --experimental_external_repositories."
)
public boolean forceExternalRepositories;
@Option(
name = "experimental_ui",
defaultValue = "true",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help =
"Switches to an alternative progress bar that more explicitly shows progress, such "
+ "as loaded packages and executed actions."
)
public boolean experimentalUi;
@Option(
name = "experimental_ui_debug_all_events",
defaultValue = "false",
metadataTags = {OptionMetadataTag.HIDDEN},
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.UNKNOWN},
help = "Report all events known to the experimental new Bazel UI."
)
public boolean experimentalUiDebugAllEvents;
@Option(
name = "experimental_ui_actions_shown",
defaultValue = "8",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help =
"Number of concurrent actions shown in the alternative progress bar; each "
+ "action is shown on a separate line. The alternative progress bar always shows "
+ "at least one one, all numbers less than 1 are mapped to 1. "
+ "This option has no effect unless --experimental_ui is set."
)
public int experimentalUiActionsShown;
@Option(
name = "experimental_ui_limit_console_output",
defaultValue = "0",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help =
"Number of bytes to which the experimental UI will limit its output (non-positive "
+ "values indicate unlimited). Once the limit is approaching, the experimental UI "
+ "will try hard to limit in a meaningful way, but will ultimately just drop all "
+ "output."
)
public int experimentalUiLimitConsoleOutput;
@Option(
name = "experimental_ui_deduplicate",
defaultValue = "false",
documentationCategory = OptionDocumentationCategory.LOGGING,
effectTags = {OptionEffectTag.TERMINAL_OUTPUT},
help = "Make the experimental UI deduplicate messages to have a cleaner scroll-back log.")
public boolean experimentalUiDeduplicate;
public boolean useColor() {
return useColorEnum == UseColor.YES || (useColorEnum == UseColor.AUTO && isATty);
}
public boolean useCursorControl() {
return useCursesEnum == UseCurses.YES || (useCursesEnum == UseCurses.AUTO && isATty);
}
}
private static final DateTimeFormatter TIMESTAMP_FORMAT =
DateTimeFormatter.ofPattern("(MM-dd HH:mm:ss.SSS) ");
protected final OutErr outErr;
private final PrintStream errPrintStream;
protected final Set<EventKind> eventMask =
EnumSet.copyOf(EventKind.ERRORS_WARNINGS_AND_INFO_AND_OUTPUT);
protected final boolean showTimestamp;
public BlazeCommandEventHandler(OutErr outErr, Options eventOptions) {
this.outErr = outErr;
this.errPrintStream = new PrintStream(outErr.getErrorStream(), true);
if (eventOptions.showProgress) {
eventMask.add(EventKind.PROGRESS);
eventMask.add(EventKind.START);
} else {
// Skip PASS events if --noshow_progress is requested.
eventMask.remove(EventKind.PASS);
}
if (eventOptions.showTaskFinish) {
eventMask.add(EventKind.FINISH);
}
eventMask.add(EventKind.SUBCOMMAND);
this.showTimestamp = eventOptions.showTimestamp;
}
/** See EventHandler.handle. */
@Override
public void handle(Event event) {
if (!eventMask.contains(event.getKind())) {
return;
}
String prefix;
switch (event.getKind()) {
case STDOUT:
putOutput(outErr.getOutputStream(), event);
return;
case STDERR:
putOutput(outErr.getErrorStream(), event);
return;
case PASS:
case FAIL:
case TIMEOUT:
case ERROR:
case WARNING:
case DEBUG:
case DEPCHECKER:
prefix = event.getKind() + ": ";
break;
case SUBCOMMAND:
prefix = ">>>>>>>>> ";
break;
case INFO:
case PROGRESS:
case START:
case FINISH:
prefix = "____";
break;
default:
throw new IllegalStateException("" + event.getKind());
}
StringBuilder buf = new StringBuilder();
buf.append(prefix);
if (showTimestamp) {
buf.append(timestamp());
}
Location location = event.getLocation();
if (location != null) {
buf.append(location.print()).append(": ");
}
buf.append(event.getMessage());
if (event.getKind() == EventKind.FINISH) {
buf.append(" DONE");
}
// Add a trailing period for ERROR and WARNING messages, which are
// typically English sentences composed from exception messages.
if (event.getKind() == EventKind.WARNING ||
event.getKind() == EventKind.ERROR) {
buf.append('.');
}
// Event messages go to stderr; results (e.g. 'blaze query') go to stdout.
errPrintStream.println(buf);
if (event.getStdErr() != null) {
handle(Event.of(EventKind.STDERR, null, event.getStdErr()));
}
if (event.getStdOut() != null) {
handle(Event.of(EventKind.STDOUT, null, event.getStdOut()));
}
}
private void putOutput(OutputStream out, Event event) {
try {
out.write(event.getMessageBytes());
out.flush();
} catch (IOException e) {
// This can happen in server mode if the blaze client has exited, or if output is redirected
// to a file and the disk is full, etc. May be moot in the case of full disk, or useful in
// the case of real bug in our handling of streams.
logger.log(Level.WARNING, "Failed to write event", e);
}
}
/**
* @return a string representing the current time, eg "04-26 13:47:32.124".
*/
protected String timestamp() {
return TIMESTAMP_FORMAT.format(ZonedDateTime.now(ZoneId.systemDefault()));
}
}
| |
/*
* Copyright 2016, Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.google.api.gax.grpc;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import io.grpc.Channel;
import io.grpc.stub.StreamObserver;
import java.util.Iterator;
import javax.annotation.Nullable;
/**
* A StreamingCallable is an immutable object which is capable of making RPC calls to streaming API
* methods.
*
* <p>
* It is considered advanced usage for a user to create a StreamingCallable themselves. This class
* is intended to be created by a generated service API wrapper class, and configured by instances
* of StreamingCallSettings.Builder which are exposed through the API wrapper class's settings
* class.
*/
public class StreamingCallable<RequestT, ResponseT> {
private final DirectStreamingCallable<RequestT, ResponseT> callable;
private final Channel channel;
@Nullable private final StreamingCallSettings settings;
/** Package-private */
StreamingCallable(
DirectStreamingCallable<RequestT, ResponseT> callable,
Channel channel,
StreamingCallSettings settings) {
this.callable = callable;
this.channel = channel;
this.settings = settings;
}
/**
* Bind the StreamingCallable with the given channel.
*
* @param boundChannel {@link io.grpc.Channel} to bind the callable with.
*/
public StreamingCallable<RequestT, ResponseT> bind(Channel boundChannel) {
return new StreamingCallable<>(callable, boundChannel, settings);
}
/**
* Create a callable object that represents a streaming API method. Public only for technical
* reasons - for advanced usage
*
* @param streamingCallSettings {@link com.google.api.gax.grpc.StreamingCallSettings} to configure
* the method-level settings with.
* @param channel {@link Channel} to use to connect to the service.
* @return {@link com.google.api.gax.grpc.StreamingCallable} callable object.
*/
public static <RequestT, ResponseT> StreamingCallable<RequestT, ResponseT> create(
StreamingCallSettings<RequestT, ResponseT> streamingCallSettings, Channel channel) {
return streamingCallSettings.createStreamingCallable(channel);
}
/**
* Conduct a bidirectional streaming call
*
* @param responseObserver {@link io.grpc.stub.StreamObserver} to observe the streaming responses
* @return {@link StreamObserver} which is used for making streaming requests.
*/
public StreamObserver<RequestT> bidiStreamingCall(StreamObserver<ResponseT> responseObserver) {
Preconditions.checkNotNull(channel);
return callable.bidiStreamingCall(
responseObserver, CallContext.createDefault().withChannel(channel));
}
/**
* Conduct a bidirectional streaming call with the given
* {@link com.google.api.gax.grpc.CallContext}.
*
* @param responseObserver {@link io.grpc.stub.StreamObserver} to observe the streaming responses
* @param context {@link com.google.api.gax.grpc.CallContext} to provide context information of
* the gRPC call. The existing channel will be overridden by the channel contained in the context
* (if any).
* @return {@link StreamObserver} which is used for making streaming requests.
*/
public StreamObserver<RequestT> bidiStreamingCall(
StreamObserver<ResponseT> responseObserver, CallContext context) {
if (context.getChannel() == null) {
context = context.withChannel(channel);
}
Preconditions.checkNotNull(context.getChannel());
return callable.bidiStreamingCall(responseObserver, context);
}
/**
* Conduct a server streaming call
*
* @param request request
* @param responseObserver {@link io.grpc.stub.StreamObserver} to observe the streaming responses
*/
public void serverStreamingCall(RequestT request, StreamObserver<ResponseT> responseObserver) {
Preconditions.checkNotNull(channel);
callable.serverStreamingCall(
request, responseObserver, CallContext.createDefault().withChannel(channel));
}
/**
* Conduct a server streaming call with the given {@link com.google.api.gax.grpc.CallContext}.
*
* @param request request
* @param responseObserver {@link io.grpc.stub.StreamObserver} to observe the streaming responses
* @param context {@link com.google.api.gax.grpc.CallContext} to provide context information of
* the gRPC call. The existing channel will be overridden by the channel contained in the context
* (if any).
*/
public void serverStreamingCall(
RequestT request, StreamObserver<ResponseT> responseObserver, CallContext context) {
if (context.getChannel() == null) {
context = context.withChannel(channel);
}
Preconditions.checkNotNull(context.getChannel());
callable.serverStreamingCall(request, responseObserver, context);
}
/**
* Conduct a iteration server streaming call
*
* @param request request
* @return {@link Iterator} which is used for iterating the responses.
*/
public Iterator<ResponseT> serverStreamingCall(RequestT request) {
Preconditions.checkNotNull(channel);
return callable.blockingServerStreamingCall(
request, CallContext.createDefault().withChannel(channel));
}
/**
* Conduct a iteration server streaming call with the given
* {@link com.google.api.gax.grpc.CallContext}
*
* @param request request
* @param context {@link com.google.api.gax.grpc.CallContext} to provide context information of
* the gRPC call. The existing channel will be overridden by the channel contained in the context
* (if any).
* @return {@link Iterator} which is used for iterating the responses.
*/
public Iterator<ResponseT> serverStreamingCall(RequestT request, CallContext context) {
if (context.getChannel() == null) {
context = context.withChannel(channel);
}
return callable.blockingServerStreamingCall(request, context);
}
/**
* Conduct a client streaming call
*
* @param responseObserver {@link io.grpc.stub.StreamObserver} to receive the non-streaming
* response.
* @return {@link StreamObserver} which is used for making streaming requests.
*/
public StreamObserver<RequestT> clientStreamingCall(StreamObserver<ResponseT> responseObserver) {
Preconditions.checkNotNull(channel);
return callable.clientStreamingCall(
responseObserver, CallContext.createDefault().withChannel(channel));
}
/**
* Conduct a client streaming call with the given {@link com.google.api.gax.grpc.CallContext}
*
* @param responseObserver {@link io.grpc.stub.StreamObserver} to receive the non-streaming
* response.
* @param context {@link com.google.api.gax.grpc.CallContext} to provide context information of
* the gRPC call. The existing channel will be overridden by the channel contained in the context
* (if any)
* @return {@link StreamObserver} which is used for making streaming requests.
*/
public StreamObserver<RequestT> clientStreamingCall(
StreamObserver<ResponseT> responseObserver, CallContext context) {
if (context.getChannel() == null) {
context = context.withChannel(channel);
}
return callable.clientStreamingCall(responseObserver, context);
}
@VisibleForTesting
Channel getChannel() {
return channel;
}
}
| |
package com.github.citools;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.HttpClients;
import org.apache.maven.eventspy.AbstractEventSpy;
import org.apache.maven.eventspy.EventSpy;
import org.apache.maven.execution.BuildFailure;
import org.apache.maven.execution.BuildSuccess;
import org.apache.maven.execution.BuildSummary;
import org.apache.maven.execution.DefaultMavenExecutionResult;
import org.apache.maven.execution.ExecutionEvent;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.component.annotations.Requirement;
import org.codehaus.plexus.logging.Logger;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.stream.JsonReader;
/**
* @author Alain Helaili - helaili@github.com
*
*/
@Component(role = EventSpy.class, hint = "github-event-spy")
public class MavenEventSpy extends AbstractEventSpy {
@Requirement
private Logger logger;
private Context context;
private HttpClient httpClient;
private Map<String, Integer> eventClassNames = new HashMap<String, Integer>();
private Set<String> executionEventReceived = new HashSet<String>();
private SpyConfig spyConfig;
private boolean initError = false;
private String githubToken;
private String githubEndpoint;
public enum GitHubStatus {
Pending("pending"), Success("success"), Failure("failure"), Error("error");
private final String statusLabel;
/**
* @param statusLabel
*/
private GitHubStatus(final String statusLabel) {
this.statusLabel = statusLabel;
}
/*
* (non-Javadoc)
*
* @see java.lang.Enum#toString()
*/
@Override
public String toString() {
return statusLabel;
}
}
public Context getContext() {
return context;
}
@Override
public void init(Context context) throws Exception {
super.init(context);
this.context = context;
String configFile = "m2github.json";
String githubEndpointPrefix = "https://api.github.com/";
String githubRepo;
String sha;
Properties userProperties = (Properties) context.getData().get("userProperties");
if (userProperties.getProperty("m2github.configFile") != null) {
configFile = userProperties.getProperty("m2github.configFile");
}
try {
FileReader fileReader = new FileReader(configFile);
logger.info(" ** m2github - Using config file " + configFile);
JsonReader reader = new JsonReader(fileReader);
Gson gson = new GsonBuilder().create();
spyConfig = gson.fromJson(reader, SpyConfig.class);
} catch (Exception e1) {
logger.info(" ** m2github - No config file found");
}
githubRepo = userProperties.getProperty("m2github.repo");
if (githubRepo == null) {
logger.error("m2github - Missing property m2github.repo");
initError = true;
}
githubToken = userProperties.getProperty("m2github.token");
if (githubToken == null) {
logger.error("m2github - Missing property m2github.token");
initError = true;
}
if (userProperties.getProperty("m2github.endpoint") != null) {
githubEndpointPrefix = userProperties.getProperty("m2github.endpoint");
}
// Need the current SHA
Process gitProcess = Runtime.getRuntime().exec("git rev-parse HEAD");
gitProcess.waitFor();
BufferedReader reader = new BufferedReader(new InputStreamReader(gitProcess.getInputStream()));
StringBuffer shaStringBuffer = new StringBuffer();
String line = "";
while ((line = reader.readLine()) != null) {
shaStringBuffer.append(line);
}
sha = shaStringBuffer.toString();
if (sha == null) {
logger.error("m2github - Couldn't figure out SHA1");
initError = true;
}
if (!initError) {
try {
githubEndpoint = githubEndpointPrefix + "/repos/" + githubRepo + "/statuses/" + sha;
httpClient = HttpClients.createDefault();
if (httpClient == null) {
logger.error(" ** m2github - Failed to initialize HTTP Client");
System.exit(1);
}
logger.info(" ** m2github - GitHub Event Spy succesfully initialized - Endpoint is " + githubEndpoint
+ " ** ");
} catch (Exception e) {
logger.error(e.getMessage());
e.printStackTrace();
}
}
}
@Override
public void close() throws Exception {
super.close();
/*
// Displaying the event classes
logger.info(" ** Event Classes received by m2github ** ");
for (String className : eventClassNames.keySet()) {
logger.info(" ** " + className + " : " + eventClassNames.get(className));
}
*/
logger.info(" ** Execution events received by m2github ** ");
// Displaying execution events (class = org.apache.maven.execution.ExecutionEvent)
for(String executionEvent : executionEventReceived) {
logger.info(" ** " + executionEvent );
}
}
@Override
public void onEvent(Object event) throws Exception {
// Capturing the various event types we receive so we can report on
// those being ignored
if (event != null) {
Integer currentCount = eventClassNames.get(event.getClass().getName());
if (currentCount == null) {
currentCount = new Integer(1);
} else {
currentCount = new Integer(currentCount.intValue() + 1);
}
eventClassNames.put(event.getClass().getName(), currentCount);
}
if (event != null && !initError) {
if (event instanceof org.apache.maven.execution.DefaultMavenExecutionResult) {
processExecutionResult((DefaultMavenExecutionResult) event);
} else if (event instanceof org.apache.maven.execution.ExecutionEvent) {
processExecutionEvent((ExecutionEvent) event);
}
}
}
protected void processExecutionResult(DefaultMavenExecutionResult executionResult) {
BuildSummary bs = executionResult.getBuildSummary(executionResult.getProject());
String statusLabel = generateBuildSummaryName(bs);
executionEventReceived.add(statusLabel);
if (bs instanceof BuildSuccess) {
sendStatus(statusLabel, "Duration : " + bs.getTime() + "ms", GitHubStatus.Success);
} else if (bs instanceof BuildFailure) {
sendStatus(statusLabel, "Duration : " + bs.getTime() + "ms", GitHubStatus.Failure);
} else {
logger.error("m2github - unknown status for " + statusLabel + " - " + bs.getClass().getName());
}
}
protected void processExecutionEvent(ExecutionEvent executionEvent) {
String statusLabel = null;
GitHubStatus statusType = null;
switch (executionEvent.getType()) {
case MojoStarted:
statusLabel = generateMojoName(executionEvent);
statusType = GitHubStatus.Pending;
break;
case MojoSucceeded:
statusLabel = generateMojoName(executionEvent);
statusType = GitHubStatus.Success;
break;
case MojoFailed:
statusLabel = generateMojoName(executionEvent);
statusType = GitHubStatus.Failure;
break;
case ProjectStarted:
statusLabel = generateProjectName(executionEvent);
statusType = GitHubStatus.Pending;
break;
case ProjectSucceeded:
statusLabel = generateProjectName(executionEvent);
statusType = GitHubStatus.Success;
break;
case ProjectFailed:
statusLabel = generateProjectName(executionEvent);
statusType = GitHubStatus.Failure;
break;
default:
break;
}
if(statusLabel != null && statusType != null) {
executionEventReceived.add(statusLabel);
String message;
if(statusType == GitHubStatus.Pending) {
message = "Just Started";
} else if(statusType == GitHubStatus.Failure) {
message = "Ouch, Failure";
} else if(statusType == GitHubStatus.Success) {
message = "Oh, Sweet Success!";
} else {
message = "You should never really get this message. Please call me if you do.";
}
sendStatus(statusLabel, message, statusType);
}
}
/**
* Do the actual sending of the status to GitHub
*
* @param label
* The label of the status we want to send
* @param message
* The message displayed on the status page
* @param status
* The status (success | failure | pending)
*/
protected void sendStatus(String label, String message, GitHubStatus status) {
HttpPost httpPostRequest = new HttpPost(githubEndpoint);
try {
String payload = String.format(
"{\"state\": \"%s\", \"target_url\": \"%s\", \"description\": \"%s\", \"context\": \"%s\"}",
status, "http://github.com", message, label);
logger.debug(payload);
StringEntity params = new StringEntity(payload);
httpPostRequest.addHeader("content-type", "application/json");
httpPostRequest.addHeader("Authorization", "token " + githubToken);
RequestConfig requestConfig = RequestConfig.custom().setSocketTimeout(5000).setConnectTimeout(5000)
.setConnectionRequestTimeout(5000).build();
httpPostRequest.setConfig(requestConfig);
httpPostRequest.setEntity(params);
HttpResponse response = httpClient.execute(httpPostRequest);
if (response.getStatusLine().getStatusCode() >= 300) {
logger.error(response.getStatusLine().toString());
}
} catch (Exception e) {
logger.error(e.getMessage());
e.printStackTrace();
} finally {
httpPostRequest.releaseConnection();
}
}
/**
* Generates a label for a Mojo related event
*
* @param executionEvent
* @return the concatenation of the project, the mojo's group id, the mojo's
* artifact id and the goal
*/
private String generateMojoName(ExecutionEvent executionEvent) {
String mojoName = String.format("%s/%s/%s/%s", executionEvent.getProject().getName(), executionEvent.getMojoExecution()
.getGroupId(), executionEvent.getMojoExecution().getArtifactId(), executionEvent.getMojoExecution()
.getGoal());
if(spyConfig != null) {
if(spyConfig.isIgnored(mojoName)) {
return null;
} else {
String mapping = spyConfig.getMapping(mojoName);
return mapping == null ? mojoName : mapping;
}
} else {
return mojoName;
}
}
private String generateProjectName(ExecutionEvent executionEvent) {
String projectName = executionEvent.getProject().getName();
if(spyConfig != null) {
if(spyConfig.isIgnored(projectName)) {
return null;
} else {
String mapping = spyConfig.getMapping(projectName);
return mapping == null ? projectName : mapping;
}
} else {
return projectName;
}
}
private String generateBuildSummaryName(BuildSummary bs) {
String bsName = bs.getProject().getName();
if(spyConfig != null) {
if(spyConfig.isIgnored(bsName)) {
return null;
} else {
String mapping = spyConfig.getMapping(bsName);
return mapping == null ? bsName : mapping;
}
} else {
return bsName;
}
}
}
| |
package cz.metacentrum.perun.dispatcher.jms;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import javax.jms.Session;
import org.hornetq.api.core.TransportConfiguration;
import org.hornetq.api.jms.HornetQJMSClient;
import org.hornetq.api.jms.JMSFactoryType;
import org.hornetq.jms.client.HornetQConnectionFactory;
import org.hornetq.core.remoting.impl.netty.NettyConnectorFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.task.TaskExecutor;
import org.hornetq.core.remoting.impl.netty.TransportConstants;
import cz.metacentrum.perun.dispatcher.exceptions.MessageFormatException;
import cz.metacentrum.perun.dispatcher.exceptions.PerunHornetQServerException;
import cz.metacentrum.perun.dispatcher.hornetq.PerunHornetQServer;
import cz.metacentrum.perun.dispatcher.processing.SmartMatcher;
import cz.metacentrum.perun.dispatcher.scheduling.PropagationMaintainer;
import cz.metacentrum.perun.dispatcher.scheduling.TaskScheduler;
/**
*
* @author Michal Karm Babacek JavaDoc coming soon...
*
*/
@org.springframework.stereotype.Service(value = "systemQueueProcessor")
public class SystemQueueProcessor {
private final static Logger log = LoggerFactory
.getLogger(SystemQueueProcessor.class);
@Autowired
private Properties dispatcherPropertiesBean;
@Autowired
private DispatcherQueuePool dispatcherQueuePool;
@Autowired
private PerunHornetQServer perunHornetQServer;
@Autowired
private SmartMatcher smartMatcher;
@Autowired
private TaskExecutor taskExecutor;
private Session session = null;
@Autowired
private SystemQueueReceiver systemQueueReceiver;
@Autowired
private PropagationMaintainer propagationMaintainer;
private boolean processingMessages = false;
private boolean systemQueueInitiated = false;
private ConnectionFactory cf;
private Connection connection;
public void startProcessingSystemMessages() {
connection = null;
try {
// Step 2. Instantiate the TransportConfiguration object which
// contains the knowledge of what transport to use,
// The server port etc.
log.debug("Creating transport configuration...");
Map<String, Object> connectionParams = new HashMap<String, Object>();
if (log.isDebugEnabled()) {
log.debug("Gonna connect to the host["
+ dispatcherPropertiesBean.getProperty("dispatcher.ip.address")
+ "] on port["
+ dispatcherPropertiesBean.getProperty("dispatcher.port")
+ "]...");
}
connectionParams.put(TransportConstants.PORT_PROP_NAME, Integer
.parseInt(dispatcherPropertiesBean.getProperty("dispatcher.port")));
connectionParams.put(TransportConstants.HOST_PROP_NAME,
dispatcherPropertiesBean.getProperty("dispatcher.ip.address"));
TransportConfiguration transportConfiguration = new TransportConfiguration(
NettyConnectorFactory.class.getName(), connectionParams);
// Step 3 Directly instantiate the JMS ConnectionFactory object
// using that TransportConfiguration
log.debug("Creating connection factory...");
cf = (ConnectionFactory) HornetQJMSClient.createConnectionFactoryWithoutHA(JMSFactoryType.CF,
transportConfiguration);
((HornetQConnectionFactory)cf).setUseGlobalPools(false);
// Step 4.Create a JMS Connection
log.debug("Creating connection...");
connection = cf.createConnection();
// Step 5. Create a JMS Session
log.debug("Creating session...");
session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
// Step 10. Start the Connection
log.debug("Starting connection...");
connection.start();
if (processingMessages) {
systemQueueReceiver.stop();
}
systemQueueReceiver.setUp("systemQueue", session);
log.debug("Executor: taskExecutor.execute(systemQueueReceiver)...");
taskExecutor.execute(systemQueueReceiver);
log.debug("Initialization done.");
processingMessages = true;
} catch (JMSException e) {
// If unable to connect to the server...
log.error(
"Connection failed. \nThis is weird...are you sure that the Perun-Dispatcher is running on host["
+ dispatcherPropertiesBean
.getProperty("dispatcher.ip.address")
+ "] on port["
+ dispatcherPropertiesBean.getProperty("dispatcher.port")
+ "] ? \nSee: perun-dispatcher.properties. We gonna wait 5 sec and try again...",
e);
throw new RuntimeException(e);
} catch (Exception e) {
log.error(e.toString(), e);
}
}
public void stopProcessingSystemMessages() {
if (processingMessages && systemQueueReceiver != null) {
systemQueueReceiver.stop();
try {
connection.stop();
connection.close();
((HornetQConnectionFactory)cf).close();
} catch (JMSException e) {
log.error("Error closing JMS client connection: ", e.toString());
}
}
}
public void closeSystemQueue() {
throw new UnsupportedOperationException("Sorry...");
/*
* // TODO: Send "we gonna close the bar" message to all clients... if
* (systemQueueInitiated) { stopProcessingSystemMessages(); try {
* connection.close(); } catch (JMSException e) {
* log.error(e.toString(),e.getCause()); } }
*/
}
public boolean isProcessingMessages() {
return processingMessages;
}
public boolean isSystemQueueInitiated() {
return systemQueueInitiated;
}
protected void processDispatcherQueueAndMatchingRule(
String systemMessagetext) throws PerunHornetQServerException,
MessageFormatException {
if (perunHornetQServer.isServerRunning()
&& perunHornetQServer.getJMSServerManager() != null) {
if (log.isDebugEnabled()) {
log.debug("Processing system message:" + systemMessagetext);
}
// Expected messages:
// Register message
// register:x
// where x is an Integer that represents Engine's ID in the Perun
// DB.
// Good bye message
// goodbye:x
// where x is an Integer that represents Engine's ID in the Perun
// DB.
// Task status message
// task:x:y:status
// where x is an Integer that represents Engine's ID in the Perun
// y is an Integer that represents task ID
// status is string representation of task status
if(null == systemMessagetext) {
throw new MessageFormatException("Client (Perun-Engine) sent empty message");
}
String[] clientIDsplitter = systemMessagetext.split(":", 3);
if(clientIDsplitter.length < 2) {
throw new MessageFormatException(
"Client (Perun-Engine) sent a malformed message ["
+ systemMessagetext + "]");
}
int clientID = 0;
try {
clientID = Integer.parseInt(clientIDsplitter[1]);
} catch (NumberFormatException e) {
throw new MessageFormatException(
"Client (Perun-Engine) sent a malformed message ["
+ systemMessagetext + "]", e);
}
if (clientIDsplitter[0].equalsIgnoreCase("register")) {
// Do we have this queue already?
DispatcherQueue dispatcherQueue = dispatcherQueuePool.getDispatcherQueueByClient(clientID);
if (dispatcherQueue != null) {
// Yes, so we just reload matching rules...
smartMatcher.reloadRulesFromDBForEngine(clientID);
// ...and close all tasks that could have been running there
propagationMaintainer.closeTasksForEngine(clientID);
} else {
// No, we have to create the whole JMS queue and load
// matching
// rules...
createDispatcherQueueForClient(clientID);
}
} else if (clientIDsplitter[0].equalsIgnoreCase("goodbye")) {
// engine going down, should mark all tasks as failed
propagationMaintainer.closeTasksForEngine(clientID);
dispatcherQueuePool.removeDispatcherQueue(clientID);
} else if (clientIDsplitter[0].equalsIgnoreCase("task")) {
clientIDsplitter = systemMessagetext.split(":", 5);
log.debug("TESTSTR -> got task message {}", clientIDsplitter);
if(clientIDsplitter.length < 5) {
throw new MessageFormatException(
"Client (Perun-Engine) sent a malformed message ["
+ systemMessagetext + "]");
}
propagationMaintainer.onTaskStatusChange(Integer.parseInt(clientIDsplitter[2]),
clientIDsplitter[3], clientIDsplitter[4]);
} else if (clientIDsplitter[0].equalsIgnoreCase("taskresult")) {
log.debug("Taskresult received");
clientIDsplitter = systemMessagetext.split(":", 3);
// destination complete for task
if(clientIDsplitter.length < 3) {
throw new MessageFormatException(
"Client (Perun-Engine) sent a malformed message ["
+ systemMessagetext + "]");
}
propagationMaintainer.onTaskDestinationComplete(
clientID,
clientIDsplitter[2]
);
} else {
throw new MessageFormatException(
"Client (Perun-Engine) sent a malformed message ["
+ systemMessagetext + "]");
}
} else {
throw new PerunHornetQServerException(
"It looks like the HornetQ server is not running or JMSServerManager is fucked up...");
}
}
public void createDispatcherQueuesForClients(Set<Integer> clientIDs)
throws PerunHornetQServerException {
if (perunHornetQServer.isServerRunning()
&& perunHornetQServer.getJMSServerManager() != null) {
for (Integer clientID : clientIDs) {
createDispatcherQueueForClient(clientID);
}
} else {
throw new PerunHornetQServerException(
"It looks like the HornetQ server is not running or JMSServerManager is fucked up...");
}
}
private void createDispatcherQueueForClient(Integer clientID) {
// Create a new queue
String queueName = "queue" + clientID;
try {
perunHornetQServer.getJMSServerManager().createQueue(false,
queueName, null, false, new String[0]);
} catch (Exception e) {
log.error(e.toString(), e);
}
DispatcherQueue dispatcherQueue = new DispatcherQueue(clientID,
queueName, session);
// Rules
smartMatcher.reloadRulesFromDBForEngine(clientID);
// Add to the queue
dispatcherQueuePool.addDispatcherQueue(dispatcherQueue);
}
public void setDispatcherQueuePool(DispatcherQueuePool dispatcherQueuePool) {
this.dispatcherQueuePool = dispatcherQueuePool;
}
public void setDispatcherPropertiesBean(Properties propertiesBean) {
this.dispatcherPropertiesBean = propertiesBean;
}
public void setPerunHornetQServer(PerunHornetQServer perunHornetQServer) {
this.perunHornetQServer = perunHornetQServer;
}
public void setSmartMatcher(SmartMatcher smartMatcher) {
this.smartMatcher = smartMatcher;
}
public void setTaskExecutor(TaskExecutor taskExecutor) {
this.taskExecutor = taskExecutor;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.security.visibility;
import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
@Category({SecurityTests.class, MediumTests.class})
public class TestDefaultScanLabelGeneratorStack {
public static final String CONFIDENTIAL = "confidential";
private static final String SECRET = "secret";
public static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final byte[] ROW_1 = Bytes.toBytes("row1");
private final static byte[] CF = Bytes.toBytes("f");
private final static byte[] Q1 = Bytes.toBytes("q1");
private final static byte[] Q2 = Bytes.toBytes("q2");
private final static byte[] Q3 = Bytes.toBytes("q3");
private final static byte[] value1 = Bytes.toBytes("value1");
private final static byte[] value2 = Bytes.toBytes("value2");
private final static byte[] value3 = Bytes.toBytes("value3");
public static Configuration conf;
@Rule
public final TestName TEST_NAME = new TestName();
public static User SUPERUSER;
public static User TESTUSER;
@BeforeClass
public static void setupBeforeClass() throws Exception {
// setup configuration
conf = TEST_UTIL.getConfiguration();
VisibilityTestUtil.enableVisiblityLabels(conf);
// Not setting any SLG class. This means to use the default behavior.
conf.set("hbase.superuser", "admin");
TEST_UTIL.startMiniCluster(1);
SUPERUSER = User.createUserForTesting(conf, "admin", new String[] { "supergroup" });
TESTUSER = User.createUserForTesting(conf, "test", new String[] { });
// Wait for the labels table to become available
TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000);
// Set up for the test
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
VisibilityClient.addLabels(conn, new String[] { SECRET, CONFIDENTIAL });
VisibilityClient.setAuths(conn, new String[] { CONFIDENTIAL }, TESTUSER.getShortName());
} catch (Throwable t) {
throw new IOException(t);
}
return null;
}
});
}
@Test
public void testDefaultScanLabelGeneratorStack() throws Exception {
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = TEST_UTIL.createTable(tableName, CF)) {
Put put = new Put(ROW_1);
put.add(CF, Q1, HConstants.LATEST_TIMESTAMP, value1);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
put = new Put(ROW_1);
put.add(CF, Q2, HConstants.LATEST_TIMESTAMP, value2);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(ROW_1);
put.add(CF, Q3, HConstants.LATEST_TIMESTAMP, value3);
table.put(put);
return null;
}
}
});
// Test that super user can see all the cells.
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
Scan s = new Scan();
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(1);
// Test that super user can see all the cells.
assertTrue(next.length == 1);
CellScanner cellScanner = next[0].cellScanner();
cellScanner.advance();
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q1));
assertTrue(Bytes.equals(current.getValue(), value1));
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q2));
assertTrue(Bytes.equals(current.getValue(), value2));
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q3));
assertTrue(Bytes.equals(current.getValue(), value3));
return null;
}
}
});
TESTUSER.runAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
// Test scan with no auth attribute
Scan s = new Scan();
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(1);
assertTrue(next.length == 1);
CellScanner cellScanner = next[0].cellScanner();
cellScanner.advance();
Cell current = cellScanner.current();
// test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q2));
assertTrue(Bytes.equals(current.getValue(), value2));
cellScanner.advance();
current = cellScanner.current();
// test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q3));
assertTrue(Bytes.equals(current.getValue(), value3));
// Test scan with correct auth attribute for test user
Scan s1 = new Scan();
// test user is entitled to 'CONFIDENTIAL'.
// If we set both labels in the scan, 'SECRET' will be dropped by the SLGs.
s1.setAuthorizations(new Authorizations(new String[] { SECRET, CONFIDENTIAL }));
ResultScanner scanner1 = table.getScanner(s1);
Result[] next1 = scanner1.next(1);
assertTrue(next1.length == 1);
CellScanner cellScanner1 = next1[0].cellScanner();
cellScanner1.advance();
Cell current1 = cellScanner1.current();
// test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(),
current1.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current1.getQualifier(), Q2));
assertTrue(Bytes.equals(current1.getValue(), value2));
cellScanner1.advance();
current1 = cellScanner1.current();
// test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(),
current1.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current1.getQualifier(), Q3));
assertTrue(Bytes.equals(current1.getValue(), value3));
// Test scan with incorrect auth attribute for test user
Scan s2 = new Scan();
// test user is entitled to 'CONFIDENTIAL'.
// If we set 'SECRET', it will be dropped by the SLGs.
s2.setAuthorizations(new Authorizations(new String[] { SECRET }));
ResultScanner scanner2 = table.getScanner(s2);
Result next2 = scanner2.next();
CellScanner cellScanner2 = next2.cellScanner();
cellScanner2.advance();
Cell current2 = cellScanner2.current();
// This scan will only see value3 (no label)
assertTrue(Bytes.equals(current2.getRowArray(), current2.getRowOffset(),
current2.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current2.getQualifier(), Q3));
assertTrue(Bytes.equals(current2.getValue(), value3));
assertFalse(cellScanner2.advance());
return null;
}
}
});
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.ssh;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import com.trilead.ssh2.Session;
/**
* Write commands to SSH *
*
* @author Samatar
* @since 03-Juin-2008
*
*/
public class SSH extends BaseStep implements StepInterface {
private static Class<?> PKG = SSHMeta.class; // for i18n purposes, needed by Translator2!!
private SSHMeta meta;
private SSHData data;
public SSH( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) {
super( stepMeta, stepDataInterface, copyNr, transMeta, trans );
}
@Override
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
meta = (SSHMeta) smi;
data = (SSHData) sdi;
Object[] row;
if ( meta.isDynamicCommand() ) {
row = getRow();
if ( row == null ) {
setOutputDone();
return false;
}
if ( first ) {
first = false;
data.outputRowMeta = getInputRowMeta().clone();
data.nrInputFields = data.outputRowMeta.size();
meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore );
data.nrOutputFields = data.outputRowMeta.size();
// Check if commands field is provided
if ( meta.isDynamicCommand() ) {
if ( Utils.isEmpty( meta.getcommandfieldname() ) ) {
throw new KettleException( BaseMessages.getString( PKG, "SSH.Error.CommandFieldMissing" ) );
}
// cache the position of the source filename field
data.indexOfCommand = data.outputRowMeta.indexOfValue( meta.getcommandfieldname() );
if ( data.indexOfCommand < 0 ) {
// The field is unreachable !
throw new KettleException( BaseMessages.getString( PKG, "SSH.Exception.CouldnotFindField", meta
.getcommandfieldname() ) );
}
}
}
} else {
if ( !data.wroteOneRow ) {
row = new Object[] {}; // empty row
incrementLinesRead();
data.wroteOneRow = true;
if ( first ) {
first = false;
data.outputRowMeta = new RowMeta();
data.nrInputFields = 0;
meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore );
data.nrOutputFields = data.outputRowMeta.size();
data.commands = environmentSubstitute( meta.getCommand() );
}
} else {
setOutputDone(); // signal end to receiver(s)
return false;
}
}
RowMetaInterface imeta = getInputRowMeta();
if ( imeta == null ) {
imeta = new RowMeta();
this.setInputRowMeta( imeta );
}
// Reserve room
Object[] rowData = new Object[data.nrOutputFields];
for ( int i = 0; i < data.nrInputFields; i++ ) {
rowData[i] = row[i]; // no data is changed, clone is not needed here.
}
int index = data.nrInputFields;
Session session = null;
try {
if ( meta.isDynamicCommand() ) {
// get commands
data.commands = data.outputRowMeta.getString( row, data.indexOfCommand );
if ( Utils.isEmpty( data.commands ) ) {
throw new KettleException( BaseMessages.getString( PKG, "SSH.Error.MessageEmpty" ) );
}
}
// Open a session
session = data.conn.openSession();
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "SSH.Log.SessionOpened" ) );
}
// execute commands
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "SSH.Log.RunningCommand", data.commands ) );
}
session.execCommand( data.commands );
// Read Stdout, Sterr and exitStatus
SessionResult sessionresult = new SessionResult( session );
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "SSH.Log.CommandRunnedCommand", data.commands, sessionresult
.getStdOut(), sessionresult.getStdErr() ) );
}
// Add stdout to output
rowData[index++] = sessionresult.getStd();
if ( !Utils.isEmpty( data.stdTypeField ) ) {
// Add stdtype to output
rowData[index++] = sessionresult.isStdTypeErr();
}
if ( log.isRowLevel() ) {
logRowlevel( BaseMessages.getString( PKG, "SSH.Log.OutputLine", data.outputRowMeta.getString( rowData ) ) );
}
putRow( data.outputRowMeta, rowData );
if ( checkFeedback( getLinesRead() ) ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "SSH.LineNumber", "" + getLinesRead() ) );
}
}
} catch ( Exception e ) {
boolean sendToErrorRow = false;
String errorMessage = null;
if ( getStepMeta().isDoingErrorHandling() ) {
sendToErrorRow = true;
errorMessage = e.toString();
} else {
logError( BaseMessages.getString( PKG, "SSH.ErrorInStepRunning" ) + e.getMessage() );
setErrors( 1 );
stopAll();
setOutputDone(); // signal end to receiver(s)
return false;
}
if ( sendToErrorRow ) {
// Simply add this row to the error row
putError( getInputRowMeta(), row, 1, errorMessage, null, "SSH001" );
}
} finally {
if ( session != null ) {
session.close();
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "SSH.Log.SessionClosed" ) );
}
}
}
return true;
}
@Override
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) {
meta = (SSHMeta) smi;
data = (SSHData) sdi;
if ( super.init( smi, sdi ) ) {
String servername = environmentSubstitute( meta.getServerName() );
int nrPort = Const.toInt( environmentSubstitute( meta.getPort() ), 22 );
String username = environmentSubstitute( meta.getuserName() );
String password = Utils.resolvePassword( variables, meta.getpassword() );
String keyFilename = environmentSubstitute( meta.getKeyFileName() );
String passphrase = environmentSubstitute( meta.getPassphrase() );
int timeOut = Const.toInt( environmentSubstitute( meta.getTimeOut() ), 0 );
String proxyhost = environmentSubstitute( meta.getProxyHost() );
int proxyport = Const.toInt( environmentSubstitute( meta.getProxyPort() ), 0 );
String proxyusername = environmentSubstitute( meta.getProxyUsername() );
String proxypassword = environmentSubstitute( meta.getProxyPassword() );
// Check target server
if ( Utils.isEmpty( servername ) ) {
logError( BaseMessages.getString( PKG, "SSH.MissingServerName" ) );
}
// Check if username field is provided
if ( Utils.isEmpty( meta.getuserName() ) ) {
logError( BaseMessages.getString( PKG, "SSH.Error.UserNamedMissing" ) );
return false;
}
// Get output fields
data.stdOutField = environmentSubstitute( meta.getStdOutFieldName() );
if ( Utils.isEmpty( data.stdOutField ) ) {
logError( BaseMessages.getString( PKG, "SSH.Error.StdOutFieldNameMissing" ) );
return false;
}
data.stdTypeField = environmentSubstitute( meta.getStdErrFieldName() );
try {
// Open connection
data.conn =
SSHData.OpenConnection(
servername, nrPort, username, password, meta.isusePrivateKey(), keyFilename, passphrase, timeOut,
this, proxyhost, proxyport, proxyusername, proxypassword );
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "SSH.Log.ConnectionOpened" ) );
}
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "SSH.Error.OpeningConnection", e.getMessage() ) );
return false;
}
return true;
}
return false;
}
@Override
public void dispose( StepMetaInterface smi, StepDataInterface sdi ) {
meta = (SSHMeta) smi;
data = (SSHData) sdi;
if ( data.conn != null ) {
data.conn.close();
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "SSH.Log.ConnectionClosed" ) );
}
}
super.dispose( smi, sdi );
}
}
| |
package net.drewke.tdme.engine.fileio.models;
import java.io.IOException;
import java.io.OutputStream;
import net.drewke.tdme.engine.model.Animation;
import net.drewke.tdme.engine.model.Face;
import net.drewke.tdme.engine.model.FacesEntity;
import net.drewke.tdme.engine.model.Group;
import net.drewke.tdme.engine.model.Joint;
import net.drewke.tdme.engine.model.JointWeight;
import net.drewke.tdme.engine.model.Material;
import net.drewke.tdme.engine.model.Model;
import net.drewke.tdme.engine.model.Skinning;
import net.drewke.tdme.engine.model.TextureCoordinate;
import net.drewke.tdme.math.Vector3;
import net.drewke.tdme.os.FileSystem;
import net.drewke.tdme.utils.HashMap;
/**
* TDME model writer
* @author Andreas Drewke
* @version $Id$
*/
public class TMWriter {
/**
* TDME model format writer
* @param model
* @param path name
* @param file name
* @throws IOException
* @throws ModelIOException
*/
public static void write(Model model, String pathName, String fileName) throws IOException {
OutputStream os = null;
try {
os = FileSystem.getInstance().getOutputStream(pathName, fileName);
// version major.minor = 1.0
writeString(os, "TDME Model");
writeByte(os, (byte)1);
writeByte(os, (byte)0);
writeByte(os, (byte)0);
// meta data
writeString(os, model.getName());
// up vector, rotation order, bounding box, ...
writeString(os, model.getUpVector().toString());
writeString(os, model.getRotationOrder().toString());
writeFloatArray(os, model.getBoundingBox().getMin().getArray());
writeFloatArray(os, model.getBoundingBox().getMax().getArray());
writeFloat(os, model.getFPS());
writeFloatArray(os, model.getImportTransformationsMatrix().getArray());
// materials
writeInt(os, model.getMaterials().size());
for (Material material: model.getMaterials().getValuesIterator()) {
writeMaterial(os, material);
}
// sub groups
writeSubGroups(os, model.getSubGroups());
} catch (IOException ioe) {
throw ioe;
} finally {
if (os != null) {
os.flush();
os.close();
}
}
}
/**
* Writes a boolean to output stream
* @param output stream
* @param boolean
* @throws IOException
*/
private static void writeBoolean(OutputStream os, boolean b) throws IOException {
os.write((byte)(b == true?1:0));
}
/**
* Writes a byte to output stream
* @param output stream
* @param byte
* @throws IOException
*/
private static void writeByte(OutputStream os, byte b) throws IOException {
os.write(b);
}
/**
* Writes a integer to output stream
* @param output stream
* @param int
* @throws IOException
*/
private static void writeInt(OutputStream os, int i) throws IOException {
os.write((i >> 24) & 0xff);
os.write((i >> 16) & 0xff);
os.write((i >> 8) & 0xff);
os.write((i >> 0) & 0xff);
}
/**
* Writes a float to output stream
* @param output stream
* @param float
* @throws IOException
*/
private static void writeFloat(OutputStream os, float f) throws IOException {
writeInt(os, Float.floatToIntBits(f));
}
/**
* Writes a string to output stream
* @param output stream
* @param string
* @throws IOException
*/
private static void writeString(OutputStream os, String s) throws IOException {
if (s == null) {
writeBoolean(os, false);
} else {
writeBoolean(os, true);
writeInt(os, s.length());
for (int i = 0; i < s.length(); i++) {
writeByte(os, (byte)s.charAt(i));
}
}
}
/**
* Writes a float array to output stream
* @param output stream
* @param float array
* @throws IOException
*/
private static void writeFloatArray(OutputStream os, float[] f) throws IOException {
writeInt(os, f.length);
for (int i = 0; i < f.length; i++) {
writeFloat(os, f[i]);
}
}
/**
* Write material
* @param output stream
* @param material
* @throws IOException
*/
private static void writeMaterial(OutputStream os, Material m) throws IOException {
writeString(os, m.getId());
writeFloatArray(os, m.getAmbientColor().getArray());
writeFloatArray(os, m.getDiffuseColor().getArray());
writeFloatArray(os, m.getSpecularColor().getArray());
writeFloatArray(os, m.getEmissionColor().getArray());
writeFloat(os, m.getShininess());
writeString(os, m.getDiffuseTexturePathName());
writeString(os, m.getDiffuseTextureFileName());
writeString(os, m.getSpecularTexturePathName());
writeString(os, m.getSpecularTextureFileName());
writeString(os, m.getNormalTexturePathName());
writeString(os, m.getNormalTextureFileName());
writeString(os, m.getDisplacementTexturePathName());
writeString(os, m.getDisplacementTextureFileName());
}
/**
* Write vertices to output stream
* @param output stream
* @param vertices
* @throws IOException
*/
private static void writeVertices(OutputStream os, Vector3[] v) throws IOException {
if (v == null) {
writeBoolean(os, false);
} else {
writeBoolean(os, true);
writeInt(os, v.length);
for (int i = 0; i < v.length; i++) {
writeFloatArray(os, v[i].getArray());
}
}
}
/**
* Write texture coordinates to output stream
* @param output stream
* @param texture coordinates
* @throws IOException
*/
private static void writeTextureCoordinates(OutputStream os, TextureCoordinate[] tc) throws IOException {
if (tc == null) {
writeBoolean(os, false);
} else {
writeBoolean(os, true);
writeInt(os, tc.length);
for (int i = 0; i < tc.length; i++) {
writeFloatArray(os, tc[i].getArray());
}
}
}
/**
* Write indices to output stream
* @param output stream
* @param indices
* @throws IOException
*/
private static void writeIndices(OutputStream os, int[] indices) throws IOException {
if (indices == null) {
writeBoolean(os, false);
} else {
writeBoolean(os, true);
writeInt(os, indices.length);
for (int i = 0; i < indices.length; i++) {
writeInt(os, indices[i]);
}
}
}
/**
* Write animation to output stream
* @param output stream
* @param animation
* @throws IOException
*/
private static void writeAnimation(OutputStream os, Animation a) throws IOException {
if (a == null) {
writeBoolean(os, false);
} else {
writeBoolean(os, true);
writeInt(os, a.getTransformationsMatrices().length);
for (int i = 0; i < a.getTransformationsMatrices().length; i++) {
writeFloatArray(os, a.getTransformationsMatrices()[i].getArray());
}
}
}
/**
* Write faces entities to output stream
* @param output stream
* @param faces entities
* @throws IOException
*/
private static void writeFacesEntities(OutputStream os, FacesEntity[] facesEntities) throws IOException {
writeInt(os, facesEntities.length);
for (int i = 0; i < facesEntities.length; i++) {
FacesEntity fe = facesEntities[i];
writeString(os, fe.getId());
if (fe.getMaterial() == null) {
writeBoolean(os, false);
} else {
writeBoolean(os, true);
writeString(os, fe.getMaterial().getId());
}
writeInt(os, fe.getFaces().length);
for (int j = 0; j < fe.getFaces().length; j++) {
Face f = fe.getFaces()[j];
writeIndices(os, f.getVertexIndices());
writeIndices(os, f.getNormalIndices());
writeIndices(os, f.getTextureCoordinateIndices());
writeIndices(os, f.getTangentIndices());
writeIndices(os, f.getBitangentIndices());
}
}
}
/**
* Write skinning joint
* @param output stream
* @param joint
* @throws IOException
*/
private static void writeSkinningJoint(OutputStream os, Joint joint) throws IOException {
writeString(os, joint.getGroupId());
writeFloatArray(os, joint.getBindMatrix().getArray());
}
/**
* Write skinning joint weight
* @param output stream
* @param joint
* @throws IOException
*/
private static void writeSkinningJointWeight(OutputStream os, JointWeight jointWeight) throws IOException {
writeInt(os, jointWeight.getJointIndex());
writeInt(os, jointWeight.getWeightIndex());
}
/**
* Write skinning to output stream
* @param output stream
* @param skinning
* @throws IOException
*/
private static void writeSkinning(OutputStream os, Skinning skinning) throws IOException {
if (skinning == null) {
writeBoolean(os, false);
} else {
writeBoolean(os, true);
writeFloatArray(os, skinning.getWeights());
writeInt(os, skinning.getJoints().length);
for (int i = 0; i < skinning.getJoints().length; i++) {
writeSkinningJoint(os, skinning.getJoints()[i]);
}
writeInt(os, skinning.getVerticesJointsWeights().length);
for (int i = 0; i < skinning.getVerticesJointsWeights().length; i++) {
writeInt(os, skinning.getVerticesJointsWeights()[i].length);
for (int j = 0; j < skinning.getVerticesJointsWeights()[i].length; j++) {
writeSkinningJointWeight(os, skinning.getVerticesJointsWeights()[i][j]);
}
}
}
}
/**
* Write sub groups
* @param output stream
* @param sub groups
* @throws IOException
*/
private static void writeSubGroups(OutputStream os, HashMap<String, Group> subGroups) throws IOException {
writeInt(os, subGroups.size());
for (Group subGroup: subGroups.getValuesIterator()) {
writeGroup(os, subGroup);
}
}
/**
* Write group to output stream
* @param output stream
* @param group
* @throws IOException
*/
private static void writeGroup(OutputStream os, Group g) throws IOException {
writeString(os, g.getId());
writeString(os, g.getName());
writeBoolean(os, g.isJoint());
writeFloatArray(os, g.getTransformationsMatrix().getArray());
writeVertices(os, g.getVertices());
writeVertices(os, g.getNormals());
writeTextureCoordinates(os, g.getTextureCoordinates());
writeVertices(os, g.getTangents());
writeVertices(os, g.getBitangents());
writeAnimation(os, g.getAnimation());
writeSkinning(os, g.getSkinning());
writeFacesEntities(os, g.getFacesEntities());
writeSubGroups(os, g.getSubGroups());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.server.rest.profile;
import static com.fasterxml.jackson.databind.SerializationFeature.INDENT_OUTPUT;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.proto.UserBitShared.MajorFragmentProfile;
import org.apache.drill.exec.proto.UserBitShared.MinorFragmentProfile;
import org.apache.drill.exec.proto.UserBitShared.OperatorProfile;
import org.apache.drill.exec.proto.UserBitShared.QueryProfile;
import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState;
import org.apache.drill.exec.proto.helper.QueryIdHelper;
import org.apache.drill.exec.server.options.OptionList;
import org.apache.drill.exec.server.options.OptionValue;
import org.apache.drill.exec.server.rest.WebServer;
import org.apache.drill.exec.server.rest.WebUtils;
import org.apache.drill.shaded.guava.com.google.common.base.CaseFormat;
import com.fasterxml.jackson.databind.ObjectMapper;
import javax.servlet.http.HttpServletRequest;
/**
* Wrapper class for a {@link #profile query profile}, so it to be presented through web UI.
*/
public class ProfileWrapper {
private static final String ESTIMATED_LABEL = " (Estimated)";
private static final String NOT_AVAILABLE_LABEL = "Not Available";
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ProfileWrapper.class);
private static final ObjectMapper mapper = new ObjectMapper().enable(INDENT_OUTPUT);
private final QueryProfile profile;
private final String id;
private final List<FragmentWrapper> fragmentProfiles;
private final List<OperatorWrapper> operatorProfiles;
private final Map<String, Long> majorFragmentTallyMap;
private final long majorFragmentTallyTotal;
private final OptionList options;
private final boolean onlyImpersonationEnabled;
private Map<String, String> physicalOperatorMap;
private final String noProgressWarningThreshold;
private final int defaultAutoLimit;
private final boolean showEstimatedRows;
private final String csrfToken;
public ProfileWrapper(final QueryProfile profile, DrillConfig drillConfig, HttpServletRequest request) {
this.profile = profile;
this.id = profile.hasQueryId() ? profile.getQueryId() : QueryIdHelper.getQueryId(profile.getId());
this.defaultAutoLimit = drillConfig.getInt(ExecConstants.HTTP_WEB_CLIENT_RESULTSET_AUTOLIMIT_ROWS);
//Generating Operator Name map (DRILL-6140)
String profileTextPlan = profile.hasPlan()? profile.getPlan(): "";
generateOpMap(profileTextPlan);
csrfToken = WebUtils.getCsrfTokenFromHttpRequest(request);
final List<FragmentWrapper> fragmentProfiles = new ArrayList<>();
final List<MajorFragmentProfile> majors = new ArrayList<>(profile.getFragmentProfileList());
Collections.sort(majors, Comparators.majorId);
for (final MajorFragmentProfile major : majors) {
fragmentProfiles.add(new FragmentWrapper(major, profile.getStart(), drillConfig));
}
this.fragmentProfiles = fragmentProfiles;
this.majorFragmentTallyMap = new HashMap<>(majors.size());
this.majorFragmentTallyTotal = tallyMajorFragmentCost(majors);
final List<OperatorWrapper> ows = new ArrayList<>();
// temporary map to store (major_id, operator_id) -> [((op_profile, minor_id),minorFragHostname)]
final Map<ImmutablePair<Integer, Integer>, List<ImmutablePair<ImmutablePair<OperatorProfile, Integer>, String>>> opmap = new HashMap<>();
Collections.sort(majors, Comparators.majorId);
for (final MajorFragmentProfile major : majors) {
final List<MinorFragmentProfile> minors = new ArrayList<>(major.getMinorFragmentProfileList());
Collections.sort(minors, Comparators.minorId);
for (final MinorFragmentProfile minor : minors) {
String fragmentHostName = minor.getEndpoint().getAddress();
final List<OperatorProfile> ops = new ArrayList<>(minor.getOperatorProfileList());
Collections.sort(ops, Comparators.operatorId);
for (final OperatorProfile op : ops) {
final ImmutablePair<Integer, Integer> ip = new ImmutablePair<>(
major.getMajorFragmentId(), op.getOperatorId());
if (!opmap.containsKey(ip)) {
final List<ImmutablePair<ImmutablePair<OperatorProfile, Integer>, String>> l = new ArrayList<>();
opmap.put(ip, l);
}
opmap.get(ip).add(new ImmutablePair<>(
new ImmutablePair<>(op, minor.getMinorFragmentId()),
fragmentHostName));
}
}
}
final List<ImmutablePair<Integer, Integer>> keys = new ArrayList<>(opmap.keySet());
Collections.sort(keys);
for (final ImmutablePair<Integer, Integer> ip : keys) {
ows.add(new OperatorWrapper(ip.getLeft(), opmap.get(ip), physicalOperatorMap, drillConfig));
}
this.operatorProfiles = ows;
OptionList options;
try {
options = mapper.readValue(profile.getOptionsJson(), OptionList.class);
} catch (Exception e) {
logger.error("Unable to deserialize query options", e);
options = new OptionList();
}
this.options = options;
this.onlyImpersonationEnabled = WebServer.isOnlyImpersonationEnabled(drillConfig);
this.noProgressWarningThreshold = String.valueOf(drillConfig.getInt(ExecConstants.PROFILE_WARNING_PROGRESS_THRESHOLD));
this.showEstimatedRows = drillConfig.getBoolean(ExecConstants.PROFILE_STATISTICS_ESTIMATED_ROWS_SHOW);
}
private long tallyMajorFragmentCost(List<MajorFragmentProfile> majorFragments) {
long globalProcessNanos = 0L;
for (MajorFragmentProfile majorFP : majorFragments) {
String majorFragmentId = new OperatorPathBuilder().setMajor(majorFP).build();
long processNanos = 0L;
for (MinorFragmentProfile minorFP : majorFP.getMinorFragmentProfileList()) {
for (OperatorProfile op : minorFP.getOperatorProfileList()) {
processNanos += op.getProcessNanos();
}
}
majorFragmentTallyMap.put(majorFragmentId, processNanos);
globalProcessNanos += processNanos;
}
return globalProcessNanos;
}
public boolean hasAutoLimit() {
return profile.hasAutoLimit();
}
public int getAutoLimit() {
return profile.getAutoLimit();
}
public int getDefaultAutoLimit() {
return defaultAutoLimit;
}
public boolean hasError() {
return profile.hasError() && profile.getError() != null;
}
public QueryProfile getProfile() {
return profile;
}
public String getProfileDuration() {
return (new SimpleDurationFormat(profile.getStart(), profile.getEnd())).verbose();
}
public String getQueryId() {
return id;
}
public String getQueryStateDisplayName() {
return ProfileUtil.getQueryStateDisplayName(profile.getState());
}
public String getPlanningDuration() {
//Check if Planning End is known
if (profile.getPlanEnd() > 0L) {
return (new SimpleDurationFormat(profile.getStart(), profile.getPlanEnd())).verbose();
}
//Check if any fragments have started
if (profile.getFragmentProfileCount() > 0) {
//Init Planning End Time
long estimatedPlanEnd = Long.MAX_VALUE;
//Using Screen MajorFragment as reference
MajorFragmentProfile majorFrag0 = profile.getFragmentProfile(0);
//Searching for earliest starting fragment
for (MinorFragmentProfile fragmentWrapper : majorFrag0.getMinorFragmentProfileList()) {
long minorFragmentStart = fragmentWrapper.getStartTime();
if (minorFragmentStart > 0 && minorFragmentStart < estimatedPlanEnd) {
estimatedPlanEnd = minorFragmentStart;
}
}
//Provide estimated plan time
return (new SimpleDurationFormat(profile.getStart(), estimatedPlanEnd)).verbose() + ESTIMATED_LABEL;
}
//Unable to estimate/calculate Specific Time spent in Planning
return NOT_AVAILABLE_LABEL;
}
public String getQueuedDuration() {
//Check if State is ENQUEUED
if (profile.getState() == QueryState.ENQUEUED) {
return (new SimpleDurationFormat(profile.getPlanEnd(), System.currentTimeMillis())).verbose();
}
//Check if Queue Wait End is known
if (profile.getQueueWaitEnd() > 0L) {
return (new SimpleDurationFormat(profile.getPlanEnd(), profile.getQueueWaitEnd())).verbose();
}
//Unable to estimate/calculate Specific Time spent in Queue
return NOT_AVAILABLE_LABEL;
}
public String getExecutionDuration() {
//Check if State is PREPARING, PLANNING, STARTING or ENQUEUED
if (profile.getState() == QueryState.PREPARING ||
profile.getState() == QueryState.PLANNING ||
profile.getState() == QueryState.STARTING ||
profile.getState() == QueryState.ENQUEUED) {
return NOT_AVAILABLE_LABEL;
}
long queryEndTime;
// Check if State is RUNNING, set end time to current time
if (profile.getState() == QueryState.RUNNING) {
queryEndTime = System.currentTimeMillis();
} else {
queryEndTime = profile.getEnd();
}
//Check if QueueEnd is known
if (profile.getQueueWaitEnd() > 0L) {
//Execution time [end(QueueWait) - endTime(Query)]
return (new SimpleDurationFormat(profile.getQueueWaitEnd(), queryEndTime)).verbose();
}
//Check if Plan End is known
if (profile.getPlanEnd() > 0L) {
//Execution time [end(Planning) - endTime(Query)]
return (new SimpleDurationFormat(profile.getPlanEnd(), queryEndTime)).verbose();
}
//Check if any fragments have started
if (profile.getFragmentProfileCount() > 0) {
//Providing Invalid Planning End Time (Will update later)
long estimatedPlanEnd = Long.MAX_VALUE;
//Using Screen MajorFragment as reference
MajorFragmentProfile majorFrag0 = profile.getFragmentProfile(0);
//Searching for earliest starting fragment
for (MinorFragmentProfile fragmentWrapper : majorFrag0.getMinorFragmentProfileList()) {
long minorFragmentStart = fragmentWrapper.getStartTime();
if (minorFragmentStart > 0 && minorFragmentStart < estimatedPlanEnd) {
estimatedPlanEnd = minorFragmentStart;
}
}
//Execution time [start(rootFragment) - endTime(Query)]
return (new SimpleDurationFormat(estimatedPlanEnd, queryEndTime)).verbose() + ESTIMATED_LABEL;
}
//Unable to estimate/calculate Specific Execution Time
return NOT_AVAILABLE_LABEL;
}
//Threshold to be used by WebServer in issuing warning
public String getNoProgressWarningThreshold() {
return this.noProgressWarningThreshold;
}
public List<FragmentWrapper> getFragmentProfiles() {
return fragmentProfiles;
}
public String getFragmentsOverview() {
TableBuilder tb;
if (profile.getState() == QueryState.STARTING
|| profile.getState() == QueryState.RUNNING) {
tb = new TableBuilder(FragmentWrapper.ACTIVE_FRAGMENT_OVERVIEW_COLUMNS, FragmentWrapper.ACTIVE_FRAGMENT_OVERVIEW_COLUMNS_TOOLTIP);
for (final FragmentWrapper fw : fragmentProfiles) {
fw.addSummary(tb);
}
} else {
tb = new TableBuilder(FragmentWrapper.COMPLETED_FRAGMENT_OVERVIEW_COLUMNS, FragmentWrapper.COMPLETED_FRAGMENT_OVERVIEW_COLUMNS_TOOLTIP);
for (final FragmentWrapper fw : fragmentProfiles) {
fw.addFinalSummary(tb);
}
}
return tb.build();
}
public List<OperatorWrapper> getOperatorProfiles() {
return operatorProfiles;
}
public String getOperatorsOverview() {
final TableBuilder tb = new TableBuilder(OperatorWrapper.OPERATORS_OVERVIEW_COLUMNS,
OperatorWrapper.OPERATORS_OVERVIEW_COLUMNS_TOOLTIP);
for (final OperatorWrapper ow : operatorProfiles) {
ow.addSummary(tb, this.majorFragmentTallyMap, this.majorFragmentTallyTotal);
}
return tb.build();
}
public Map<String, String> getOptions() {
return getOptions(o -> true);
}
public Map<String, String> getSessionOptions() {
return getOptions(o -> OptionValue.OptionScope.SESSION == o.getScope());
}
public Map<String, String> getQueryOptions() {
return getOptions(o -> OptionValue.OptionScope.QUERY == o.getScope());
}
/**
* Generates sorted map with properties used to display on Web UI,
* where key is property name and value is property string value.
* Options are filtered based on {@link OptionValue.OptionScope}.
* <p/>
* When property value is null, it would be replaced with 'null',
* this is achieved using {@link String#valueOf(Object)} method.
* Options will be stored in ascending key order, sorted according
* to the natural order for the option name represented by {@link String}.
*
* @param filter filter based on {@link OptionValue.OptionScope}
* @return map with properties names and string values
*/
private Map<String, String> getOptions(Predicate<OptionValue> filter) {
return options.stream()
.filter(filter)
.collect(Collectors.toMap(
OptionValue::getName,
o -> String.valueOf(o.getValue()),
(o, n) -> n,
TreeMap::new));
}
/**
* @return true if impersonation is enabled without authentication,
* is needed to indicated if user name should be included when re-running the query
*/
public boolean isOnlyImpersonationEnabled() {
return onlyImpersonationEnabled;
}
//Generates operator names inferred from physical plan
private void generateOpMap(String plan) {
this.physicalOperatorMap = new HashMap<>();
if (plan.isEmpty()) {
return;
}
//[e.g ] operatorLine = "01-03 Flatten(flattenField=[$1]) : rowType = RecordType(ANY rfsSpecCode, ..."
String[] operatorLine = plan.split("\\n");
for (String line : operatorLine) {
String[] lineToken = line.split("\\s+", 3);
if (lineToken.length < 2) {
continue; //Skip due to possible invalid entry
}
//[e.g ] operatorPath = "01-xx-03"
String operatorPath = lineToken[0].trim().replaceFirst("-", "-xx-"); //Required format for lookup
//[e.g ] extractedOperatorName = "FLATTEN"
String extractedOperatorName = CaseFormat.UPPER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, lineToken[1].split("\\(", 2)[0].trim());
physicalOperatorMap.put(operatorPath, extractedOperatorName);
}
}
public boolean showEstimatedRows() {
return showEstimatedRows;
}
public String getCsrfToken() {
return csrfToken;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.state.internals;
import org.apache.kafka.common.utils.AbstractIterator;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.errors.InvalidStateStoreException;
import org.apache.kafka.streams.errors.ProcessorStateException;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.TimestampedBytesStore;
import org.apache.kafka.streams.state.internals.metrics.RocksDBMetricsRecorder;
import org.rocksdb.ColumnFamilyDescriptor;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.ColumnFamilyOptions;
import org.rocksdb.DBOptions;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;
import org.rocksdb.RocksIterator;
import org.rocksdb.WriteBatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Objects;
import static java.util.Arrays.asList;
import static org.apache.kafka.streams.state.TimestampedBytesStore.convertToTimestampedFormat;
/**
* A persistent key-(value-timestamp) store based on RocksDB.
*/
public class RocksDBTimestampedStore extends RocksDBStore implements TimestampedBytesStore {
private static final Logger log = LoggerFactory.getLogger(RocksDBTimestampedStore.class);
RocksDBTimestampedStore(final String name,
final String metricsScope) {
super(name, metricsScope);
}
RocksDBTimestampedStore(final String name,
final String parentDir,
final RocksDBMetricsRecorder metricsRecorder) {
super(name, parentDir, metricsRecorder);
}
@Override
void openRocksDB(final DBOptions dbOptions,
final ColumnFamilyOptions columnFamilyOptions) {
final List<ColumnFamilyDescriptor> columnFamilyDescriptors = asList(
new ColumnFamilyDescriptor(RocksDB.DEFAULT_COLUMN_FAMILY, columnFamilyOptions),
new ColumnFamilyDescriptor("keyValueWithTimestamp".getBytes(StandardCharsets.UTF_8), columnFamilyOptions));
final List<ColumnFamilyHandle> columnFamilies = new ArrayList<>(columnFamilyDescriptors.size());
try {
db = RocksDB.open(dbOptions, dbDir.getAbsolutePath(), columnFamilyDescriptors, columnFamilies);
setDbAccessor(columnFamilies.get(0), columnFamilies.get(1));
} catch (final RocksDBException e) {
if ("Column family not found: : keyValueWithTimestamp".equals(e.getMessage())) {
try {
db = RocksDB.open(dbOptions, dbDir.getAbsolutePath(), columnFamilyDescriptors.subList(0, 1), columnFamilies);
columnFamilies.add(db.createColumnFamily(columnFamilyDescriptors.get(1)));
} catch (final RocksDBException fatal) {
throw new ProcessorStateException("Error opening store " + name + " at location " + dbDir.toString(), fatal);
}
setDbAccessor(columnFamilies.get(0), columnFamilies.get(1));
} else {
throw new ProcessorStateException("Error opening store " + name + " at location " + dbDir.toString(), e);
}
}
}
private void setDbAccessor(final ColumnFamilyHandle noTimestampColumnFamily,
final ColumnFamilyHandle withTimestampColumnFamily) {
final RocksIterator noTimestampsIter = db.newIterator(noTimestampColumnFamily);
noTimestampsIter.seekToFirst();
if (noTimestampsIter.isValid()) {
log.info("Opening store {} in upgrade mode", name);
dbAccessor = new DualColumnFamilyAccessor(noTimestampColumnFamily, withTimestampColumnFamily);
} else {
log.info("Opening store {} in regular mode", name);
dbAccessor = new SingleColumnFamilyAccessor(withTimestampColumnFamily);
noTimestampColumnFamily.close();
}
noTimestampsIter.close();
}
private class DualColumnFamilyAccessor implements RocksDBAccessor {
private final ColumnFamilyHandle oldColumnFamily;
private final ColumnFamilyHandle newColumnFamily;
private DualColumnFamilyAccessor(final ColumnFamilyHandle oldColumnFamily,
final ColumnFamilyHandle newColumnFamily) {
this.oldColumnFamily = oldColumnFamily;
this.newColumnFamily = newColumnFamily;
}
@Override
public void put(final byte[] key,
final byte[] valueWithTimestamp) {
if (valueWithTimestamp == null) {
try {
db.delete(oldColumnFamily, wOptions, key);
} catch (final RocksDBException e) {
// String format is happening in wrapping stores. So formatted message is thrown from wrapping stores.
throw new ProcessorStateException("Error while removing key from store " + name, e);
}
try {
db.delete(newColumnFamily, wOptions, key);
} catch (final RocksDBException e) {
// String format is happening in wrapping stores. So formatted message is thrown from wrapping stores.
throw new ProcessorStateException("Error while removing key from store " + name, e);
}
} else {
try {
db.delete(oldColumnFamily, wOptions, key);
} catch (final RocksDBException e) {
// String format is happening in wrapping stores. So formatted message is thrown from wrapping stores.
throw new ProcessorStateException("Error while removing key from store " + name, e);
}
try {
db.put(newColumnFamily, wOptions, key, valueWithTimestamp);
} catch (final RocksDBException e) {
// String format is happening in wrapping stores. So formatted message is thrown from wrapping stores.
throw new ProcessorStateException("Error while putting key/value into store " + name, e);
}
}
}
@Override
public void prepareBatch(final List<KeyValue<Bytes, byte[]>> entries,
final WriteBatch batch) throws RocksDBException {
for (final KeyValue<Bytes, byte[]> entry : entries) {
Objects.requireNonNull(entry.key, "key cannot be null");
addToBatch(entry.key.get(), entry.value, batch);
}
}
@Override
public byte[] get(final byte[] key) throws RocksDBException {
final byte[] valueWithTimestamp = db.get(newColumnFamily, key);
if (valueWithTimestamp != null) {
return valueWithTimestamp;
}
final byte[] plainValue = db.get(oldColumnFamily, key);
if (plainValue != null) {
final byte[] valueWithUnknownTimestamp = convertToTimestampedFormat(plainValue);
// this does only work, because the changelog topic contains correct data already
// for other format changes, we cannot take this short cut and can only migrate data
// from old to new store on put()
put(key, valueWithUnknownTimestamp);
return valueWithUnknownTimestamp;
}
return null;
}
@Override
public byte[] getOnly(final byte[] key) throws RocksDBException {
final byte[] valueWithTimestamp = db.get(newColumnFamily, key);
if (valueWithTimestamp != null) {
return valueWithTimestamp;
}
final byte[] plainValue = db.get(oldColumnFamily, key);
if (plainValue != null) {
return convertToTimestampedFormat(plainValue);
}
return null;
}
@Override
public KeyValueIterator<Bytes, byte[]> range(final Bytes from,
final Bytes to) {
return new RocksDBDualCFRangeIterator(
name,
db.newIterator(newColumnFamily),
db.newIterator(oldColumnFamily),
from,
to);
}
@Override
public KeyValueIterator<Bytes, byte[]> all() {
final RocksIterator innerIterWithTimestamp = db.newIterator(newColumnFamily);
innerIterWithTimestamp.seekToFirst();
final RocksIterator innerIterNoTimestamp = db.newIterator(oldColumnFamily);
innerIterNoTimestamp.seekToFirst();
return new RocksDBDualCFIterator(name, innerIterWithTimestamp, innerIterNoTimestamp);
}
@Override
public long approximateNumEntries() throws RocksDBException {
return db.getLongProperty(oldColumnFamily, "rocksdb.estimate-num-keys")
+ db.getLongProperty(newColumnFamily, "rocksdb.estimate-num-keys");
}
@Override
public void flush() throws RocksDBException {
db.flush(fOptions, oldColumnFamily);
db.flush(fOptions, newColumnFamily);
}
@Override
public void prepareBatchForRestore(final Collection<KeyValue<byte[], byte[]>> records,
final WriteBatch batch) throws RocksDBException {
for (final KeyValue<byte[], byte[]> record : records) {
addToBatch(record.key, record.value, batch);
}
}
@Override
public void addToBatch(final byte[] key,
final byte[] value,
final WriteBatch batch) throws RocksDBException {
if (value == null) {
batch.delete(oldColumnFamily, key);
batch.delete(newColumnFamily, key);
} else {
batch.delete(oldColumnFamily, key);
batch.put(newColumnFamily, key, value);
}
}
@Override
public void close() {
oldColumnFamily.close();
newColumnFamily.close();
}
@Override
@SuppressWarnings("deprecation")
public void toggleDbForBulkLoading() {
try {
db.compactRange(oldColumnFamily, true, 1, 0);
} catch (final RocksDBException e) {
throw new ProcessorStateException("Error while range compacting during restoring store " + name, e);
}
try {
db.compactRange(newColumnFamily, true, 1, 0);
} catch (final RocksDBException e) {
throw new ProcessorStateException("Error while range compacting during restoring store " + name, e);
}
}
}
private class RocksDBDualCFIterator extends AbstractIterator<KeyValue<Bytes, byte[]>>
implements KeyValueIterator<Bytes, byte[]> {
// RocksDB's JNI interface does not expose getters/setters that allow the
// comparator to be pluggable, and the default is lexicographic, so it's
// safe to just force lexicographic comparator here for now.
private final Comparator<byte[]> comparator = Bytes.BYTES_LEXICO_COMPARATOR;
private final String storeName;
private final RocksIterator iterWithTimestamp;
private final RocksIterator iterNoTimestamp;
private volatile boolean open = true;
private byte[] nextWithTimestamp;
private byte[] nextNoTimestamp;
private KeyValue<Bytes, byte[]> next;
RocksDBDualCFIterator(final String storeName,
final RocksIterator iterWithTimestamp,
final RocksIterator iterNoTimestamp) {
this.iterWithTimestamp = iterWithTimestamp;
this.iterNoTimestamp = iterNoTimestamp;
this.storeName = storeName;
}
@Override
public synchronized boolean hasNext() {
if (!open) {
throw new InvalidStateStoreException(String.format("RocksDB iterator for store %s has closed", storeName));
}
return super.hasNext();
}
@Override
public synchronized KeyValue<Bytes, byte[]> next() {
return super.next();
}
@Override
public KeyValue<Bytes, byte[]> makeNext() {
if (nextNoTimestamp == null && iterNoTimestamp.isValid()) {
nextNoTimestamp = iterNoTimestamp.key();
}
if (nextWithTimestamp == null && iterWithTimestamp.isValid()) {
nextWithTimestamp = iterWithTimestamp.key();
}
if (nextNoTimestamp == null && !iterNoTimestamp.isValid()) {
if (nextWithTimestamp == null && !iterWithTimestamp.isValid()) {
return allDone();
} else {
next = KeyValue.pair(new Bytes(nextWithTimestamp), iterWithTimestamp.value());
nextWithTimestamp = null;
iterWithTimestamp.next();
}
} else {
if (nextWithTimestamp == null) {
next = KeyValue.pair(new Bytes(nextNoTimestamp), convertToTimestampedFormat(iterNoTimestamp.value()));
nextNoTimestamp = null;
iterNoTimestamp.next();
} else {
if (comparator.compare(nextNoTimestamp, nextWithTimestamp) <= 0) {
next = KeyValue.pair(new Bytes(nextNoTimestamp), convertToTimestampedFormat(iterNoTimestamp.value()));
nextNoTimestamp = null;
iterNoTimestamp.next();
} else {
next = KeyValue.pair(new Bytes(nextWithTimestamp), iterWithTimestamp.value());
nextWithTimestamp = null;
iterWithTimestamp.next();
}
}
}
return next;
}
@Override
public synchronized void close() {
openIterators.remove(this);
iterNoTimestamp.close();
iterWithTimestamp.close();
open = false;
}
@Override
public Bytes peekNextKey() {
if (!hasNext()) {
throw new NoSuchElementException();
}
return next.key;
}
}
private class RocksDBDualCFRangeIterator extends RocksDBDualCFIterator {
// RocksDB's JNI interface does not expose getters/setters that allow the
// comparator to be pluggable, and the default is lexicographic, so it's
// safe to just force lexicographic comparator here for now.
private final Comparator<byte[]> comparator = Bytes.BYTES_LEXICO_COMPARATOR;
private final byte[] upperBoundKey;
RocksDBDualCFRangeIterator(final String storeName,
final RocksIterator iterWithTimestamp,
final RocksIterator iterNoTimestamp,
final Bytes from,
final Bytes to) {
super(storeName, iterWithTimestamp, iterNoTimestamp);
iterWithTimestamp.seek(from.get());
iterNoTimestamp.seek(from.get());
upperBoundKey = to.get();
if (upperBoundKey == null) {
throw new NullPointerException("RocksDBDualCFRangeIterator: upperBoundKey is null for key " + to);
}
}
@Override
public KeyValue<Bytes, byte[]> makeNext() {
final KeyValue<Bytes, byte[]> next = super.makeNext();
if (next == null) {
return allDone();
} else {
if (comparator.compare(next.key.get(), upperBoundKey) <= 0) {
return next;
} else {
return allDone();
}
}
}
}
}
| |
package org.jgroups.tests;
import org.jgroups.*;
import org.jgroups.protocols.*;
import org.jgroups.protocols.pbcast.*;
import org.jgroups.stack.ProtocolStack;
import org.jgroups.util.Digest;
import org.jgroups.util.Tuple;
import org.jgroups.util.Util;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.*;
/**
* Tests overlapping merges, e.g. A: {A,B}, B: {A,B} and C: {A,B,C}. Tests unicast as well as multicast seqno tables.<br/>
* Related JIRA: https://jira.jboss.org/jira/browse/JGRP-940
* @author Bela Ban
*/
@Test(groups=Global.STACK_DEPENDENT,singleThreaded=true)
public class OverlappingMergeTest extends ChannelTestBase {
protected JChannel a, b, c, d;
protected MyReceiver ra, rb, rc, rd;
protected boolean multicast_transport;
@BeforeMethod
protected void start() throws Exception {
a=createChannel(true, 4).name("A");
ra=new MyReceiver("A", a);
a.setReceiver(ra);
b=createChannel(a).name("B");
rb=new MyReceiver("B", b);
b.setReceiver(rb);
c=createChannel(a).name("C");
rc=new MyReceiver("C", c);
c.setReceiver(rc);
modifyConfigs(a,b,c);
a.connect("OverlappingMergeTest");
b.connect("OverlappingMergeTest");
c.connect("OverlappingMergeTest");
Util.waitUntilAllChannelsHaveSameView(30000, 1000, a, b, c);
multicast_transport=isMulticastTransport(a);
}
@AfterMethod
protected void stop() throws Exception {
for(JChannel ch: new JChannel[]{a,b,c,d}) {
if(ch != null)
ch.getProtocolStack().findProtocol(GMS.class).setLevel("warn");
}
Util.close(d,c,b,a);
ra.clear(); rb.clear(); rc.clear();
}
@SuppressWarnings("unchecked")
public void testRegularMessageSending() throws Exception {
sendMessages(5, a, b, c);
checkReceivedMessages(make(ra, 15), make(rb,15), make(rc,15));
}
/**
* Verifies that unicasts are received correctly by all participants after an overlapping merge. The following steps
* are executed:
* <ol>
* <li/>Group is {A,B,C}, A is the coordinator
* <li/>MERGE3 is removed from all members
* <li/>VERIFY_SUSPECT is removed from all members
* <li/>Everyone sends 5 unicast messages to everyone else
* <li/>Everyone sends 5 multicasts
* <li/>A SUSPECT(A) event is injected into B's stack (GMS). This causes a new view {B,C} to be multicast by B
* <li/>B and C install {B,C}
* <li/>B and C trash the connection table for A in UNICAST
* <li/>A ignores the view, it still has view {A,B,C} and all connection tables intact in UNICAST
* <li/>We now inject a MERGE(A,B) event into A. This should use A and B as coords to create a new MergeView {A,B,C}
* <li/>The merge already fails because the unicast between A and B fails due to the reason given below !
* Once this is fixed, the next step below should work, too !
* <li/>A sends a unicast to B and C. This should fail until JGRP-940 has been fixed !
* <li/>Reason: B and C trashed A's conntables in UNICAST, but A didn't trash its conn tables for B and C, so
* we have non-matching seqnos !
* </ol>
*/
public void testOverlappingMergeWithBC() throws Exception {
sendMessages(5, a, b, c);
checkReceivedMessages(make(ra, 15), make(rb,15), make(rc,15));
// Inject view {B,C} into B and C:
View new_view=View.create(b.getAddress(), 10, b.getAddress(), c.getAddress());
System.out.println("\n ==== Injecting view " + new_view + " into B and C ====");
injectView(new_view, b, c);
makeCoordinator(b);
assert Util.isCoordinator(a);
assert Util.isCoordinator(b);
assert !Util.isCoordinator(c);
System.out.println("A's view: " + a.getView());
System.out.println("B's view: " + b.getView());
System.out.println("C's view: " + c.getView());
assert a.getView().size() == 3 : "A's view is " + a.getView();
assert b.getView().size() == 2 : "B's view is " + b.getView();
assert c.getView().size() == 2 : "C's view is " + c.getView();
System.out.println("\n==== Sending messages while the cluster is partitioned ====");
sendMessages(5, a, b, c);
if(multicast_transport) {
// B and C drop A's multicasts, but A will receive B's and C's multicasts
checkReceivedMessages(make(ra, 15), make(rb,10), make(rc,10));
}
else {
// B and C drop A's multicasts, and won't send their multicasts to A (A only receives its owm multicasts)
checkReceivedMessages(make(ra, 5), make(rb,10), make(rc,10));
}
System.out.println("\n ==== Digests are:\n" + dumpDigests(a,b,c));
// start merging
Event merge_evt=createMergeEvent(a,b,c);
JChannel merge_leader=determineMergeLeader(a, b);
System.out.println("\n==== Injecting a merge event (leader=" + merge_leader.getAddress() + ") ====");
injectMergeEvent(merge_evt, merge_leader);
System.out.println("\n==== checking views after merge ====:");
for(int i=0; i < 20; i++) {
if(a.getView().size() == 3 && b.getView().size() == 3 && c.getView().size() == 3) {
System.out.println("views are correct: all views have a size of 3");
break;
}
System.out.print(".");
runStableProtocol(a,b,c);
Util.sleep(1000);
injectMergeEvent(createMergeEvent(a,b,c), merge_leader);
}
System.out.println("\n ==== Digests after the merge:\n" + dumpDigests(a,b,c));
View va=a.getView(), vb=b.getView(), vc=c.getView();
System.out.println("\nA's view: " + va);
System.out.println("B's view: " + vb);
System.out.println("C's view: " + vc);
assert va.size() == 3 : "A's view is " + va;
assert vb.size() == 3 : "B's view is " + vb;
assert vc.size() == 3 : "C's view is " + vc;
System.out.println("\n==== Sending messages after merge ====");
sendMessages(5, a, b, c);
checkReceivedMessages(make(ra, 15), make(rb,15), make(rc,15));
}
/**
* Verifies that unicasts are received correctly by all participants after an overlapping merge. The following steps
* are executed:
* <ol>
* <li/>Group is {A,B,C}, inject views:
* <li/>A: A,C
* <li/>B: A,B,C
* <li/>C: A,B,C
* <li/>Then initiate a merge.
* </ol>
*/
public void testOverlappingMergeWithABC() throws Exception {
sendMessages(5, a, b, c);
checkReceivedMessages(make(ra, 15), make(rb,15), make(rc,15));
// Inject view {A,C} into A:
View new_view=View.create(a.getAddress(), 4, a.getAddress(), c.getAddress());
System.out.println("\n ==== Injecting view " + new_view + " into A ====");
injectView(new_view, a);
assertTrue(Util.isCoordinator(a));
assertFalse(Util.isCoordinator(b));
assertFalse(Util.isCoordinator(c));
System.out.println("A's view: " + a.getView());
System.out.println("B's view: " + b.getView());
System.out.println("C's view: " + c.getView());
assertEquals("A's view is " + a.getView(),2,a.getView().size());
assertEquals("B's view is " + b.getView(), 3, b.getView().size());
assertEquals("C's view is " + c.getView(), 3, c.getView().size());
// start merging
Event merge_evt=createMergeEvent(a,b,c);
for(JChannel ch: new JChannel[]{a,b,c})
ch.getProtocolStack().findProtocol(GMS.class).setLevel("trace");
System.out.println("\n==== Injecting a merge event into A, B and C ====");
injectMergeEvent(merge_evt,a,b,c);
System.out.println("\n==== checking views after merge ====:");
for(int i=0; i < 20; i++) {
if(a.getView().size() == 3 && b.getView().size() == 3 && c.getView().size() == 3) {
System.out.println("views are correct: all views have a size of 3");
break;
}
System.out.print(".");
runStableProtocol(a,b,c);
Util.sleep(1000);
injectMergeEvent(createMergeEvent(a,b,c), a,b,c);
}
System.out.println("\n ==== Digests after the merge:\n" + dumpDigests(a,b,c));
View va=a.getView(), vb=b.getView(), vc=c.getView();
System.out.println("\nA's view: " + va);
System.out.println("B's view: " + vb);
System.out.println("C's view: " + vc);
assertEquals("A's view is " + va,3,va.size());
assertEquals("B's view is " + vb,3, vb.size());
assertEquals("C's view is " + vc,3,vc.size());
System.out.println("\n==== Sending messages after merge ====");
sendMessages(5,a,b,c);
checkReceivedMessages(make(ra, 15), make(rb,15), make(rc,15));
for(JChannel ch: new JChannel[]{a,b,c})
ch.getProtocolStack().findProtocol(GMS.class).setLevel("warn");
}
/**
* <ol>
* <li/>Group is A|4={A,B,C}, inject views:
* <li/>A: A|5={A,B}
* <li/>B: A|5={A,B}
* <li/>C: A|4={A,B,C} // failed installing view A|5
* <li/>Then initiate a merge.
* </ol>
*/
public void testOverlappingMergeWithABC2() throws Exception {
// Inject view {A,B} into A and B:
View new_view=View.create(a.getAddress(), 4, a.getAddress(), b.getAddress());
System.out.println("\n ==== Injecting view " + new_view + " into A and B ====");
injectView(new_view, a,b);
assertTrue(Util.isCoordinator(a));
assertFalse(Util.isCoordinator(b));
assertFalse(Util.isCoordinator(c));
System.out.println("A's view: " + a.getView());
System.out.println("B's view: " + b.getView());
System.out.println("C's view: " + c.getView());
assertEquals("A's view is " + a.getView(), 2, a.getView().size());
assertEquals("B's view is " + b.getView(), 2, b.getView().size());
assertEquals("C's view is " + c.getView(), 3, c.getView().size());
// start merging
Event merge_evt=createMergeEvent(a,b,c);
for(JChannel ch: new JChannel[]{a,b,c})
ch.getProtocolStack().findProtocol(GMS.class).setLevel("trace");
System.out.println("\n==== Injecting a merge event into A, B and C ====");
injectMergeEvent(merge_evt,a,b,c);
System.out.println("\n==== checking views after merge ====:");
for(int i=0; i < 20; i++) {
if(a.getView().size() == 3 && b.getView().size() == 3 && c.getView().size() == 3) {
System.out.println("views are correct: all views have a size of 3");
break;
}
System.out.print(".");
runStableProtocol(a,b,c);
Util.sleep(1000);
injectMergeEvent(createMergeEvent(a,b,c), a,b,c);
}
System.out.println("\n ==== Digests after the merge:\n" + dumpDigests(a,b,c));
View va=a.getView(), vb=b.getView(), vc=c.getView();
System.out.println("\nA's view: " + va);
System.out.println("B's view: " + vb);
System.out.println("C's view: " + vc);
Util.waitUntilAllChannelsHaveSameView(100, 50, a,b,c);
}
/**
* Test the following scenario (https://issues.jboss.org/browse/JGRP-1451):
* - A: {A,C,B}
* - B: {A,C,B}
* - C: {A,C,B}
* - D: {B,A,C,D}
* - Merging should end up with a view where everybody has A, B, C and D in the same view
*/
public void testMergeWithDifferentPartitions() throws Exception {
d=createChannel(a);
d.setName("D");
rd=new MyReceiver("D", d);
d.setReceiver(rd);
modifyConfigs(d);
d.connect("OverlappingMergeTest");
// Inject view {A,C,B} into A, B and C:
View new_view=View.create(a.getAddress(), 4, a.getAddress(), c.getAddress(), b.getAddress());
System.out.println("\n ==== Injecting view " + new_view + " into A, B and C ====");
injectView(new_view,false,a,b,c);
assert Util.isCoordinator(a);
assert !Util.isCoordinator(b);
assert !Util.isCoordinator(c);
View view_d=View.create(b.getAddress(), 4, b.getAddress(), a.getAddress(), c.getAddress(), d.getAddress());
System.out.println("\n ==== Injecting view " + view_d + " into D ====\n");
injectView(view_d, false, d);
assert !Util.isCoordinator(d);
for(JChannel ch: Arrays.asList(a,b,c,d))
System.out.println(ch.getName() + ": " + ch.getView());
// start merging
Event merge_evt=createMergeEvent(a,b,c,d);
System.out.println("\n==== Injecting a merge event into members ====");
for(JChannel ch: new JChannel[]{a,b,c,d}) {
GMS gms=ch.getProtocolStack().findProtocol(GMS.class);
gms.setLevel("trace");
}
injectMergeEvent(merge_evt,a,b,c,d);
for(int i=0; i < 20; i++) {
if(a.getView().size() == 4 && b.getView().size() == 4 && c.getView().size() == 4 && d.getView().size() == 4)
break;
Util.sleep(2000);
injectMergeEvent(createMergeEvent(a,b,c,d), a,b,c,d);
}
for(JChannel ch: Arrays.asList(a,b,c,d))
System.out.println(ch.getName() + ": " + ch.getView() + " (coord=" + isCoord(ch) + ")");
for(JChannel ch: Arrays.asList(a,b,c,d)) {
assert ch.getView().size() == 4 : ch.getName() + ": view is " + ch.getView();
GMS gms=ch.getProtocolStack().findProtocol(GMS.class);
gms.setLevel("warn");
}
System.out.println("\n");
}
/**
* Tests a merge where all members have views whose ViewIds have the same creator, but different IDs, e.g.:
* A: A|5 {A}
* B: A|6 {A,B}
* C: A|7 {A,B,C}
*/
public void testSameCreatorDifferentIDs() throws Exception {
for(JChannel ch: new JChannel[]{a,b,c}) {
MERGE3 merge_prot=ch.getProtocolStack().findProtocol(MERGE3.class);
if(merge_prot == null) {
merge_prot=new MERGE3();
merge_prot.setMinInterval(500).setMaxInterval(1000).setValue("check_interval", 3000);
ch.getProtocolStack().insertProtocol(merge_prot, ProtocolStack.Position.ABOVE, Discovery.class);
merge_prot.init();
merge_prot.down(new Event(Event.SET_LOCAL_ADDRESS, ch.getAddress()));
}
}
View view=View.create(a.getAddress(), 5, a.getAddress());
injectView(view, a);
view=View.create(a.getAddress(), 6, a.getAddress(), b.getAddress());
injectView(view, b);
view=View.create(a.getAddress(), 7, a.getAddress(), b.getAddress(), c.getAddress());
injectView(view, c);
System.out.println("\nA's view: " + a.getView());
System.out.println("B's view: " + b.getView());
System.out.println("C's view: " + c.getView());
Util.waitUntilAllChannelsHaveSameView(50000, 1000, a, b, c);
View va=a.getView(), vb=b.getView(), vc=c.getView();
System.out.println("\nA's view: " + va);
System.out.println("B's view: " + vb);
System.out.println("C's view: " + vc);
assertEquals("A's view is " + va, 3, va.size());
assertEquals("B's view is " + vb, 3, vb.size());
assertEquals("C's view is " + vc, 3, vc.size());
}
private static void makeCoordinator(JChannel ch) {
GMS gms=ch.getProtocolStack().findProtocol(GMS.class);
gms.becomeCoordinator();
}
private static String dumpDigests(JChannel ... channels) {
StringBuilder sb=new StringBuilder();
for(JChannel ch: channels) {
sb.append(ch.getAddress()).append(": ");
NAKACK2 nakack=ch.getProtocolStack().findProtocol(NAKACK2.class);
Digest digest=nakack.getDigest();
sb.append(digest).append("\n");
}
return sb.toString();
}
private static JChannel determineMergeLeader(JChannel ... coords) {
Membership tmp=new Membership();
for(JChannel ch: coords) {
tmp.add(ch.getAddress());
}
tmp.sort();
Address merge_leader=tmp.elementAt(0);
for(JChannel ch: coords) {
if(ch.getAddress().equals(merge_leader))
return ch;
}
return null;
}
private static void injectView(View view, boolean print_receivers, JChannel ... channels) {
for(JChannel ch: channels) {
GMS gms=ch.getProtocolStack().findProtocol(GMS.class);
gms.installView(view);
}
if(!print_receivers)
return;
for(JChannel ch: channels) {
MyReceiver receiver=(MyReceiver)ch.getReceiver();
System.out.println("[" + receiver.name + "] view=" + ch.getView());
}
}
private static void injectView(View view, JChannel ... channels) {
injectView(view, true, channels);
}
protected static Event createMergeEvent(JChannel ... channels) {
Map<Address,View> views=new HashMap<>();
for(JChannel ch: channels)
views.put(ch.getAddress(), ch.getView());
return new Event(Event.MERGE, views);
}
private static void injectMergeEvent(Event evt, JChannel ... channels) {
for(JChannel ch: channels) {
GMS gms=ch.getProtocolStack().findProtocol(GMS.class);
gms.up(evt);
}
}
private void sendMessages(int num_msgs, JChannel... channels) throws Exception {
ra.clear(); rb.clear(); rc.clear();
for(JChannel ch: channels) {
for(int i=1; i <= num_msgs; i++)
ch.send(null, String.valueOf(i));
}
}
private static void runStableProtocol(JChannel... channels) {
for(JChannel ch: channels) {
STABLE stable=ch.getProtocolStack().findProtocol(STABLE.class);
if(stable != null)
stable.gc();
}
}
protected boolean isMulticastTransport(JChannel ch) {
return ch.getProtocolStack().getTransport().supportsMulticasting();
}
@SuppressWarnings("unchecked")
protected void checkReceivedMessages(Tuple<MyReceiver,Integer> ... expected_messages) {
for(int i=0; i < 30; i++) {
boolean all_received=true;
for(Tuple<MyReceiver,Integer> tuple: expected_messages) {
MyReceiver receiver=tuple.getVal1();
List<Message> mcasts=receiver.getMulticasts();
int mcasts_received=mcasts.size();
int expected_mcasts=tuple.getVal2();
if(mcasts_received != expected_mcasts) {
all_received=false;
break;
}
}
if(all_received)
break;
runStableProtocol(a,b,c);
Util.sleep(1000);
}
for(Tuple<MyReceiver,Integer> tuple: expected_messages) {
MyReceiver receiver=tuple.getVal1();
List<Message> mcasts=receiver.getMulticasts();
int mcasts_received=mcasts.size();
System.out.println("receiver " + receiver + ": mcasts=" + mcasts_received);
}
for(Tuple<MyReceiver,Integer> tuple: expected_messages) {
MyReceiver receiver=tuple.getVal1();
List<Message> mcasts=receiver.getMulticasts();
int mcasts_received=mcasts.size();
int expected_mcasts=tuple.getVal2();
assert mcasts_received == expected_mcasts : "(" + receiver.name + ") num_mcasts=" + print(mcasts) +
" expected: " + expected_mcasts + ")";
}
}
@SuppressWarnings("unchecked")
protected Tuple<MyReceiver,Integer> make(MyReceiver r, int expected_msgs) {
return new Tuple<>(r, expected_msgs);
}
private static String print(List<Message> msgs) {
StringBuilder sb=new StringBuilder();
for(Message msg: msgs) {
sb.append(msg.getSrc()).append(": ").append((Object)msg.getObject()).append(" ");
}
return sb.toString();
}
protected boolean isCoord(JChannel ch) {
GMS gms=ch.getProtocolStack().findProtocol(GMS.class);
return gms.getImpl() instanceof CoordGmsImpl;
}
private static void modifyConfigs(JChannel ... channels) throws Exception {
for(JChannel ch: channels) {
ProtocolStack stack=ch.getProtocolStack();
stack.removeProtocols("MERGE3","FD_SOCK","FD","FD_ALL","FC","MFC","UFC","VERIFY_SUSPECT", "STATE_TRANSFER");
NAKACK2 nak=stack.findProtocol(NAKACK2.class);
if(nak != null)
nak.setLogDiscardMessages(false);
}
}
protected static class MyReceiver extends ReceiverAdapter {
final String name;
final JChannel ch;
final List<Message> mcasts=new ArrayList<>(20);
public MyReceiver(String name, JChannel ch) {
this.name=name;
this.ch=ch;
}
public void receive(Message msg) {
Address dest=msg.getDest();
if(dest == null) {
synchronized(mcasts) {
mcasts.add(msg);
}
}
}
public List<Message> getMulticasts() { return mcasts; }
public void clear() {mcasts.clear();}
public Address getAddress() {return ch != null? ch.getAddress() : null;}
public String toString() {
return name;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.elasticsearch.common.breaker.CircuitBreakingException;
import org.elasticsearch.common.settings.Settings;
public class RegexLimitTests extends ScriptTestCase {
// This regex has backtracking due to .*?
private final String pattern = "/abc.*?def/";
private final String charSequence = "'abcdodef'";
private final String splitCharSequence = "'0-abc-1-def-X-abc-2-def-Y-abc-3-def-Z-abc'";
private final String regexCircuitMessage = "[scripting] Regular expression considered too many characters";
public void testRegexInject_Matcher() {
String[] scripts = new String[]{pattern + ".matcher(" + charSequence + ").matches()",
"Matcher m = " + pattern + ".matcher(" + charSequence + "); m.matches()"};
for (String script : scripts) {
setRegexLimitFactor(2);
assertEquals(Boolean.TRUE, exec(script));
// Backtracking means the regular expression will fail with limit factor 1 (don't consider more than each char once)
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
}
public void testRegexInjectUnlimited_Matcher() {
String[] scripts = new String[]{pattern + ".matcher(" + charSequence + ").matches()",
"Matcher m = " + pattern + ".matcher(" + charSequence + "); m.matches()"};
for (String script : scripts) {
setRegexEnabled();
assertEquals(Boolean.TRUE, exec(script));
}
}
public void testRegexInject_Def_Matcher() {
String[] scripts = new String[]{"def p = " + pattern + "; p.matcher(" + charSequence + ").matches()",
"def p = " + pattern + "; def m = p.matcher(" + charSequence + "); m.matches()"};
for (String script : scripts) {
setRegexLimitFactor(2);
assertEquals(Boolean.TRUE, exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
}
public void testMethodRegexInject_Ref_Matcher() {
String script =
"boolean isMatch(Function func) { func.apply(" + charSequence +").matches(); } " +
"Pattern pattern = " + pattern + ";" +
"isMatch(pattern::matcher)";
setRegexLimitFactor(2);
assertEquals(Boolean.TRUE, exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testRegexInject_DefMethodRef_Matcher() {
String script =
"boolean isMatch(Function func) { func.apply(" + charSequence +").matches(); } " +
"def pattern = " + pattern + ";" +
"isMatch(pattern::matcher)";
setRegexLimitFactor(2);
assertEquals(Boolean.TRUE, exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testRegexInject_SplitLimit() {
String[] scripts = new String[]{pattern + ".split(" + splitCharSequence + ", 2)",
"Pattern p = " + pattern + "; p.split(" + splitCharSequence + ", 2)"};
for (String script : scripts) {
setRegexLimitFactor(2);
assertArrayEquals(new String[]{"0-", "-X-abc-2-def-Y-abc-3-def-Z-abc"}, (String[])exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
}
public void testRegexInjectUnlimited_SplitLimit() {
String[] scripts = new String[]{pattern + ".split(" + splitCharSequence + ", 2)",
"Pattern p = " + pattern + "; p.split(" + splitCharSequence + ", 2)"};
for (String script : scripts) {
setRegexEnabled();
assertArrayEquals(new String[]{"0-", "-X-abc-2-def-Y-abc-3-def-Z-abc"}, (String[])exec(script));
}
}
public void testRegexInject_Def_SplitLimit() {
String script = "def p = " + pattern + "; p.split(" + splitCharSequence + ", 2)";
setRegexLimitFactor(2);
assertArrayEquals(new String[]{"0-", "-X-abc-2-def-Y-abc-3-def-Z-abc"}, (String[])exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testRegexInject_Ref_SplitLimit() {
String script =
"String[] splitLimit(BiFunction func) { func.apply(" + splitCharSequence + ", 2); } " +
"Pattern pattern = " + pattern + ";" +
"splitLimit(pattern::split)";
setRegexLimitFactor(2);
assertArrayEquals(new String[]{"0-", "-X-abc-2-def-Y-abc-3-def-Z-abc"}, (String[])exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testRegexInject_DefMethodRef_SplitLimit() {
String script =
"String[] splitLimit(BiFunction func) { func.apply(" + splitCharSequence + ", 2); } " +
"def pattern = " + pattern + ";" +
"splitLimit(pattern::split)";
setRegexLimitFactor(2);
assertArrayEquals(new String[]{"0-", "-X-abc-2-def-Y-abc-3-def-Z-abc"}, (String[])exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testRegexInject_Split() {
String[] scripts = new String[]{pattern + ".split(" + splitCharSequence + ")",
"Pattern p = " + pattern + "; p.split(" + splitCharSequence + ")"};
for (String script : scripts) {
setRegexLimitFactor(2);
assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[])exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
}
public void testRegexInjectUnlimited_Split() {
String[] scripts = new String[]{pattern + ".split(" + splitCharSequence + ")",
"Pattern p = " + pattern + "; p.split(" + splitCharSequence + ")"};
for (String script : scripts) {
setRegexEnabled();
assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[])exec(script));
}
}
public void testRegexInject_Def_Split() {
String script = "def p = " + pattern + "; p.split(" + splitCharSequence + ")";
setRegexLimitFactor(2);
assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[])exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testRegexInject_Ref_Split() {
String script =
"String[] split(Function func) { func.apply(" + splitCharSequence + "); } " +
"Pattern pattern = " + pattern + ";" +
"split(pattern::split)";
setRegexLimitFactor(2);
assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[])exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testRegexInject_DefMethodRef_Split() {
String script =
"String[] split(Function func) { func.apply(" + splitCharSequence +"); } " +
"def pattern = " + pattern + ";" +
"split(pattern::split)";
setRegexLimitFactor(2);
assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[])exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testRegexInject_SplitAsStream() {
String[] scripts = new String[]{pattern + ".splitAsStream(" + splitCharSequence + ").toArray(String[]::new)",
"Pattern p = " + pattern + "; p.splitAsStream(" + splitCharSequence + ").toArray(String[]::new)"};
for (String script : scripts) {
setRegexLimitFactor(2);
assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[]) exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
}
public void testRegexInjectUnlimited_SplitAsStream() {
String[] scripts = new String[]{pattern + ".splitAsStream(" + splitCharSequence + ").toArray(String[]::new)",
"Pattern p = " + pattern + "; p.splitAsStream(" + splitCharSequence + ").toArray(String[]::new)"};
for (String script : scripts) {
setRegexEnabled();
assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[]) exec(script));
}
}
public void testRegexInject_Def_SplitAsStream() {
String script = "def p = " + pattern + "; p.splitAsStream(" + splitCharSequence + ").toArray(String[]::new)";
setRegexLimitFactor(2);
assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[]) exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testRegexInject_Ref_SplitAsStream() {
String script =
"Stream splitStream(Function func) { func.apply(" + splitCharSequence +"); } " +
"Pattern pattern = " + pattern + ";" +
"splitStream(pattern::splitAsStream).toArray(String[]::new)";
setRegexLimitFactor(2);
assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[]) exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testRegexInject_DefMethodRef_SplitAsStream() {
String script =
"Stream splitStream(Function func) { func.apply(" + splitCharSequence +"); } " +
"def pattern = " + pattern + ";" +
"splitStream(pattern::splitAsStream).toArray(String[]::new)";
setRegexLimitFactor(2);
assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[]) exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testRegexInjectFindOperator() {
String script = "if (" + charSequence + " =~ " + pattern + ") { return 100; } return 200";
setRegexLimitFactor(2);
assertEquals(Integer.valueOf(100), (Integer) exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testRegexInjectMatchOperator() {
String script = "if (" + charSequence + " ==~ " + pattern + ") { return 100; } return 200";
setRegexLimitFactor(2);
assertEquals(Integer.valueOf(100), (Integer) exec(script));
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
}
public void testSnippetRegex() {
String charSequence = "abcdef123456".repeat(100);
String script = "if ('" + charSequence + "' ==~ " + pattern + ") { return 100; } return 200";
setRegexLimitFactor(1);
CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script));
assertTrue(cbe.getMessage().contains(regexCircuitMessage));
assertTrue(cbe.getMessage().contains(charSequence.subSequence(0, 61) + "..."));
}
private void setRegexLimitFactor(int factor) {
Settings settings = Settings.builder().put(CompilerSettings.REGEX_LIMIT_FACTOR.getKey(), factor).build();
scriptEngine = new PainlessScriptEngine(settings, scriptContexts());
}
private void setRegexEnabled() {
Settings settings = Settings.builder().put(CompilerSettings.REGEX_ENABLED.getKey(), "true").build();
scriptEngine = new PainlessScriptEngine(settings, scriptContexts());
}
}
| |
package smartfood.mobile;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.border.*;
import javax.swing.event.*;
import javax.swing.text.*;
/**
* The TextPrompt class will display a prompt over top of a text component when
* the Document of the text field is empty. The Show property is used to
* determine the visibility of the prompt.
*
* The Font and foreground Color of the prompt will default to those properties
* of the parent text component. You are free to change the properties after
* class construction.
*/
public class TextPrompt extends JLabel
implements FocusListener, DocumentListener
{
public enum Show
{
ALWAYS,
FOCUS_GAINED,
FOCUS_LOST;
}
private JTextComponent component;
private Document document;
private Show show;
private boolean showPromptOnce;
private int focusLost;
public TextPrompt(String text, JTextComponent component)
{
this(text, component, Show.ALWAYS);
}
public TextPrompt(String text, JTextComponent component, Show show)
{
this.component = component;
setShow( show );
document = component.getDocument();
setText( text );
setFont( component.getFont() );
setForeground( component.getForeground() );
setBorder( new EmptyBorder(component.getInsets()) );
setHorizontalAlignment(JLabel.LEADING);
component.addFocusListener( this );
document.addDocumentListener( this );
component.setLayout( new BorderLayout() );
component.add( this );
checkForPrompt();
}
/**
* Convenience method to change the alpha value of the current foreground
* Color to the specifice value.
*
* @param alpha value in the range of 0 - 1.0.
*/
public void changeAlpha(float alpha)
{
changeAlpha( (int)(alpha * 255) );
}
/**
* Convenience method to change the alpha value of the current foreground
* Color to the specifice value.
*
* @param alpha value in the range of 0 - 255.
*/
public void changeAlpha(int alpha)
{
alpha = alpha > 255 ? 255 : alpha < 0 ? 0 : alpha;
Color foreground = getForeground();
int red = foreground.getRed();
int green = foreground.getGreen();
int blue = foreground.getBlue();
Color withAlpha = new Color(red, green, blue, alpha);
super.setForeground( withAlpha );
}
/**
* Convenience method to change the style of the current Font. The style
* values are found in the Font class. Common values might be:
* Font.BOLD, Font.ITALIC and Font.BOLD + Font.ITALIC.
*
* @param style value representing the the new style of the Font.
*/
public void changeStyle(int style)
{
setFont( getFont().deriveFont( style ) );
}
/**
* Get the Show property
*
* @return the Show property.
*/
public Show getShow()
{
return show;
}
/**
* Set the prompt Show property to control when the promt is shown.
* Valid values are:
*
* Show.AWLAYS (default) - always show the prompt
* Show.Focus_GAINED - show the prompt when the component gains focus
* (and hide the prompt when focus is lost)
* Show.Focus_LOST - show the prompt when the component loses focus
* (and hide the prompt when focus is gained)
*
* @param show a valid Show enum
*/
public void setShow(Show show)
{
this.show = show;
}
/**
* Get the showPromptOnce property
*
* @return the showPromptOnce property.
*/
public boolean getShowPromptOnce()
{
return showPromptOnce;
}
/**
* Show the prompt once. Once the component has gained/lost focus
* once, the prompt will not be shown again.
*
* @param showPromptOnce when true the prompt will only be shown once,
* otherwise it will be shown repeatedly.
*/
public void setShowPromptOnce(boolean showPromptOnce)
{
this.showPromptOnce = showPromptOnce;
}
/**
* Check whether the prompt should be visible or not. The visibility
* will change on updates to the Document and on focus changes.
*/
private void checkForPrompt()
{
// Text has been entered, remove the prompt
if (document.getLength() > 0)
{
setVisible( false );
return;
}
// Prompt has already been shown once, remove it
if (showPromptOnce && focusLost > 0)
{
setVisible(false);
return;
}
// Check the Show property and component focus to determine if the
// prompt should be displayed.
if (component.hasFocus())
{
if (show == Show.ALWAYS
|| show == Show.FOCUS_GAINED)
setVisible( true );
else
setVisible( false );
}
else
{
if (show == Show.ALWAYS
|| show == Show.FOCUS_LOST)
setVisible( true );
else
setVisible( false );
}
}
// Implement FocusListener
public void focusGained(FocusEvent e)
{
checkForPrompt();
}
public void focusLost(FocusEvent e)
{
focusLost++;
checkForPrompt();
}
// Implement DocumentListener
public void insertUpdate(DocumentEvent e)
{
checkForPrompt();
}
public void removeUpdate(DocumentEvent e)
{
checkForPrompt();
}
public void changedUpdate(DocumentEvent e) {}
}
| |
package com.sequenceiq.cloudbreak.core.flow2.stack.upscale;
import static com.sequenceiq.cloudbreak.core.flow2.stack.upscale.AbstractStackUpscaleAction.HOST_GROUP_WITH_HOSTNAMES;
import static com.sequenceiq.cloudbreak.core.flow2.stack.upscale.AbstractStackUpscaleAction.NETWORK_SCALE_DETAILS;
import static com.sequenceiq.cloudbreak.core.flow2.stack.upscale.AbstractStackUpscaleAction.REPAIR;
import static com.sequenceiq.cloudbreak.core.flow2.stack.upscale.AbstractStackUpscaleAction.TRIGGERED_VARIANT;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.anyMap;
import static org.mockito.ArgumentMatchers.isNotNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.statemachine.action.Action;
import org.springframework.test.util.ReflectionTestUtils;
import com.sequenceiq.cloudbreak.cloud.context.CloudContext;
import com.sequenceiq.cloudbreak.cloud.event.model.EventStatus;
import com.sequenceiq.cloudbreak.cloud.event.resource.UpscaleStackValidationRequest;
import com.sequenceiq.cloudbreak.cloud.event.resource.UpscaleStackValidationResult;
import com.sequenceiq.cloudbreak.cloud.model.CloudCredential;
import com.sequenceiq.cloudbreak.cloud.model.CloudResourceStatus;
import com.sequenceiq.cloudbreak.cloud.model.CloudStack;
import com.sequenceiq.cloudbreak.cloud.model.ResourceStatus;
import com.sequenceiq.cloudbreak.converter.spi.StackToCloudStackConverter;
import com.sequenceiq.cloudbreak.core.flow2.dto.NetworkScaleDetails;
import com.sequenceiq.cloudbreak.core.flow2.event.StackScaleTriggerEvent;
import com.sequenceiq.cloudbreak.core.flow2.stack.downscale.StackScalingFlowContext;
import com.sequenceiq.cloudbreak.domain.stack.Stack;
import com.sequenceiq.cloudbreak.reactor.api.event.stack.UpscaleStackResult;
import com.sequenceiq.cloudbreak.service.resource.ResourceService;
import com.sequenceiq.cloudbreak.service.stack.InstanceMetaDataService;
import com.sequenceiq.common.api.adjustment.AdjustmentTypeWithThreshold;
import com.sequenceiq.common.api.type.AdjustmentType;
import com.sequenceiq.flow.core.AbstractActionTestSupport;
import com.sequenceiq.flow.core.FlowParameters;
import com.sequenceiq.flow.core.FlowRegister;
import com.sequenceiq.flow.reactor.ErrorHandlerAwareReactorEventFactory;
import reactor.bus.Event;
import reactor.bus.EventBus;
@ExtendWith(MockitoExtension.class)
class StackUpscaleActionsTest {
private static final String INSTANCE_GROUP_NAME = "worker";
private static final Integer ADJUSTMENT = 3;
private static final Integer ADJUSTMENT_ZERO = 0;
private static final String SELECTOR = "selector";
private static final Long STACK_ID = 123L;
private static final String VARIANT = "VARIANT";
@Mock
private InstanceMetaDataService instanceMetaDataService;
@Mock
private StackUpscaleService stackUpscaleService;
@Mock
private StackToCloudStackConverter cloudStackConverter;
@Mock
private ResourceService resourceService;
@InjectMocks
private StackUpscaleActions underTest;
private StackScalingFlowContext context;
@Mock
private FlowParameters flowParameters;
@Mock
private Stack stack;
@Mock
private CloudContext cloudContext;
@Mock
private CloudCredential cloudCredential;
@Mock
private CloudStack cloudStack;
@Mock
private FlowRegister runningFlows;
@Mock
private EventBus eventBus;
@Mock
private ErrorHandlerAwareReactorEventFactory reactorEventFactory;
@Mock
private Event<Object> event;
@Captor
private ArgumentCaptor<Object> payloadArgumentCaptor;
@Mock
private CloudResourceStatus cloudResourceStatus;
@BeforeEach
void setUp() {
context = new StackScalingFlowContext(flowParameters, stack, cloudContext, cloudCredential, cloudStack, Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT),
Map.of(), Map.of(), false, new AdjustmentTypeWithThreshold(AdjustmentType.EXACT, ADJUSTMENT.longValue()));
}
private AbstractStackUpscaleAction<StackScaleTriggerEvent> getPrevalidateAction() {
AbstractStackUpscaleAction<StackScaleTriggerEvent> action = (AbstractStackUpscaleAction<StackScaleTriggerEvent>) underTest.prevalidate();
initActionPrivateFields(action);
return action;
}
private void initActionPrivateFields(Action<?, ?> action) {
ReflectionTestUtils.setField(action, null, runningFlows, FlowRegister.class);
ReflectionTestUtils.setField(action, null, eventBus, EventBus.class);
ReflectionTestUtils.setField(action, null, reactorEventFactory, ErrorHandlerAwareReactorEventFactory.class);
}
// Note: this implicitly tests getPrevalidateAction().createRequest() as well.
@Test
void prevalidateTestDoExecuteWhenScalingNeededAndAllowed() throws Exception {
when(cloudContext.getId()).thenReturn(STACK_ID);
AdjustmentTypeWithThreshold adjustmentTypeWithThreshold = new AdjustmentTypeWithThreshold(AdjustmentType.EXACT, ADJUSTMENT.longValue());
StackScaleTriggerEvent payload = new StackScaleTriggerEvent(SELECTOR, STACK_ID, Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT), Map.of(), Map.of(),
adjustmentTypeWithThreshold, VARIANT);
when(stackUpscaleService.getInstanceCountToCreate(stack, INSTANCE_GROUP_NAME, ADJUSTMENT, false)).thenReturn(ADJUSTMENT);
Stack updatedStack = mock(Stack.class);
when(instanceMetaDataService.saveInstanceAndGetUpdatedStack(stack, Map.of(INSTANCE_GROUP_NAME, 3), Map.of(), false, false,
context.getStackNetworkScaleDetails())).thenReturn(updatedStack);
CloudStack convertedCloudStack = mock(CloudStack.class);
when(cloudStackConverter.convert(updatedStack)).thenReturn(convertedCloudStack);
when(reactorEventFactory.createEvent(anyMap(), isNotNull())).thenReturn(event);
new AbstractActionTestSupport<>(getPrevalidateAction()).doExecute(context, payload, Map.of());
verify(stackUpscaleService).addInstanceFireEventAndLog(stack, Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT), adjustmentTypeWithThreshold);
verify(stackUpscaleService).startAddInstances(stack, Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT));
verify(reactorEventFactory).createEvent(anyMap(), payloadArgumentCaptor.capture());
verify(eventBus).notify("UPSCALESTACKVALIDATIONREQUEST", event);
Object responsePayload = payloadArgumentCaptor.getValue();
assertThat(responsePayload).isInstanceOf(UpscaleStackValidationRequest.class);
UpscaleStackValidationRequest<UpscaleStackValidationResult> upscaleStackValidationRequest =
(UpscaleStackValidationRequest<UpscaleStackValidationResult>) responsePayload;
assertThat(upscaleStackValidationRequest.getResourceId()).isEqualTo(STACK_ID);
assertThat(upscaleStackValidationRequest.getCloudContext()).isSameAs(cloudContext);
assertThat(upscaleStackValidationRequest.getCloudStack()).isSameAs(convertedCloudStack);
assertThat(upscaleStackValidationRequest.getCloudCredential()).isSameAs(cloudCredential);
}
@Test
void prevalidateTestDoExecuteWhenScalingNeededAndNotAllowed() throws Exception {
AdjustmentTypeWithThreshold adjustmentTypeWithThreshold = new AdjustmentTypeWithThreshold(AdjustmentType.EXACT, ADJUSTMENT.longValue());
StackScaleTriggerEvent payload = new StackScaleTriggerEvent(SELECTOR, STACK_ID, Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT), Map.of(), Map.of(),
adjustmentTypeWithThreshold, VARIANT);
when(stackUpscaleService.getInstanceCountToCreate(stack, INSTANCE_GROUP_NAME, ADJUSTMENT, false)).thenReturn(ADJUSTMENT_ZERO);
List<CloudResourceStatus> resourceStatuses = List.of(cloudResourceStatus);
when(resourceService.getAllAsCloudResourceStatus(STACK_ID)).thenReturn(resourceStatuses);
when(reactorEventFactory.createEvent(anyMap(), isNotNull())).thenReturn(event);
new AbstractActionTestSupport<>(getPrevalidateAction()).doExecute(context, payload, Map.of());
verify(stackUpscaleService).addInstanceFireEventAndLog(stack, Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT), adjustmentTypeWithThreshold);
verifyEventForUpscaleStackResult(resourceStatuses);
}
private void verifyEventForUpscaleStackResult(List<CloudResourceStatus> resourceStatuses) {
verify(reactorEventFactory).createEvent(anyMap(), payloadArgumentCaptor.capture());
verify(eventBus).notify("UPSCALESTACKRESULT", event);
Object responsePayload = payloadArgumentCaptor.getValue();
assertThat(responsePayload).isInstanceOf(UpscaleStackResult.class);
UpscaleStackResult upscaleStackResult = (UpscaleStackResult) responsePayload;
assertThat(upscaleStackResult.getResourceId()).isEqualTo(STACK_ID);
assertThat(upscaleStackResult.getResourceStatus()).isEqualTo(ResourceStatus.CREATED);
assertThat(upscaleStackResult.getResults()).isEqualTo(resourceStatuses);
assertThat(upscaleStackResult.getStatus()).isEqualTo(EventStatus.OK);
assertThat(upscaleStackResult.getStatusReason()).isNull();
assertThat(upscaleStackResult.getErrorDetails()).isNull();
}
@Test
void prevalidateTestDoExecuteWhenScalingNotNeeded() throws Exception {
AdjustmentTypeWithThreshold adjustmentTypeWithThreshold = new AdjustmentTypeWithThreshold(AdjustmentType.EXACT, ADJUSTMENT_ZERO.longValue());
context = new StackScalingFlowContext(flowParameters, stack, cloudContext, cloudCredential, cloudStack, Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT_ZERO),
Map.of(), Map.of(), false, adjustmentTypeWithThreshold);
StackScaleTriggerEvent payload = new StackScaleTriggerEvent(SELECTOR, STACK_ID, Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT_ZERO), Map.of(), Map.of(),
adjustmentTypeWithThreshold, VARIANT);
when(stackUpscaleService.getInstanceCountToCreate(stack, INSTANCE_GROUP_NAME, ADJUSTMENT_ZERO, false)).thenReturn(ADJUSTMENT_ZERO);
List<CloudResourceStatus> resourceStatuses = List.of(cloudResourceStatus);
when(resourceService.getAllAsCloudResourceStatus(STACK_ID)).thenReturn(resourceStatuses);
when(reactorEventFactory.createEvent(anyMap(), isNotNull())).thenReturn(event);
new AbstractActionTestSupport<>(getPrevalidateAction()).doExecute(context, payload, Map.of());
verify(stackUpscaleService).addInstanceFireEventAndLog(stack, Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT_ZERO), adjustmentTypeWithThreshold);
verifyEventForUpscaleStackResult(resourceStatuses);
}
@Test
void prevalidateTestCreateContextWhenTriggeredVariantSet() {
NetworkScaleDetails networkScaleDetails = new NetworkScaleDetails();
StackScaleTriggerEvent payload = new StackScaleTriggerEvent(SELECTOR, STACK_ID, Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT_ZERO), Map.of(),
Map.of(INSTANCE_GROUP_NAME, Set.of("hostname")), networkScaleDetails, null, VARIANT);
Map<Object, Object> variables = new HashMap<>();
new AbstractActionTestSupport<>(getPrevalidateAction()).prepareExecution(payload, variables);
Assertions.assertEquals(Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT_ZERO), variables.get(AbstractStackUpscaleAction.HOST_GROUP_WITH_ADJUSTMENT));
Assertions.assertEquals(Map.of(INSTANCE_GROUP_NAME, Set.of("hostname")), variables.get(HOST_GROUP_WITH_HOSTNAMES));
Assertions.assertEquals(false, variables.get(REPAIR));
Assertions.assertEquals(VARIANT, variables.get(TRIGGERED_VARIANT));
Assertions.assertEquals(networkScaleDetails, variables.get(NETWORK_SCALE_DETAILS));
}
@Test
void prevalidateTestCreateContextWhenTriggeredVariantNotSet() {
NetworkScaleDetails networkScaleDetails = new NetworkScaleDetails();
StackScaleTriggerEvent payload = new StackScaleTriggerEvent(SELECTOR, STACK_ID, Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT_ZERO), Map.of(),
Map.of(INSTANCE_GROUP_NAME, Set.of("hostname")),
networkScaleDetails, null, null);
Map<Object, Object> variables = new HashMap<>();
new AbstractActionTestSupport<>(getPrevalidateAction()).prepareExecution(payload, variables);
Assertions.assertEquals(Map.of(INSTANCE_GROUP_NAME, ADJUSTMENT_ZERO), variables.get(AbstractStackUpscaleAction.HOST_GROUP_WITH_ADJUSTMENT));
Assertions.assertEquals(Map.of(INSTANCE_GROUP_NAME, Set.of("hostname")), variables.get(HOST_GROUP_WITH_HOSTNAMES));
Assertions.assertEquals(false, variables.get(REPAIR));
Assertions.assertNull(variables.get(TRIGGERED_VARIANT));
Assertions.assertEquals(networkScaleDetails, variables.get(NETWORK_SCALE_DETAILS));
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.apigateway.model;
import java.io.Serializable;
/**
* <p>
* Represents the base path that callers of the API that must provide as part of
* the URL after the domain name.
* </p>
*/
public class GetBasePathMappingResult implements Serializable, Cloneable {
/**
* <p>
* The base path name that callers of the API must provide as part of the
* URL after the domain name.
* </p>
*/
private String basePath;
/**
* <p>
* The name of the API.
* </p>
*/
private String restApiId;
/**
* <p>
* The name of the API's stage.
* </p>
*/
private String stage;
/**
* <p>
* The base path name that callers of the API must provide as part of the
* URL after the domain name.
* </p>
*
* @param basePath
* The base path name that callers of the API must provide as part of
* the URL after the domain name.
*/
public void setBasePath(String basePath) {
this.basePath = basePath;
}
/**
* <p>
* The base path name that callers of the API must provide as part of the
* URL after the domain name.
* </p>
*
* @return The base path name that callers of the API must provide as part
* of the URL after the domain name.
*/
public String getBasePath() {
return this.basePath;
}
/**
* <p>
* The base path name that callers of the API must provide as part of the
* URL after the domain name.
* </p>
*
* @param basePath
* The base path name that callers of the API must provide as part of
* the URL after the domain name.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public GetBasePathMappingResult withBasePath(String basePath) {
setBasePath(basePath);
return this;
}
/**
* <p>
* The name of the API.
* </p>
*
* @param restApiId
* The name of the API.
*/
public void setRestApiId(String restApiId) {
this.restApiId = restApiId;
}
/**
* <p>
* The name of the API.
* </p>
*
* @return The name of the API.
*/
public String getRestApiId() {
return this.restApiId;
}
/**
* <p>
* The name of the API.
* </p>
*
* @param restApiId
* The name of the API.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public GetBasePathMappingResult withRestApiId(String restApiId) {
setRestApiId(restApiId);
return this;
}
/**
* <p>
* The name of the API's stage.
* </p>
*
* @param stage
* The name of the API's stage.
*/
public void setStage(String stage) {
this.stage = stage;
}
/**
* <p>
* The name of the API's stage.
* </p>
*
* @return The name of the API's stage.
*/
public String getStage() {
return this.stage;
}
/**
* <p>
* The name of the API's stage.
* </p>
*
* @param stage
* The name of the API's stage.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public GetBasePathMappingResult withStage(String stage) {
setStage(stage);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getBasePath() != null)
sb.append("BasePath: " + getBasePath() + ",");
if (getRestApiId() != null)
sb.append("RestApiId: " + getRestApiId() + ",");
if (getStage() != null)
sb.append("Stage: " + getStage());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetBasePathMappingResult == false)
return false;
GetBasePathMappingResult other = (GetBasePathMappingResult) obj;
if (other.getBasePath() == null ^ this.getBasePath() == null)
return false;
if (other.getBasePath() != null
&& other.getBasePath().equals(this.getBasePath()) == false)
return false;
if (other.getRestApiId() == null ^ this.getRestApiId() == null)
return false;
if (other.getRestApiId() != null
&& other.getRestApiId().equals(this.getRestApiId()) == false)
return false;
if (other.getStage() == null ^ this.getStage() == null)
return false;
if (other.getStage() != null
&& other.getStage().equals(this.getStage()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getBasePath() == null) ? 0 : getBasePath().hashCode());
hashCode = prime * hashCode
+ ((getRestApiId() == null) ? 0 : getRestApiId().hashCode());
hashCode = prime * hashCode
+ ((getStage() == null) ? 0 : getStage().hashCode());
return hashCode;
}
@Override
public GetBasePathMappingResult clone() {
try {
return (GetBasePathMappingResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.entity.projectile;
import com.flowpowered.math.vector.Vector3d;
import com.google.common.collect.Maps;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.item.EntityArmorStand;
import net.minecraft.entity.item.EntityEnderPearl;
import net.minecraft.entity.item.EntityExpBottle;
import net.minecraft.entity.item.EntityFireworkRocket;
import net.minecraft.entity.passive.EntityLlama;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.projectile.EntityDragonFireball;
import net.minecraft.entity.projectile.EntityEgg;
import net.minecraft.entity.projectile.EntityFishHook;
import net.minecraft.entity.projectile.EntityLargeFireball;
import net.minecraft.entity.projectile.EntityLlamaSpit;
import net.minecraft.entity.projectile.EntityPotion;
import net.minecraft.entity.projectile.EntitySmallFireball;
import net.minecraft.entity.projectile.EntitySnowball;
import net.minecraft.entity.projectile.EntityThrowable;
import net.minecraft.entity.projectile.EntityTippedArrow;
import net.minecraft.entity.projectile.EntityWitherSkull;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraft.tileentity.TileEntityDispenser;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.MathHelper;
import net.minecraft.util.math.Vec3d;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.block.tileentity.carrier.Dispenser;
import org.spongepowered.api.data.key.Keys;
import org.spongepowered.api.entity.Entity;
import org.spongepowered.api.entity.EntityType;
import org.spongepowered.api.entity.projectile.Egg;
import org.spongepowered.api.entity.projectile.EnderPearl;
import org.spongepowered.api.entity.projectile.EyeOfEnder;
import org.spongepowered.api.entity.projectile.Firework;
import org.spongepowered.api.entity.projectile.FishHook;
import org.spongepowered.api.entity.projectile.LlamaSpit;
import org.spongepowered.api.entity.projectile.Projectile;
import org.spongepowered.api.entity.projectile.Snowball;
import org.spongepowered.api.entity.projectile.ThrownExpBottle;
import org.spongepowered.api.entity.projectile.ThrownPotion;
import org.spongepowered.api.entity.projectile.arrow.TippedArrow;
import org.spongepowered.api.entity.projectile.explosive.DragonFireball;
import org.spongepowered.api.entity.projectile.explosive.WitherSkull;
import org.spongepowered.api.entity.projectile.explosive.fireball.LargeFireball;
import org.spongepowered.api.entity.projectile.explosive.fireball.SmallFireball;
import org.spongepowered.api.entity.projectile.source.ProjectileSource;
import org.spongepowered.api.event.SpongeEventFactory;
import org.spongepowered.api.event.entity.projectile.LaunchProjectileEvent;
import org.spongepowered.api.world.Location;
import org.spongepowered.api.world.extent.Extent;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.event.SpongeCommonEventFactory;
import org.spongepowered.common.registry.type.entity.EntityTypeRegistryModule;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
public class ProjectileLauncher {
private static final Map<Class<? extends Projectile>, ProjectileLogic<?>> projectileLogic = Maps.newHashMap();
private static final Map<Class<? extends ProjectileSource>, ProjectileSourceLogic<?>> projectileSourceLogic = Maps.newHashMap();
public static <T extends Projectile> Optional<T> launch(Class<T> projectileClass, ProjectileSource source, Vector3d vel) {
ProjectileLogic<T> logic = getLogic(projectileClass);
if (logic == null) {
return Optional.empty();
}
Optional<T> projectile = logic.launch(source);
projectile.ifPresent(t -> {
if (vel != null) {
t.offer(Keys.VELOCITY, vel);
}
t.setShooter(source);
});
if (projectile.isPresent()) {
if (vel != null) {
projectile.get().offer(Keys.VELOCITY, vel);
}
projectile.get().setShooter(source);
}
return projectile;
}
// From EntityThrowable constructor
private static void configureThrowable(EntityThrowable entity) {
entity.posX -= MathHelper.cos(entity.rotationYaw / 180.0F * (float) Math.PI) * 0.16F;
entity.posY -= 0.1D;
entity.posZ -= MathHelper.sin(entity.rotationYaw / 180.0F * (float) Math.PI) * 0.16F;
entity.setPosition(entity.posX, entity.posY, entity.posZ);
float f = 0.4F;
entity.motionX = -MathHelper.sin(entity.rotationYaw / 180.0F * (float) Math.PI)
* MathHelper.cos(entity.rotationPitch / 180.0F * (float) Math.PI) * f;
entity.motionZ = MathHelper.cos(entity.rotationYaw / 180.0F * (float) Math.PI)
* MathHelper.cos(entity.rotationPitch / 180.0F * (float) Math.PI) * f;
entity.motionY = -MathHelper.sin((entity.rotationPitch) / 180.0F * (float) Math.PI) * f;
}
public static <T extends Projectile> void registerProjectileLogic(Class<T> projectileClass, ProjectileLogic<T> logic) {
projectileLogic.put(projectileClass, logic);
}
public static <T extends ProjectileSource> void registerProjectileSourceLogic(Class<T> projectileSourceClass, ProjectileSourceLogic<T> logic) {
projectileSourceLogic.put(projectileSourceClass, logic);
}
@SuppressWarnings("unchecked")
static <T extends ProjectileSource> ProjectileSourceLogic<T> getSourceLogic(Class<T> sourceClass) {
return (ProjectileSourceLogic<T>) projectileSourceLogic.get(sourceClass);
}
@SuppressWarnings("unchecked")
private static <T extends Projectile> ProjectileLogic<T> getLogic(Class<T> sourceClass) {
// If a concrete class is handed to us, find the API interface
if (!sourceClass.isInterface() && net.minecraft.entity.Entity.class.isAssignableFrom(sourceClass)) {
for (Class<?> iface : sourceClass.getInterfaces()) {
if (Projectile.class.isAssignableFrom(iface)) {
sourceClass = (Class<T>) iface;
break;
}
}
}
return (ProjectileLogic<T>) projectileLogic.get(sourceClass);
}
@SuppressWarnings("unchecked")
static <P extends Projectile> Optional<P> defaultLaunch(ProjectileSource source, Class<P> projectileClass, Location<?> loc) {
Optional<EntityType> opType = EntityTypeRegistryModule.getInstance().getEntity(projectileClass);
if (!opType.isPresent()) {
return Optional.empty();
}
Entity projectile = loc.getExtent().createEntity(opType.get(), loc.getPosition());
if (projectile instanceof EntityThrowable) {
configureThrowable((EntityThrowable) projectile);
}
return doLaunch(loc.getExtent(), (P) projectile);
}
static <P extends Projectile> Optional<P> doLaunch(Extent extent, P projectile) {
LaunchProjectileEvent event = SpongeEventFactory.createLaunchProjectileEvent(Sponge.getCauseStackManager().getCurrentCause(), projectile);
SpongeImpl.getGame().getEventManager().post(event);
if (!event.isCancelled() && extent.spawnEntity(projectile)) {
return Optional.of(projectile);
}
return Optional.empty();
}
static {
registerProjectileSourceLogic(Dispenser.class, new DispenserSourceLogic());
registerProjectileLogic(TippedArrow.class, new SimpleItemLaunchLogic<TippedArrow>(TippedArrow.class, Items.ARROW) {
@Override
protected Optional<TippedArrow> createProjectile(EntityLivingBase source, Location<?> loc) {
TippedArrow arrow = (TippedArrow) new EntityTippedArrow(source.world, source);
((EntityTippedArrow) arrow).shoot(source, source.rotationPitch, source.rotationYaw, 0.0F, 3.0F, 0);
return doLaunch(loc.getExtent(), arrow);
}
});
registerProjectileLogic(Egg.class, new SimpleItemLaunchLogic<Egg>(Egg.class, Items.EGG) {
@Override
protected Optional<Egg> createProjectile(EntityLivingBase source, Location<?> loc) {
Egg egg = (Egg) new EntityEgg(source.world, source);
((EntityThrowable) egg).shoot(source, source.rotationPitch, source.rotationYaw, 0.0F, 1.5F, 0);
return doLaunch(loc.getExtent(), egg);
}
});
registerProjectileLogic(SmallFireball.class, new SimpleItemLaunchLogic<SmallFireball>(SmallFireball.class, Items.FIRE_CHARGE) {
@Override
protected Optional<SmallFireball> createProjectile(EntityLivingBase source, Location<?> loc) {
Vec3d lookVec = source.getLook(1);
SmallFireball fireball = (SmallFireball) new EntitySmallFireball(source.world, source,
lookVec.x * 4, lookVec.y * 4, lookVec.z * 4);
((EntitySmallFireball) fireball).posY += source.getEyeHeight();
return doLaunch(loc.getExtent(), fireball);
}
});
registerProjectileLogic(Firework.class, new SimpleItemLaunchLogic<Firework>(Firework.class, Items.FIREWORKS) {
@Override
protected Optional<Firework> createProjectile(EntityLivingBase source, Location<?> loc) {
Firework firework = (Firework) new EntityFireworkRocket(source.world, loc.getX(), loc.getY(), loc.getZ(), ItemStack.EMPTY);
return doLaunch(loc.getExtent(), firework);
}
});
registerProjectileLogic(Snowball.class, new SimpleItemLaunchLogic<Snowball>(Snowball.class, Items.SNOWBALL) {
@Override
protected Optional<Snowball> createProjectile(EntityLivingBase source, Location<?> loc) {
Snowball snowball = (Snowball) new EntitySnowball(source.world, source);
((EntityThrowable) snowball).shoot(source, source.rotationPitch, source.rotationYaw, 0.0F, 1.5F, 0);
return doLaunch(loc.getExtent(), snowball);
}
});
registerProjectileLogic(ThrownExpBottle.class, new SimpleItemLaunchLogic<ThrownExpBottle>(ThrownExpBottle.class, Items.EXPERIENCE_BOTTLE) {
@Override
protected Optional<ThrownExpBottle> createProjectile(EntityLivingBase source, Location<?> loc) {
ThrownExpBottle expBottle = (ThrownExpBottle) new EntityExpBottle(source.world, source);
((EntityThrowable) expBottle).shoot(source, source.rotationPitch, source.rotationYaw, -20.0F, 0.7F, 0);
return doLaunch(loc.getExtent(), expBottle);
}
});
registerProjectileLogic(EnderPearl.class, new SimpleDispenserLaunchLogic<EnderPearl>(EnderPearl.class) {
@Override
protected Optional<EnderPearl> createProjectile(EntityLivingBase source, Location<?> loc) {
EnderPearl pearl = (EnderPearl) new EntityEnderPearl(source.world, source);
((EntityThrowable) pearl).shoot(source, source.rotationPitch, source.rotationYaw, 0.0F, 1.5F, 0);
return doLaunch(loc.getExtent(), pearl);
}
});
registerProjectileLogic(LargeFireball.class, new SimpleDispenserLaunchLogic<LargeFireball>(LargeFireball.class) {
@Override
protected Optional<LargeFireball> createProjectile(EntityLivingBase source, Location<?> loc) {
Vec3d lookVec = source.getLook(1);
LargeFireball fireball = (LargeFireball) new EntityLargeFireball(source.world, source,
lookVec.x * 4, lookVec.y * 4, lookVec.z * 4);
((EntityLargeFireball) fireball).posY += source.getEyeHeight();
return doLaunch(loc.getExtent(), fireball);
}
@Override
public Optional<LargeFireball> createProjectile(ProjectileSource source, Class<LargeFireball> projectileClass, Location<?> loc) {
if (!(source instanceof TileEntityDispenser)) {
return super.createProjectile(source, projectileClass, loc);
}
TileEntityDispenser dispenser = (TileEntityDispenser) source;
EnumFacing enumfacing = DispenserSourceLogic.getFacing(dispenser);
Random random = dispenser.getWorld().rand;
double d3 = random.nextGaussian() * 0.05D + enumfacing.getFrontOffsetX();
double d4 = random.nextGaussian() * 0.05D + enumfacing.getFrontOffsetY();
double d5 = random.nextGaussian() * 0.05D + enumfacing.getFrontOffsetZ();
EntityLivingBase thrower = new EntityArmorStand(dispenser.getWorld(), loc.getX() + enumfacing.getFrontOffsetX(),
loc.getY() + enumfacing.getFrontOffsetY(), loc.getZ() + enumfacing.getFrontOffsetZ());
LargeFireball fireball = (LargeFireball) new EntityLargeFireball(dispenser.getWorld(), thrower, d3, d4, d5);
return doLaunch(loc.getExtent(), fireball);
}
});
registerProjectileLogic(WitherSkull.class, new SimpleDispenserLaunchLogic<WitherSkull>(WitherSkull.class) {
@Override
protected Optional<WitherSkull> createProjectile(EntityLivingBase source, Location<?> loc) {
Vec3d lookVec = source.getLook(1);
WitherSkull skull = (WitherSkull) new EntityWitherSkull(source.world, source,
lookVec.x * 4, lookVec.y * 4, lookVec.z * 4);
((EntityWitherSkull) skull).posY += source.getEyeHeight();
return doLaunch(loc.getExtent(), skull);
}
});
registerProjectileLogic(EyeOfEnder.class, new SimpleDispenserLaunchLogic<>(EyeOfEnder.class));
registerProjectileLogic(FishHook.class, new SimpleDispenserLaunchLogic<FishHook>(FishHook.class) {
@Override
protected Optional<FishHook> createProjectile(EntityLivingBase source, Location<?> loc) {
if (source instanceof EntityPlayer) {
FishHook hook = (FishHook) new EntityFishHook(source.world, (EntityPlayer) source);
return doLaunch(loc.getExtent(), hook);
}
return super.createProjectile(source, loc);
}
});
registerProjectileLogic(ThrownPotion.class, new SimpleDispenserLaunchLogic<ThrownPotion>(ThrownPotion.class) {
@Override
protected Optional<ThrownPotion> createProjectile(EntityLivingBase source, Location<?> loc) {
ThrownPotion potion = (ThrownPotion) new EntityPotion(source.world, source, new ItemStack(Items.SPLASH_POTION, 1));
((EntityThrowable) potion).shoot(source, source.rotationPitch, source.rotationYaw, -20.0F, 0.5F, 0);
return doLaunch(loc.getExtent(), potion);
}
});
registerProjectileLogic(LlamaSpit.class, new SimpleEntityLaunchLogic<LlamaSpit>(LlamaSpit.class) {
@Override
public Optional<LlamaSpit> launch(ProjectileSource source) {
if (!(source instanceof EntityLlama)) {
return Optional.empty();
}
return super.launch(source);
}
@Override
public Optional<LlamaSpit> createProjectile(ProjectileSource source, Class<LlamaSpit> projectileClass, Location<?> loc) {
EntityLlama llama = (EntityLlama) source;
LlamaSpit llamaSpit = (LlamaSpit) new EntityLlamaSpit(llama.world, (EntityLlama) source);
Vec3d lookVec = llama.getLook(1);
((EntityLlamaSpit) llamaSpit).shoot(lookVec.x, lookVec.y, lookVec.z, 1.5F, 0);
return doLaunch(loc.getExtent(), llamaSpit);
}
});
registerProjectileLogic(DragonFireball.class, new SimpleDispenserLaunchLogic<DragonFireball>(DragonFireball.class) {
@Override
protected Optional<DragonFireball> createProjectile(EntityLivingBase source, Location<?> loc) {
Vec3d lookVec = source.getLook(1);
DragonFireball fireball = (DragonFireball) new EntityDragonFireball(source.world, source,
lookVec.x * 4, lookVec.y * 4, lookVec.z * 4);
((EntityDragonFireball) fireball).posY += source.getEyeHeight();
return doLaunch(loc.getExtent(), fireball);
}
});
}
}
| |
/*
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014-2015 Groupon, Inc
* Copyright 2014-2015 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.subscription.api.transfer;
import java.util.List;
import java.util.UUID;
import org.joda.time.DateTime;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.killbill.billing.catalog.MockCatalog;
import org.killbill.billing.catalog.MockCatalogService;
import org.killbill.billing.catalog.api.BillingPeriod;
import org.killbill.billing.catalog.api.CatalogService;
import org.killbill.billing.catalog.api.PhaseType;
import org.killbill.billing.catalog.api.PlanPhaseSpecifier;
import org.killbill.billing.catalog.api.PriceListSet;
import org.killbill.billing.catalog.api.ProductCategory;
import org.killbill.billing.subscription.SubscriptionTestSuiteNoDB;
import org.killbill.billing.subscription.api.SubscriptionBaseApiService;
import org.killbill.billing.subscription.api.SubscriptionBaseTransitionType;
import org.killbill.billing.subscription.api.timeline.SubscriptionBaseTimeline.ExistingEvent;
import org.killbill.billing.subscription.api.timeline.SubscriptionBaseTimelineApi;
import org.killbill.billing.subscription.api.user.DefaultSubscriptionBase;
import org.killbill.billing.subscription.api.user.SubscriptionBuilder;
import org.killbill.billing.subscription.engine.dao.SubscriptionDao;
import org.killbill.billing.subscription.events.SubscriptionBaseEvent;
import org.killbill.billing.subscription.events.SubscriptionBaseEvent.EventType;
import org.killbill.billing.subscription.events.user.ApiEventTransfer;
import org.killbill.billing.subscription.events.user.ApiEventType;
import org.killbill.billing.util.cache.CacheControllerDispatcher;
import org.killbill.billing.util.callcontext.InternalCallContextFactory;
import org.killbill.billing.util.dao.NonEntityDao;
import com.google.common.collect.ImmutableList;
// Simple unit tests for DefaultSubscriptionBaseTransferApi, see TestTransfer for more advanced tests with dao
public class TestDefaultSubscriptionTransferApi extends SubscriptionTestSuiteNoDB {
private DefaultSubscriptionBaseTransferApi transferApi;
@Override
@BeforeMethod(groups = "fast")
public void beforeMethod() throws Exception {
super.beforeMethod();
final NonEntityDao nonEntityDao = Mockito.mock(NonEntityDao.class);
final SubscriptionDao dao = Mockito.mock(SubscriptionDao.class);
final CatalogService catalogService = new MockCatalogService(new MockCatalog(), cacheControllerDispatcher);
final SubscriptionBaseApiService apiService = Mockito.mock(SubscriptionBaseApiService.class);
final SubscriptionBaseTimelineApi timelineApi = Mockito.mock(SubscriptionBaseTimelineApi.class);
final InternalCallContextFactory internalCallContextFactory = new InternalCallContextFactory(clock, nonEntityDao, new CacheControllerDispatcher());
transferApi = new DefaultSubscriptionBaseTransferApi(clock, dao, timelineApi, catalogService, apiService, internalCallContextFactory);
}
@Test(groups = "fast")
public void testEventsForCancelledSubscriptionBeforeTransfer() throws Exception {
final DateTime subscriptionStartTime = clock.getUTCNow();
final DateTime subscriptionCancelTime = subscriptionStartTime.plusDays(1);
final ImmutableList<ExistingEvent> existingEvents = ImmutableList.<ExistingEvent>of(createEvent(subscriptionStartTime, SubscriptionBaseTransitionType.CREATE),
createEvent(subscriptionCancelTime, SubscriptionBaseTransitionType.CANCEL));
final SubscriptionBuilder subscriptionBuilder = new SubscriptionBuilder();
final DefaultSubscriptionBase subscription = new DefaultSubscriptionBase(subscriptionBuilder);
final DateTime transferDate = subscriptionStartTime.plusDays(10);
final List<SubscriptionBaseEvent> events = transferApi.toEvents(existingEvents, subscription, transferDate, internalCallContext);
Assert.assertEquals(events.size(), 0);
}
@Test(groups = "fast")
public void testEventsForCancelledSubscriptionAfterTransfer() throws Exception {
final DateTime subscriptionStartTime = clock.getUTCNow();
final DateTime subscriptionCancelTime = subscriptionStartTime.plusDays(1);
final ImmutableList<ExistingEvent> existingEvents = ImmutableList.<ExistingEvent>of(createEvent(subscriptionStartTime, SubscriptionBaseTransitionType.CREATE),
createEvent(subscriptionCancelTime, SubscriptionBaseTransitionType.CANCEL));
final SubscriptionBuilder subscriptionBuilder = new SubscriptionBuilder();
final DefaultSubscriptionBase subscription = new DefaultSubscriptionBase(subscriptionBuilder);
final DateTime transferDate = subscriptionStartTime.plusHours(1);
final List<SubscriptionBaseEvent> events = transferApi.toEvents(existingEvents, subscription, transferDate, internalCallContext);
Assert.assertEquals(events.size(), 1);
Assert.assertEquals(events.get(0).getType(), EventType.API_USER);
Assert.assertEquals(events.get(0).getEffectiveDate(), transferDate);
Assert.assertEquals(((ApiEventTransfer) events.get(0)).getApiEventType(), ApiEventType.TRANSFER);
}
@Test(groups = "fast")
public void testEventsAfterTransferForMigratedBundle1() throws Exception {
// MIGRATE_ENTITLEMENT then MIGRATE_BILLING (both in the past)
final DateTime transferDate = clock.getUTCNow();
final DateTime migrateSubscriptionEventEffectiveDate = transferDate.minusDays(10);
final DateTime migrateBillingEventEffectiveDate = migrateSubscriptionEventEffectiveDate.plusDays(1);
final List<SubscriptionBaseEvent> events = transferBundle(migrateSubscriptionEventEffectiveDate, migrateBillingEventEffectiveDate, transferDate);
Assert.assertEquals(events.size(), 1);
Assert.assertEquals(events.get(0).getType(), EventType.API_USER);
Assert.assertEquals(events.get(0).getEffectiveDate(), transferDate);
Assert.assertEquals(((ApiEventTransfer) events.get(0)).getApiEventType(), ApiEventType.TRANSFER);
}
@Test(groups = "fast")
public void testEventsAfterTransferForMigratedBundle2() throws Exception {
// MIGRATE_ENTITLEMENT and MIGRATE_BILLING at the same time (both in the past)
final DateTime transferDate = clock.getUTCNow();
final DateTime migrateSubscriptionEventEffectiveDate = transferDate.minusDays(10);
final DateTime migrateBillingEventEffectiveDate = migrateSubscriptionEventEffectiveDate;
final List<SubscriptionBaseEvent> events = transferBundle(migrateSubscriptionEventEffectiveDate, migrateBillingEventEffectiveDate, transferDate);
Assert.assertEquals(events.size(), 1);
Assert.assertEquals(events.get(0).getType(), EventType.API_USER);
Assert.assertEquals(events.get(0).getEffectiveDate(), transferDate);
Assert.assertEquals(((ApiEventTransfer) events.get(0)).getApiEventType(), ApiEventType.TRANSFER);
}
@Test(groups = "fast")
public void testEventsAfterTransferForMigratedBundle3() throws Exception {
// MIGRATE_ENTITLEMENT then MIGRATE_BILLING (the latter in the future)
final DateTime transferDate = clock.getUTCNow();
final DateTime migrateSubscriptionEventEffectiveDate = transferDate.minusDays(10);
final DateTime migrateBillingEventEffectiveDate = migrateSubscriptionEventEffectiveDate.plusDays(20);
final List<SubscriptionBaseEvent> events = transferBundle(migrateSubscriptionEventEffectiveDate, migrateBillingEventEffectiveDate, transferDate);
Assert.assertEquals(events.size(), 1);
Assert.assertEquals(events.get(0).getType(), EventType.API_USER);
Assert.assertEquals(events.get(0).getEffectiveDate(), transferDate);
Assert.assertEquals(((ApiEventTransfer) events.get(0)).getApiEventType(), ApiEventType.TRANSFER);
}
@Test(groups = "fast")
public void testEventsAfterTransferForMigratedBundle4() throws Exception {
// MIGRATE_ENTITLEMENT then MIGRATE_BILLING (both in the future)
final DateTime transferDate = clock.getUTCNow();
final DateTime migrateSubscriptionEventEffectiveDate = transferDate.plusDays(10);
final DateTime migrateBillingEventEffectiveDate = migrateSubscriptionEventEffectiveDate.plusDays(20);
final List<SubscriptionBaseEvent> events = transferBundle(migrateSubscriptionEventEffectiveDate, migrateBillingEventEffectiveDate, transferDate);
Assert.assertEquals(events.size(), 1);
Assert.assertEquals(events.get(0).getType(), EventType.API_USER);
Assert.assertEquals(events.get(0).getEffectiveDate(), migrateSubscriptionEventEffectiveDate);
Assert.assertEquals(((ApiEventTransfer) events.get(0)).getApiEventType(), ApiEventType.TRANSFER);
}
private List<SubscriptionBaseEvent> transferBundle(final DateTime migrateSubscriptionEventEffectiveDate, final DateTime migrateBillingEventEffectiveDate,
final DateTime transferDate) throws SubscriptionBaseTransferApiException {
final ImmutableList<ExistingEvent> existingEvents = createMigrateEvents(migrateSubscriptionEventEffectiveDate, migrateBillingEventEffectiveDate);
final SubscriptionBuilder subscriptionBuilder = new SubscriptionBuilder();
final DefaultSubscriptionBase subscription = new DefaultSubscriptionBase(subscriptionBuilder);
return transferApi.toEvents(existingEvents, subscription, transferDate, internalCallContext);
}
private ExistingEvent createEvent(final DateTime eventEffectiveDate, final SubscriptionBaseTransitionType subscriptionTransitionType) {
return new ExistingEvent() {
@Override
public DateTime getEffectiveDate() {
return eventEffectiveDate;
}
@Override
public String getPlanName() {
return "BicycleTrialEvergreen1USD";
}
@Override
public String getPlanPhaseName() {
return SubscriptionBaseTransitionType.CANCEL.equals(subscriptionTransitionType) ? null : "BicycleTrialEvergreen1USD-trial";
}
@Override
public UUID getEventId() {
return UUID.randomUUID();
}
@Override
public PlanPhaseSpecifier getPlanPhaseSpecifier() {
return SubscriptionBaseTransitionType.CANCEL.equals(subscriptionTransitionType) ? null :
new PlanPhaseSpecifier("BicycleTrialEvergreen1USD", ProductCategory.BASE, BillingPeriod.NO_BILLING_PERIOD,
PriceListSet.DEFAULT_PRICELIST_NAME, PhaseType.FIXEDTERM);
}
@Override
public DateTime getRequestedDate() {
return getEffectiveDate();
}
@Override
public SubscriptionBaseTransitionType getSubscriptionTransitionType() {
return subscriptionTransitionType;
}
};
}
private ImmutableList<ExistingEvent> createMigrateEvents(final DateTime migrateSubscriptionEventEffectiveDate, final DateTime migrateBillingEventEffectiveDate) {
final ExistingEvent migrateEntitlementEvent = new ExistingEvent() {
@Override
public DateTime getEffectiveDate() {
return migrateSubscriptionEventEffectiveDate;
}
@Override
public String getPlanName() {
return "BicycleTrialEvergreen1USD";
}
@Override
public String getPlanPhaseName() {
return "BicycleTrialEvergreen1USD-trial";
}
@Override
public UUID getEventId() {
return UUID.randomUUID();
}
@Override
public PlanPhaseSpecifier getPlanPhaseSpecifier() {
return new PlanPhaseSpecifier("BicycleTrialEvergreen1USD", ProductCategory.BASE, BillingPeriod.NO_BILLING_PERIOD,
PriceListSet.DEFAULT_PRICELIST_NAME, PhaseType.FIXEDTERM);
}
@Override
public DateTime getRequestedDate() {
return getEffectiveDate();
}
@Override
public SubscriptionBaseTransitionType getSubscriptionTransitionType() {
return SubscriptionBaseTransitionType.MIGRATE_ENTITLEMENT;
}
};
final ExistingEvent migrateBillingEvent = new ExistingEvent() {
@Override
public DateTime getEffectiveDate() {
return migrateBillingEventEffectiveDate;
}
@Override
public String getPlanName() {
return migrateEntitlementEvent.getPlanName();
}
@Override
public String getPlanPhaseName() {
return migrateEntitlementEvent.getPlanPhaseName();
}
@Override
public UUID getEventId() {
return UUID.randomUUID();
}
@Override
public PlanPhaseSpecifier getPlanPhaseSpecifier() {
return migrateEntitlementEvent.getPlanPhaseSpecifier();
}
@Override
public DateTime getRequestedDate() {
return migrateEntitlementEvent.getRequestedDate();
}
@Override
public SubscriptionBaseTransitionType getSubscriptionTransitionType() {
return SubscriptionBaseTransitionType.MIGRATE_BILLING;
}
};
return ImmutableList.<ExistingEvent>of(migrateEntitlementEvent, migrateBillingEvent);
}
}
| |
package Reckner;
import java.util.ArrayList;
import java.util.Arrays;
public class RecknerCommodity
{
private int code;
private String commodityTitle;
private Double commodityPrice;
private Double dm;
private Double ash;
private Double crudeProtein;
private Double crudeFibre;
private Double oil;
private Double ndf;
private Double adf;
private Double effectiveNdf;
private Double omd;
private Double pdia;
private Double pdin;
private Double pdie;
private Double starch;
private Double sugar;
private Double ufl;
private Double ufv;
private Double lysdi;
private Double methdi;
private Double ca;
private Double p;
private Double mg;
private Double na;
private Double cu;
private Double zn;
private Double mn;
private Double co;
private Double se;
private Double i;
private Double vitaminA;
private Double vitaminD;
private Double vitaminE;
private Double pal;
private Double me;
public RecknerCommodity(int code, String commodityTitle, Double commodityPrice, Double dm, Double ash, Double crudeProtein, Double crudeFibre, Double oil,
Double ndf, Double adf, Double effectiveNdf, Double omd, Double pdia, Double pdin, Double pdie, Double starch, Double sugar, Double ufl,
Double ufv, Double lysdi, Double methdi, Double ca, Double p, Double mg, Double na, Double cu, Double zn, Double mn, Double co, Double se,
Double i, Double vitaminA, Double vitaminD, Double vitaminE, Double pal, Double me)
{
this.code = code;
this.commodityTitle = commodityTitle;
this.commodityPrice = commodityPrice;
this.dm = dm;
this.ash = ash;
this.crudeProtein = crudeProtein;
this.crudeFibre = crudeFibre;
this.oil = oil;
this.ndf = ndf;
this.adf = adf;
this.effectiveNdf = effectiveNdf;
this.omd = omd;
this.pdia = pdia;
this.pdin = pdin;
this.pdie = pdie;
this.starch = starch;
this.sugar = sugar;
this.ufl = ufl;
this.ufv = ufv;
this.lysdi = lysdi;
this.methdi = methdi;
this.ca = ca;
this.p = p;
this.mg = mg;
this.na = na;
this.cu = cu;
this.zn = zn;
this.mn = mn;
this.co = co;
this.se = se;
this.i = i;
this.vitaminA = vitaminA;
this.vitaminD = vitaminD;
this.vitaminE = vitaminE;
this.pal = pal;
this.me = me;
}
public int getCode()
{
return code;
}
public void setCode(int code)
{
this.code = code;
}
public String getCommodityTitle()
{
return commodityTitle;
}
public void setCommodityTitle(String commodityTitle)
{
this.commodityTitle = commodityTitle;
}
public Double getCommodityPrice()
{
return commodityPrice;
}
public void setCommodityPrice(Double commodityPrice)
{
this.commodityPrice = commodityPrice;
}
public Double getDm()
{
return dm;
}
public void setDm(Double dm)
{
this.dm = dm;
}
public Double getAsh()
{
return ash;
}
public void setAsh(Double ash)
{
this.ash = ash;
}
public Double getCrudeProtein()
{
return crudeProtein;
}
public void setCrudeProtein(Double crudeProtein)
{
this.crudeProtein = crudeProtein;
}
public Double getCrudeFibre()
{
return crudeFibre;
}
public void setCrudeFibre(Double crudeFibre)
{
this.crudeFibre = crudeFibre;
}
public Double getOil()
{
return oil;
}
public void setOil(Double oil)
{
this.oil = oil;
}
public Double getNdf()
{
return ndf;
}
public void setNdf(Double ndf)
{
this.ndf = ndf;
}
public Double getAdf()
{
return adf;
}
public void setAdf(Double adf)
{
this.adf = adf;
}
public Double getEffectiveNdf()
{
return effectiveNdf;
}
public void setEffectiveNdf(Double effectiveNdf)
{
this.effectiveNdf = effectiveNdf;
}
public Double getOmd()
{
return omd;
}
public void setOmd(Double omd)
{
this.omd = omd;
}
public Double getPdia()
{
return pdia;
}
public void setPdia(Double pdia)
{
this.pdia = pdia;
}
public Double getPdin()
{
return pdin;
}
public void setPdin(Double pdin)
{
this.pdin = pdin;
}
public Double getPdie()
{
return pdie;
}
public void setPdie(Double pdie)
{
this.pdie = pdie;
}
public Double getStarch()
{
return starch;
}
public void setStarch(Double starch)
{
this.starch = starch;
}
public Double getSugar()
{
return sugar;
}
public void setSugar(Double sugar)
{
this.sugar = sugar;
}
public Double getUfl()
{
return ufl;
}
public void setUfl(Double ufl)
{
this.ufl = ufl;
}
public Double getUfv()
{
return ufv;
}
public void setUfv(Double ufv)
{
this.ufv = ufv;
}
public Double getLysdi()
{
return lysdi;
}
public void setLysdi(Double lysdi)
{
this.lysdi = lysdi;
}
public Double getMethdi()
{
return methdi;
}
public void setMethdi(Double methdi)
{
this.methdi = methdi;
}
public Double getCa()
{
return ca;
}
public void setCa(Double ca)
{
this.ca = ca;
}
public Double getP()
{
return p;
}
public void setP(Double p)
{
this.p = p;
}
public Double getMg()
{
return mg;
}
public void setMg(Double mg)
{
this.mg = mg;
}
public Double getNa()
{
return na;
}
public void setNa(Double na)
{
this.na = na;
}
public Double getCu()
{
return cu;
}
public void setCu(Double cu)
{
this.cu = cu;
}
public Double getZn()
{
return zn;
}
public void setZn(Double zn)
{
this.zn = zn;
}
public Double getMn()
{
return mn;
}
public void setMn(Double mn)
{
this.mn = mn;
}
public Double getCo()
{
return co;
}
public void setCo(Double co)
{
this.co = co;
}
public Double getSe()
{
return se;
}
public void setSe(Double se)
{
this.se = se;
}
public Double getI()
{
return i;
}
public void setI(Double i)
{
this.i = i;
}
public Double getVitaminA()
{
return vitaminA;
}
public void setVitaminA(Double vitaminA)
{
this.vitaminA = vitaminA;
}
public Double getVitaminD()
{
return vitaminD;
}
public void setVitaminD(Double vitaminD)
{
this.vitaminD = vitaminD;
}
public Double getVitaminE()
{
return vitaminE;
}
public void setVitaminE(Double vitaminE)
{
this.vitaminE = vitaminE;
}
public Double getPal()
{
return pal;
}
public void setPal(Double pal)
{
this.pal = pal;
}
public Double getMe()
{
return me;
}
public void setMe(Double me)
{
this.me = me;
}
public ArrayList<String> toList()
{
return new ArrayList<>(Arrays.asList(code + "", commodityTitle, commodityPrice + "", dm + "", ash + "", crudeProtein + "", crudeFibre + "", oil + "", ndf + "",
adf + "", effectiveNdf + "", omd + "", pdia + "", pdin + "", pdie + "", starch + "", sugar + "", ufl + "", ufv + "", lysdi + "", methdi + "", ca + "", p + "",
mg + "", na + "", cu + "", zn + "", mn + "", co + "", se + "", i + "", vitaminA + "", vitaminD + "", vitaminE + "", pal + "", me + ""));
}
}
| |
/**
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.inject;
import static com.google.common.base.Preconditions.checkState;
import com.google.inject.binder.AnnotatedBindingBuilder;
import com.google.inject.binder.AnnotatedConstantBindingBuilder;
import com.google.inject.binder.AnnotatedElementBuilder;
import com.google.inject.binder.LinkedBindingBuilder;
import com.google.inject.matcher.Matcher;
import com.google.inject.spi.Message;
import com.google.inject.spi.ProvisionListener;
import com.google.inject.spi.TypeConverter;
import com.google.inject.spi.TypeListener;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
/**
* A module whose configuration information is hidden from its environment by default. Only bindings
* that are explicitly exposed will be available to other modules and to the users of the injector.
* This module may expose the bindings it creates and the bindings of the modules it installs.
*
* <p>A private module can be nested within a regular module or within another private module using
* {@link Binder#install install()}. Its bindings live in a new environment that inherits bindings,
* type converters, scopes, and interceptors from the surrounding ("parent") environment. When you
* nest multiple private modules, the result is a tree of environments where the injector's
* environment is the root.
*
* <p>Guice EDSL bindings can be exposed with {@link #expose(Class) expose()}. {@literal @}{@link
* com.google.inject.Provides Provides} bindings can be exposed with the {@literal @}{@link
* Exposed} annotation:
*
* <pre>
* public class FooBarBazModule extends PrivateModule {
* protected void configure() {
* bind(Foo.class).to(RealFoo.class);
* expose(Foo.class);
*
* install(new TransactionalBarModule());
* expose(Bar.class).annotatedWith(Transactional.class);
*
* bind(SomeImplementationDetail.class);
* install(new MoreImplementationDetailsModule());
* }
*
* {@literal @}Provides {@literal @}Exposed
* public Baz provideBaz() {
* return new SuperBaz();
* }
* }
* </pre>
*
* <p>Private modules are implemented using {@link Injector#createChildInjector(Module[]) parent
* injectors}. When it can satisfy their dependencies, just-in-time bindings will be created in the
* root environment. Such bindings are shared among all environments in the tree.
*
* <p>The scope of a binding is constrained to its environment. A singleton bound in a private
* module will be unique to its environment. But a binding for the same type in a different private
* module will yield a different instance.
*
* <p>A shared binding that injects the {@code Injector} gets the root injector, which only has
* access to bindings in the root environment. An explicit binding that injects the {@code Injector}
* gets access to all bindings in the child environment.
*
* <p>To promote a just-in-time binding to an explicit binding, bind it:
* <pre>
* bind(FooImpl.class);
* </pre>
*
* @author jessewilson@google.com (Jesse Wilson)
* @since 2.0
*/
public abstract class PrivateModule implements Module {
/** Like abstract module, the binder of the current private module */
private PrivateBinder binder;
public final synchronized void configure(Binder binder) {
checkState(this.binder == null, "Re-entry is not allowed.");
// Guice treats PrivateModules specially and passes in a PrivateBinder automatically.
this.binder = (PrivateBinder) binder.skipSources(PrivateModule.class);
try {
configure();
} finally {
this.binder = null;
}
}
/**
* Creates bindings and other configurations private to this module. Use {@link #expose(Class)
* expose()} to make the bindings in this module available externally.
*/
protected abstract void configure();
/** Makes the binding for {@code key} available to other modules and the injector. */
protected final <T> void expose(Key<T> key) {
binder().expose(key);
}
/**
* Makes a binding for {@code type} available to other modules and the injector. Use {@link
* AnnotatedElementBuilder#annotatedWith(Class) annotatedWith()} to expose {@code type} with a
* binding annotation.
*/
protected final AnnotatedElementBuilder expose(Class<?> type) {
return binder().expose(type);
}
/**
* Makes a binding for {@code type} available to other modules and the injector. Use {@link
* AnnotatedElementBuilder#annotatedWith(Class) annotatedWith()} to expose {@code type} with a
* binding annotation.
*/
protected final AnnotatedElementBuilder expose(TypeLiteral<?> type) {
return binder().expose(type);
}
// everything below is copied from AbstractModule
/**
* Returns the current binder.
*/
protected final PrivateBinder binder() {
checkState(binder != null, "The binder can only be used inside configure()");
return binder;
}
/**
* @see Binder#bindScope(Class, Scope)
*/
protected final void bindScope(Class<? extends Annotation> scopeAnnotation, Scope scope) {
binder().bindScope(scopeAnnotation, scope);
}
/**
* @see Binder#bind(Key)
*/
protected final <T> LinkedBindingBuilder<T> bind(Key<T> key) {
return binder().bind(key);
}
/**
* @see Binder#bind(TypeLiteral)
*/
protected final <T> AnnotatedBindingBuilder<T> bind(TypeLiteral<T> typeLiteral) {
return binder().bind(typeLiteral);
}
/**
* @see Binder#bind(Class)
*/
protected final <T> AnnotatedBindingBuilder<T> bind(Class<T> clazz) {
return binder().bind(clazz);
}
/**
* @see Binder#bindConstant()
*/
protected final AnnotatedConstantBindingBuilder bindConstant() {
return binder().bindConstant();
}
/**
* @see Binder#install(Module)
*/
protected final void install(Module module) {
binder().install(module);
}
/**
* @see Binder#addError(String, Object[])
*/
protected final void addError(String message, Object... arguments) {
binder().addError(message, arguments);
}
/**
* @see Binder#addError(Throwable)
*/
protected final void addError(Throwable t) {
binder().addError(t);
}
/**
* @see Binder#addError(Message)
*/
protected final void addError(Message message) {
binder().addError(message);
}
/**
* @see Binder#requestInjection(Object)
*/
protected final void requestInjection(Object instance) {
binder().requestInjection(instance);
}
/**
* @see Binder#requestStaticInjection(Class[])
*/
protected final void requestStaticInjection(Class<?>... types) {
binder().requestStaticInjection(types);
}
/*if[AOP]*/
/**
* @see Binder#bindInterceptor(com.google.inject.matcher.Matcher, com.google.inject.matcher.Matcher, org.aopalliance.intercept.MethodInterceptor[])
*/
protected final void bindInterceptor(Matcher<? super Class<?>> classMatcher,
Matcher<? super Method> methodMatcher,
org.aopalliance.intercept.MethodInterceptor... interceptors) {
binder().bindInterceptor(classMatcher, methodMatcher, interceptors);
}
/*end[AOP]*/
/**
* Instructs Guice to require a binding to the given key.
*/
protected final void requireBinding(Key<?> key) {
binder().getProvider(key);
}
/**
* Instructs Guice to require a binding to the given type.
*/
protected final void requireBinding(Class<?> type) {
binder().getProvider(type);
}
/**
* @see Binder#getProvider(Key)
*/
protected final <T> Provider<T> getProvider(Key<T> key) {
return binder().getProvider(key);
}
/**
* @see Binder#getProvider(Class)
*/
protected final <T> Provider<T> getProvider(Class<T> type) {
return binder().getProvider(type);
}
/**
* @see Binder#convertToTypes(com.google.inject.matcher.Matcher, com.google.inject.spi.TypeConverter)
*/
protected final void convertToTypes(Matcher<? super TypeLiteral<?>> typeMatcher,
TypeConverter converter) {
binder().convertToTypes(typeMatcher, converter);
}
/**
* @see Binder#currentStage()
*/
protected final Stage currentStage() {
return binder().currentStage();
}
/**
* @see Binder#getMembersInjector(Class)
*/
protected <T> MembersInjector<T> getMembersInjector(Class<T> type) {
return binder().getMembersInjector(type);
}
/**
* @see Binder#getMembersInjector(TypeLiteral)
*/
protected <T> MembersInjector<T> getMembersInjector(TypeLiteral<T> type) {
return binder().getMembersInjector(type);
}
/**
* @see Binder#bindListener(com.google.inject.matcher.Matcher, com.google.inject.spi.TypeListener)
*/
protected void bindListener(Matcher<? super TypeLiteral<?>> typeMatcher,
TypeListener listener) {
binder().bindListener(typeMatcher, listener);
}
/**
* @see Binder#bindListener(Matcher, ProvisionListener...)
*/
protected void bindListener(Matcher<? super Binding<?>> bindingMatcher,
ProvisionListener... listeners) {
binder().bindListener(bindingMatcher, listeners);
}
}
| |
package hr.hrg.hipster.processor;
import static hr.hrg.javapoet.PoetUtil.*;
import java.io.*;
import java.util.*;
import java.util.Map.*;
import javax.annotation.processing.*;
import javax.lang.model.*;
import javax.lang.model.element.*;
import javax.lang.model.type.*;
import javax.lang.model.util.*;
import javax.persistence.*;
import javax.tools.*;
import javax.tools.Diagnostic.Kind;
import com.fasterxml.jackson.annotation.*;
import com.squareup.javapoet.*;
import com.squareup.javapoet.TypeSpec.*;
import hr.hrg.hipster.entity.*;
import hr.hrg.hipster.sql.*;
@SupportedAnnotationTypes("hr.hrg.hipster.entity.HipsterEntity")
@SupportedOptions({"hipster_proc_jackson","hipster_proc_builder", "hipster_proc_column_meta_class"})
public class HipsterDaoProcessor extends AbstractProcessor{
private static Map<String, List<ClassName>> packageMetas = new HashMap<String, List<ClassName>>();
private static Map<ClassName, EntityDef> defMap = new HashMap<>();
static int counter= 0;
static GenOptions genOptions;
@Override
public SourceVersion getSupportedSourceVersion() {
// We may claim to support the latest version, since we are not using
// any version-specific extensions.
return SourceVersion.latest();
}
@Override
public synchronized void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
processingEnv.getMessager().printMessage(Kind.NOTE, "INIT "+(++counter));
boolean anno = "true".equalsIgnoreCase(processingEnv.getOptions().get("hipster_proc_annotations"));
boolean jackson = "true".equalsIgnoreCase(processingEnv.getOptions().get("hipster_proc_jackson"));
boolean genBuilder = "true".equalsIgnoreCase(processingEnv.getOptions().get("hipster_proc_builder"));
boolean genVisitor = "true".equalsIgnoreCase(processingEnv.getOptions().get("hipster_proc_visitor"));
boolean genUpdate = "true".equalsIgnoreCase(processingEnv.getOptions().get("hipster_proc_update"));
boolean genMongo = "true".equalsIgnoreCase(processingEnv.getOptions().get("hipster_proc_mongo"));
boolean mongoSkipNull = "true".equalsIgnoreCase(processingEnv.getOptions().get("hipster_proc_mongo_skip_null"));
boolean mongoUseFieldName = "true".equalsIgnoreCase(processingEnv.getOptions().get("hipster_proc_mongo_use_field_name"));
boolean genSql = "true".equalsIgnoreCase(processingEnv.getOptions().get("hipster_proc_sql"));
genOptions = new GenOptions(anno, jackson,true, genVisitor, genUpdate, genBuilder, genSql, genMongo, mongoSkipNull, mongoUseFieldName);
}
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
Set<? extends Element> elements = roundEnv.getElementsAnnotatedWith(HipsterEntity.class);
Map<String, Element> packageClasses = new HashMap<>();
processingEnv.getMessager().printMessage(Kind.NOTE, "process classes "+elements);
for (Element element : elements) {
if (element.getKind() == ElementKind.INTERFACE) {
EntityDef def = makeDef(element);
if(def.genOptions.isGenMeta() && def.packageElement != null) packageClasses.put(def.packageName, def.packageElement);
generateClass(def, processingEnv);
}else{
processingEnv.getMessager().printMessage(Kind.NOTE, "skip because not interface "+element);
}
}
for(Entry<String, Element> entry: packageClasses.entrySet()) {
generateAllEntitiesInPackage(entry.getKey(), entry.getValue(), processingEnv, roundEnv);
}
return false;
}
public EntityDef makeDef(Element _element) {
TypeElement typeElement = (TypeElement) _element;
ClassName className = ClassName.get(typeElement);
Element packageElement = findPackage(typeElement);
if(packageElement != null) {
HipsterEntity annotation = packageElement.getAnnotation(HipsterEntity.class);
if(annotation != null) genOptions = new GenOptions(genOptions,annotation);
}
processingEnv.getMessager().printMessage(
Diagnostic.Kind.NOTE,
"annotated class: " + typeElement.getQualifiedName());
EntityDef def = new EntityDef(typeElement, processingEnv.getElementUtils(), genOptions);
def.packageElement = packageElement;
makeProps(typeElement, def);
addDef(def);
return def;
}
public void makeProps(TypeElement typeElement, EntityDef def) {
List<? extends TypeMirror> interfaces = typeElement.getInterfaces();
for (TypeMirror typeMirror : interfaces) {
TypeElement typeElement2 = (TypeElement) processingEnv.getTypeUtils().asElement(typeMirror);
HipsterEntity hipsterEntity = typeElement2.getAnnotation(HipsterEntity.class);
if(hipsterEntity != null) {
processingEnv.getMessager().printMessage(
Diagnostic.Kind.NOTE,
"annotated class: " + typeElement.getQualifiedName()+" with interface "+typeMirror.toString());
makeProps(typeElement2, def);
}
}
for (Element element : typeElement.getEnclosedElements()) {
if(element.getKind() == ElementKind.METHOD) {
ExecutableElement method = (ExecutableElement) element;
String name = element.getSimpleName().toString();
TypeName typeName = TypeName.get(method.getReturnType());
String typeNameStr = typeName.toString();
// only getters (getSth, isSth)
if(method.getParameters().size()> 0) continue;
if(method.isDefault()) continue;
boolean getter = name.startsWith("get") || (name.startsWith("is") && ("boolean".equals(typeNameStr) || "java.lang.Boolean".equals(typeNameStr) )) ;
if(!getter) {
processingEnv.getMessager().printMessage(
Diagnostic.Kind.NOTE,
"not a getter " + typeElement.getQualifiedName()+"."+name+":"+typeNameStr);
continue;
}
HipsterColumn hipsterColumn = element.getAnnotation(HipsterColumn.class);
if(hipsterColumn != null && hipsterColumn.skip()) continue;
Property prop = def.addProp(name, typeName, method.getReturnType(), method, processingEnv);
prop.readOnly = method.getAnnotation(Id.class) != null;
if(prop.readOnly) {
if(def.getPrimaryProp() != null) {
processingEnv.getMessager().printMessage(Kind.ERROR, "Second id field found, frist one was at "+def.getPrimaryProp().getterName+"()", method);
}else {
def.setPrimaryProp(prop);
}
}
}
}
}
public void addDef(EntityDef def) {
List<ClassName> list = packageMetas.get(def.packageName);
if(list == null) {
list = new ArrayList<>();
packageMetas.put(def.packageName, list);
}
if(!list.contains(def.type)) list.add(def.type);
defMap.put(def.type, def);
}
private Element findPackage(TypeElement typeElement) {
Element parentElement = typeElement.getEnclosingElement();
int i=0;
while(parentElement != null) {
if(parentElement.getKind() == ElementKind.PACKAGE) return parentElement;
parentElement = typeElement.getEnclosingElement();
if(++i>2) break;
}
return null;
}
private void generateAllEntitiesInPackage(String packageName, Element packageElement, ProcessingEnvironment processingEnv, RoundEnvironment roundEnv) {
ClassName className = ClassName.get(packageName,"AllEntitiesInPackage");
packageElement.accept(new ElementScanner8<Void, Void>() {
@Override
public Void visitType(TypeElement e, Void p) {
HipsterEntity annotation = e.getAnnotation(HipsterEntity.class);
if(annotation != null) {
ClassName entityClassName = ClassName.get(e);
EntityDef def = defMap.get(entityClassName);
if(def == null) def = makeDef(e);
}
return super.visitType(e, p);
}
}, null);
if(packageName == null || !packageMetas.containsKey(packageName)) return;
List<EntityDef> list = new ArrayList<>();
for(ClassName cName: packageMetas.get(packageName)) {
EntityDef def = defMap.get(cName);
if(def.genOptions.isGenMeta()) list.add(def);
}
TypeSpec.Builder cp = classBuilder(PUBLIC(),className);
com.squareup.javapoet.CodeBlock.Builder codeBlock = CodeBlock.builder();
codeBlock.add("$T.toArray(\n", HipsterSqlUtil.class);
codeBlock.indent();
boolean first = true;
for(int i=0; i<list.size(); i++) {
EntityDef def = list.get(i);
if(!def.genOptions.isGenMeta()) continue;
if(!first) codeBlock.add(",\n");
codeBlock.add("$T.class", def.typeMeta);
first = false;
}
codeBlock.unindent();
final CodeBlock code1 = codeBlock.add("\n)").build();
// Class<? extends IEntityMeta>[] ALL_META = HipsterSqlUtil.toArray(...);
addField(cp,
ArrayTypeName.of(parametrized(Class.class, WildcardTypeName.subtypeOf(IEntityMeta.class))),
"ALL_META",
field -> {
field.addModifiers(PUBLIC().STATIC().FINAL().toArray());
field.initializer(code1);
} );
codeBlock = CodeBlock.builder();
codeBlock.add("$T.toArray(\n", HipsterSqlUtil.class);
codeBlock.indent();
first = true;
for(int i=0; i<list.size(); i++) {
EntityDef def = list.get(i);
if(!def.genOptions.isGenMeta()) continue;
if(!first) codeBlock.add(",\n");
codeBlock.add("$T.class", def.type);
first = false;
}
codeBlock.unindent();
final CodeBlock code2 = codeBlock.add("\n)").build();
// Class[] ALL_ENTITIES = HipsterSqlUtil.toArray(...);
addField(cp,
ArrayTypeName.of(Class.class),
"ALL_ENTITIES",
field -> {
field.addModifiers(PUBLIC().STATIC().FINAL().toArray());
field.initializer(code2);
} );
write(className,cp.build(),processingEnv);
}
private EntityDef generateClass(EntityDef def, ProcessingEnvironment processingEnv) {
try {
// Builder builder = new GenEnum().gen2(def);
// write(def.typeEnum.packageName(), builder.build(), processingEnv);
String[] className = HipsterProcessorUtil.splitClassName(processingEnv.getOptions().getOrDefault("hipster_proc_column_meta_class",ColumnMeta.class.getName()));
ClassName columnMetaBase = ClassName.get(className[0],className[1]);
Builder builder = new GenImmutable(columnMetaBase).gen2(def);
write(def.typeImmutable, builder.build(), processingEnv);
if(def.genOptions.isGenBuilder()){
builder = new GenBuilder(columnMetaBase).gen2(def);
write(def.typeBuilder, builder.build(), processingEnv);
}
if(def.genOptions.isGenUpdate()){
builder = new GenUpdate(columnMetaBase).gen2(def);
write(def.typeUpdate, builder.build(), processingEnv);
}
if(def.genOptions.isGenMeta()){
builder = new GenMeta().gen(def,columnMetaBase, processingEnv);
JavaFile javaFile = JavaFile.builder(def.typeDelta.packageName(), builder.build())
.addStaticImport(HipsterSqlUtil.class,"annotation")
.build();
write(def.typeDelta.packageName(), javaFile, processingEnv);
// builder = new GenDelta().gen2(def);
// write(def.typeDelta, builder.build(), processingEnv);
if(genOptions.isGenVisitor()) {
builder = new GenVisitor().gen2(def);
write(def.typeDelta, builder.build(), processingEnv);
}
}
} catch (Throwable e) {
processingEnv.getMessager().printMessage(Kind.ERROR, e.getMessage()+"\n"+getTrace(e), def.clazz);
}
return def;
}
public void write(ClassName type, TypeSpec spec, ProcessingEnvironment processingEnv) {
write(type.packageName(), spec, processingEnv);
}
public void write(String packageName, TypeSpec spec, ProcessingEnvironment processingEnv) {
write(packageName, JavaFile.builder(packageName, spec).build(), processingEnv);
}
public void write(String packageName, JavaFile javaFile, ProcessingEnvironment processingEnv) {
try {
JavaFileObject jfo = processingEnv.getFiler().createSourceFile(packageName+"."+javaFile.typeSpec.name);
try ( OutputStream out = jfo.openOutputStream();
PrintWriter pw = new PrintWriter(out);
){
javaFile.writeTo(pw);
pw.flush();
}
} catch (Exception e) {
e.printStackTrace();
}
}
String getTrace(Throwable e){
try(StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);) {
e.printStackTrace(pw);
return sw.toString();
} catch (IOException e1) {
e1.printStackTrace();
}
return null;
}
}
| |
/*
* File: SimplifiedSequentialMinimalOptimization.java
* Authors: Justin Basilico
* Company: Sandia National Laboratories
* Project: Cognitive Foundry Learning Core
*
* Copyright July 19, 2010, Sandia Corporation.
* Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
* license for use of this work by or on behalf of the U.S. Government. Export
* of this program may require a license from the United States Government.
*/
package gov.sandia.cognition.learning.algorithm.svm;
import gov.sandia.cognition.annotation.PublicationReference;
import gov.sandia.cognition.annotation.PublicationType;
import gov.sandia.cognition.learning.algorithm.AbstractAnytimeSupervisedBatchLearner;
import gov.sandia.cognition.learning.data.InputOutputPair;
import gov.sandia.cognition.learning.function.categorization.KernelBinaryCategorizer;
import gov.sandia.cognition.learning.function.kernel.Kernel;
import gov.sandia.cognition.learning.function.kernel.KernelContainer;
import gov.sandia.cognition.util.DefaultWeightedValue;
import gov.sandia.cognition.util.Randomized;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Random;
/**
* This is a simplified version of the Sequential Minimization Algorithm (SMO)
* that was used as a stepping-stone in the full SMO implementation.
*
* @author Justin Basilico
* @since 3.1
* @see SequentialMinimalOptimization
*/
@PublicationReference(
title="The Simplified SMO Algorithm",
author="Andrew Ng",
year=2009,
type=PublicationType.WebPage,
url="http://www.stanford.edu/class/cs229/materials/smo.pdf")
public class SimplifiedSequentialMinimalOptimization<InputType>
extends AbstractAnytimeSupervisedBatchLearner<InputType, Boolean, KernelBinaryCategorizer<InputType, DefaultWeightedValue<InputType>>>
implements KernelContainer<InputType>, Randomized
{
public static final int DEFAULT_MAX_ITERATIONS = 1000;
public static final int DEFAULT_MAX_STEPS_WITHOUT_CHANGE = 10;
/** The default maximum penalty is infinite, which means that it is
* hard-assignment. */
public static final double DEFAULT_MAX_PENALTY = Double.POSITIVE_INFINITY;
/** The default error tolerance is 0.001, which is what was recommended in
* the original Sequential Minimal Optimization paper. */
public static final double DEFAULT_ERROR_TOLERANCE = 0.001;
/** The default effective value for zero is {@value}. */
public static final double DEFAULT_EFFECTIVE_ZERO = 1.0e-10;
/** The kernel to use. */
private Kernel<? super InputType> kernel;
private double maxPenalty;
private double errorTolerance;
private int maxStepsWithoutChange;
private double effectiveZero;
private Random random;
/** The result categorizer. */
private transient KernelBinaryCategorizer<InputType, DefaultWeightedValue<InputType>> result;
private transient ArrayList<InputOutputPair<? extends InputType, Boolean>> dataList;
private transient int dataSize;
/** The number of items changed on the most recent iteration. */
private transient int changeCount;
private transient int stepsWithoutChange;
/** The mapping of weight objects to non-zero weighted examples
* (support vectors). */
private transient LinkedHashMap<Integer, DefaultWeightedValue<InputType>> supportsMap;
public SimplifiedSequentialMinimalOptimization()
{
this(null, DEFAULT_MAX_PENALTY, DEFAULT_ERROR_TOLERANCE,
DEFAULT_MAX_STEPS_WITHOUT_CHANGE, DEFAULT_EFFECTIVE_ZERO,
DEFAULT_MAX_ITERATIONS, new Random());
}
public SimplifiedSequentialMinimalOptimization(
Kernel<? super InputType> kernel,
final double maxPenalty,
double errorTolerance,
int maxStepsWithoutChange,
double effectiveZero,
final int maxIterations,
Random random)
{
super(maxIterations);
this.setKernel(kernel);
this.setMaxPenalty(maxPenalty);
this.setErrorTolerance(errorTolerance);
this.setMaxStepsWithoutChange(maxStepsWithoutChange);
this.setEffectiveZero(effectiveZero);
this.setRandom(random);
}
@Override
protected boolean initializeAlgorithm()
{
this.result = null;
if (this.getData() == null)
{
// Error: No data to learn on.
return false;
}
this.dataList = new ArrayList<InputOutputPair<? extends InputType, Boolean>>(
this.getData().size());
int positives = 0;
for (InputOutputPair<? extends InputType, Boolean> example : this.getData())
{
if (example != null && example.getInput() != null && example.getOutput() != null)
{
this.dataList.add(example);
if (example.getOutput())
{
positives++;
}
}
}
this.dataSize = this.dataList.size();
if (this.dataSize <= 0)
{
// Error: No valid data to learn from.
this.dataList = null;
return false;
}
else if (positives <= 0 || positives >= this.dataSize)
{
throw new IllegalArgumentException("Data is all one category");
}
this.changeCount = this.getData().size();
this.stepsWithoutChange = 0;
this.supportsMap = new LinkedHashMap<Integer, DefaultWeightedValue<InputType>>();
// initialize alpha array to all zero
// initialize threshold to zero
this.result = new KernelBinaryCategorizer<InputType, DefaultWeightedValue<InputType>>(
this.kernel, this.supportsMap.values(), 0.0);
return true;
}
@Override
protected boolean step()
{
System.out.println("Iteration: " + this.getIteration());
final double tol = this.errorTolerance;
final double C = this.maxPenalty;
this.changeCount = 0;
for (int i = 0; i < this.dataSize; i++)
{
//System.out.println();
//System.out.println(" i: " + i);
final double yI = this.getTarget(i);
final double eI = this.getSVMOutput(i) - yI;
double alphaI = this.getAlpha(i);
//System.out.println(" yi: " + yi);
//System.out.println(" Ei: " + Ei);
//System.out.println(" alphai: " + alphai);
final double yITimesEI = yI * eI;
if ( ((yITimesEI < -tol) && (alphaI < C))
|| ((yITimesEI > +tol) && (alphaI > 0)))
{
// Select a random j != i
int j = this.random.nextInt(this.dataSize - 1);
if (j >= i)
{
j += 1;
}
//for (int j = 0; j < dataSize; j++)
//{
// if (i == j) continue;
if (this.takeStep(i, j))
{
changeCount++;
}
}
//}
}
/*
System.out.println("Change count: " + changeCount);
System.out.println("Result " + result);
for (WeightedValue<?> support : result.getExamples())
{
System.out.println(" " + support.getWeight() + " " + support.getValue());
}
System.out.println("Bias: " + result.getBias());
*/
if (this.changeCount <= 0)
{
this.stepsWithoutChange++;
}
else
{
this.stepsWithoutChange = 0;
}
return this.stepsWithoutChange < this.maxStepsWithoutChange;
}
private boolean takeStep(
final int i,
final int j)
{
if (i == j)
{
// This is a sanity check. It cannot take a step if the two
// examples are exactly the same.
return false;
}
final double C = this.maxPenalty;
final double epsilon = this.effectiveZero;
final double CMinusEpsilon = C - epsilon;
final double yI = this.getTarget(i);
final double eI = this.getSVMOutput(i) - yI;
final double oldAlphaI = this.getAlpha(i);
// double alphaI = this.getAlpha(i);
final double yJ = this.getTarget(j);
final double eJ = this.getSVMOutput(j) - yJ;
final double oldAlphaJ = this.getAlpha(j);
// double alphaJ = this.getAlpha(j);
//System.out.println(" i: " + i);
//System.out.println(" yi: " + yi);
//System.out.println(" Ei: " + Ei);
//System.out.println(" alphai: " + alphai);
//System.out.println(" j: " + j);
//System.out.println(" Ej: " + Ej);
//System.out.println(" yj: " + yj);
//System.out.println(" alphaj: " + alphaj);
// Compute the lower and upper bounds to solve for new values of
// alphaI and alphaJ.
final double lowerBound;
final double upperBound;
if (yI != yJ)
{
final double alphaJMinusAlphaI = oldAlphaJ - oldAlphaI;
lowerBound = Math.max(0, alphaJMinusAlphaI);
upperBound = Math.min(C, alphaJMinusAlphaI + C);
}
else
{
final double alphaIPlusAlphaJ = oldAlphaI + oldAlphaJ;
lowerBound = Math.max(0, alphaIPlusAlphaJ - C);
upperBound = Math.min(C, alphaIPlusAlphaJ);
}
//System.out.println(" L: " + L);
//System.out.println(" H: " + H);
if (lowerBound >= upperBound)
{
return false;
}
// Evaluate the kernels between the values, using the property that by
// kernel symmetry: k(i,j) == k(j,i)
final double kII = this.evaluateKernel(i, i);
final double kIJ = this.evaluateKernel(i, j);
final double kJI = kIJ;
final double kJJ = this.evaluateKernel(j, j);
final double eta = kIJ + kJI - kII - kJJ;
//System.out.println(" eta: " + eta);
if (eta >= 0.0)
{
return false;
}
double newAlphaJ = oldAlphaJ - (yJ * (eI - eJ)) / eta;
if (newAlphaJ <= lowerBound)
{
newAlphaJ = lowerBound;
}
else if (newAlphaJ >= upperBound)
{
newAlphaJ = upperBound;
}
// If the new alpha is close enough to 0.0 or the maximum alpha, just
// set it to that value.
if (newAlphaJ < epsilon)
{
newAlphaJ = 0.0;
}
else if (newAlphaJ > CMinusEpsilon)
{
newAlphaJ = C;
}
//System.out.println(" alphajnew: " + alphaj);
if (Math.abs(newAlphaJ - oldAlphaJ) < epsilon)
{
return false;
}
double newAlphaI = oldAlphaI + yI * yJ * (oldAlphaJ - newAlphaJ);
// If the new alpha is close enough to 0.0 or the maximum alpha, just
// set it to that value.
if (newAlphaI < epsilon)
{
newAlphaI = 0.0;
}
else if (newAlphaI > CMinusEpsilon)
{
newAlphaI = C;
}
final double oldBias = this.getBias();
final double b1 = oldBias - eI
- yI * (newAlphaI - oldAlphaI) * kII
- yJ * (newAlphaJ - oldAlphaJ) * kIJ;
final double b2 = oldBias - eJ
- yI * (newAlphaI - oldAlphaI) * kJI
- yJ * (newAlphaJ - oldAlphaJ) * kJJ;
final double newBias;
if (newAlphaI > epsilon && newAlphaI < CMinusEpsilon)
{
newBias = b1;
}
else if (newAlphaJ > epsilon && newAlphaJ < CMinusEpsilon)
{
newBias = b2;
}
else
{
newBias = (b1 + b2) / 2.0;
}
//System.out.println(" alphai: " + alphai);
//System.out.println(" alphaj: " + alphaj);
//System.out.println(" b: " + b);
this.setAlpha(i, newAlphaI);
this.setAlpha(j, newAlphaJ);
this.setBias(newBias);
return true;
}
@Override
protected void cleanupAlgorithm()
{
this.dataList = null;
this.supportsMap = null;
}
private double evaluateKernel(
final int i,
final int j)
{
return this.kernel.evaluate(this.getPoint(i), this.getPoint(j));
}
private double getSVMOutput(
final InputType input)
{
return this.result.evaluateAsDouble(input);
}
private double getSVMOutput(
final int i)
{
return this.getSVMOutput(this.getPoint(i));
}
private InputType getPoint(
final int i)
{
return this.dataList.get(i).getInput();
}
private double getTarget(
final int i)
{
return this.dataList.get(i).getOutput() ? +1.0 : -1.0;
}
private double getAlpha(
final int i)
{
final DefaultWeightedValue<InputType> support = this.supportsMap.get(i);
if (support == null)
{
return 0.0;
}
else
{
// The weight is the label (+1 or -1) times alpha. Alpha is always
// greater than zero, so we just take the absolute value of the
// weight to get it.
return Math.abs(support.getWeight());
}
}
private void setAlpha(
int i,
double alpha)
{
if (alpha == 0.0)
{
this.supportsMap.remove(i);
}
else
{
// The weight is the label times alpha.
final double weight = this.getTarget(i) * alpha;
DefaultWeightedValue<InputType> support = this.supportsMap.get(i);
if (support == null)
{
support = new DefaultWeightedValue<InputType>(
this.getPoint(i), weight);
supportsMap.put(i, support);
}
else
{
support.setWeight(weight);
}
}
}
private double getBias()
{
return this.result.getBias();
}
private void setBias(
final double b)
{
this.result.setBias(b);
}
public KernelBinaryCategorizer<InputType, DefaultWeightedValue<InputType>> getResult()
{
return this.result;
}
public Kernel<? super InputType> getKernel()
{
return kernel;
}
public void setKernel(
final Kernel<? super InputType> kernel)
{
this.kernel = kernel;
}
public double getMaxPenalty()
{
return maxPenalty;
}
public void setMaxPenalty(
final double maxPenalty)
{
if (maxPenalty <= 0.0)
{
throw new IllegalArgumentException("maxPenalty must be positive.");
}
this.maxPenalty = maxPenalty;
}
public double getErrorTolerance()
{
return errorTolerance;
}
public void setErrorTolerance(
final double errorTolerance)
{
if (errorTolerance < 0.0)
{
throw new IllegalArgumentException(
"errorTolerance cannot be negative.");
}
this.errorTolerance = errorTolerance;
}
public int getMaxStepsWithoutChange()
{
return maxStepsWithoutChange;
}
public void setMaxStepsWithoutChange(
final int maxStepsWithoutChange)
{
if (maxStepsWithoutChange <= 0)
{
throw new IllegalArgumentException(
"maxStepsWithoutChange must be positive");
}
this.maxStepsWithoutChange = maxStepsWithoutChange;
}
public double getEffectiveZero()
{
return this.effectiveZero;
}
public void setEffectiveZero(
final double effectiveZero)
{
if (effectiveZero < 0.0)
{
throw new IllegalArgumentException(
"effectiveZero cannot be negative.");
}
this.effectiveZero = effectiveZero;
}
public Random getRandom()
{
return this.random;
}
public void setRandom(
final Random random)
{
this.random = random;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.raptor.storage;
import com.facebook.presto.metadata.InMemoryNodeManager;
import com.facebook.presto.orc.LongVector;
import com.facebook.presto.orc.OrcDataSource;
import com.facebook.presto.orc.OrcRecordReader;
import com.facebook.presto.orc.SliceVector;
import com.facebook.presto.raptor.RaptorColumnHandle;
import com.facebook.presto.raptor.backup.BackupStore;
import com.facebook.presto.raptor.backup.FileBackupStore;
import com.facebook.presto.raptor.metadata.ColumnStats;
import com.facebook.presto.raptor.metadata.DatabaseShardManager;
import com.facebook.presto.raptor.metadata.ShardDelta;
import com.facebook.presto.raptor.metadata.ShardInfo;
import com.facebook.presto.raptor.metadata.ShardManager;
import com.facebook.presto.spi.ConnectorPageSource;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.NodeManager;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.TupleDomain;
import com.facebook.presto.spi.type.SqlDate;
import com.facebook.presto.spi.type.SqlTimestamp;
import com.facebook.presto.spi.type.SqlVarbinary;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.testing.MaterializedResult;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import io.airlift.json.JsonCodec;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import org.joda.time.DateTime;
import org.joda.time.Days;
import org.joda.time.chrono.ISOChronology;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.IDBI;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import static com.facebook.presto.RowPagesBuilder.rowPagesBuilder;
import static com.facebook.presto.raptor.storage.OrcTestingUtil.createReader;
import static com.facebook.presto.raptor.storage.OrcTestingUtil.octets;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.DateType.DATE;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.TimeZoneKey.UTC_KEY;
import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP;
import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.testing.MaterializedResult.materializeSourceDataStream;
import static com.facebook.presto.testing.MaterializedResult.resultBuilder;
import static com.google.common.io.Files.createTempDir;
import static io.airlift.json.JsonCodec.jsonCodec;
import static io.airlift.slice.Slices.utf8Slice;
import static io.airlift.slice.Slices.wrappedBuffer;
import static io.airlift.testing.FileUtils.deleteRecursively;
import static io.airlift.units.DataSize.Unit.BYTE;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static java.lang.String.format;
import static java.util.Locale.ENGLISH;
import static org.joda.time.DateTimeZone.UTC;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import static org.testng.FileAssert.assertFile;
@Test(singleThreaded = true)
public class TestOrcStorageManager
{
private static final JsonCodec<ShardDelta> SHARD_DELTA_CODEC = jsonCodec(ShardDelta.class);
private static final ISOChronology UTC_CHRONOLOGY = ISOChronology.getInstance(UTC);
private static final DateTime EPOCH = new DateTime(0, UTC_CHRONOLOGY);
private static final ConnectorSession SESSION = new ConnectorSession("user", UTC_KEY, ENGLISH, System.currentTimeMillis(), null);
private static final String CURRENT_NODE = "node";
private static final DataSize ORC_MAX_MERGE_DISTANCE = new DataSize(1, MEGABYTE);
private static final DataSize ORC_MAX_READ_SIZE = new DataSize(1, MEGABYTE);
private static final DataSize ORC_STREAM_BUFFER_SIZE = new DataSize(1, MEGABYTE);
private static final Duration SHARD_RECOVERY_TIMEOUT = new Duration(30, TimeUnit.SECONDS);
private static final DataSize MAX_BUFFER_SIZE = new DataSize(256, MEGABYTE);
private static final int MAX_SHARD_ROWS = 100;
private static final DataSize MAX_FILE_SIZE = new DataSize(1, MEGABYTE);
private static final Duration MISSING_SHARD_DISCOVERY = new Duration(5, TimeUnit.MINUTES);
private final NodeManager nodeManager = new InMemoryNodeManager();
private Handle dummyHandle;
private File temporary;
private StorageService storageService;
private ShardRecoveryManager recoveryManager;
private FileBackupStore fileBackupStore;
private Optional<BackupStore> backupStore;
@BeforeClass
public void setup()
throws Exception
{
temporary = createTempDir();
File directory = new File(temporary, "data");
storageService = new FileStorageService(directory);
storageService.start();
File backupDirectory = new File(temporary, "backup");
fileBackupStore = new FileBackupStore(backupDirectory);
fileBackupStore.start();
backupStore = Optional.of(fileBackupStore);
IDBI dbi = new DBI("jdbc:h2:mem:test" + System.nanoTime());
dummyHandle = dbi.open();
ShardManager shardManager = new DatabaseShardManager(dbi);
Duration discoveryInterval = new Duration(5, TimeUnit.MINUTES);
recoveryManager = new ShardRecoveryManager(storageService, backupStore, nodeManager, shardManager, discoveryInterval, 10);
}
@AfterClass(alwaysRun = true)
public void tearDown()
throws Exception
{
if (dummyHandle != null) {
dummyHandle.close();
}
deleteRecursively(temporary);
}
@Test
public void testShardFiles()
throws Exception
{
UUID uuid = UUID.fromString("701e1a79-74f7-4f56-b438-b41e8e7d019d");
assertEquals(
new File(temporary, "data/storage/701/e1a/701e1a79-74f7-4f56-b438-b41e8e7d019d.orc"),
storageService.getStorageFile(uuid));
assertEquals(
new File(temporary, "data/staging/701e1a79-74f7-4f56-b438-b41e8e7d019d.orc"),
storageService.getStagingFile(uuid));
assertEquals(
new File(temporary, "backup/701/e1a/701e1a79-74f7-4f56-b438-b41e8e7d019d.orc"),
fileBackupStore.getBackupFile(uuid));
}
@Test
public void testWriter()
throws Exception
{
OrcStorageManager manager = createOrcStorageManager();
List<Long> columnIds = ImmutableList.of(3L, 7L);
List<Type> columnTypes = ImmutableList.<Type>of(BIGINT, VARCHAR);
StoragePageSink sink = manager.createStoragePageSink(columnIds, columnTypes);
List<Page> pages = rowPagesBuilder(columnTypes)
.row(123, "hello")
.row(456, "bye")
.build();
sink.appendPages(pages);
List<ShardInfo> shards = sink.commit();
assertEquals(shards.size(), 1);
ShardInfo shardInfo = Iterables.getOnlyElement(shards);
UUID shardUuid = shardInfo.getShardUuid();
File file = storageService.getStorageFile(shardUuid);
File backupFile = fileBackupStore.getBackupFile(shardUuid);
assertEquals(shardInfo.getRowCount(), 2);
assertEquals(shardInfo.getCompressedSize(), file.length());
// verify primary and backup shard exist
assertFile(file, "primary shard");
assertFile(backupFile, "backup shard");
// remove primary shard to force recovery from backup
assertTrue(file.delete());
assertTrue(file.getParentFile().delete());
assertFalse(file.exists());
recoveryManager.restoreFromBackup(shardUuid);
try (OrcDataSource dataSource = manager.openShard(shardUuid)) {
OrcRecordReader reader = createReader(dataSource, columnIds, columnTypes);
assertEquals(reader.nextBatch(), 2);
LongVector longVector = new LongVector(2);
reader.readVector(0, longVector);
assertEquals(longVector.isNull[0], false);
assertEquals(longVector.isNull[1], false);
assertEquals(longVector.vector[0], 123L);
assertEquals(longVector.vector[1], 456L);
SliceVector stringVector = new SliceVector(2);
reader.readVector(1, stringVector);
assertEquals(stringVector.vector[0], utf8Slice("hello"));
assertEquals(stringVector.vector[1], utf8Slice("bye"));
assertEquals(reader.nextBatch(), -1);
}
}
@Test
public void testReader()
throws Exception
{
OrcStorageManager manager = createOrcStorageManager();
List<Long> columnIds = ImmutableList.of(2L, 4L, 6L, 7L, 8L, 9L);
List<Type> columnTypes = ImmutableList.<Type>of(BIGINT, VARCHAR, VARBINARY, DATE, BOOLEAN, DOUBLE);
byte[] bytes1 = octets(0x00, 0xFE, 0xFF);
byte[] bytes3 = octets(0x01, 0x02, 0x19, 0x80);
StoragePageSink sink = manager.createStoragePageSink(columnIds, columnTypes);
Object[][] doubles = {
{881, "-inf", null, null, null, Double.NEGATIVE_INFINITY},
{882, "+inf", null, null, null, Double.POSITIVE_INFINITY},
{883, "nan", null, null, null, Double.NaN},
{884, "min", null, null, null, Double.MIN_VALUE},
{885, "max", null, null, null, Double.MAX_VALUE},
{886, "pzero", null, null, null, 0.0},
{887, "nzero", null, null, null, -0.0},
};
List<Page> pages = rowPagesBuilder(columnTypes)
.row(123, "hello", wrappedBuffer(bytes1), sqlDate(2001, 8, 22).getDays(), true, 123.45)
.row(null, null, null, null, null, null)
.row(456, "bye", wrappedBuffer(bytes3), sqlDate(2005, 4, 22).getDays(), false, 987.65)
.rows(doubles)
.build();
sink.appendPages(pages);
List<ShardInfo> shards = sink.commit();
assertEquals(shards.size(), 1);
UUID uuid = Iterables.getOnlyElement(shards).getShardUuid();
MaterializedResult expected = resultBuilder(SESSION, columnTypes)
.row(123, "hello", sqlBinary(bytes1), sqlDate(2001, 8, 22), true, 123.45)
.row(null, null, null, null, null, null)
.row(456, "bye", sqlBinary(bytes3), sqlDate(2005, 4, 22), false, 987.65)
.rows(doubles)
.build();
// no tuple domain (all)
TupleDomain<RaptorColumnHandle> tupleDomain = TupleDomain.all();
try (ConnectorPageSource pageSource = manager.getPageSource(uuid, columnIds, columnTypes, tupleDomain)) {
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, columnTypes);
assertEquals(result.getRowCount(), expected.getRowCount());
assertEquals(result, expected);
}
// tuple domain within the column range
tupleDomain = TupleDomain.withFixedValues(ImmutableMap.<RaptorColumnHandle, Comparable<?>>builder()
.put(new RaptorColumnHandle("test", "c1", 2, BIGINT), 124L)
.build());
try (ConnectorPageSource pageSource = manager.getPageSource(uuid, columnIds, columnTypes, tupleDomain)) {
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, columnTypes);
assertEquals(result.getRowCount(), expected.getRowCount());
}
// tuple domain outside the column range
tupleDomain = TupleDomain.withFixedValues(ImmutableMap.<RaptorColumnHandle, Comparable<?>>builder()
.put(new RaptorColumnHandle("test", "c1", 2, BIGINT), 122L)
.build());
try (ConnectorPageSource pageSource = manager.getPageSource(uuid, columnIds, columnTypes, tupleDomain)) {
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, columnTypes);
assertEquals(result.getRowCount(), 0);
}
}
@Test
public void testShardStatsBigint()
{
List<ColumnStats> stats = columnStats(types(BIGINT),
row(2L),
row(-3L),
row(5L));
assertColumnStats(stats, 1, -3L, 5L);
}
@Test
public void testShardStatsDouble()
{
List<ColumnStats> stats = columnStats(types(DOUBLE),
row(2.5),
row(-4.1),
row(6.6));
assertColumnStats(stats, 1, -4.1, 6.6);
}
@Test
public void testShardStatsBigintDouble()
{
List<ColumnStats> stats = columnStats(types(BIGINT, DOUBLE),
row(-3L, 6.6),
row(5L, -4.1));
assertColumnStats(stats, 1, -3L, 5L);
assertColumnStats(stats, 2, -4.1, 6.6);
}
@Test
public void testShardStatsDoubleMinMax()
{
List<ColumnStats> stats = columnStats(types(DOUBLE),
row(3.2),
row(Double.MIN_VALUE),
row(4.5));
assertColumnStats(stats, 1, Double.MIN_VALUE, 4.5);
stats = columnStats(types(DOUBLE),
row(3.2),
row(Double.MAX_VALUE),
row(4.5));
assertColumnStats(stats, 1, 3.2, Double.MAX_VALUE);
}
@Test
public void testShardStatsDoubleNotFinite()
{
List<ColumnStats> stats = columnStats(types(DOUBLE),
row(3.2),
row(Double.NEGATIVE_INFINITY),
row(4.5));
assertColumnStats(stats, 1, null, 4.5);
stats = columnStats(types(DOUBLE),
row(3.2),
row(Double.POSITIVE_INFINITY),
row(4.5));
assertColumnStats(stats, 1, 3.2, null);
stats = columnStats(types(DOUBLE),
row(3.2),
row(Double.NaN),
row(4.5));
assertColumnStats(stats, 1, 3.2, 4.5);
}
@Test
public void testShardStatsVarchar()
{
List<ColumnStats> stats = columnStats(
types(VARCHAR),
row(utf8Slice("hello")),
row(utf8Slice("bye")),
row(utf8Slice("foo")));
assertColumnStats(stats, 1, "bye", "hello");
}
@Test
public void testShardStatsBigintVarbinary()
{
List<ColumnStats> stats = columnStats(types(BIGINT, VARBINARY),
row(5L, wrappedBuffer(octets(0x00))),
row(3L, wrappedBuffer(octets(0x01))));
assertColumnStats(stats, 1, 3L, 5L);
assertNoColumnStats(stats, 2);
}
@Test
public void testShardStatsDateTimestamp()
{
long minDate = sqlDate(2001, 8, 22).getDays();
long maxDate = sqlDate(2005, 4, 22).getDays();
long maxTimestamp = sqlTimestamp(2002, 4, 13, 6, 7, 8).getMillisUtc();
long minTimestamp = sqlTimestamp(2001, 3, 15, 9, 10, 11).getMillisUtc();
List<ColumnStats> stats = columnStats(types(DATE, TIMESTAMP),
row(minDate, maxTimestamp),
row(maxDate, minTimestamp));
assertColumnStats(stats, 1, minDate, maxDate);
assertColumnStats(stats, 2, minTimestamp, maxTimestamp);
}
@Test
public void testMaxShardRows()
throws Exception
{
OrcStorageManager manager = createOrcStorageManager(storageService, backupStore, recoveryManager, 2, new DataSize(2, MEGABYTE));
List<Long> columnIds = ImmutableList.of(3L, 7L);
List<Type> columnTypes = ImmutableList.<Type>of(BIGINT, VARCHAR);
StoragePageSink sink = manager.createStoragePageSink(columnIds, columnTypes);
List<Page> pages = rowPagesBuilder(columnTypes)
.row(123, "hello")
.row(456, "bye")
.build();
sink.appendPages(pages);
assertTrue(sink.isFull());
}
@Test
public void testMaxFileSize()
throws Exception
{
List<Long> columnIds = ImmutableList.of(3L, 7L);
List<Type> columnTypes = ImmutableList.<Type>of(BIGINT, VARCHAR);
List<Page> pages = rowPagesBuilder(columnTypes)
.row(123, "hello")
.row(456, "bye")
.build();
// Set maxFileSize to 1 byte, so adding any page makes the StoragePageSink full
OrcStorageManager manager = createOrcStorageManager(storageService, backupStore, recoveryManager, 20, new DataSize(1, BYTE));
StoragePageSink sink = manager.createStoragePageSink(columnIds, columnTypes);
sink.appendPages(pages);
assertTrue(sink.isFull());
}
private OrcStorageManager createOrcStorageManager()
{
return createOrcStorageManager(storageService, backupStore, recoveryManager);
}
public static OrcStorageManager createOrcStorageManager(IDBI dbi, File temporary)
throws IOException
{
File directory = new File(temporary, "data");
StorageService storageService = new FileStorageService(directory);
storageService.start();
File backupDirectory = new File(temporary, "backup");
FileBackupStore fileBackupStore = new FileBackupStore(backupDirectory);
fileBackupStore.start();
Optional<BackupStore> backupStore = Optional.of(fileBackupStore);
ShardManager shardManager = new DatabaseShardManager(dbi);
ShardRecoveryManager recoveryManager = new ShardRecoveryManager(
storageService,
backupStore,
new InMemoryNodeManager(),
shardManager,
MISSING_SHARD_DISCOVERY,
10);
return createOrcStorageManager(storageService, backupStore, recoveryManager, MAX_SHARD_ROWS, MAX_FILE_SIZE);
}
public static OrcStorageManager createOrcStorageManager(
StorageService storageService,
Optional<BackupStore> backupStore,
ShardRecoveryManager recoveryManager)
{
return createOrcStorageManager(storageService, backupStore, recoveryManager, MAX_SHARD_ROWS, MAX_FILE_SIZE);
}
public static OrcStorageManager createOrcStorageManager(
StorageService storageService,
Optional<BackupStore> backupStore,
ShardRecoveryManager recoveryManager,
int maxShardRows,
DataSize maxFileSize)
{
return new OrcStorageManager(
CURRENT_NODE,
storageService,
backupStore,
SHARD_DELTA_CODEC,
ORC_MAX_MERGE_DISTANCE,
ORC_MAX_READ_SIZE,
ORC_STREAM_BUFFER_SIZE,
recoveryManager,
SHARD_RECOVERY_TIMEOUT,
maxShardRows,
maxFileSize,
MAX_BUFFER_SIZE);
}
private static void assertColumnStats(List<ColumnStats> list, long columnId, Object min, Object max)
{
for (ColumnStats stats : list) {
if (stats.getColumnId() == columnId) {
assertEquals(stats.getMin(), min);
assertEquals(stats.getMax(), max);
return;
}
}
fail(format("no stats for column: %s: %s", columnId, list));
}
private static void assertNoColumnStats(List<ColumnStats> list, long columnId)
{
for (ColumnStats stats : list) {
assertNotEquals(stats.getColumnId(), columnId);
}
}
private static List<Type> types(Type... types)
{
return ImmutableList.copyOf(types);
}
private static Object[] row(Object... values)
{
return values;
}
private List<ColumnStats> columnStats(List<Type> columnTypes, Object[]... rows)
{
ImmutableList.Builder<Long> list = ImmutableList.builder();
for (long i = 1; i <= columnTypes.size(); i++) {
list.add(i);
}
List<Long> columnIds = list.build();
OrcStorageManager manager = createOrcStorageManager();
StoragePageSink sink = manager.createStoragePageSink(columnIds, columnTypes);
sink.appendPages(rowPagesBuilder(columnTypes).rows(rows).build());
List<ShardInfo> shards = sink.commit();
assertEquals(shards.size(), 1);
return Iterables.getOnlyElement(shards).getColumnStats();
}
private static SqlVarbinary sqlBinary(byte[] bytes)
{
return new SqlVarbinary(bytes);
}
private static SqlDate sqlDate(int year, int month, int day)
{
DateTime date = new DateTime(year, month, day, 0, 0, 0, 0, UTC);
return new SqlDate(Days.daysBetween(EPOCH, date).getDays());
}
private static SqlTimestamp sqlTimestamp(int year, int month, int day, int hour, int minute, int second)
{
DateTime dateTime = new DateTime(year, month, day, hour, minute, second, 0, UTC);
return new SqlTimestamp(dateTime.getMillis(), UTC_KEY);
}
}
| |
package com.chequer.axboot.core.domain.base;
import com.chequer.axboot.core.code.AXBootTypes;
import com.chequer.axboot.core.db.mapper.ColumnToBeanPropertyRowMapper;
import com.chequer.axboot.core.parameter.RequestParams;
import com.querydsl.core.types.EntityPath;
import com.querydsl.core.types.OrderSpecifier;
import com.querydsl.core.types.Predicate;
import com.querydsl.jpa.impl.JPADeleteClause;
import com.querydsl.jpa.impl.JPAQuery;
import com.querydsl.jpa.impl.JPAUpdateClause;
import org.apache.commons.lang3.StringUtils;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.annotation.Transactional;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public abstract class AXBootBaseService<T, ID extends Serializable> extends AXBootFilterService<T> {
public AXBootBaseService() {
}
protected AXBootJPAQueryDSLRepository<T, ID> repository;
public AXBootBaseService(AXBootJPAQueryDSLRepository<T, ID> repository) {
this.repository = repository;
}
public List<T> findAll() {
return repository.findAll();
}
public List<T> findAll(Sort sort) {
return repository.findAll(sort);
}
public Page<T> findAll(Pageable pageable, String searchParams) {
return filter(findAll(pageable.getSort()), pageable, searchParams);
}
public List<T> findAll(Iterable<ID> iterable) {
return repository.findAll(iterable);
}
public T findOne(Predicate predicate) {
return repository.findOne(predicate);
}
public List<T> findAll(Predicate predicate) {
return toList(repository.findAll(predicate));
}
public List<T> findAll(Predicate predicate, Sort sort) {
return toList(repository.findAll(predicate, sort));
}
public List<T> toList(Iterable<T> iterable) {
if (iterable != null) {
List<T> list = new ArrayList<>();
for (T item : iterable) {
list.add(item);
}
return list;
}
return Collections.emptyList();
}
public List<T> findAll(Predicate predicate, OrderSpecifier... orderSpecifiers) {
return toList(repository.findAll(predicate, orderSpecifiers));
}
public List<T> findAll(OrderSpecifier... orderSpecifiers) {
return toList(repository.findAll(orderSpecifiers));
}
public Page<T> findAll(Predicate predicate, Pageable pageable) {
return repository.findAll(predicate, pageable);
}
public long count(Predicate predicate) {
return repository.count(predicate);
}
public boolean exists(Predicate predicate) {
return repository.exists(predicate);
}
public void flush() {
repository.flush();
}
@Transactional
public <S extends T> S saveAndFlush(S object) {
return repository.saveAndFlush(object);
}
@Transactional
public void deleteInBatch(Iterable<T> iterable) {
repository.deleteInBatch(iterable);
}
@Transactional
public void deleteAllInBatch() {
repository.deleteAllInBatch();
}
public T getOne(ID id) {
return repository.getOne(id);
}
public Page<T> findAll(Pageable pageable) {
return repository.findAll(pageable);
}
@Transactional
public <S extends T> S save(S var) {
boolean deleted = false;
if (var instanceof AXBootCrudModel) {
AXBootCrudModel crudModel = (AXBootCrudModel) var;
if (crudModel.getDataStatus() == AXBootTypes.DataStatus.DELETED) {
deleted = true;
}
}
if (deleted) {
repository.delete(var);
} else {
repository.save(var);
}
return var;
}
@Transactional
public <S extends T> Collection<S> save(Collection<S> vars) {
vars.forEach(this::save);
return vars;
}
public T findOne(ID var1) {
return repository.findOne(var1);
}
public boolean exists(ID var1) {
return repository.exists(var1);
}
public long count() {
return repository.count();
}
@Transactional
public void delete(ID var1) {
repository.delete(var1);
}
@Transactional
public void delete(T var1) {
repository.delete(var1);
}
@Transactional
public void delete(Iterable<? extends T> var1) {
repository.delete(var1);
}
@Transactional
public void deleteAll() {
repository.deleteAll();
}
@PersistenceContext
protected EntityManager em;
@Inject
protected JdbcTemplate jdbcTemplate;
public int getInt(Integer integer) {
if (integer == null) {
return 0;
}
return integer;
}
public long getLong(Long _long) {
if (_long == null) {
return 0;
}
return _long;
}
public int getInt(BigDecimal bigDecimal) {
if (bigDecimal == null)
return 0;
return bigDecimal.intValue();
}
public long getLong(BigDecimal bigDecimal) {
if (bigDecimal == null)
return 0;
return bigDecimal.longValue();
}
protected String like(String field) {
return "%" + field + "%";
}
public boolean isNotEmpty(String value) {
return StringUtils.isNotEmpty(value);
}
public boolean isEmpty(String value) {
return StringUtils.isEmpty(value);
}
public boolean isEmpty(Collection<?> list) {
return list == null || list.size() == 0;
}
public boolean isNotEmpty(Collection<?> list) {
return !isEmpty(list);
}
public boolean equals(Object o1, Object o2) {
if (o1 == null) {
return false;
}
if (o2 == null) {
return false;
}
return o1.equals(o2);
}
public boolean notEquals(Object o1, Object o2) {
return !equals(o1, o2);
}
protected JPAQuery<T> select() {
return new JPAQuery<>(em);
}
protected JPAUpdateClause update(EntityPath<?> entityPath) {
return new JPAUpdateClause(em, entityPath);
}
protected JPADeleteClause delete(EntityPath<?> entityPath) {
return new JPADeleteClause(em, entityPath);
}
protected <T> List<T> queryList(String query, Class<T> clazz) {
return jdbcTemplate.query(query, new ColumnToBeanPropertyRowMapper<>(clazz));
}
protected <T> T query(String query, Class<T> clazz) {
return jdbcTemplate.queryForObject(query, new ColumnToBeanPropertyRowMapper<>(clazz));
}
protected <T> T queryObject(String query, Class<T> clazz) {
return jdbcTemplate.queryForObject(query, clazz);
}
public RequestParams<T> buildRequestParams() {
return new RequestParams<>();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.locator;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.ResourceWatcher;
import org.apache.cassandra.utils.WrappedRunnable;
/**
* Used to determine if two IP's are in the same datacenter or on the same rack.
* <p/>
* Based on a properties file in the following format:
*
* 10.0.0.13=DC1:RAC2
* 10.21.119.14=DC3:RAC2
* 10.20.114.15=DC2:RAC2
* default=DC1:r1
*/
public class PropertyFileSnitch extends AbstractNetworkTopologySnitch
{
private static final Logger logger = LoggerFactory.getLogger(PropertyFileSnitch.class);
public static final String SNITCH_PROPERTIES_FILENAME = "cassandra-topology.properties";
private static volatile Map<InetAddress, String[]> endpointMap;
private static volatile String[] defaultDCRack;
private volatile boolean gossipStarted;
public PropertyFileSnitch() throws ConfigurationException
{
reloadConfiguration();
try
{
FBUtilities.resourceToFile(SNITCH_PROPERTIES_FILENAME);
Runnable runnable = new WrappedRunnable()
{
protected void runMayThrow() throws ConfigurationException
{
reloadConfiguration();
}
};
ResourceWatcher.watch(SNITCH_PROPERTIES_FILENAME, runnable, 60 * 1000);
}
catch (ConfigurationException ex)
{
logger.error("{} found, but does not look like a plain file. Will not watch it for changes", SNITCH_PROPERTIES_FILENAME);
}
}
/**
* Get the raw information about an end point
*
* @param endpoint endpoint to process
* @return a array of string with the first index being the data center and the second being the rack
*/
public String[] getEndpointInfo(InetAddress endpoint)
{
String[] rawEndpointInfo = getRawEndpointInfo(endpoint);
if (rawEndpointInfo == null)
throw new RuntimeException("Unknown host " + endpoint + " with no default configured");
return rawEndpointInfo;
}
private String[] getRawEndpointInfo(InetAddress endpoint)
{
String[] value = endpointMap.get(endpoint);
if (value == null)
{
logger.debug("Could not find end point information for {}, will use default", endpoint);
return defaultDCRack;
}
return value;
}
/**
* Return the data center for which an endpoint resides in
*
* @param endpoint the endpoint to process
* @return string of data center
*/
public String getDatacenter(InetAddress endpoint)
{
String[] info = getEndpointInfo(endpoint);
assert info != null : "No location defined for endpoint " + endpoint;
return info[0];
}
/**
* Return the rack for which an endpoint resides in
*
* @param endpoint the endpoint to process
* @return string of rack
*/
public String getRack(InetAddress endpoint)
{
String[] info = getEndpointInfo(endpoint);
assert info != null : "No location defined for endpoint " + endpoint;
return info[1];
}
public void reloadConfiguration() throws ConfigurationException
{
HashMap<InetAddress, String[]> reloadedMap = new HashMap<InetAddress, String[]>();
Properties properties = new Properties();
InputStream stream = null;
try
{
stream = getClass().getClassLoader().getResourceAsStream(SNITCH_PROPERTIES_FILENAME);
properties.load(stream);
}
catch (Exception e)
{
throw new ConfigurationException("Unable to read " + SNITCH_PROPERTIES_FILENAME, e);
}
finally
{
FileUtils.closeQuietly(stream);
}
for (Map.Entry<Object, Object> entry : properties.entrySet())
{
String key = (String) entry.getKey();
String value = (String) entry.getValue();
if (key.equals("default"))
{
String[] newDefault = value.split(":");
if (newDefault.length < 2)
defaultDCRack = new String[] { "default", "default" };
else
defaultDCRack = new String[] { newDefault[0].trim(), newDefault[1].trim() };
}
else
{
InetAddress host;
String hostString = key.replace("/", "");
try
{
host = InetAddress.getByName(hostString);
}
catch (UnknownHostException e)
{
throw new ConfigurationException("Unknown host " + hostString, e);
}
String[] token = value.split(":");
if (token.length < 2)
token = new String[] { "default", "default" };
else
token = new String[] { token[0].trim(), token[1].trim() };
reloadedMap.put(host, token);
}
}
if (defaultDCRack == null && !reloadedMap.containsKey(FBUtilities.getBroadcastAddress()))
throw new ConfigurationException(String.format("Snitch definitions at %s do not define a location for this node's broadcast address %s, nor does it provides a default",
SNITCH_PROPERTIES_FILENAME, FBUtilities.getBroadcastAddress()));
logger.debug("loaded network topology {}", FBUtilities.toString(reloadedMap));
endpointMap = reloadedMap;
if (StorageService.instance != null) // null check tolerates circular dependency; see CASSANDRA-4145
StorageService.instance.getTokenMetadata().invalidateCachedRings();
if (gossipStarted)
StorageService.instance.gossipSnitchInfo();
}
@Override
public void gossiperStarting()
{
gossipStarted = true;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.autoscalingplans.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Represents a CloudWatch metric of your choosing that can be used for dynamic scaling as part of a target tracking
* scaling policy.
* </p>
* <p>
* To create your customized scaling metric specification:
* </p>
* <ul>
* <li>
* <p>
* Add values for each required parameter from CloudWatch. You can use an existing metric, or a new metric that you
* create. To use your own metric, you must first publish the metric to CloudWatch. For more information, see <a
* href="https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/publishingMetrics.html">Publish Custom
* Metrics</a> in the <i>Amazon CloudWatch User Guide</i>.
* </p>
* </li>
* <li>
* <p>
* Choose a metric that changes proportionally with capacity. The value of the metric should increase or decrease in
* inverse proportion to the number of capacity units. That is, the value of the metric should decrease when capacity
* increases.
* </p>
* </li>
* </ul>
* <p>
* For information about terminology, available metrics, or how to publish new metrics, see <a
* href="https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html">Amazon CloudWatch
* Concepts</a> in the <i>Amazon CloudWatch User Guide</i>.
* </p>
*
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/autoscaling-plans-2018-01-06/CustomizedScalingMetricSpecification"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CustomizedScalingMetricSpecification implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The name of the metric.
* </p>
*/
private String metricName;
/**
* <p>
* The namespace of the metric.
* </p>
*/
private String namespace;
/**
* <p>
* The dimensions of the metric.
* </p>
* <p>
* Conditional: If you published your metric with dimensions, you must specify the same dimensions in your
* customized scaling metric specification.
* </p>
*/
private java.util.List<MetricDimension> dimensions;
/**
* <p>
* The statistic of the metric.
* </p>
*/
private String statistic;
/**
* <p>
* The unit of the metric.
* </p>
*/
private String unit;
/**
* <p>
* The name of the metric.
* </p>
*
* @param metricName
* The name of the metric.
*/
public void setMetricName(String metricName) {
this.metricName = metricName;
}
/**
* <p>
* The name of the metric.
* </p>
*
* @return The name of the metric.
*/
public String getMetricName() {
return this.metricName;
}
/**
* <p>
* The name of the metric.
* </p>
*
* @param metricName
* The name of the metric.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomizedScalingMetricSpecification withMetricName(String metricName) {
setMetricName(metricName);
return this;
}
/**
* <p>
* The namespace of the metric.
* </p>
*
* @param namespace
* The namespace of the metric.
*/
public void setNamespace(String namespace) {
this.namespace = namespace;
}
/**
* <p>
* The namespace of the metric.
* </p>
*
* @return The namespace of the metric.
*/
public String getNamespace() {
return this.namespace;
}
/**
* <p>
* The namespace of the metric.
* </p>
*
* @param namespace
* The namespace of the metric.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomizedScalingMetricSpecification withNamespace(String namespace) {
setNamespace(namespace);
return this;
}
/**
* <p>
* The dimensions of the metric.
* </p>
* <p>
* Conditional: If you published your metric with dimensions, you must specify the same dimensions in your
* customized scaling metric specification.
* </p>
*
* @return The dimensions of the metric.</p>
* <p>
* Conditional: If you published your metric with dimensions, you must specify the same dimensions in your
* customized scaling metric specification.
*/
public java.util.List<MetricDimension> getDimensions() {
return dimensions;
}
/**
* <p>
* The dimensions of the metric.
* </p>
* <p>
* Conditional: If you published your metric with dimensions, you must specify the same dimensions in your
* customized scaling metric specification.
* </p>
*
* @param dimensions
* The dimensions of the metric.</p>
* <p>
* Conditional: If you published your metric with dimensions, you must specify the same dimensions in your
* customized scaling metric specification.
*/
public void setDimensions(java.util.Collection<MetricDimension> dimensions) {
if (dimensions == null) {
this.dimensions = null;
return;
}
this.dimensions = new java.util.ArrayList<MetricDimension>(dimensions);
}
/**
* <p>
* The dimensions of the metric.
* </p>
* <p>
* Conditional: If you published your metric with dimensions, you must specify the same dimensions in your
* customized scaling metric specification.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setDimensions(java.util.Collection)} or {@link #withDimensions(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param dimensions
* The dimensions of the metric.</p>
* <p>
* Conditional: If you published your metric with dimensions, you must specify the same dimensions in your
* customized scaling metric specification.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomizedScalingMetricSpecification withDimensions(MetricDimension... dimensions) {
if (this.dimensions == null) {
setDimensions(new java.util.ArrayList<MetricDimension>(dimensions.length));
}
for (MetricDimension ele : dimensions) {
this.dimensions.add(ele);
}
return this;
}
/**
* <p>
* The dimensions of the metric.
* </p>
* <p>
* Conditional: If you published your metric with dimensions, you must specify the same dimensions in your
* customized scaling metric specification.
* </p>
*
* @param dimensions
* The dimensions of the metric.</p>
* <p>
* Conditional: If you published your metric with dimensions, you must specify the same dimensions in your
* customized scaling metric specification.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomizedScalingMetricSpecification withDimensions(java.util.Collection<MetricDimension> dimensions) {
setDimensions(dimensions);
return this;
}
/**
* <p>
* The statistic of the metric.
* </p>
*
* @param statistic
* The statistic of the metric.
* @see MetricStatistic
*/
public void setStatistic(String statistic) {
this.statistic = statistic;
}
/**
* <p>
* The statistic of the metric.
* </p>
*
* @return The statistic of the metric.
* @see MetricStatistic
*/
public String getStatistic() {
return this.statistic;
}
/**
* <p>
* The statistic of the metric.
* </p>
*
* @param statistic
* The statistic of the metric.
* @return Returns a reference to this object so that method calls can be chained together.
* @see MetricStatistic
*/
public CustomizedScalingMetricSpecification withStatistic(String statistic) {
setStatistic(statistic);
return this;
}
/**
* <p>
* The statistic of the metric.
* </p>
*
* @param statistic
* The statistic of the metric.
* @return Returns a reference to this object so that method calls can be chained together.
* @see MetricStatistic
*/
public CustomizedScalingMetricSpecification withStatistic(MetricStatistic statistic) {
this.statistic = statistic.toString();
return this;
}
/**
* <p>
* The unit of the metric.
* </p>
*
* @param unit
* The unit of the metric.
*/
public void setUnit(String unit) {
this.unit = unit;
}
/**
* <p>
* The unit of the metric.
* </p>
*
* @return The unit of the metric.
*/
public String getUnit() {
return this.unit;
}
/**
* <p>
* The unit of the metric.
* </p>
*
* @param unit
* The unit of the metric.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CustomizedScalingMetricSpecification withUnit(String unit) {
setUnit(unit);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getMetricName() != null)
sb.append("MetricName: ").append(getMetricName()).append(",");
if (getNamespace() != null)
sb.append("Namespace: ").append(getNamespace()).append(",");
if (getDimensions() != null)
sb.append("Dimensions: ").append(getDimensions()).append(",");
if (getStatistic() != null)
sb.append("Statistic: ").append(getStatistic()).append(",");
if (getUnit() != null)
sb.append("Unit: ").append(getUnit());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CustomizedScalingMetricSpecification == false)
return false;
CustomizedScalingMetricSpecification other = (CustomizedScalingMetricSpecification) obj;
if (other.getMetricName() == null ^ this.getMetricName() == null)
return false;
if (other.getMetricName() != null && other.getMetricName().equals(this.getMetricName()) == false)
return false;
if (other.getNamespace() == null ^ this.getNamespace() == null)
return false;
if (other.getNamespace() != null && other.getNamespace().equals(this.getNamespace()) == false)
return false;
if (other.getDimensions() == null ^ this.getDimensions() == null)
return false;
if (other.getDimensions() != null && other.getDimensions().equals(this.getDimensions()) == false)
return false;
if (other.getStatistic() == null ^ this.getStatistic() == null)
return false;
if (other.getStatistic() != null && other.getStatistic().equals(this.getStatistic()) == false)
return false;
if (other.getUnit() == null ^ this.getUnit() == null)
return false;
if (other.getUnit() != null && other.getUnit().equals(this.getUnit()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getMetricName() == null) ? 0 : getMetricName().hashCode());
hashCode = prime * hashCode + ((getNamespace() == null) ? 0 : getNamespace().hashCode());
hashCode = prime * hashCode + ((getDimensions() == null) ? 0 : getDimensions().hashCode());
hashCode = prime * hashCode + ((getStatistic() == null) ? 0 : getStatistic().hashCode());
hashCode = prime * hashCode + ((getUnit() == null) ? 0 : getUnit().hashCode());
return hashCode;
}
@Override
public CustomizedScalingMetricSpecification clone() {
try {
return (CustomizedScalingMetricSpecification) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.autoscalingplans.model.transform.CustomizedScalingMetricSpecificationMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
package cz.metacentrum.perun.core.api;
import com.google.common.collect.Sets;
import cz.metacentrum.perun.core.AbstractPerunIntegrationTest;
import cz.metacentrum.perun.core.api.exceptions.AlreadyMemberException;
import cz.metacentrum.perun.core.api.exceptions.ExtendMembershipException;
import cz.metacentrum.perun.core.api.exceptions.InternalErrorException;
import cz.metacentrum.perun.core.api.exceptions.PrivilegeException;
import cz.metacentrum.perun.core.api.exceptions.UserNotAdminException;
import cz.metacentrum.perun.core.api.exceptions.WrongAttributeValueException;
import cz.metacentrum.perun.core.api.exceptions.WrongReferenceAttributeValueException;
import cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl;
import cz.metacentrum.perun.core.impl.AuthzRoles;
import cz.metacentrum.perun.core.impl.PerunSessionImpl;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Integration tests of AuthzResolver
*
* @author Jiri Harazim <harazim@mail.muni.cz>
*/
public class AuthzResolverIntegrationTest extends AbstractPerunIntegrationTest {
private static final String CLASS_NAME = "AuthzResolver.";
final ExtSource extSource = new ExtSource(0, "AuthzResolverExtSource", ExtSourcesManager.EXTSOURCE_LDAP);
private int userLoginSequence = 0;
@Test
public void unauthorizedPerunAdmin() throws Exception {
System.out.println(CLASS_NAME + "unauthorizedPerunAdmin");
assertFalse(AuthzResolver.authorizedInternal(new PerunSessionImpl(
perun,
new PerunPrincipal("pepa", ExtSourcesManager.EXTSOURCE_NAME_INTERNAL, ExtSourcesManager.EXTSOURCE_INTERNAL),
new PerunClient()
), "default_policy",
Collections.emptyList()));
}
@Test
public void authorizedPerunAdmin() throws Exception {
System.out.println(CLASS_NAME + "authorizedPerunAdmin");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, null, Role.PERUNADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "default_policy", Collections.emptyList()));
}
@Test
public void authorizedVoAdmin() throws Exception {
System.out.println(CLASS_NAME + "authorizedVoAdmin");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.VOADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_authorized_vo_admin", Arrays.asList(createdVo)));
}
@Test
public void unauthorizedVoAdminCycleAdmin() throws Exception {
System.out.println(CLASS_NAME + "unauthorizedVoAdminCycleAdmin");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertFalse(AuthzResolver.authorizedInternal(session, "test_cycle_voadmin", Arrays.asList(createdVo)));
}
@Test
public void authorizedGroupOrVoAdmin() throws Exception {
System.out.println(CLASS_NAME + "authorizedGroupOrVoAdmin");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
Group createdGroup = setUpGroup(createdVo, createdMember);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdGroup, Role.GROUPADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_authorized_group_admin", Arrays.asList(createdVo, createdGroup)));
}
@Test
public void authorizedGroupOrVoAdmin2() throws Exception {
System.out.println(CLASS_NAME + "authorizedGroupOrVoAdmin2");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
Group createdGroup = setUpGroup(createdVo, createdMember);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.VOADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_authorized_group_admin", Arrays.asList(createdVo, createdGroup)));
}
@Test
public void unauthorizedGroupOrVoAdmin3() throws Exception {
System.out.println(CLASS_NAME + "unauthorizedGroupOrVoAdmin3");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Vo createdVo2 = perun.getVosManager().createVo(sess, new Vo(1,"test123444444","test123444444"));
final Member createdMember = createSomeMember(createdVo);
Group createdGroup = setUpGroup(createdVo, createdMember);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo2, Role.VOADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertFalse(AuthzResolver.authorizedInternal(session, "test_authorized_group_admin", Arrays.asList(createdVo, createdGroup)));
}
@Test
public void authorizedGroupAndVoAdmin() throws Exception {
System.out.println(CLASS_NAME + "authorizedGroupAndVoAdmin");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
Group createdGroup = setUpGroup(createdVo, createdMember);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.VOADMIN);
AuthzResolver.setRole(sess, createdUser, createdGroup, Role.GROUPADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_groupadmin_voadmin", Arrays.asList(createdVo, createdGroup)));
}
@Test
public void unauthorizedGroupAndVoAdmin2() throws Exception {
System.out.println(CLASS_NAME + "unauthorizedGroupAndVoAdmin2");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
Group createdGroup = setUpGroup(createdVo, createdMember);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdGroup, Role.GROUPADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertFalse(AuthzResolver.authorizedInternal(session, "test_groupadmin_voadmin", Arrays.asList(createdVo, createdGroup)));
}
@Test
public void unauthorizedGroupAndVoAdmin3() throws Exception {
System.out.println(CLASS_NAME + "unauthorizedGroupAndVoAdmin3");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
Group createdGroup = setUpGroup(createdVo, createdMember);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.VOADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertFalse(AuthzResolver.authorizedInternal(session, "test_groupadmin_voadmin", Arrays.asList(createdVo, createdGroup)));
}
@Test
public void authorizedResourceAdmin() throws Exception {
System.out.println(CLASS_NAME + "authorizedResourceAdmin");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
Facility createdFacility = setUpFacility();
Resource createdResource = setUpResource(createdVo, createdFacility);
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdResource, Role.RESOURCEADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_resource_admin", Arrays.asList(createdResource)));
}
@Test
public void authorizedTransitive() throws Exception {
System.out.println(CLASS_NAME + "authorizedTransitive");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
Facility createdFacility = setUpFacility();
Resource createdResource = setUpResource(createdVo, createdFacility);
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdResource, Role.RESOURCEADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_transitive_one", Arrays.asList(createdResource)));
}
@Test
public void authorizedResourceAdminAndFacilityAdmin() throws Exception {
System.out.println(CLASS_NAME + "authorizedResourceAdminAndFacilityAdmin");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
Facility createdFacility = setUpFacility();
Resource createdResource = setUpResource(createdVo, createdFacility);
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdResource, Role.RESOURCEADMIN);
AuthzResolver.setRole(sess, createdUser, createdFacility, Role.FACILITYADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_resource_and_facility_admin", Arrays.asList(createdResource, createdFacility)));
}
@Test
public void unauthorizedResourceAdminAndFacilityAdmin2() throws Exception {
System.out.println(CLASS_NAME + "unauthorizedResourceAdminAndFacilityAdmin2");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
Facility createdFacility = setUpFacility();
Resource createdResource = setUpResource(createdVo, createdFacility);
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdResource, Role.RESOURCEADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertFalse(AuthzResolver.authorizedInternal(session, "test_resource_and_facility_admin", Arrays.asList(createdResource, createdFacility)));
}
@Test
public void unauthorizedResourceAdminAndFacilityAdmin3() throws Exception {
System.out.println(CLASS_NAME + "unauthorizedResourceAdminAndFacilityAdmin3");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
Facility createdFacility = setUpFacility();
Resource createdResource = setUpResource(createdVo, createdFacility);
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdFacility, Role.FACILITYADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertFalse(AuthzResolver.authorizedInternal(session, "test_resource_and_facility_admin", Arrays.asList(createdResource, createdFacility)));
}
@Test
public void unauthorizedResourceAdminAndFacilityAdmin4() throws Exception {
System.out.println(CLASS_NAME + "unauthorizedResourceAdminAndFacilityAdmin4");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
Facility createdFacility = setUpFacility();
Resource createdResource = setUpResource(createdVo, createdFacility);
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertFalse(AuthzResolver.authorizedInternal(session, "test_resource_and_facility_admin", Arrays.asList(createdResource, createdFacility)));
}
@Test
public void authorizedGroupAdminOrVoAdmin() throws Exception {
System.out.println(CLASS_NAME + "authorizedGroupAdminOrVoAdmin");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
Group createdGroup = setUpGroup(createdVo, createdMember);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdGroup, Role.GROUPADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_group_or_vo", Arrays.asList(createdVo, createdGroup)));
}
@Test
public void authorizedGroupAdminOrVoAdmin2() throws Exception {
System.out.println(CLASS_NAME + "authorizedGroupAdminOrVoAdmin2");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
Group createdGroup = setUpGroup(createdVo, createdMember);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.VOADMIN);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_group_or_vo", Arrays.asList(createdVo, createdGroup)));
}
@Test
public void unauthorizedGroupAdminOrVoAdmin3() throws Exception {
System.out.println(CLASS_NAME + "unauthorizedGroupAdminOrVoAdmin3");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
Group createdGroup = setUpGroup(createdVo, createdMember);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertFalse(AuthzResolver.authorizedInternal(session, "test_group_or_vo", Arrays.asList(createdVo, createdGroup)));
}
@Test
public void authorizedSecurityTeamAdmin() throws Exception {
System.out.println(CLASS_NAME + "authorizedSecurityTeamAdmin");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
SecurityTeam team = new SecurityTeam();
team.setName("a");
SecurityTeam createdTeam = perun.getSecurityTeamsManager().createSecurityTeam(sess, team);
perun.getSecurityTeamsManager().addAdmin(sess, createdTeam, createdUser);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_security_admin", Arrays.asList(createdTeam)));
}
@Test
public void unauthorizedEmptyList() throws Exception {
System.out.println(CLASS_NAME + "unauthorizedEmptyList");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
SecurityTeam team = new SecurityTeam();
team.setName("a");
SecurityTeam createdTeam = perun.getSecurityTeamsManager().createSecurityTeam(sess, team);
perun.getSecurityTeamsManager().addAdmin(sess, createdTeam, createdUser);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertFalse(AuthzResolver.authorizedInternal(session, "test_security_admin", Arrays.asList()));
}
@Test
public void authorizedVoobserverAndTopgroupcreator() throws Exception {
System.out.println(CLASS_NAME + "authorizedVoobserverAndTopgroupcreator");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
PerunSession session = getHisSession(createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.TOPGROUPCREATOR);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.VOOBSERVER);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_voobserver_and_topgroupcreator", Arrays.asList(createdVo)));
}
@Test
public void authorizedCabinetAdmin() throws Exception {
System.out.println(CLASS_NAME + "authorizedCabinetAdmin");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
PerunSession session = getHisSession(createdMember);
AuthzResolver.setRole(sess, createdUser, null, Role.CABINETADMIN);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_cabinet", Arrays.asList()));
}
@Test
public void authorizedSelf() throws Exception {
System.out.println(CLASS_NAME + "authorizedSelf");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_self", Arrays.asList(createdUser)));
}
@Test
public void authorizedSponsor() throws Exception {
System.out.println(CLASS_NAME + "authorizedSponsor");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
PerunSession session = getHisSession(createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.SPONSOR);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_sponsor", Arrays.asList(createdVo,createdMember)));
}
@Test
public void authorizedResourceselfservice() throws Exception {
System.out.println(CLASS_NAME + "authorizedResourceselfservice");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
Group createdGroup = setUpGroup(createdVo, createdMember);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
PerunSession session = getHisSession(createdMember);
AuthzResolver.setRole(sess, createdUser, createdGroup, Role.GROUPADMIN);
Facility facility = setUpFacility();
Resource resource = setUpResource(createdVo, facility);
perun.getResourcesManager().addResourceSelfServiceGroup(sess, resource, createdGroup);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.authorizedInternal(session, "test_resourceselfservice", Arrays.asList(resource, createdGroup)));
}
@Test
public void isAuthorizedInvalidPrincipal() throws Exception {
System.out.println(CLASS_NAME + "isAuthorizedInvalidPrincipal");
assertTrue(!
AuthzResolver.isAuthorized(new PerunSessionImpl(
perun,
new PerunPrincipal("pepa", ExtSourcesManager.EXTSOURCE_NAME_INTERNAL, ExtSourcesManager.EXTSOURCE_INTERNAL),
new PerunClient()
), Role.PERUNADMIN));
}
@Test
public void setRoleResourceSelfServiceForUser() throws Exception {
System.out.println(CLASS_NAME + "setRoleResourceSelfServiceForUser");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
final Resource resource = setUpResource(createdVo, setUpFacility());
AuthzResolver.setRole(sess, createdUser, resource, Role.RESOURCESELFSERVICE);
PerunSession session = getHisSession(createdMember);
AuthzResolver.refreshAuthz(session);
assertTrue(AuthzResolver.isAuthorized(session, Role.RESOURCESELFSERVICE, resource));
}
@Test
public void unsetRoleResourceSelfServiceForUser() throws Exception {
System.out.println(CLASS_NAME + "unsetRoleResourceSelfServiceForUser");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
final Resource resource = setUpResource(createdVo, setUpFacility());
AuthzResolver.setRole(sess, createdUser, resource, Role.RESOURCESELFSERVICE);
PerunSession userSession = getHisSession(createdMember);
AuthzResolver.refreshAuthz(userSession);
AuthzResolver.unsetRole(sess, createdUser, resource, Role.RESOURCESELFSERVICE);
AuthzResolver.refreshAuthz(userSession);
assertFalse(AuthzResolver.isAuthorized(userSession, Role.RESOURCESELFSERVICE, resource));
}
@Test
public void setRoleResourceSelfServiceForGroup() throws Exception {
System.out.println(CLASS_NAME + "setRoleResourceSelfServiceForUser");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final Resource resource = setUpResource(createdVo, setUpFacility());
final Group group = setUpGroup(createdVo, createdMember);
AuthzResolver.setRole(sess, group, resource, Role.RESOURCESELFSERVICE);
PerunSession userSession = getHisSession(createdMember);
AuthzResolver.refreshAuthz(userSession);
assertTrue(AuthzResolver.isAuthorized(userSession, Role.RESOURCESELFSERVICE, resource));
}
@Test
public void unsetRoleResourceSelfServiceForGroup() throws Exception {
System.out.println(CLASS_NAME + "unsetRoleResourceSelfServiceForGroup");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final Resource resource = setUpResource(createdVo, setUpFacility());
final Group group = setUpGroup(createdVo, createdMember);
AuthzResolver.setRole(sess, group, resource, Role.RESOURCESELFSERVICE);
PerunSession userSession = getHisSession(createdMember);
AuthzResolver.refreshAuthz(userSession);
AuthzResolver.unsetRole(sess, group, resource, Role.RESOURCESELFSERVICE);
AuthzResolver.refreshAuthz(userSession);
assertFalse(AuthzResolver.isAuthorized(userSession, Role.RESOURCESELFSERVICE, resource));
}
@Test
public void setRoleVoAdmin() throws Exception {
System.out.println(CLASS_NAME + "setRole");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.VOADMIN);
PerunSession sess1 = getHisSession(createdMember);
AuthzResolver.refreshAuthz(sess1);
assertTrue(AuthzResolver.isAuthorized(sess1, Role.VOADMIN,createdVo));
}
@Test
public void setRoleVoObserver() throws Exception {
System.out.println(CLASS_NAME + "setRole");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.VOOBSERVER);
PerunSession sess1 = getHisSession(createdMember);
AuthzResolver.refreshAuthz(sess1);
assertTrue(AuthzResolver.isAuthorized(sess1, Role.VOOBSERVER,createdVo));
}
@Test
public void unsetRoleVoAdmin() throws Exception {
System.out.println(CLASS_NAME + "unsetRole");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.VOADMIN);
PerunSession sess1 = getHisSession(createdMember);
AuthzResolver.refreshAuthz(sess1);
assertTrue(AuthzResolver.isAuthorized(sess1, Role.VOADMIN,createdVo));
AuthzResolver.unsetRole(sess, createdUser, createdVo, Role.VOADMIN);
AuthzResolver.refreshAuthz(sess1);
assertTrue(!AuthzResolver.isAuthorized(sess1, Role.VOADMIN,createdVo));
}
@Test (expected = UserNotAdminException.class)
public void unsetRoleWhichNotExists() throws Exception {
System.out.println(CLASS_NAME + "unsetRole");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.unsetRole(sess, createdUser, createdVo, Role.VOADMIN);
}
@Test (expected = UserNotAdminException.class)
public void setUnsuportedRole() throws Exception {
System.out.println(CLASS_NAME + "setRole");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.unsetRole(sess, createdUser, createdVo, Role.VOADMIN);
}
@Test
public void isVoAdmin() throws Exception {
System.out.println(CLASS_NAME + "isVoAdmin");
assertTrue(! AuthzResolver.isVoAdmin(sess));
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"sdf","sdfh"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
PerunSession sess1 = getHisSession(createdMember);
assertTrue(! AuthzResolver.isVoAdmin(sess1));
perun.getVosManager().addAdmin(sess, createdVo, createdUser);
AuthzResolver.refreshAuthz(sess1);
assertTrue(AuthzResolver.isVoAdmin(sess1));
}
@Test
public void isGroupAdmin() {
System.out.println(CLASS_NAME + "isGroupAdmin");
sess = mock(PerunSession.class, RETURNS_DEEP_STUBS);
when(sess.getPerunPrincipal().getRoles().hasRole(Role.GROUPADMIN)).thenReturn(true);
assertTrue(AuthzResolver.isGroupAdmin(sess));
}
@Test
public void isFacilityAdmin() {
System.out.println(CLASS_NAME + "isFacilityAdmin");
sess = mock(PerunSession.class, RETURNS_DEEP_STUBS);
when(sess.getPerunPrincipal().getRoles().hasRole(Role.FACILITYADMIN)).thenReturn(true);
assertTrue(AuthzResolver.isFacilityAdmin(sess));
}
// @Test
// public void isResourceAdmin() {
// System.out.println(CLASS_NAME + "isResourceAdmin");
//
// sess = mock(PerunSession.class, RETURNS_DEEP_STUBS);
// when(sess.getPerunPrincipal().getRoles().hasRole(Role.RESOURCEADMIN)).thenReturn(true);
//
// assertTrue(AuthzResolver.isResourceAdmin(sess));
// }
@Test
public void isVoAdminUnit() {
System.out.println(CLASS_NAME + "isVoAdminUnit");
sess = mock(PerunSession.class, RETURNS_DEEP_STUBS);
when(sess.getPerunPrincipal().getRoles().hasRole(Role.VOADMIN)).thenReturn(true);
assertTrue(AuthzResolver.isVoAdmin(sess));
}
@Test
public void isPerunAdmin() {
System.out.println(CLASS_NAME + "isPerunAdmin");
sess = mock(PerunSession.class, RETURNS_DEEP_STUBS);
when(sess.getPerunPrincipal().getRoles().hasRole(Role.PERUNADMIN)).thenReturn(true);
assertTrue(AuthzResolver.isPerunAdmin(sess));
}
@Test
public void isAuthorized() throws Exception {
System.out.println(CLASS_NAME + "isAuthorized");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"sdf","sdfh"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
PerunSession sess1 = getHisSession(createdMember);
perun.getVosManager().addAdmin(sess, createdVo, createdUser);
AuthzResolver.refreshAuthz(sess1);
assertTrue(AuthzResolver.isAuthorized(sess1, Role.VOADMIN, createdVo));
}
@Test
public void addAllSubgroupsToAuthzRoles() throws Exception {
System.out.println(CLASS_NAME + "addAllSubgroupsToAuthzRoles");
Vo testVo = new Vo(1000, "AuthzResolver-testVo", "AuthzResolver-testVo");
testVo = perun.getVosManagerBl().createVo(sess, testVo);
Group testGroupA = new Group("AuthzResolver-testGroupA", "testGroupA");
Group testGroupB = new Group("AuthzResolver-testGroupB", "testGroupB");
Group testGroupC = new Group("AuthzResolver-testGroupC", "testGroupC");
testGroupA = perun.getGroupsManagerBl().createGroup(sess, testVo, testGroupA);
testGroupB = perun.getGroupsManagerBl().createGroup(sess, testGroupA, testGroupB);
testGroupC = perun.getGroupsManagerBl().createGroup(sess, testGroupB, testGroupC);
HashMap<String, Set<Integer>> mapWithRights = new HashMap<>();
Set<Integer> listWithIds = new HashSet<>();
listWithIds.add(testGroupA.getId());
mapWithRights.put("Vo", listWithIds);
mapWithRights.put("Group", listWithIds);
AuthzRoles authzRoles = new AuthzRoles(Role.GROUPADMIN, mapWithRights);
authzRoles = AuthzResolverBlImpl.addAllSubgroupsToAuthzRoles(sess, authzRoles);
assertTrue(authzRoles.hasRole(Role.GROUPADMIN));
assertTrue(!authzRoles.hasRole(Role.VOADMIN));
assertTrue(authzRoles.get(Role.GROUPADMIN).containsKey("Group"));
assertTrue(authzRoles.get(Role.GROUPADMIN).containsKey("Vo"));
assertTrue(authzRoles.get(Role.GROUPADMIN).get("Group").contains(testGroupA.getId()));
assertTrue(authzRoles.get(Role.GROUPADMIN).get("Group").contains(testGroupB.getId()));
assertTrue(authzRoles.get(Role.GROUPADMIN).get("Group").contains(testGroupC.getId()));
assertTrue(authzRoles.get(Role.GROUPADMIN).get("Group").size() == 3);
assertTrue(authzRoles.get(Role.GROUPADMIN).get("Vo").contains(testGroupA.getId()));
assertTrue(authzRoles.get(Role.GROUPADMIN).get("Vo").size() == 1);
}
@Test
public void isAuthorizedInOtherVo() throws Exception {
System.out.println(CLASS_NAME + "isAuthorizedInOtherVo");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"som3Vo","VoSom3Nam3"));
final Member createdMemberKouril = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMemberKouril);
PerunSession sessKouril = getHisSession(createdMemberKouril);
perun.getVosManager().addAdmin(sess, createdVo, createdUser);
AuthzResolver.refreshAuthz(sessKouril);
assertTrue("User is not authorized in own VO", AuthzResolver.isAuthorized(sessKouril, Role.VOADMIN, createdVo));
final Vo otherVo = perun.getVosManager().createVo(sess, new Vo(0,"otherVo","bliblaVo"));
assertTrue("User is authorized in foreign VO", !AuthzResolver.isAuthorized(sessKouril, Role.VOADMIN, otherVo));
}
@Test
public void isAuthorizedWrongRole() throws Exception {
System.out.println(CLASS_NAME + "isAuthorizedWrongRole");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"sdf","sdfh"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
PerunSession sess1 = getHisSession(createdMember);
perun.getVosManager().addAdmin(sess, createdVo, createdUser);
AuthzResolver.refreshAuthz(sess1);
assertTrue( ! AuthzResolver.isAuthorized(sess1, Role.FACILITYADMIN, createdVo));
assertTrue( ! AuthzResolver.isAuthorized(sess1, Role.GROUPADMIN, createdVo));
assertTrue( ! AuthzResolver.isAuthorized(sess1, Role.SELF, createdVo));
assertTrue( ! AuthzResolver.isAuthorized(sess1, Role.PERUNADMIN, createdVo));
}
@Test
public void getPrincipalRoleNames() throws Exception {
System.out.println(CLASS_NAME + "getPrincipalRoleNames");
// Principal perunTests is PERUNADMIN
PerunPrincipal pp = new PerunPrincipal("perunTests", ExtSourcesManager.EXTSOURCE_NAME_INTERNAL, ExtSourcesManager.EXTSOURCE_INTERNAL);
PerunSession ps = new PerunSessionImpl(perun, pp, new PerunClient());
List<String> roleNames = cz.metacentrum.perun.core.api.AuthzResolver.getPrincipalRoleNames(ps);
assertTrue(roleNames.contains(Role.PERUNADMIN));
}
@Test
public void isAuthorizedForAttributePublicReadVoFromUserAttribute() throws Exception {
System.out.println(CLASS_NAME + "isAuthorizedForAttributePublicReadVoFromUserAttribute");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"testvo1","testvo1"));
final Vo otherVo = perun.getVosManager().createVo(sess, new Vo(0,"testvo2","testvo2"));
final Member sessionMember = createSomeMember(createdVo);
final User sessionUser = perun.getUsersManagerBl().getUserByMember(sess, sessionMember);
final Member attributeMember = createSomeMember(otherVo);
final User attributeUser = perun.getUsersManagerBl().getUserByMember(sess, attributeMember);
AttributeDefinition attrDef = new AttributeDefinition();
attrDef.setNamespace(AttributesManager.NS_USER_ATTR_DEF);
attrDef.setType(Integer.class.getName());
attrDef.setFriendlyName("testUserAttr");
attrDef.setDisplayName("test user attr");
attrDef = perun.getAttributesManagerBl().createAttribute(sess, attrDef);
List<AttributeRights> rights = new ArrayList<>();
rights.add(new AttributeRights(attrDef.getId(), Role.SELF, Arrays.asList(ActionType.READ, ActionType.READ_PUBLIC)));
perun.getAttributesManagerBl().setAttributeRights(sess, rights);
Attribute userAttribute = new Attribute(attrDef, 2);
perun.getAttributesManagerBl().setAttribute(sess, attributeUser, userAttribute);
PerunPrincipal mockedPerunPrincipal = mock(PerunPrincipal.class, RETURNS_DEEP_STUBS);
when(mockedPerunPrincipal.isAuthzInitialized()).thenReturn(true);
when(mockedPerunPrincipal.getRoles()).thenReturn(new AuthzRoles(Role.SELF, sessionUser));
when(mockedPerunPrincipal.isAuthzInitialized()).thenReturn(true);
when(mockedPerunPrincipal.getUser()).thenReturn(sessionUser);
when(mockedPerunPrincipal.getUserId()).thenReturn(sessionUser.getId());
PerunSessionImpl testSession = new PerunSessionImpl(sess.getPerun(), mockedPerunPrincipal, sess.getPerunClient());
assertTrue(AuthzResolver.isAuthorizedForAttribute(testSession, ActionType.READ, attrDef, attributeUser));
}
@Test
public void isAuthorizedForAttributeValidSelfReadVoFromUserAttribute() throws Exception {
System.out.println(CLASS_NAME + "isAuthorizedForAttributeValidSelfReadVoFromUserAttribute");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"testvo1","testvo1"));
final Member sessionMember = createSomeMember(createdVo);
final User sessionUser = perun.getUsersManagerBl().getUserByMember(sess, sessionMember);
final Member attributeMember = createSomeMember(createdVo);
final User attributeUser = perun.getUsersManagerBl().getUserByMember(sess, attributeMember);
AttributeDefinition attrDef = new AttributeDefinition();
attrDef.setNamespace(AttributesManager.NS_USER_ATTR_DEF);
attrDef.setType(Integer.class.getName());
attrDef.setFriendlyName("testUserAttr");
attrDef.setDisplayName("test user attr");
attrDef = perun.getAttributesManagerBl().createAttribute(sess, attrDef);
List<AttributeRights> rights = new ArrayList<>();
rights.add(new AttributeRights(attrDef.getId(), Role.SELF, Arrays.asList(ActionType.READ, ActionType.READ_VO)));
perun.getAttributesManagerBl().setAttributeRights(sess, rights);
Attribute userAttribute = new Attribute(attrDef, 2);
perun.getAttributesManagerBl().setAttribute(sess, attributeUser, userAttribute);
PerunPrincipal mockedPerunPrincipal = mock(PerunPrincipal.class, RETURNS_DEEP_STUBS);
when(mockedPerunPrincipal.isAuthzInitialized()).thenReturn(true);
when(mockedPerunPrincipal.getRoles()).thenReturn(new AuthzRoles(Role.SELF, sessionUser));
when(mockedPerunPrincipal.isAuthzInitialized()).thenReturn(true);
when(mockedPerunPrincipal.getUser()).thenReturn(sessionUser);
when(mockedPerunPrincipal.getUserId()).thenReturn(sessionUser.getId());
PerunSessionImpl testSession = new PerunSessionImpl(sess.getPerun(), mockedPerunPrincipal, sess.getPerunClient());
assertTrue(AuthzResolver.isAuthorizedForAttribute(testSession, ActionType.READ, attrDef, attributeUser));
}
@Test
public void isAuthorizedForAttributeInvalidSelfReadVoFromUserAttribute() throws Exception {
System.out.println(CLASS_NAME + "isAuthorizedForAttributeInvalidSelfReadVoFromUserAttribute");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"testvo1","testvo1"));
final Vo otherVo = perun.getVosManager().createVo(sess, new Vo(0,"testvo2","testvo2"));
final Member sessionMember = createSomeMember(createdVo);
final User sessionUser = perun.getUsersManagerBl().getUserByMember(sess, sessionMember);
final Member attributeMember = createSomeMember(otherVo);
final User attributeUser = perun.getUsersManagerBl().getUserByMember(sess, attributeMember);
AttributeDefinition attrDef = new AttributeDefinition();
attrDef.setNamespace(AttributesManager.NS_USER_ATTR_DEF);
attrDef.setType(Integer.class.getName());
attrDef.setFriendlyName("testUserAttr");
attrDef.setDisplayName("test user attr");
attrDef = perun.getAttributesManagerBl().createAttribute(sess, attrDef);
List<AttributeRights> rights = new ArrayList<>();
rights.add(new AttributeRights(attrDef.getId(), Role.SELF, Arrays.asList(ActionType.READ, ActionType.READ_VO)));
perun.getAttributesManagerBl().setAttributeRights(sess, rights);
Attribute userAttribute = new Attribute(attrDef, 2);
perun.getAttributesManagerBl().setAttribute(sess, attributeUser, userAttribute);
PerunPrincipal mockedPerunPrincipal = mock(PerunPrincipal.class, RETURNS_DEEP_STUBS);
when(mockedPerunPrincipal.isAuthzInitialized()).thenReturn(true);
when(mockedPerunPrincipal.getRoles()).thenReturn(new AuthzRoles(Role.SELF, sessionUser));
when(mockedPerunPrincipal.isAuthzInitialized()).thenReturn(true);
when(mockedPerunPrincipal.getUser()).thenReturn(sessionUser);
when(mockedPerunPrincipal.getUserId()).thenReturn(sessionUser.getId());
PerunSessionImpl testSession = new PerunSessionImpl(sess.getPerun(), mockedPerunPrincipal, sess.getPerunClient());
assertFalse(AuthzResolver.isAuthorizedForAttribute(testSession, ActionType.READ, attrDef, attributeUser));
}
@Test
public void hasOneOfTheRolesForObjectSucceeds() throws Exception {
System.out.println(CLASS_NAME + "hasOneOfTheRolesForObjectSucceeds");
final Vo testVo = perun.getVosManager().createVo(sess, new Vo(0,"testvo1","testvo1"));
final Group testGroup = perun.getGroupsManager().createGroup(sess, testVo, new Group("testGroup", "testg"));
PerunPrincipal mockedPerunPrincipal = mock(PerunPrincipal.class, RETURNS_DEEP_STUBS);
when(mockedPerunPrincipal.isAuthzInitialized()).thenReturn(true);
when(mockedPerunPrincipal.getRoles()).thenReturn(new AuthzRoles(Role.VOADMIN, testVo));
PerunSession testSession = new PerunSessionImpl(sess.getPerun(), mockedPerunPrincipal, sess.getPerunClient());
assertTrue(AuthzResolver.hasOneOfTheRolesForObject(
testSession, testGroup, Sets.newHashSet(Role.PERUNADMIN, Role.VOADMIN)));
}
@Test
public void hasOneOfTheRolesForObjectFails() throws Exception {
System.out.println(CLASS_NAME + "hasOneOfTheRolesForObjectFails");
final Vo testVo = perun.getVosManager().createVo(sess, new Vo(0,"testvo1","testvo1"));
final Group testGroup = perun.getGroupsManager().createGroup(sess, testVo, new Group("testGroup", "testg"));
PerunPrincipal mockedPerunPrincipal = mock(PerunPrincipal.class, RETURNS_DEEP_STUBS);
when(mockedPerunPrincipal.isAuthzInitialized()).thenReturn(true);
when(mockedPerunPrincipal.getRoles()).thenReturn(new AuthzRoles());
PerunSession testSession = new PerunSessionImpl(sess.getPerun(), mockedPerunPrincipal, sess.getPerunClient());
assertFalse(AuthzResolver.hasOneOfTheRolesForObject(
testSession, testGroup, Sets.newHashSet(Role.PERUNADMIN, Role.VOADMIN)));
}
@Test
public void setRoleGroupAdminSucceedsForVoAdmin() throws Exception {
System.out.println(CLASS_NAME + "setRoleGroupAdminSucceedsForVoAdmin");
final Vo testVo = perun.getVosManager().createVo(sess, new Vo(0,"testvo1","testvo1"));
final Group testGroup = perun.getGroupsManager().createGroup(sess, testVo, new Group("testGroup", "testg"));
final Member testMember = createSomeMember(testVo);
final User testUser = perun.getUsersManagerBl().getUserByMember(sess, testMember);
PerunPrincipal mockedPerunPrincipal = mock(PerunPrincipal.class, RETURNS_DEEP_STUBS);
when(mockedPerunPrincipal.isAuthzInitialized()).thenReturn(true);
when(mockedPerunPrincipal.getRoles()).thenReturn(new AuthzRoles(Role.VOADMIN, testVo));
PerunSession testSession = new PerunSessionImpl(sess.getPerun(), mockedPerunPrincipal, sess.getPerunClient());
AuthzResolver.setRole(testSession, testUser, testGroup, Role.GROUPADMIN);
}
@Test
public void setRoleGroupAdminFailsWithoutSufficientRole() throws Exception {
System.out.println(CLASS_NAME + "setRoleGroupAdminFailsWithoutSufficientRole");
final Vo testVo = perun.getVosManager().createVo(sess, new Vo(0,"testvo1","testvo1"));
final Vo otherVo = perun.getVosManager().createVo(sess, new Vo(1,"testvo2","testvo2"));
final Group testGroup = perun.getGroupsManager().createGroup(sess, testVo, new Group("testGroup", "testg"));
final Member testMember = createSomeMember(testVo);
final User testUser = perun.getUsersManagerBl().getUserByMember(sess, testMember);
PerunPrincipal mockedPerunPrincipal = mock(PerunPrincipal.class, RETURNS_DEEP_STUBS);
when(mockedPerunPrincipal.isAuthzInitialized()).thenReturn(true);
when(mockedPerunPrincipal.getRoles()).thenReturn(new AuthzRoles(Role.VOADMIN, otherVo));
PerunSession testSession = new PerunSessionImpl(sess.getPerun(), mockedPerunPrincipal, sess.getPerunClient());
assertThatExceptionOfType(PrivilegeException.class).isThrownBy(
() -> AuthzResolver.setRole(testSession, testUser, testGroup, Role.GROUPADMIN));
}
@Test
public void roleExistsForExistingRole() {
assertTrue(AuthzResolver.roleExists("PERUNADMIN"));
}
@Test
public void roleExistsForNotExistingRole() {
assertFalse(AuthzResolver.roleExists("RANDOMROLE"));
}
// private methods ==============================================================
private Facility setUpFacility() throws Exception {
Facility facility = new Facility();
facility.setName("ResourcesManagerTestFacility");
facility = perun.getFacilitiesManager().createFacility(sess, facility);
/*
Owner owner = new Owner();
owner.setName("ResourcesManagerTestOwner");
owner.setContact("testingOwner");
perun.getOwnersManager().createOwner(sess, owner);
perun.getFacilitiesManager().addOwner(sess, facility, owner);
*/
return facility;
}
private Resource setUpResource(Vo vo, Facility facility) throws Exception {
Resource resource = new Resource();
resource.setName("ResourcesManagerTestResource");
resource.setDescription("Testovaci");
resource = perun.getResourcesManagerBl().createResource(sess, resource, vo, facility);
return resource;
}
private Group setUpGroup(Vo vo, Member member) throws Exception {
Group group = new Group("Test group", "test group");
group = perun.getGroupsManagerBl().createGroup(sess, vo, group);
perun.getGroupsManagerBl().addMember(sess, group, member);
return group;
}
private Candidate setUpCandidate(String login) {
String userFirstName = "FirstTest";
String userLastName = "LastTest";
Candidate candidate = new Candidate(); //Mockito.mock(Candidate.class);
candidate.setFirstName(userFirstName);
candidate.setId(0);
candidate.setMiddleName("");
candidate.setLastName(userLastName);
candidate.setTitleBefore("");
candidate.setTitleAfter("");
final UserExtSource userExtSource = new UserExtSource(extSource, login);
candidate.setUserExtSource(userExtSource);
candidate.setAttributes(new HashMap<>());
return candidate;
}
private Member createSomeMember(final Vo createdVo) throws ExtendMembershipException, AlreadyMemberException, WrongAttributeValueException, WrongReferenceAttributeValueException, InternalErrorException {
final Candidate candidate = setUpCandidate("Login" + userLoginSequence++);
final Member createdMember = perun.getMembersManagerBl().createMemberSync(sess, createdVo, candidate);
return createdMember;
}
private PerunSession getHisSession(final Member createdMember) throws InternalErrorException {
List<UserExtSource> ues = perun.getUsersManagerBl().getUserExtSources(sess, perun.getUsersManagerBl().getUserByMember(sess, createdMember));
if (ues.size() == 0) {
throw new InternalErrorException("Empty userExtSource list");
}
UserExtSource ue = new UserExtSource();
for (UserExtSource u : ues) {
if (u.getExtSource().getType().equals(ExtSourcesManager.EXTSOURCE_LDAP)) {
ue = u;
break;
}
}
PerunPrincipal pp1 = new PerunPrincipal(ue.getLogin(), ue.getExtSource().getName(), ue.getExtSource().getType());
PerunSession sess1 = perun.getPerunSession(pp1, new PerunClient());
return sess1;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v10/services/campaign_draft_service.proto
package com.google.ads.googleads.v10.services;
/**
* <pre>
* A single operation (create, update, remove) on a campaign draft.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.services.CampaignDraftOperation}
*/
public final class CampaignDraftOperation extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v10.services.CampaignDraftOperation)
CampaignDraftOperationOrBuilder {
private static final long serialVersionUID = 0L;
// Use CampaignDraftOperation.newBuilder() to construct.
private CampaignDraftOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CampaignDraftOperation() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CampaignDraftOperation();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CampaignDraftOperation(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v10.resources.CampaignDraft.Builder subBuilder = null;
if (operationCase_ == 1) {
subBuilder = ((com.google.ads.googleads.v10.resources.CampaignDraft) operation_).toBuilder();
}
operation_ =
input.readMessage(com.google.ads.googleads.v10.resources.CampaignDraft.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom((com.google.ads.googleads.v10.resources.CampaignDraft) operation_);
operation_ = subBuilder.buildPartial();
}
operationCase_ = 1;
break;
}
case 18: {
com.google.ads.googleads.v10.resources.CampaignDraft.Builder subBuilder = null;
if (operationCase_ == 2) {
subBuilder = ((com.google.ads.googleads.v10.resources.CampaignDraft) operation_).toBuilder();
}
operation_ =
input.readMessage(com.google.ads.googleads.v10.resources.CampaignDraft.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom((com.google.ads.googleads.v10.resources.CampaignDraft) operation_);
operation_ = subBuilder.buildPartial();
}
operationCase_ = 2;
break;
}
case 26: {
java.lang.String s = input.readStringRequireUtf8();
operationCase_ = 3;
operation_ = s;
break;
}
case 34: {
com.google.protobuf.FieldMask.Builder subBuilder = null;
if (updateMask_ != null) {
subBuilder = updateMask_.toBuilder();
}
updateMask_ = input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(updateMask_);
updateMask_ = subBuilder.buildPartial();
}
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v10_services_CampaignDraftOperation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v10_services_CampaignDraftOperation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.services.CampaignDraftOperation.class, com.google.ads.googleads.v10.services.CampaignDraftOperation.Builder.class);
}
private int operationCase_ = 0;
private java.lang.Object operation_;
public enum OperationCase
implements com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
CREATE(1),
UPDATE(2),
REMOVE(3),
OPERATION_NOT_SET(0);
private final int value;
private OperationCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static OperationCase valueOf(int value) {
return forNumber(value);
}
public static OperationCase forNumber(int value) {
switch (value) {
case 1: return CREATE;
case 2: return UPDATE;
case 3: return REMOVE;
case 0: return OPERATION_NOT_SET;
default: return null;
}
}
public int getNumber() {
return this.value;
}
};
public OperationCase
getOperationCase() {
return OperationCase.forNumber(
operationCase_);
}
public static final int UPDATE_MASK_FIELD_NUMBER = 4;
private com.google.protobuf.FieldMask updateMask_;
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return updateMask_ != null;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return getUpdateMask();
}
public static final int CREATE_FIELD_NUMBER = 1;
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* draft.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft create = 1;</code>
* @return Whether the create field is set.
*/
@java.lang.Override
public boolean hasCreate() {
return operationCase_ == 1;
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* draft.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft create = 1;</code>
* @return The create.
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CampaignDraft getCreate() {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v10.resources.CampaignDraft) operation_;
}
return com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance();
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* draft.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft create = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CampaignDraftOrBuilder getCreateOrBuilder() {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v10.resources.CampaignDraft) operation_;
}
return com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance();
}
public static final int UPDATE_FIELD_NUMBER = 2;
/**
* <pre>
* Update operation: The campaign draft is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft update = 2;</code>
* @return Whether the update field is set.
*/
@java.lang.Override
public boolean hasUpdate() {
return operationCase_ == 2;
}
/**
* <pre>
* Update operation: The campaign draft is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft update = 2;</code>
* @return The update.
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CampaignDraft getUpdate() {
if (operationCase_ == 2) {
return (com.google.ads.googleads.v10.resources.CampaignDraft) operation_;
}
return com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance();
}
/**
* <pre>
* Update operation: The campaign draft is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft update = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CampaignDraftOrBuilder getUpdateOrBuilder() {
if (operationCase_ == 2) {
return (com.google.ads.googleads.v10.resources.CampaignDraft) operation_;
}
return com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance();
}
public static final int REMOVE_FIELD_NUMBER = 3;
/**
* <pre>
* Remove operation: The campaign draft is expected to have a valid
* resource name, in this format:
* `customers/{customer_id}/campaignDrafts/{base_campaign_id}~{draft_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return Whether the remove field is set.
*/
public boolean hasRemove() {
return operationCase_ == 3;
}
/**
* <pre>
* Remove operation: The campaign draft is expected to have a valid
* resource name, in this format:
* `customers/{customer_id}/campaignDrafts/{base_campaign_id}~{draft_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return The remove.
*/
public java.lang.String getRemove() {
java.lang.Object ref = "";
if (operationCase_ == 3) {
ref = operation_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (operationCase_ == 3) {
operation_ = s;
}
return s;
}
}
/**
* <pre>
* Remove operation: The campaign draft is expected to have a valid
* resource name, in this format:
* `customers/{customer_id}/campaignDrafts/{base_campaign_id}~{draft_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return The bytes for remove.
*/
public com.google.protobuf.ByteString
getRemoveBytes() {
java.lang.Object ref = "";
if (operationCase_ == 3) {
ref = operation_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
if (operationCase_ == 3) {
operation_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (operationCase_ == 1) {
output.writeMessage(1, (com.google.ads.googleads.v10.resources.CampaignDraft) operation_);
}
if (operationCase_ == 2) {
output.writeMessage(2, (com.google.ads.googleads.v10.resources.CampaignDraft) operation_);
}
if (operationCase_ == 3) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, operation_);
}
if (updateMask_ != null) {
output.writeMessage(4, getUpdateMask());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (operationCase_ == 1) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, (com.google.ads.googleads.v10.resources.CampaignDraft) operation_);
}
if (operationCase_ == 2) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, (com.google.ads.googleads.v10.resources.CampaignDraft) operation_);
}
if (operationCase_ == 3) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, operation_);
}
if (updateMask_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, getUpdateMask());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v10.services.CampaignDraftOperation)) {
return super.equals(obj);
}
com.google.ads.googleads.v10.services.CampaignDraftOperation other = (com.google.ads.googleads.v10.services.CampaignDraftOperation) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask()
.equals(other.getUpdateMask())) return false;
}
if (!getOperationCase().equals(other.getOperationCase())) return false;
switch (operationCase_) {
case 1:
if (!getCreate()
.equals(other.getCreate())) return false;
break;
case 2:
if (!getUpdate()
.equals(other.getUpdate())) return false;
break;
case 3:
if (!getRemove()
.equals(other.getRemove())) return false;
break;
case 0:
default:
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
switch (operationCase_) {
case 1:
hash = (37 * hash) + CREATE_FIELD_NUMBER;
hash = (53 * hash) + getCreate().hashCode();
break;
case 2:
hash = (37 * hash) + UPDATE_FIELD_NUMBER;
hash = (53 * hash) + getUpdate().hashCode();
break;
case 3:
hash = (37 * hash) + REMOVE_FIELD_NUMBER;
hash = (53 * hash) + getRemove().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v10.services.CampaignDraftOperation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A single operation (create, update, remove) on a campaign draft.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.services.CampaignDraftOperation}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.services.CampaignDraftOperation)
com.google.ads.googleads.v10.services.CampaignDraftOperationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v10_services_CampaignDraftOperation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v10_services_CampaignDraftOperation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.services.CampaignDraftOperation.class, com.google.ads.googleads.v10.services.CampaignDraftOperation.Builder.class);
}
// Construct using com.google.ads.googleads.v10.services.CampaignDraftOperation.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (updateMaskBuilder_ == null) {
updateMask_ = null;
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
operationCase_ = 0;
operation_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v10.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v10_services_CampaignDraftOperation_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.CampaignDraftOperation getDefaultInstanceForType() {
return com.google.ads.googleads.v10.services.CampaignDraftOperation.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v10.services.CampaignDraftOperation build() {
com.google.ads.googleads.v10.services.CampaignDraftOperation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.CampaignDraftOperation buildPartial() {
com.google.ads.googleads.v10.services.CampaignDraftOperation result = new com.google.ads.googleads.v10.services.CampaignDraftOperation(this);
if (updateMaskBuilder_ == null) {
result.updateMask_ = updateMask_;
} else {
result.updateMask_ = updateMaskBuilder_.build();
}
if (operationCase_ == 1) {
if (createBuilder_ == null) {
result.operation_ = operation_;
} else {
result.operation_ = createBuilder_.build();
}
}
if (operationCase_ == 2) {
if (updateBuilder_ == null) {
result.operation_ = operation_;
} else {
result.operation_ = updateBuilder_.build();
}
}
if (operationCase_ == 3) {
result.operation_ = operation_;
}
result.operationCase_ = operationCase_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v10.services.CampaignDraftOperation) {
return mergeFrom((com.google.ads.googleads.v10.services.CampaignDraftOperation)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v10.services.CampaignDraftOperation other) {
if (other == com.google.ads.googleads.v10.services.CampaignDraftOperation.getDefaultInstance()) return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
switch (other.getOperationCase()) {
case CREATE: {
mergeCreate(other.getCreate());
break;
}
case UPDATE: {
mergeUpdate(other.getUpdate());
break;
}
case REMOVE: {
operationCase_ = 3;
operation_ = other.operation_;
onChanged();
break;
}
case OPERATION_NOT_SET: {
break;
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v10.services.CampaignDraftOperation parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v10.services.CampaignDraftOperation) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int operationCase_ = 0;
private java.lang.Object operation_;
public OperationCase
getOperationCase() {
return OperationCase.forNumber(
operationCase_);
}
public Builder clearOperation() {
operationCase_ = 0;
operation_ = null;
onChanged();
return this;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_;
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return updateMaskBuilder_ != null || updateMask_ != null;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
onChanged();
} else {
updateMaskBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public Builder setUpdateMask(
com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
onChanged();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (updateMask_ != null) {
updateMask_ =
com.google.protobuf.FieldMask.newBuilder(updateMask_).mergeFrom(value).buildPartial();
} else {
updateMask_ = value;
}
onChanged();
} else {
updateMaskBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public Builder clearUpdateMask() {
if (updateMaskBuilder_ == null) {
updateMask_ = null;
onChanged();
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
return this;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null ?
com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(),
getParentForChildren(),
isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v10.resources.CampaignDraft, com.google.ads.googleads.v10.resources.CampaignDraft.Builder, com.google.ads.googleads.v10.resources.CampaignDraftOrBuilder> createBuilder_;
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* draft.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft create = 1;</code>
* @return Whether the create field is set.
*/
@java.lang.Override
public boolean hasCreate() {
return operationCase_ == 1;
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* draft.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft create = 1;</code>
* @return The create.
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CampaignDraft getCreate() {
if (createBuilder_ == null) {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v10.resources.CampaignDraft) operation_;
}
return com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance();
} else {
if (operationCase_ == 1) {
return createBuilder_.getMessage();
}
return com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance();
}
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* draft.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft create = 1;</code>
*/
public Builder setCreate(com.google.ads.googleads.v10.resources.CampaignDraft value) {
if (createBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
operation_ = value;
onChanged();
} else {
createBuilder_.setMessage(value);
}
operationCase_ = 1;
return this;
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* draft.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft create = 1;</code>
*/
public Builder setCreate(
com.google.ads.googleads.v10.resources.CampaignDraft.Builder builderForValue) {
if (createBuilder_ == null) {
operation_ = builderForValue.build();
onChanged();
} else {
createBuilder_.setMessage(builderForValue.build());
}
operationCase_ = 1;
return this;
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* draft.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft create = 1;</code>
*/
public Builder mergeCreate(com.google.ads.googleads.v10.resources.CampaignDraft value) {
if (createBuilder_ == null) {
if (operationCase_ == 1 &&
operation_ != com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance()) {
operation_ = com.google.ads.googleads.v10.resources.CampaignDraft.newBuilder((com.google.ads.googleads.v10.resources.CampaignDraft) operation_)
.mergeFrom(value).buildPartial();
} else {
operation_ = value;
}
onChanged();
} else {
if (operationCase_ == 1) {
createBuilder_.mergeFrom(value);
}
createBuilder_.setMessage(value);
}
operationCase_ = 1;
return this;
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* draft.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft create = 1;</code>
*/
public Builder clearCreate() {
if (createBuilder_ == null) {
if (operationCase_ == 1) {
operationCase_ = 0;
operation_ = null;
onChanged();
}
} else {
if (operationCase_ == 1) {
operationCase_ = 0;
operation_ = null;
}
createBuilder_.clear();
}
return this;
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* draft.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft create = 1;</code>
*/
public com.google.ads.googleads.v10.resources.CampaignDraft.Builder getCreateBuilder() {
return getCreateFieldBuilder().getBuilder();
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* draft.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft create = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CampaignDraftOrBuilder getCreateOrBuilder() {
if ((operationCase_ == 1) && (createBuilder_ != null)) {
return createBuilder_.getMessageOrBuilder();
} else {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v10.resources.CampaignDraft) operation_;
}
return com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance();
}
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* draft.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft create = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v10.resources.CampaignDraft, com.google.ads.googleads.v10.resources.CampaignDraft.Builder, com.google.ads.googleads.v10.resources.CampaignDraftOrBuilder>
getCreateFieldBuilder() {
if (createBuilder_ == null) {
if (!(operationCase_ == 1)) {
operation_ = com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance();
}
createBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v10.resources.CampaignDraft, com.google.ads.googleads.v10.resources.CampaignDraft.Builder, com.google.ads.googleads.v10.resources.CampaignDraftOrBuilder>(
(com.google.ads.googleads.v10.resources.CampaignDraft) operation_,
getParentForChildren(),
isClean());
operation_ = null;
}
operationCase_ = 1;
onChanged();;
return createBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v10.resources.CampaignDraft, com.google.ads.googleads.v10.resources.CampaignDraft.Builder, com.google.ads.googleads.v10.resources.CampaignDraftOrBuilder> updateBuilder_;
/**
* <pre>
* Update operation: The campaign draft is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft update = 2;</code>
* @return Whether the update field is set.
*/
@java.lang.Override
public boolean hasUpdate() {
return operationCase_ == 2;
}
/**
* <pre>
* Update operation: The campaign draft is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft update = 2;</code>
* @return The update.
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CampaignDraft getUpdate() {
if (updateBuilder_ == null) {
if (operationCase_ == 2) {
return (com.google.ads.googleads.v10.resources.CampaignDraft) operation_;
}
return com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance();
} else {
if (operationCase_ == 2) {
return updateBuilder_.getMessage();
}
return com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance();
}
}
/**
* <pre>
* Update operation: The campaign draft is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft update = 2;</code>
*/
public Builder setUpdate(com.google.ads.googleads.v10.resources.CampaignDraft value) {
if (updateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
operation_ = value;
onChanged();
} else {
updateBuilder_.setMessage(value);
}
operationCase_ = 2;
return this;
}
/**
* <pre>
* Update operation: The campaign draft is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft update = 2;</code>
*/
public Builder setUpdate(
com.google.ads.googleads.v10.resources.CampaignDraft.Builder builderForValue) {
if (updateBuilder_ == null) {
operation_ = builderForValue.build();
onChanged();
} else {
updateBuilder_.setMessage(builderForValue.build());
}
operationCase_ = 2;
return this;
}
/**
* <pre>
* Update operation: The campaign draft is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft update = 2;</code>
*/
public Builder mergeUpdate(com.google.ads.googleads.v10.resources.CampaignDraft value) {
if (updateBuilder_ == null) {
if (operationCase_ == 2 &&
operation_ != com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance()) {
operation_ = com.google.ads.googleads.v10.resources.CampaignDraft.newBuilder((com.google.ads.googleads.v10.resources.CampaignDraft) operation_)
.mergeFrom(value).buildPartial();
} else {
operation_ = value;
}
onChanged();
} else {
if (operationCase_ == 2) {
updateBuilder_.mergeFrom(value);
}
updateBuilder_.setMessage(value);
}
operationCase_ = 2;
return this;
}
/**
* <pre>
* Update operation: The campaign draft is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft update = 2;</code>
*/
public Builder clearUpdate() {
if (updateBuilder_ == null) {
if (operationCase_ == 2) {
operationCase_ = 0;
operation_ = null;
onChanged();
}
} else {
if (operationCase_ == 2) {
operationCase_ = 0;
operation_ = null;
}
updateBuilder_.clear();
}
return this;
}
/**
* <pre>
* Update operation: The campaign draft is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft update = 2;</code>
*/
public com.google.ads.googleads.v10.resources.CampaignDraft.Builder getUpdateBuilder() {
return getUpdateFieldBuilder().getBuilder();
}
/**
* <pre>
* Update operation: The campaign draft is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft update = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CampaignDraftOrBuilder getUpdateOrBuilder() {
if ((operationCase_ == 2) && (updateBuilder_ != null)) {
return updateBuilder_.getMessageOrBuilder();
} else {
if (operationCase_ == 2) {
return (com.google.ads.googleads.v10.resources.CampaignDraft) operation_;
}
return com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance();
}
}
/**
* <pre>
* Update operation: The campaign draft is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignDraft update = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v10.resources.CampaignDraft, com.google.ads.googleads.v10.resources.CampaignDraft.Builder, com.google.ads.googleads.v10.resources.CampaignDraftOrBuilder>
getUpdateFieldBuilder() {
if (updateBuilder_ == null) {
if (!(operationCase_ == 2)) {
operation_ = com.google.ads.googleads.v10.resources.CampaignDraft.getDefaultInstance();
}
updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v10.resources.CampaignDraft, com.google.ads.googleads.v10.resources.CampaignDraft.Builder, com.google.ads.googleads.v10.resources.CampaignDraftOrBuilder>(
(com.google.ads.googleads.v10.resources.CampaignDraft) operation_,
getParentForChildren(),
isClean());
operation_ = null;
}
operationCase_ = 2;
onChanged();;
return updateBuilder_;
}
/**
* <pre>
* Remove operation: The campaign draft is expected to have a valid
* resource name, in this format:
* `customers/{customer_id}/campaignDrafts/{base_campaign_id}~{draft_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return Whether the remove field is set.
*/
@java.lang.Override
public boolean hasRemove() {
return operationCase_ == 3;
}
/**
* <pre>
* Remove operation: The campaign draft is expected to have a valid
* resource name, in this format:
* `customers/{customer_id}/campaignDrafts/{base_campaign_id}~{draft_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return The remove.
*/
@java.lang.Override
public java.lang.String getRemove() {
java.lang.Object ref = "";
if (operationCase_ == 3) {
ref = operation_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (operationCase_ == 3) {
operation_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Remove operation: The campaign draft is expected to have a valid
* resource name, in this format:
* `customers/{customer_id}/campaignDrafts/{base_campaign_id}~{draft_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return The bytes for remove.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getRemoveBytes() {
java.lang.Object ref = "";
if (operationCase_ == 3) {
ref = operation_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
if (operationCase_ == 3) {
operation_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Remove operation: The campaign draft is expected to have a valid
* resource name, in this format:
* `customers/{customer_id}/campaignDrafts/{base_campaign_id}~{draft_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @param value The remove to set.
* @return This builder for chaining.
*/
public Builder setRemove(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
operationCase_ = 3;
operation_ = value;
onChanged();
return this;
}
/**
* <pre>
* Remove operation: The campaign draft is expected to have a valid
* resource name, in this format:
* `customers/{customer_id}/campaignDrafts/{base_campaign_id}~{draft_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearRemove() {
if (operationCase_ == 3) {
operationCase_ = 0;
operation_ = null;
onChanged();
}
return this;
}
/**
* <pre>
* Remove operation: The campaign draft is expected to have a valid
* resource name, in this format:
* `customers/{customer_id}/campaignDrafts/{base_campaign_id}~{draft_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @param value The bytes for remove to set.
* @return This builder for chaining.
*/
public Builder setRemoveBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
operationCase_ = 3;
operation_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.services.CampaignDraftOperation)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v10.services.CampaignDraftOperation)
private static final com.google.ads.googleads.v10.services.CampaignDraftOperation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v10.services.CampaignDraftOperation();
}
public static com.google.ads.googleads.v10.services.CampaignDraftOperation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CampaignDraftOperation>
PARSER = new com.google.protobuf.AbstractParser<CampaignDraftOperation>() {
@java.lang.Override
public CampaignDraftOperation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CampaignDraftOperation(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CampaignDraftOperation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CampaignDraftOperation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.CampaignDraftOperation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package com.navisens.pojostick.navisensleafletmaps;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.Fragment;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.webkit.JavascriptInterface;
import android.webkit.WebResourceRequest;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import com.navisens.motiondnaapi.MotionDna;
import com.navisens.motiondnaapi.MotionDnaApplication;
import com.navisens.motiondnaapi.MotionDnaInterface;
import java.util.Locale;
/**
* Created by Joseph on 6/23/17.
*
* Navisens MotionDna map support via leaflet
*/
public class MotionDnaMaps extends Fragment {
// TODO: test if GPS faster when pass into js as map center
// https://stackoverflow.com/questions/10524381/gps-android-get-positioning-only-once
/**
* Usable map types are:
* <ul>
* No API key required
* <li>{@link #OSM_Mapnik}</li>
* <li>{@link #OSM_France}</li>
* <br>
* API key required, and custom styles available
* <li>{@link #Thunderforest}</li>
* <li>{@link #Mapbox}</li>
* <br>
* WIP no key required
* <li>{@link #Esri}</li>
* </ul>
*/
@SuppressWarnings({"WeakerAccess", "unused"})
public enum Maps {
/**
* Open Street Maps, does not require a key, no custom map style
*/
OSM_Mapnik("OpenStreetMap_Mapnik"),
/**
* Open Street Maps, does not require a key, custom map style is France, slighty higher zoom compared to {@link #OSM_Mapnik}
*/
OSM_France("OpenStreetMap_France"),
/**
* Thunderforest tiling servers, requires a key, default style is 'outdoors'
*/
Thunderforest("Thunderforest"),
/**
* Mapbox tiling servers, requires a key, default style is 'mapbox.streets'
*/
Mapbox("Mapbox"),
/**
* Esri tiling servers, not fully implemented yet, current access does not require key, but has missing tiles at high zooms
*/
Esri("Esri");
private final String name;
Maps(String s) {
name = s;
}
@Override
public String toString() {
return this.name;
}
}
private static final int REQUEST_MDNA_PERMISSIONS = 1;
private static final String DEFAULT_MAP = "addMap_OpenStreetMap_Mapnik();";
private static final double LOCAL_SCALING = Math.pow(2, -17);
private static MotionDnaApplication motionDna;
private static MotionDna.LocationStatus lastLocation = MotionDna.LocationStatus.UNINITIALIZED;
private static String devKey;
private static boolean customLocation = false, shouldRestart = true;
private WebView webView;
private MotionDnaService motionDnaService;
private boolean useDefaultMap = true, useLocal = false;
public MotionDnaMaps () {
super();
motionDnaService = new MotionDnaService();
}
@SuppressLint("ValidFragment")
public MotionDnaMaps (String devkey) {
this();
MotionDnaMaps.devKey = devkey;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
motionDnaService.loadedActivity(this.getActivity());
}
@SuppressLint("SetJavaScriptEnabled")
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
webView = new WebView(getActivity()) {
// https://stackoverflow.com/a/44278258
@Override
public boolean onTouchEvent(MotionEvent ev) {
int action = ev.getAction();
switch (action) {
case MotionEvent.ACTION_DOWN:
// Disallow ScrollView to intercept touch events.
this.getParent().requestDisallowInterceptTouchEvent(true);
break;
case MotionEvent.ACTION_UP:
// Allow ScrollView to intercept touch events.
this.getParent().requestDisallowInterceptTouchEvent(false);
break;
}
// Handle MapView's touch events.
super.onTouchEvent(ev);
return true;
}
};
WebSettings webSettings = webView.getSettings();
webSettings.setJavaScriptEnabled(true);
webSettings.setDomStorageEnabled(true);
webView.addJavascriptInterface(new JavaScriptInterface(), "JSInterface");
webView.loadUrl("file:///android_asset/index.html");
this.setRetainInstance(true);
webView.setWebViewClient(new WebViewClient() {
@SuppressWarnings("deprecation")
@Override
public boolean shouldOverrideUrlLoading(WebView view, String url) {
redirectUrl(url);
return true;
}
@TargetApi(Build.VERSION_CODES.N)
@Override
public boolean shouldOverrideUrlLoading(WebView view, WebResourceRequest request) {
redirectUrl(request.getUrl().toString());
return true;
}
@Override
public void onPageFinished(WebView view, String url) {
if (shouldRestart) {
shouldRestart = false;
restart();
}
motionDnaService.evaluateJS(view);
}
private void redirectUrl(String url) {
try {
Uri uri = Uri.parse(url);
Intent intent = new Intent(Intent.ACTION_VIEW, uri);
startActivity(intent);
} catch (Exception e) {
// ignore bad url requests
}
}
});
return webView;
}
@Override
public void onPause() {
super.onPause();
save();
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if (!MotionDnaApplication.checkMotionDnaPermissions(getActivity().getApplicationContext())) {
System.err.println("ERROR: Insufficient permissions.");
stop();
}
}
/**
* Add a map which does not require credentials
* @param name The type of map to add
* @return a reference to this object
*/
@SuppressWarnings("unused")
public MotionDnaMaps addMap (Maps name) {
motionDnaService.appendJS(
String.format("addMap_%s();",
name));
useDefaultMap = false;
return this;
}
/**
* Add a map with credentials, but use the default style
* @param name The type of map to add
* @param key Login credentials key for the given map type
* @return a reference to this object
*/
@SuppressWarnings("unused")
public MotionDnaMaps addMap (Maps name, String key) {
motionDnaService.appendJS(
String.format("addMap_%s('%s');",
name, key));
useDefaultMap = false;
return this;
}
/**
* Add a map with credentials, and specify a map style
* @param name The type of map to add
* @param key Login credentials key for the given map type
* @param mapid The map style, for example Mapbox maps define the style 'mapbox.streets'
* @return a reference to this object
*/
@SuppressWarnings("unused")
public MotionDnaMaps addMap (Maps name, String key, String mapid) {
motionDnaService.appendJS(
String.format("addMap_%s('%s', '%s');",
name, key, mapid));
useDefaultMap = false;
return this;
}
/**
* Add a custom user-defined map
* @param url The url to the tiling server
* @param jsonOptions A JavaScript object stored as JSON, including any parameters required by the url. See leaflet TileLayer for more documentation details
* @return a reference to this object
*/
@SuppressWarnings("unused")
public MotionDnaMaps addMap (String url, String jsonOptions) {
motionDnaService.appendJS(
String.format("addMap('%s', '%s');",
url, jsonOptions));
useDefaultMap = false;
return this;
}
/**
* Enable user control over the map. This gives the user the ability to set custom location and heading, which will disable NavisensLocation when in global mode.
* <br>
* NOTE: At the current time, user controls are not supported while in local mode, and calls to this method will be ignored if {@link #useLocalOnly()} is called.
* @return a reference to this object
*/
@SuppressWarnings("unused")
public MotionDnaMaps addControls () {
motionDnaService.appendJS("UI();");
return this;
}
/**
* Prevent maps from reinitializing, using the last saved state as the starting point.
* <br>
* NOTE: This method will not destroy save state on restart and may cause Maps to run out of memory. Make sure to call the {@link #restart()} function as necessary.
* @return a reference to this object
*/
@SuppressWarnings("unused")
public MotionDnaMaps preventRestart () {
shouldRestart = false;
return this;
}
/**
* Only provide the local cartesian coordinates. The user will begin at coordinates (0, 0), and location services will begin immediately. No default map will be used.
* <br>
* Normally, when using global mode by default, location services will require both GPS and user movement (about 1-2 blocks of walking) before location is fully initialized.
* <br>
* NOTE: At the current time, local mode does not support enabling controls, and will disregard calls to {@link #addControls()}.
* @return a reference to this object
*/
@SuppressWarnings("unused")
public MotionDnaMaps useLocalOnly () {
motionDnaService.javascript = "setSimple();" + motionDnaService.javascript;
useLocal = true;
useDefaultMap = false;
return this;
}
/**
* Hide plotting markers, and prevent user from accessing
* @return a reference to this object
*/
@SuppressWarnings("unused")
public MotionDnaMaps hideMarkers () {
motionDnaService.appendJS("hideClustering()");
return this;
}
/**
* Pause the location services
* @return whether location services were paused successfully
*/
@SuppressWarnings("unused")
public boolean pause () {
if (motionDna != null)
motionDna.pause();
return motionDna != null;
}
/**
* Resume providing location services
* @return whether location services were resumed successfully
*/
@SuppressWarnings("unused")
public boolean resume () {
if (motionDna != null)
motionDna.resume();
return motionDna != null;
}
/**
* Save the current display, user location, and map zoom temporarily. When the activity restarts, this will be cleared unless {@link #preventRestart()} is called.
* @return whether save was called successfully
*/
@SuppressWarnings("unused")
public boolean save () {
if (webView != null)
webView.evaluateJavascript("if (typeof SAVE !== 'undefined') SAVE();", null);
return webView != null;
}
/**
* Restart Maps cache to clear the currently displayed location and path
* @return whether restart was called successfully
*/
@SuppressWarnings("unused")
public boolean restart () {
if (webView != null)
webView.evaluateJavascript("START();", null);
return webView != null;
}
/**
* Signals all components to terminate.
*/
@SuppressWarnings("unused")
public void stop () {
if (motionDna != null) {
motionDna.stop();
motionDna = null;
}
if (webView != null)
webView.evaluateJavascript("STOP();", null);
webView = null;
}
private class MotionDnaService implements MotionDnaInterface {
Activity activity;
WebView webview;
String javascript = "RUN(%b);";
double x, y, h;
void appendJS(String js) {
javascript += js;
}
void loadedActivity(Activity act) {
this.activity = act;
if (motionDna == null) {
motionDna = new MotionDnaApplication(this);
ActivityCompat.requestPermissions(this.activity, MotionDnaApplication.needsRequestingPermissions(), REQUEST_MDNA_PERMISSIONS);
motionDna.runMotionDna(devKey);
if (useLocal)
customLocation = true;
else
motionDna.setLocationNavisens();
motionDna.setCallbackUpdateRateInMs(100);
motionDna.setMapCorrectionEnabled(true);
motionDna.setExternalPositioningState(MotionDna.ExternalPositioningState.HIGH_ACCURACY);
} else {
motionDna.motionDna = this;
}
}
void evaluateJS(WebView view) {
this.webview = view;
this.webview.evaluateJavascript(
String.format(javascript, lastLocation == MotionDna.LocationStatus.UNINITIALIZED),
null);
if (useDefaultMap)
this.webview.evaluateJavascript(DEFAULT_MAP, null);
}
@Override
public void receiveMotionDna(MotionDna motionDna) {
if (this.webview != null) {
MotionDna.Location location = motionDna.getLocation();
if (useLocal) {
x = location.localLocation.x * LOCAL_SCALING;
y = location.localLocation.y * LOCAL_SCALING;
h = location.heading;
// System.out.println(x + ", " + y + ", " + h);
this.webview.evaluateJavascript(
String.format(Locale.ENGLISH, "if (typeof SESSION_RELOADED !== 'undefined') addPoint(%.7f, %.7f, %d);",
y,
x,
motionDna.getMotion().primaryMotion.ordinal()),
null
);
this.webview.evaluateJavascript(
String.format(Locale.ENGLISH, "if (typeof SESSION_RELOADED !== 'undefined') move(%.7f, %.7f, %.7f, %d);",
y,
x,
h,
motionDna.getMotion().primaryMotion.ordinal()),
null
);
} else {
if (lastLocation != location.locationStatus) {
switch (location.locationStatus) {
case NAVISENS_INITIALIZING:
this.webview.evaluateJavascript("acquiredGPS();", null);
break;
case NAVISENS_INITIALIZED:
this.webview.evaluateJavascript("acquiredLocation();", null);
}
lastLocation = location.locationStatus;
}
if (customLocation || location.locationStatus == MotionDna.LocationStatus.NAVISENS_INITIALIZED) {
this.webview.evaluateJavascript(
String.format(Locale.ENGLISH, "if (typeof SESSION_RELOADED !== 'undefined') addPoint(%.7f, %.7f, %d);",
location.globalLocation.latitude,
location.globalLocation.longitude,
motionDna.getMotion().primaryMotion.ordinal()),
null
);
}
this.webview.evaluateJavascript(
String.format(Locale.ENGLISH, "if (typeof SESSION_RELOADED !== 'undefined') move(%.7f, %.7f, %.7f, %d);",
location.globalLocation.latitude,
location.globalLocation.longitude,
location.heading,
motionDna.getMotion().primaryMotion.ordinal()),
null
);
}
}
}
@Override
public void failureToAuthenticate(String s) {}
@Override
public void reportSensorMissing(String s) {}
@Override
public void reportSensorTiming(double v, String s) {}
@Override
public void errorOccurred(Exception e, String s) {}
@Override
public Context getAppContext() {
return this.activity.getApplicationContext();
}
@Override
public PackageManager getPkgManager() {
return this.activity.getPackageManager();
}
}
private class JavaScriptInterface {
@SuppressWarnings("unused")
@JavascriptInterface
public void customLocationInitialized(double lat, double lng, double heading) {
if (useLocal) {
// motionDna.setCartesianOffsetInMeters(lng - motionDnaService.x, lat - motionDnaService.y);
// motionDna.setHeadingInDegrees(heading);
// motionDna.setLocalHeadingOffsetInDegrees(heading - motionDnaService.h);
} else {
motionDna.setLocationLatitudeLongitudeAndHeadingInDegrees(lat, lng, heading);
}
customLocation = true;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.