gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package rtrk.pnrs1.ra174_2014.taskmanager.AddTaskMainScreen; import android.content.Intent; import android.graphics.Color; import android.os.Bundle; import android.os.Parcelable; import android.support.v7.app.AppCompatActivity; import android.text.Editable; import android.text.TextWatcher; import android.view.View; import android.widget.Button; import android.widget.CheckBox; import android.widget.DatePicker; import android.widget.EditText; import android.widget.TimePicker; import android.widget.Toast; import java.io.Serializable; import java.util.ArrayList; import java.util.Calendar; import rtrk.pnrs1.ra174_2014.taskmanager.ListAdapterStuff.ListAdapter; import rtrk.pnrs1.ra174_2014.taskmanager.ListAdapterStuff.ListData; import rtrk.pnrs1.ra174_2014.taskmanager.R; import rtrk.pnrs1.ra174_2014.taskmanager.TaskManagerMainScreen.StartScreen; public class AddTaskView extends AppCompatActivity implements AddTaskModel.View { Button btnAddTask; Button btnCancelTask; Button btnGreen; Button btnRed; Button btnYellow; EditText txtTaskName; EditText txtTaskDescription; TimePicker taskTimePicker; CheckBox chkReminder; DatePicker taskDatePicker; ListData listItem; ArrayList<ListData> listOfTasks; int pickedColor; boolean canEdit; Intent intent; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_add_task_view); initStuff(); } private class MyTextWatcher implements TextWatcher { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { CheckAddTaskButton(); } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { CheckAddTaskButton(); } @Override public void afterTextChanged(Editable s) { CheckAddTaskButton(); } } @Override public void initStuff() { txtTaskName = (EditText) findViewById(R.id.txtTaskName); txtTaskDescription = (EditText) findViewById(R.id.txtTaskDescription); taskTimePicker = (TimePicker) findViewById(R.id.timePicker); taskDatePicker = (DatePicker) findViewById(R.id.datePicker); chkReminder = (CheckBox) findViewById(R.id.chkReminder); btnRed = (Button) findViewById(R.id.btnRed); btnGreen = (Button) findViewById(R.id.btnGreen); btnYellow = (Button) findViewById(R.id.btnYellow); btnCancelTask = (Button) findViewById(R.id.btnCancelTask); btnAddTask = (Button) findViewById(R.id.btnAddTask); btnAddTask.setEnabled(false); pickedColor = 0; taskTimePicker.setIs24HourView(true); taskTimePicker.setCurrentHour(Calendar.getInstance().get(Calendar.HOUR_OF_DAY)); taskDatePicker.setMinDate(System.currentTimeMillis()); btnAddTask.setText(getIntent().getStringExtra(getResources().getString(R.string.btn1))); btnCancelTask.setText(getIntent().getStringExtra(getResources().getString(R.string.btn2))); listOfTasks=new ArrayList<>(); intent = getIntent(); if(intent.hasExtra("EDIT")) { canEdit = true; listItem = (ListData) intent.getSerializableExtra("EDIT"); txtTaskName.setText(listItem.taskName); taskDatePicker.updateDate(listItem.year,listItem.month+1,listItem.day); taskTimePicker.setCurrentHour(listItem.hour); taskTimePicker.setCurrentMinute(listItem.minute); txtTaskDescription.setText(listItem.taskDescription); if(listItem.reminder) chkReminder.setChecked(true); else chkReminder.setChecked(false); switch (listItem.priority){ case 1: btnRed.setEnabled(false); btnRed.setBackgroundColor(Color.GRAY); btnYellow.setEnabled(false); btnYellow.setBackgroundColor(Color.GRAY); pickedColor = 1; break; case 2: btnRed.setEnabled(false); btnRed.setBackgroundColor(Color.GRAY); btnGreen.setEnabled(false); btnGreen.setBackgroundColor(Color.GRAY); pickedColor = 2; break; case 3: btnGreen.setEnabled(false); btnGreen.setBackgroundColor(Color.GRAY); btnYellow.setEnabled(false); btnYellow.setBackgroundColor(Color.GRAY); pickedColor = 3; break; default: pickedColor = 0; break; } CheckAddTaskButton(); } else canEdit = false; //Initialize Listeners! txtTaskName.addTextChangedListener(new MyTextWatcher()); txtTaskDescription.addTextChangedListener(new MyTextWatcher()); btnCancelTask.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { ShowCancelToast(); Intent intent = new Intent(getBaseContext(),StartScreen.class); if(btnCancelTask.getText().equals(getResources().getString(R.string.delete_list_element))) { listItem=new ListData(txtTaskName.getText().toString(), txtTaskDescription.getText().toString(),pickedColor, taskDatePicker.getDayOfMonth(),taskDatePicker.getMonth()+1, taskDatePicker.getYear(),taskTimePicker.getCurrentHour(), taskTimePicker.getCurrentMinute(),false,chkReminder.isChecked()); intent.putExtra("Deleted", listItem); setResult(RESULT_OK,intent); finish(); } setResult(RESULT_CANCELED,intent); finish(); } }); btnAddTask.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (btnAddTask.isEnabled()) { Intent intent = new Intent(getBaseContext(),StartScreen.class); listItem=new ListData(txtTaskName.getText().toString(), txtTaskDescription.getText().toString(),pickedColor, taskDatePicker.getDayOfMonth(),taskDatePicker.getMonth()+1, taskDatePicker.getYear(),taskTimePicker.getCurrentHour(), taskTimePicker.getCurrentMinute(),false,chkReminder.isChecked()); listOfTasks.add(listItem); intent.putExtra("Task", listItem); ShowToast(); setResult(RESULT_OK,intent); finish(); } } }); btnGreen.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (pickedColor == 0) { btnRed.setEnabled(false); btnRed.setBackgroundColor(Color.GRAY); btnYellow.setEnabled(false); btnYellow.setBackgroundColor(Color.GRAY); pickedColor = 1; CheckAddTaskButton(); return; } else if (pickedColor == 1) { btnRed.setEnabled(true); btnRed.setBackgroundColor(Color.RED); btnYellow.setEnabled(true); btnYellow.setBackgroundColor(Color.YELLOW); pickedColor = 0; CheckAddTaskButton(); return; } } }); btnRed.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (pickedColor == 0) { btnGreen.setEnabled(false); btnGreen.setBackgroundColor(Color.GRAY); btnYellow.setEnabled(false); btnYellow.setBackgroundColor(Color.GRAY); pickedColor = 3; CheckAddTaskButton(); return; } else if (pickedColor == 3) { btnGreen.setEnabled(true); btnGreen.setBackgroundColor(Color.GREEN); btnYellow.setEnabled(true); btnYellow.setBackgroundColor(Color.YELLOW); pickedColor = 0; CheckAddTaskButton(); return; } } }); btnYellow.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (pickedColor == 0) { btnRed.setEnabled(false); btnRed.setBackgroundColor(Color.GRAY); btnGreen.setEnabled(false); btnGreen.setBackgroundColor(Color.GRAY); pickedColor = 2; CheckAddTaskButton(); return; } else if (pickedColor == 2) { btnRed.setEnabled(true); btnRed.setBackgroundColor(Color.RED); btnGreen.setEnabled(true); btnGreen.setBackgroundColor(Color.GREEN); pickedColor = 0; CheckAddTaskButton(); return; } } }); } @Override public void proceedToNextActivity() { Intent intent = new Intent(getBaseContext(),StartScreen.class); listItem=new ListData(txtTaskName.getText().toString(), txtTaskDescription.getText().toString(), pickedColor, taskDatePicker.getDayOfMonth(),taskDatePicker.getMonth()+1, taskDatePicker.getYear(),taskTimePicker.getCurrentHour(), taskTimePicker.getCurrentMinute(),false,chkReminder.isChecked()); listOfTasks.add(listItem); intent.putExtra("Task", listItem); setResult(RESULT_OK,intent); finish(); } @Override public void onBackPressed() { super.onBackPressed(); ShowCancelToast(); Intent intent = new Intent(getBaseContext(),StartScreen.class); setResult(RESULT_CANCELED,intent); finish(); } @Override public void CheckAddTaskButton(){ Calendar c= Calendar.getInstance(); long millis=System.currentTimeMillis(); c.setTimeInMillis(millis); int hours =c.get(Calendar.HOUR); int minutes = c.get(Calendar.MINUTE); if(pickedColor >=1 && !txtTaskDescription.getText().toString().isEmpty() && !txtTaskName.getText().toString().isEmpty()){ btnAddTask.setEnabled(true); }else{ btnAddTask.setEnabled(false); } } public void ShowToast(){ Toast toast = Toast.makeText(this, R.string.toast_add_task, Toast.LENGTH_LONG); toast.show(); } public void ShowCancelToast(){ Toast toast = Toast.makeText(this, R.string.toast_cancel_task, Toast.LENGTH_LONG); toast.show(); } }
package net.coding.program.maopao.third; import android.graphics.Bitmap; /** * Created by paveld on 3/6/14. */ public class FastBlur { public static Bitmap doBlur(Bitmap sentBitmap, int radius, boolean canReuseInBitmap) { // Stack Blur v1.0 from // http://www.quasimondo.com/StackBlurForCanvas/StackBlurDemo.html // // Java Author: Mario Klingemann <mario at quasimondo.com> // http://incubator.quasimondo.com // created Feburary 29, 2004 // Android port : Yahel Bouaziz <yahel at kayenko.com> // http://www.kayenko.com // ported april 5th, 2012 // This is a compromise between Gaussian Blur and Box blur // It creates much better looking blurs than Box Blur, but is // 7x faster than my Gaussian Blur implementation. // // I called it Stack Blur because this describes best how this // filter works internally: it creates a kind of moving stack // of colors whilst scanning through the image. Thereby it // just has to add one new block of color to the right side // of the stack and remove the leftmost color. The remaining // colors on the topmost layer of the stack are either added on // or reduced by one, depending on if they are on the right or // on the left side of the stack. // // If you are using this algorithm in your code please add // the following line: // // Stack Blur Algorithm by Mario Klingemann <mario@quasimondo.com> Bitmap bitmap; if (canReuseInBitmap) { bitmap = sentBitmap; } else { bitmap = sentBitmap.copy(sentBitmap.getConfig(), true); } if (radius < 1) { return (null); } int w = bitmap.getWidth(); int h = bitmap.getHeight(); int[] pix = new int[w * h]; bitmap.getPixels(pix, 0, w, 0, 0, w, h); int wm = w - 1; int hm = h - 1; int wh = w * h; int div = radius + radius + 1; int r[] = new int[wh]; int g[] = new int[wh]; int b[] = new int[wh]; int rsum, gsum, bsum, x, y, i, p, yp, yi, yw; int vmin[] = new int[Math.max(w, h)]; int divsum = (div + 1) >> 1; divsum *= divsum; int dv[] = new int[256 * divsum]; for (i = 0; i < 256 * divsum; i++) { dv[i] = (i / divsum); } yw = yi = 0; int[][] stack = new int[div][3]; int stackpointer; int stackstart; int[] sir; int rbs; int r1 = radius + 1; int routsum, goutsum, boutsum; int rinsum, ginsum, binsum; for (y = 0; y < h; y++) { rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0; for (i = -radius; i <= radius; i++) { p = pix[yi + Math.min(wm, Math.max(i, 0))]; sir = stack[i + radius]; sir[0] = (p & 0xff0000) >> 16; sir[1] = (p & 0x00ff00) >> 8; sir[2] = (p & 0x0000ff); rbs = r1 - Math.abs(i); rsum += sir[0] * rbs; gsum += sir[1] * rbs; bsum += sir[2] * rbs; if (i > 0) { rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; } else { routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; } } stackpointer = radius; for (x = 0; x < w; x++) { r[yi] = dv[rsum]; g[yi] = dv[gsum]; b[yi] = dv[bsum]; rsum -= routsum; gsum -= goutsum; bsum -= boutsum; stackstart = stackpointer - radius + div; sir = stack[stackstart % div]; routsum -= sir[0]; goutsum -= sir[1]; boutsum -= sir[2]; if (y == 0) { vmin[x] = Math.min(x + radius + 1, wm); } p = pix[yw + vmin[x]]; sir[0] = (p & 0xff0000) >> 16; sir[1] = (p & 0x00ff00) >> 8; sir[2] = (p & 0x0000ff); rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; rsum += rinsum; gsum += ginsum; bsum += binsum; stackpointer = (stackpointer + 1) % div; sir = stack[(stackpointer) % div]; routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; rinsum -= sir[0]; ginsum -= sir[1]; binsum -= sir[2]; yi++; } yw += w; } for (x = 0; x < w; x++) { rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0; yp = -radius * w; for (i = -radius; i <= radius; i++) { yi = Math.max(0, yp) + x; sir = stack[i + radius]; sir[0] = r[yi]; sir[1] = g[yi]; sir[2] = b[yi]; rbs = r1 - Math.abs(i); rsum += r[yi] * rbs; gsum += g[yi] * rbs; bsum += b[yi] * rbs; if (i > 0) { rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; } else { routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; } if (i < hm) { yp += w; } } yi = x; stackpointer = radius; for (y = 0; y < h; y++) { // Preserve alpha channel: ( 0xff000000 & pix[yi] ) pix[yi] = (0xff000000 & pix[yi]) | (dv[rsum] << 16) | (dv[gsum] << 8) | dv[bsum]; rsum -= routsum; gsum -= goutsum; bsum -= boutsum; stackstart = stackpointer - radius + div; sir = stack[stackstart % div]; routsum -= sir[0]; goutsum -= sir[1]; boutsum -= sir[2]; if (x == 0) { vmin[y] = Math.min(y + r1, hm) * w; } p = x + vmin[y]; sir[0] = r[p]; sir[1] = g[p]; sir[2] = b[p]; rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; rsum += rinsum; gsum += ginsum; bsum += binsum; stackpointer = (stackpointer + 1) % div; sir = stack[stackpointer]; routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; rinsum -= sir[0]; ginsum -= sir[1]; binsum -= sir[2]; yi += w; } } bitmap.setPixels(pix, 0, w, 0, 0, w, h); return (bitmap); } }
package flaxbeard.sprockets.blocks.tiles; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import mcmultipart.multipart.PartSlot; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.network.NetworkManager; import net.minecraft.network.play.server.SPacketUpdateTileEntity; import net.minecraft.tileentity.TileEntity; import net.minecraft.tileentity.TileEntityFurnace; import net.minecraft.util.EnumFacing; import net.minecraft.util.EnumParticleTypes; import net.minecraft.util.Tuple; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Vec3i; import flaxbeard.sprockets.api.IMechanicalConsumer; import flaxbeard.sprockets.blocks.SprocketsBlocks; import flaxbeard.sprockets.lib.LibConstants; import flaxbeard.sprockets.multiparts.SprocketsMultiparts; public class TileEntityFrictionHeater extends TileEntitySprocketBase implements IMechanicalConsumer { private static final ArrayList<HashSet<Tuple<Vec3i, PartSlot>>> CIS; private static final List<Set<Vec3i>> BLOCK_CIS; public int facing = -1; public int numChilds = 0; boolean[] filled = new boolean[6]; public byte isParent = -1; private int ticksExisted = 0; private static final Vec3i UP = EnumFacing.UP.getDirectionVec(); static { CIS = new ArrayList<HashSet<Tuple<Vec3i, PartSlot>>>(); BLOCK_CIS = new ArrayList<Set<Vec3i>>(); for (int side = 0; side < 6; side++) { CIS.add(SprocketsMultiparts.rotatePartFacing(side, new Tuple(new Vec3i(0, 1, 0), PartSlot.DOWN))); BLOCK_CIS.add(SprocketsMultiparts.rotateFacing(side, UP)); } } public void updateParentPlace() { BlockPos furn = pos.add(EnumFacing.VALUES[facing].getDirectionVec()); for (int i = 0; i < facing; i++) { BlockPos otherHeaterPos = furn.subtract(EnumFacing.VALUES[i].getDirectionVec()); TileEntity heat = worldObj.getTileEntity(otherHeaterPos); if (heat instanceof TileEntityFrictionHeater) { TileEntityFrictionHeater fH = (TileEntityFrictionHeater) heat; if (fH.isParent == 1) { fH.isParent = 0; this.isParent = 1; this.numChilds = fH.numChilds + 1; this.filled = fH.filled; this.filled[facing] = true; IBlockState state = worldObj.getBlockState(otherHeaterPos); worldObj.notifyBlockUpdate(otherHeaterPos, state, state, 2); return; } } } for (int i = facing + 1; i < 6; i++) { BlockPos otherHeaterPos = furn.subtract(EnumFacing.VALUES[i].getDirectionVec()); TileEntity heat = worldObj.getTileEntity(otherHeaterPos); if (heat instanceof TileEntityFrictionHeater) { TileEntityFrictionHeater fH = (TileEntityFrictionHeater) heat; if (fH.isParent == 1) { this.isParent = 0; fH.numChilds++; fH.filled[facing] = true; IBlockState state = worldObj.getBlockState(otherHeaterPos); worldObj.notifyBlockUpdate(otherHeaterPos, state, state, 2); return; } } } } @Override public void update() { super.update(); if (facing != -1) { BlockPos furn = pos.add(EnumFacing.VALUES[facing].getDirectionVec()); TileEntity te = worldObj.getTileEntity(furn); if (isParent == -1 && !worldObj.isRemote) { isParent = 1; numChilds = 1; filled[facing] = true; updateParentPlace(); IBlockState state = worldObj.getBlockState(pos); worldObj.notifyBlockUpdate(pos, state, state, 2); } if (te != null && te instanceof TileEntityFurnace) { if (isParent == 1) { float speed = 0; for (int i = 0; i < 6; i++) { if (filled[i]) { BlockPos otherHeaterPos = furn.subtract(EnumFacing.VALUES[i].getDirectionVec()); TileEntityFrictionHeater heat = (TileEntityFrictionHeater) worldObj.getTileEntity(otherHeaterPos); if (heat.getNetwork() != null) { if (!heat.getNetwork().isJammed()) { speed += Math.abs(heat.getNetwork().getSpeedForConduit(heat)); } } } } if (speed > 0) { int num = Math.max(2, (int) (30. / Math.sqrt(speed * 2))); TileEntityFurnace furnaceTE = (TileEntityFurnace) te; ticksExisted = (ticksExisted + 1) % num; if (!worldObj.isRemote) { if (ticksExisted == 0 && furnaceTE.getField(0) > 0) { furnaceTE.setField(0, furnaceTE.getField(0) + 1); // increase burn time } } else { if ((30. / Math.sqrt(speed * 2)) < 2 && Minecraft.getMinecraft().thePlayer.ticksExisted % 15 == 0) { for (int i = 0; i < 6; i++) { if (filled[i]) { BlockPos otherHeaterPos = furn.subtract(EnumFacing.VALUES[i].getDirectionVec()); worldObj.spawnParticle(EnumParticleTypes.LAVA, otherHeaterPos.getX() + .5F, otherHeaterPos.getY() + .5F, otherHeaterPos.getZ() + .5F, .25F * (worldObj.rand.nextFloat() - .5F), .25F * (worldObj.rand.nextFloat() - .5F), .25F * (worldObj.rand.nextFloat() - .5F), new int[] { 5, 255, 255 }); } } } } } } } } } public void updateParentRemove() { BlockPos furn = pos.add(EnumFacing.VALUES[facing].getDirectionVec()); if (isParent == 1) { for (int i = facing - 1; i >= 0; i--) { BlockPos otherHeaterPos = furn.subtract(EnumFacing.VALUES[i].getDirectionVec()); TileEntity heat = worldObj.getTileEntity(otherHeaterPos); if (heat instanceof TileEntityFrictionHeater) { TileEntityFrictionHeater fH = (TileEntityFrictionHeater) heat; fH.isParent = 1; fH.numChilds = this.numChilds - 1; fH.filled = filled; fH.filled[facing] = false; IBlockState state = worldObj.getBlockState(otherHeaterPos); worldObj.notifyBlockUpdate(otherHeaterPos, state, state, 2); return; } } } else { for (int i = 5; i > facing; i--) { BlockPos otherHeaterPos = furn.subtract(EnumFacing.VALUES[i].getDirectionVec()); TileEntity heat = worldObj.getTileEntity(otherHeaterPos); if (heat instanceof TileEntityFrictionHeater) { TileEntityFrictionHeater fH = (TileEntityFrictionHeater) heat; if (fH.isParent == 1) { fH.numChilds = fH.numChilds - 1; fH.filled[facing] = false; IBlockState state = worldObj.getBlockState(otherHeaterPos); worldObj.notifyBlockUpdate(otherHeaterPos, state, state, 2); return; } } } } } @Override public void invalidate() { updateParentRemove(); super.invalidate(); } @Override public boolean isNegativeDirection() { return this.facing % 2 == 1; } @Override public HashSet<Tuple<Vec3i, PartSlot>> multipartCisConnections() { if (facing == -1 && worldObj != null && worldObj.getBlockState(getPosMC()) != null && worldObj.getBlockState(getPosMC()).getBlock() == SprocketsBlocks.frictionHeater) { facing = SprocketsBlocks.frictionHeater.getMetaFromState(worldObj.getBlockState(getPosMC())); } if (facing == -1) { return new HashSet<Tuple<Vec3i, PartSlot>>(); } return CIS.get(facing); } @Override public HashSet<Tuple<Vec3i, PartSlot>> multipartTransConnections() { return new HashSet<Tuple<Vec3i, PartSlot>>(); } @Override public Set<Vec3i> cisConnections() { if (facing == -1 && worldObj != null && worldObj.getBlockState(getPosMC()) != null && worldObj.getBlockState(getPosMC()).getBlock() == SprocketsBlocks.frictionHeater) { facing = SprocketsBlocks.frictionHeater.getMetaFromState(worldObj.getBlockState(getPosMC())); } if (facing == -1) { return new HashSet<Vec3i>(); } return BLOCK_CIS.get(facing); } @Override public HashSet<Vec3i> transConnections() { return new HashSet<Vec3i>(); } @Override public void readFromNBT(NBTTagCompound compound) { super.readFromNBT(compound); isParent = compound.getByte("isParent"); numChilds = compound.getInteger("numChilds"); ticksExisted = compound.getInteger("ticksExisted"); for (int i = 0; i < 6; i++) { filled[i] = compound.getBoolean("filled" + i); } } @Override public NBTTagCompound writeToNBT(NBTTagCompound compound) { compound = super.writeToNBT(compound); compound.setByte("isParent", isParent); compound.setInteger("numChilds", numChilds); compound.setInteger("ticksExisted", ticksExisted); for (int i = 0; i < 6; i++) { compound.setBoolean("filled" + i, filled[i]); } return compound; } @Override public void onDataPacket(NetworkManager net, SPacketUpdateTileEntity pkt) { NBTTagCompound data = pkt.getNbtCompound(); this.readFromNBT(data); if (getNetwork() != null) { getNetwork().updateNetworkSpeedAndTorque(); } } @Override public SPacketUpdateTileEntity getUpdatePacket() { NBTTagCompound data = new NBTTagCompound(); this.writeToNBT(data); return new SPacketUpdateTileEntity(pos, 0, data); } @Override public float torqueCost() { return LibConstants.FRICTION_HEATER_TORQUE; } }
/* Copyright 2009-2019 Igor Polevoy Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.javalite.json; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Convenience class to convert JSON strings to and from objects. * * @author Igor Polevoy on 5/26/16. */ public class JSONHelper { private static final ObjectMapper mapper = new ObjectMapper(); static { mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); } /** * Convert a JSON map to a Java Map * * @param json JSON map * @return Java Map. */ public static Map toMap(String json) { try { return mapper.readValue(json, Map.class); } catch (Exception e) { throw new JSONParseException("Failed to parse JSON string into a Java Map",e); } } /** * Convenience method to convert String to {@link JSONMap}. * * @param json String content of some JSON object. * @return instance of {@link JSONMap}. */ public static JSONMap toJSONMap(String json) { return new JSONMap(toMap(json)); } /** * Convenience method to convert String to {@link JSONList}. * * @param json String content of some JSON array. * @return instance of {@link JSONList}. */ public static JSONList toJSONList(String json) { return new JSONList(toList(json)); } /** * Convert JSON Array to Java array of maps. * * @param json JSON array * @return Java array. */ public static Map[] toMaps(String json) { try { return mapper.readValue(json, Map[].class); } catch (Exception e) { throw new JSONParseException("Failed to parse JSON string into a Java Maps",e); } } /** * Convert Java object to a JSON string. * * @param val Java object * @return JSON string. */ public static String toJsonString(Object val) { return toJsonString(val, false); } /** * Convert Java object to a JSON string. * * @param val Java object * @param pretty enable/disable pretty print * @return JSON string. */ public static String toJsonString(Object val, boolean pretty) { try { return pretty ? mapper.writerWithDefaultPrettyPrinter().with(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS).writeValueAsString(val) : mapper.writeValueAsString(val); } catch (Exception e) { throw new RuntimeException(e); } } /** * Converts input into a JSON object. * * @param namesAndValues - expected sequence of corresponding name and value pairs (number of parameters must be even ). * @return new string {name:value,name1:value1, etc.} */ public static String toJsonObject(Object... namesAndValues) { if (namesAndValues.length % 2 != 0) { throw new IllegalArgumentException("number or arguments must be even"); } StringBuilder sb = new StringBuilder("{"); int count = 0; while (true) { Object name = namesAndValues[count]; sb.append("\"").append(name).append("\":"); if (!(namesAndValues[count + 1] instanceof Number)) { if (namesAndValues[count + 1] == null) { sb.append("null"); } else { sb.append("\"").append(namesAndValues[count + 1].toString()).append("\""); } } else { sb.append(namesAndValues[count + 1].toString()); } if (count < (namesAndValues.length - 2)) { sb.append(","); count += 2; } else { sb.append("}"); break; } } return sb.toString(); } /** * Convert JSON array tp Java List * * @param json JSON array string. * @return Java List instance. */ public static List toList(String json) { try { return mapper.readValue(json, List.class); } catch (Exception e) { throw new JSONParseException("Failed to parse JSON string into a Java List",e); } } /** * Clean control characters in a string. * * @param value string to escape * @return escaped version */ public static String cleanControlChars(String value) { return sanitize(value, true); } /** * Escapes control characters in a string. * * @param value string to escape * @return escaped version * @see #sanitize(String) */ public static String escapeControlChars(String value) { return sanitize(value, false); } /** * Escapes control characters in a string. * * @param value string to escape * @return escaped version * @see #escapeControlChars(String) */ public static String sanitize(String value) { return sanitize(value, false); } public static String sanitize(String value, boolean clean) { return sanitize(value, clean, null); } /** * Escapes control characters in a string when you need to * generate JSON. * * @param value input string * @param clean if true will remove characters that match, if false will escape * @param toEscape array of characters to escape. If not provided, it will escape or clean <code>'"','\\', '\t', '\b', '\n', '\r' '\f'</code>. * This method will only escape or clean if provided chars are from this list. * @return input string with control characters escaped or removed, depending on the <code>clean</code> flag. */ public static String sanitize(String value, boolean clean, Character... toEscape) { StringBuilder builder = new StringBuilder(); Map<Character, String> replacements = clean ? CLEAN_CHARS : REPLACEMENT_CHARS; for (int i = 0; i < value.length(); i++) { char c = value.charAt(i); if (toEscape == null) { if (replacements.containsKey(c)) { builder.append(replacements.get(c)); } else { builder.append(c); } } else { if (replacements.containsKey(c) && contains(toEscape, c)) { builder.append(replacements.get(c)); } else { builder.append(c); } } } return builder.toString(); } private static boolean contains(Character[] toEscape, char c) { for (char escapeChar : toEscape) { if (escapeChar == c) { return true; } } return false; } private static final Map<Character, String> REPLACEMENT_CHARS = new HashMap<>(); private static final Map<Character, String> CLEAN_CHARS = new HashMap<>(); static { for (int i = 0; i <= 0x1f; i++) { REPLACEMENT_CHARS.put((char) i, String.format("\\u%04x", (int) i)); } REPLACEMENT_CHARS.put('\u2028', "\\u2028"); REPLACEMENT_CHARS.put('\u2029', "\\u2029"); REPLACEMENT_CHARS.put('"', "\\\""); REPLACEMENT_CHARS.put('\\', "\\\\"); REPLACEMENT_CHARS.put('\t', "\\t"); REPLACEMENT_CHARS.put('\b', "\\b"); REPLACEMENT_CHARS.put('\n', "\\n"); REPLACEMENT_CHARS.put('\r', "\\r"); REPLACEMENT_CHARS.put('\f', "\\f"); for (int i = 0; i <= 0x1f; i++) { CLEAN_CHARS.put((char) i, String.format("\\u%04x", (int) i)); } CLEAN_CHARS.put('\u2028', ""); CLEAN_CHARS.put('\u2029', ""); CLEAN_CHARS.put('\\', ""); CLEAN_CHARS.put('\t', ""); CLEAN_CHARS.put('\b', ""); CLEAN_CHARS.put('\n', ""); CLEAN_CHARS.put('\r', ""); CLEAN_CHARS.put('\f', ""); } }
package org.holoeverywhere.slider; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.os.Parcel; import android.os.Parcelable; import android.text.TextUtils; import org.holoeverywhere.app.Fragment; import org.holoeverywhere.drawable.DrawableCompat; import java.lang.ref.WeakReference; class BaseSliderItem<T extends BaseSliderItem<T>> implements Parcelable { int mCustomLayout = 0; Fragment.SavedState mSavedState; SliderMenu mSliderMenu; WeakReference<Fragment> mLastFragment; boolean mSaveState = true; boolean mClickable = true; int mBackgroundColor = 0; Bundle mFragmentArguments; Class<? extends Fragment> mFragmentClass; CharSequence mLabel; int mSelectionHandlerColor = 0; String mTag; int mTextAppereance = 0; int mTextAppereanceInverse = 0; Drawable mIcon; BaseSliderItem() { } BaseSliderItem(Parcel source) throws Exception { String classname = source.readString(); if (classname != null) { mFragmentClass = (Class<? extends Fragment>) Class.forName(classname); } mSavedState = source.readParcelable(Fragment.SavedState.class.getClassLoader()); mSaveState = source.readInt() == 1; mClickable = source.readInt() == 1; mCustomLayout = source.readInt(); mBackgroundColor = source.readInt(); mSelectionHandlerColor = source.readInt(); mTextAppereance = source.readInt(); mTextAppereanceInverse = source.readInt(); mLabel = TextUtils.CHAR_SEQUENCE_CREATOR.createFromParcel(source); } @Override public int describeContents() { return 0; } public int getBackgroundColor() { return mBackgroundColor; } public T setBackgroundColor(int backgroundColor) { mBackgroundColor = backgroundColor; return (T) this; } public int getCustomLayout() { return mCustomLayout; } public T setCustomLayout(int customLayout) { mCustomLayout = customLayout; return (T) this; } public Bundle getFragmentArguments() { return mFragmentArguments; } public T setFragmentArguments(Bundle fragmentArguments) { mFragmentArguments = fragmentArguments; return (T) this; } public Class<? extends Fragment> getFragmentClass() { return mFragmentClass; } public T setFragmentClass(Class<? extends Fragment> fragmentClass) { if (mFragmentClass == fragmentClass) { return (T) this; } mFragmentClass = fragmentClass; mSavedState = null; return (T) this; } public CharSequence getLabel() { return mLabel; } public T setLabel(CharSequence label) { mLabel = label; invalidate(); return (T) this; } public Drawable getIcon() { return mIcon; } public T setIconAttr(int attrId) { TypedArray a = mSliderMenu.getActivity().obtainStyledAttributes(new int[]{attrId}); final Drawable drawable = a.getDrawable(0); a.recycle(); return setIcon(drawable); } public T setIcon(int resId) { if (mSliderMenu == null) { throw new IllegalStateException("You cannot provide icon before adding item to SliderMenu"); } return setIcon(DrawableCompat.getDrawable(mSliderMenu.getActivity().getResources(), resId)); } public T setIcon(Drawable icon) { mIcon = icon; invalidate(); return (T) this; } public int getSelectionHandlerColor() { return mSelectionHandlerColor; } public T setSelectionHandlerColor(int selectionHandlerColor) { mSelectionHandlerColor = selectionHandlerColor; return (T) this; } public String getTag() { return mTag; } public T setTag(String tag) { mTag = tag; return (T) this; } public int getTextAppereance() { return mTextAppereance; } public T setTextAppereance(int textAppereance) { mTextAppereance = textAppereance; return (T) this; } public int getTextAppereanceInverse() { return mTextAppereanceInverse; } public T setTextAppereanceInverse(int textAppereanceInverse) { mTextAppereanceInverse = textAppereanceInverse; return (T) this; } private void invalidate() { if (mSliderMenu != null) { mSliderMenu.invalidate(); } } public boolean isSaveState() { return mSaveState; } public T setSaveState(boolean saveState) { if (mSaveState == saveState) { return (T) this; } mSaveState = saveState; if (!saveState) { mSavedState = null; } return (T) this; } public T clickable(boolean clickable) { mClickable = clickable; return (T) this; } public boolean isClickable() { return mClickable; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeString(mFragmentClass == null ? null : mFragmentClass.getName()); dest.writeParcelable(mSaveState ? mSavedState : null, flags); dest.writeInt(mSaveState ? 1 : 0); dest.writeInt(mCustomLayout); dest.writeInt(mBackgroundColor); dest.writeInt(mSelectionHandlerColor); dest.writeInt(mTextAppereance); dest.writeInt(mTextAppereanceInverse); TextUtils.writeToParcel(mLabel, dest, flags); } public T fillColors(int[] colors) { if (colors != null && colors.length >= 2 && mSliderMenu != null) { final Resources res = mSliderMenu.getActivity().getResources(); fillColors(res.getColor(colors[0]), res.getColor(colors[1])); } return (T) this; } public T fillColors(int backgroundColor, int selectionHandlerColor) { setBackgroundColor(backgroundColor); setSelectionHandlerColor(selectionHandlerColor); return (T) this; } public boolean hasVisiblePage() { return mFragmentClass != null; } }
package com.platform; import android.Manifest; import android.app.Activity; import android.content.Context; import android.content.pm.PackageManager; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; import android.os.Bundle; import android.support.v4.app.ActivityCompat; import android.util.Log; import com.breadwallet.BreadApp; import com.breadwallet.tools.util.Utils; import com.google.firebase.crash.FirebaseCrash; import org.eclipse.jetty.continuation.Continuation; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.websocket.api.Session; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import javax.servlet.http.HttpServletResponse; /** * BreadWallet * <p/> * Created by Mihail Gutan on <mihail@breadwallet.com> 1/12/17. * Copyright (c) 2017 breadwallet LLC * <p/> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p/> * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * <p/> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ public class GeoLocationManager { private static final String TAG = GeoLocationManager.class.getName(); private Session session; private Continuation continuation; private Request baseRequest; private LocationManager locationManager; private static GeoLocationManager instance; public static GeoLocationManager getInstance() { if (instance == null) instance = new GeoLocationManager(); return instance; } public void getOneTimeGeoLocation(Continuation cont, Request req) { this.continuation = cont; this.baseRequest = req; final Activity app = BreadApp.getBreadContext(); if (app == null) return; locationManager = (LocationManager) app.getSystemService(Context.LOCATION_SERVICE); if (locationManager == null) { Log.e(TAG, "getOneTimeGeoLocation: locationManager is null!"); return; } app.runOnUiThread(new Runnable() { @Override public void run() { if (ActivityCompat.checkSelfPermission(app, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(app, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) { RuntimeException ex = new RuntimeException("getOneTimeGeoLocation, can't happen"); Log.e(TAG, "run: getOneTimeGeoLocation, can't happen"); FirebaseCrash.report(ex); return; } locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 0, 0, locationListener); locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, locationListener); } }); } public void startGeoSocket(Session sess) { session = sess; final Activity app = BreadApp.getBreadContext(); if (app == null) return; final LocationManager locationManager = (LocationManager) app.getSystemService(Context.LOCATION_SERVICE); app.runOnUiThread(new Runnable() { @Override public void run() { if (ActivityCompat.checkSelfPermission(app, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(app, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) { RuntimeException ex = new RuntimeException("startGeoSocket, can't happen"); Log.e(TAG, "run: startGeoSocket, can't happen"); FirebaseCrash.report(ex); return; } locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 1000, 0, socketLocationListener); locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 1000, 0, socketLocationListener); } }); } public void stopGeoSocket() { final Activity app = BreadApp.getBreadContext(); if (app == null) return; final LocationManager locationManager = (LocationManager) app.getSystemService(Context.LOCATION_SERVICE); app.runOnUiThread(new Runnable() { @Override public void run() { if (ActivityCompat.checkSelfPermission(app, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(app, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) { Log.e(TAG, "stopGeoSocket, can't happen"); RuntimeException ex = new RuntimeException("stopGeoSocket, can't happen"); FirebaseCrash.report(ex); throw ex; } locationManager.removeUpdates(socketLocationListener); } }); } // Define a listener that responds to location updates private LocationListener socketLocationListener = new LocationListener() { private boolean sending; public void onLocationChanged(Location location) { // Called when a new location is found by the network location provider. if (sending) return; sending = true; if (session != null && session.isOpen()) { final String jsonLocation = getJsonLocation(location); new Thread(new Runnable() { @Override public void run() { try { session.getRemote().sendString(jsonLocation); } catch (IOException e) { e.printStackTrace(); } finally { sending = false; } } }).start(); } else { sending = false; } } public void onStatusChanged(String provider, int status, Bundle extras) { } public void onProviderEnabled(String provider) { } public void onProviderDisabled(String provider) { } }; private LocationListener locationListener = new LocationListener() { private boolean processing; public void onLocationChanged(final Location location) { if (processing) return; processing = true; new Thread(new Runnable() { @Override public void run() { // Called when a new location is found by the network location provider. if (continuation != null && baseRequest != null) { String jsonLocation = getJsonLocation(location); try { if (!Utils.isNullOrEmpty(jsonLocation)) { try { ((HttpServletResponse) continuation.getServletResponse()).setStatus(200); continuation.getServletResponse().getOutputStream().write(jsonLocation.getBytes("UTF-8")); baseRequest.setHandled(true); continuation.complete(); continuation = null; } catch (IOException e) { e.printStackTrace(); } } else { try { ((HttpServletResponse) continuation.getServletResponse()).sendError(500); baseRequest.setHandled(true); continuation.complete(); continuation = null; } catch (IOException e) { e.printStackTrace(); } FirebaseCrash.report(new NullPointerException("onLocationChanged: " + jsonLocation)); Log.e(TAG, "onLocationChanged: WARNING respStr is null or empty: " + jsonLocation); } } catch (Exception e) { e.printStackTrace(); } finally { processing = false; Activity app = BreadApp.getBreadContext(); if (app == null || ActivityCompat.checkSelfPermission(app, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(app, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) { Log.e(TAG, "onLocationChanged: PERMISSION DENIED for removeUpdates"); } else { locationManager.removeUpdates(locationListener); } } } } }).start(); } public void onStatusChanged(String provider, int status, Bundle extras) { } public void onProviderEnabled(String provider) { } public void onProviderDisabled(String provider) { } }; public static String getJsonLocation(Location location) { try { JSONObject responseJson = new JSONObject(); JSONObject coordObj = new JSONObject(); coordObj.put("latitude", location.getLatitude()); coordObj.put("longitude", location.getLongitude()); responseJson.put("timestamp", location.getTime()); responseJson.put("coordinate", coordObj); responseJson.put("altitude", location.getAltitude()); responseJson.put("horizontal_accuracy", location.getAccuracy()); responseJson.put("description", ""); return responseJson.toString(); } catch (JSONException e) { Log.e(TAG, "handleLocation: Failed to create json response"); e.printStackTrace(); } return null; } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.aorura.android.weather.app; import android.content.Context; import android.content.SharedPreferences; import android.preference.PreferenceManager; import android.text.format.Time; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; public class Utility { //dongwook2.shin for BT LED public static final int THUNDERSTORM = 200; public static final int DRIZZLE = 300; public static final int RAIN = 500; public static final int SNOW = 600; public static final int ATMOSPHERE = 700; public static final int CLOUDS = 800; public static final int CLEAR = 801; public static String getPreferredRainbow(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_rainbow_key), context.getString(R.string.pref_rainbow_default)); } public static String getPreferredLocation(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_location_key), context.getString(R.string.pref_location_default)); } public static boolean isMetric(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_units_key), context.getString(R.string.pref_units_metric)) .equals(context.getString(R.string.pref_units_metric)); } public static String formatTemperature(Context context, double temperature) { // Data stored in Celsius by default. If user prefers to see in Fahrenheit, convert // the values here. //String suffix = "\u00B0"; if (!isMetric(context)) { temperature = (temperature * 1.8) + 32; } // For presentation, assume the user doesn't care about tenths of a degree. return String.format(context.getString(R.string.format_temperature), temperature); } public static String formatTemperatureWithoutDegree(Context context, double temperature) { // Data stored in Celsius by default. If user prefers to see in Fahrenheit, convert // the values here. //String suffix = "\u00B0"; if (!isMetric(context)) { temperature = (temperature * 1.8) + 32; } // For presentation, assume the user doesn't care about tenths of a degree. return String.format(context.getString(R.string.format_temperature_without_degree), temperature); } static String formatDate(long dateInMilliseconds) { Date date = new Date(dateInMilliseconds); return DateFormat.getDateInstance().format(date); } // Format used for storing dates in the database. ALso used for converting those strings // back into date objects for comparison/processing. public static final String DATE_FORMAT = "yyyyMMdd"; /** * Helper method to convert the database representation of the date into something to display * to users. As classy and polished a user experience as "20140102" is, we can do better. * * @param context Context to use for resource localization * @param dateInMillis The date in milliseconds * @return a user-friendly representation of the date. */ public static String getFriendlyDayString(Context context, long dateInMillis) { // The day string for forecast uses the following logic: // For today: "Today, June 8" // For tomorrow: "Tomorrow" // For the next 5 days: "Wednesday" (just the day name) // For all days after that: "Mon Jun 8" Time time = new Time(); time.setToNow(); long currentTime = System.currentTimeMillis(); int julianDay = Time.getJulianDay(dateInMillis, time.gmtoff); int currentJulianDay = Time.getJulianDay(currentTime, time.gmtoff); // If the date we're building the String for is today's date, the format // is "Today, June 24" if (julianDay == currentJulianDay) { String today = context.getString(R.string.today); int formatId = R.string.format_full_friendly_date; return String.format(context.getString( formatId, today, getFormattedMonthDay(context, dateInMillis))); } else if ( julianDay < currentJulianDay + 7 ) { // If the input date is less than a week in the future, just return the day name. return getDayName(context, dateInMillis); } else { // Otherwise, use the form "Mon Jun 3" SimpleDateFormat shortenedDateFormat = new SimpleDateFormat("EEE MMM dd"); return shortenedDateFormat.format(dateInMillis); } } /** * Given a day, returns just the name to use for that day. * E.g "today", "tomorrow", "wednesday". * * @param context Context to use for resource localization * @param dateInMillis The date in milliseconds * @return */ public static String getDayName(Context context, long dateInMillis) { // If the date is today, return the localized version of "Today" instead of the actual // day name. Time t = new Time(); t.setToNow(); int julianDay = Time.getJulianDay(dateInMillis, t.gmtoff); int currentJulianDay = Time.getJulianDay(System.currentTimeMillis(), t.gmtoff); if (julianDay == currentJulianDay) { return context.getString(R.string.today); } else if ( julianDay == currentJulianDay +1 ) { return context.getString(R.string.tomorrow); } else { Time time = new Time(); time.setToNow(); // Otherwise, the format is just the day of the week (e.g "Wednesday". SimpleDateFormat dayFormat = new SimpleDateFormat("EEEE"); return dayFormat.format(dateInMillis); } } /** * Converts db date format to the format "Month day", e.g "June 24". * @param context Context to use for resource localization * @param dateInMillis The db formatted date string, expected to be of the form specified * in Utility.DATE_FORMAT * @return The day in the form of a string formatted "December 6" */ public static String getFormattedMonthDay(Context context, long dateInMillis ) { Time time = new Time(); time.setToNow(); SimpleDateFormat dbDateFormat = new SimpleDateFormat(Utility.DATE_FORMAT); SimpleDateFormat monthDayFormat = new SimpleDateFormat("MMMM dd"); String monthDayString = monthDayFormat.format(dateInMillis); return monthDayString; } public static String getFormattedWind(Context context, float windSpeed, float degrees) { int windFormat; if (Utility.isMetric(context)) { windFormat = R.string.format_wind_kmh; } else { windFormat = R.string.format_wind_mph; windSpeed = .621371192237334f * windSpeed; } // From wind direction in degrees, determine compass direction as a string (e.g NW) // You know what's fun, writing really long if/else statements with tons of possible // conditions. Seriously, try it! String direction = "Unknown"; if (degrees >= 337.5 || degrees < 22.5) { direction = "N"; } else if (degrees >= 22.5 && degrees < 67.5) { direction = "NE"; } else if (degrees >= 67.5 && degrees < 112.5) { direction = "E"; } else if (degrees >= 112.5 && degrees < 157.5) { direction = "SE"; } else if (degrees >= 157.5 && degrees < 202.5) { direction = "S"; } else if (degrees >= 202.5 && degrees < 247.5) { direction = "SW"; } else if (degrees >= 247.5 && degrees < 292.5) { direction = "W"; } else if (degrees >= 292.5 && degrees < 337.5) { direction = "NW"; } return String.format(context.getString(windFormat), windSpeed, direction); } /** * Helper method to provide the icon resource id according to the weather condition id returned * by the OpenWeatherMap call. * @param weatherId from OpenWeatherMap API response * @return resource id for the corresponding icon. -1 if no relation is found. */ public static int getIconResourceForWeatherCondition(int weatherId) { // Based on weather code data found at: // http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes if (weatherId >= 200 && weatherId <= 232) { return R.drawable.ic_storm; } else if (weatherId >= 300 && weatherId <= 321) { return R.drawable.ic_light_rain; } else if (weatherId >= 500 && weatherId <= 504) { return R.drawable.ic_rain; } else if (weatherId == 511) { return R.drawable.ic_snow; } else if (weatherId >= 520 && weatherId <= 531) { return R.drawable.ic_rain; } else if (weatherId >= 600 && weatherId <= 622) { return R.drawable.ic_snow; } else if (weatherId >= 701 && weatherId <= 761) { return R.drawable.ic_fog; } else if (weatherId == 761 || weatherId == 781) { return R.drawable.ic_storm; } else if (weatherId == 800) { return R.drawable.ic_clear; } else if (weatherId == 801) { return R.drawable.ic_light_clouds; } else if (weatherId >= 802 && weatherId <= 804) { return R.drawable.ic_cloudy; } return -1; } /** * Helper method to provide the art resource id according to the weather condition id returned * by the OpenWeatherMap call. * @param weatherId from OpenWeatherMap API response * @return resource id for the corresponding icon. -1 if no relation is found. */ public static int getArtResourceForWeatherCondition(int weatherId) { // Based on weather code data found at: // http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes if (weatherId >= 200 && weatherId <= 232) { return R.drawable.art_storm; } else if (weatherId >= 300 && weatherId <= 321) { return R.drawable.art_light_rain; } else if (weatherId >= 500 && weatherId <= 504) { return R.drawable.art_rain; } else if (weatherId == 511) { return R.drawable.art_snow; } else if (weatherId >= 520 && weatherId <= 531) { return R.drawable.art_rain; } else if (weatherId >= 600 && weatherId <= 622) { return R.drawable.art_snow; } else if (weatherId >= 701 && weatherId <= 761) { return R.drawable.art_fog; } else if (weatherId == 761 || weatherId == 781) { return R.drawable.art_storm; } else if (weatherId == 800) { return R.drawable.art_clear; } else if (weatherId == 801) { return R.drawable.art_light_clouds; } else if (weatherId >= 802 && weatherId <= 804) { return R.drawable.art_clouds; } return -1; } //dongwook2.shin for BT LED public static int getWeatherConditionForBT(int weatherId) { // Based on weather code data found at: // http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes if (weatherId >= 200 && weatherId <= 232) { return Utility.THUNDERSTORM; } else if (weatherId >= 300 && weatherId <= 321) { return Utility.DRIZZLE; } else if (weatherId >= 500 && weatherId <= 504) { return Utility.RAIN; } else if (weatherId == 511) { return Utility.SNOW; } else if (weatherId >= 520 && weatherId <= 531) { return Utility.RAIN; } else if (weatherId >= 600 && weatherId <= 622) { return Utility.SNOW; } else if (weatherId >= 701 && weatherId <= 761) { return Utility.ATMOSPHERE; } else if (weatherId == 761 || weatherId == 781) { return Utility.THUNDERSTORM; } else if (weatherId == 800) { return Utility.CLEAR; } else if (weatherId == 801) { return Utility.CLOUDS; } else if (weatherId >= 802 && weatherId <= 804) { return Utility.CLOUDS; } return -1; } public static byte getDigit (int num) { if(num > 9) { return 0x09; } switch (num) { case 1: return 0x01; case 2: return 0x02; case 3: return 0x03; case 4: return 0x04; case 5: return 0x05; case 6: return 0x06; case 7: return 0x07; case 8: return 0x08; case 9: return 0x09; case 0: return 0x00; } return 0x09; } }
/* Copyright 2013 Peter Laird Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.storageroomapp.client.field; import java.util.HashMap; import java.util.Map; import org.json.simple.JSONObject; import com.storageroomapp.client.Collection; import com.storageroomapp.client.util.JsonSimpleUtil; abstract public class GenericField<T extends GenericValue<?>> implements Cloneable { /* * MY FRIEND: * * I was hoping you wouldn't have to come down in here, as the higher level * classes (com.storageroomapp.client.*) are fairly nice and neat. They match * well with the SR concepts, and more or less are light and airy. * * When it comes to Fields, I am sorry but here is where we get nasty. * This class here, GenericField, is the epicenter of some decent complexity. * * 1. I used Java generics * * Most folks frankly just aren't that familiar with generics outside their use * with Java collections, so I was not planning to use them. But, the Field * solution was made much better to the public API user because of them, so I * yielded and used them. Sorry. Generics kinda suck from a readability point * of view. * * 2. I unified the metadata and data fields * * A controversial choice perhaps, but I used the same class hierarchy for metadata * fields (those that describe what fields are defined for a collection, that can * have a default value) and data fields (the fields associated with an Entry, * and can have an actual value). This saved a ton of code, but makes working at this * level a little harder as you have to always keep in mind what flavor of field you * are working with. The 'isFieldDefinition' member will remind you, but still. * * 3. Some StorageRoom fields are Chimeras * * Field types like File and Image have entirely different json structures depending * on whether they are being downloaded from SR in a GET, or uploaded to SR in * a PUT or POST. Ouch. * * 4. My use case is solved, I am losing interest in continuing the javadoc * * Sadly, I am about ready to be done here. I did a decent job of documenting the * higher classes, but I need to move on. Providing good docs for the Fields package * would take a ton of time, so I leave you on your own... */ protected Collection parentCollection = null; // this is used by subclasses to differentiate protected String type = "all"; // the core properties of a Field protected String name = "undefined"; protected String identifier = "undefined"; // the rest of the properties protected Map<String, String> additionalProperties = new HashMap<String, String>(); // is this a compound field? File, Image, Location, Association protected boolean isCompoundFieldType = false; // If isFieldDefinition is true, this object is a metadata field, meaning it // is associated with a Collection, not an Entry protected boolean isFieldDefinition = true; // Field 'value' should only be used if isFieldDefinition is false protected T value = null; protected GenericField(String type) { this.type = type; } // METADATA FIELD CONSTRUCTION (a field definition for a collection; no value is allowed) // we are using the prototype pattern here // each Field should know how to create Field instances for specific Collections on demand @SuppressWarnings("unchecked") private GenericField<T> build() { GenericField<T> clone = null; try { clone = (GenericField<T>)this.clone(); } catch (CloneNotSupportedException cns) { // nothing } return clone; } /** * Copies a metadata field from one collection to another. This is * used when copying over the standard fields that occur in all collections * into a newly created collection. */ protected GenericField<T> cloneAsMetadataField(Collection otherCollection) { GenericField<T> clone = build(); clone.parentCollection = otherCollection; return clone; } protected GenericField<T> cloneAsMetadataField(Collection otherCollection, String name, String identifier) { GenericField<T> clone = build(); clone.parentCollection = otherCollection; clone.type = type; clone.name = name; clone.identifier = identifier; return clone; } protected GenericField<T> cloneAsMetadataField(Collection otherCollection, JSONObject jsonObj) { GenericField<T> clone = build(); clone.parentCollection = otherCollection; clone.type = JsonSimpleUtil.parseJsonStringValue(jsonObj, "@type"); clone.name = JsonSimpleUtil.parseJsonStringValue(jsonObj, "name"); clone.identifier = JsonSimpleUtil.parseJsonStringValue(jsonObj, "identifier"); return clone; } // DATA FIELD CONSTRUCTION (a field associated with an entry in the collection) protected GenericField<T> cloneAsDataField(T value) { GenericField<T> clone = build(); clone.isFieldDefinition = false; clone.value = value; return clone; } /* "@type":"IntegerField", "name":"InStock", "identifier":"in_stock", "show_in_interface":true, "edit_in_interface":true, "input_type":"text_field", */ protected GenericField<T> parseValueJsonForField(JSONObject entry, String identifier) { Object valueJson = entry.get(identifier); if (valueJson == null) { return null; } T valueJava = deserializeJsonValue(valueJson); GenericField<T> instance = null; if (valueJava != null) { instance = cloneAsDataField(valueJava); } return instance; } abstract protected T deserializeJsonValue(Object jsonValue); // GETTERS and SETTERS /** * Gets the StorageRoom type label for this field. Examples include * StringField, TimeField, IntegerField, etc. * @return the String type */ public String getType() { return type; } /** * Gets the name of this field, provided by the admin user when creating * the field in the SR UI. * @return the String name */ public String getName() { return name; } /** * The internal identifier used for this field in API calls. Generally, * it is the lowercase version of the name, with spaces replaced with * underscores. * * @return the String id */ public String getIdentifier() { return identifier; } /** * Returns whether this field represents a compound type. This is useful * for StorageRoom UI apps to know when to show a complicated value editor. * Compound types are File, Image, Location and others. * @return true if it is a compound type, false if not. */ public boolean isCompoundFieldType() { return isCompoundFieldType; } /** * Returns the wrapper object that carrys the value of this field. * This will never return null, even if the underlying value is * not set. * <p> * This bears some more explanation. * <p> * Because this Java client is largely a json serialization/deserialization * tool (at least when we are down to the Field level), there is a lot of * machinery that is responsible for converting objects into StorageRoom * json and back. The value wrapper returned by this method is critical * to that capability. * <p> * If this Field object is a metadata field (i.e. associated with a Collection * not an Entry), then calling this method will result in an exception being * thrown. This radical approach is meant to root out serious coding bugs that * can result by mistaking Collection as Entry fields. * * @return the wrapper GenericField subclass appropriate for the field type */ public T getValueWrapper() { if (isFieldDefinition) { throw new IllegalArgumentException(); } return value; } /** * A convenience method to quickly get the value of this field as a String. * * @return the String value */ public String getValueAsString() { if (isFieldDefinition) { throw new IllegalArgumentException(); } String valueStr = null; if (value != null) { valueStr = value.toString(); } return valueStr; } /** * When planning to update an Entry, this method allows you to set the * Field value to a new value. * <p> * Note, if you wish to nullify a field value, do not pass null to this method. * Instead, instantiate the proper GenericValue subclass (e.g. StringValue) using * the default constructor, which sets the inner value to null. Then, pass that * constructed GenericValue into this method. * * @param newValue the new value for the field. It should never be null. */ public void setValue(T newValue) { if (isFieldDefinition) { throw new IllegalArgumentException(); } value = newValue; } /** * Determines if two fields refer to the same Field in the same Collection. * It does NOT check for value equality, entry equality, or anything else. * @param match the field to match * @return true if they are the same field in the collection */ public boolean isSameField(GenericField<?> match) { if (match == null) { return false; } if (!this.parentCollection.equals(match.parentCollection)) { return false; } if (!this.identifier.equals(match.identifier)) { return false; } return true; } // Serialization @Override public String toString() { StringBuilder sb = new StringBuilder(); if (isFieldDefinition) { sb.append("{ type ["); sb.append(type); sb.append("] name ["); sb.append(name); sb.append("] id ["); sb.append(identifier); sb.append("] value ["); sb.append(value); sb.append("]"); sb.append("}"); } else { String valueStr = "null"; if (value != null) { valueStr = value.toString(); } sb.append(identifier); sb.append(" ["); sb.append(valueStr); sb.append("] "); } return sb.toString(); } public String toJSONString() { StringBuilder sb = new StringBuilder(); if (isFieldDefinition) { sb.append("{ \"@type\": \""); sb.append(type); sb.append("\", \"name\": \""); sb.append(name); sb.append("\", \"identifier\": \""); sb.append(identifier); sb.append("\""); sb.append(" }"); } else { if (value == null) { return null; } sb.append("\""); sb.append(identifier); sb.append("\": "); sb.append(value.toJSONString()); } return sb.toString(); } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.client.file; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import static org.mockito.Matchers.anyInt; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyLong; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import alluxio.AlluxioURI; import alluxio.ClientContext; import alluxio.ConfigurationTestUtils; import alluxio.client.block.AlluxioBlockStore; import alluxio.client.block.BlockWorkerInfo; import alluxio.client.block.stream.BlockInStream; import alluxio.client.block.stream.BlockInStream.BlockInStreamSource; import alluxio.client.block.stream.TestBlockInStream; import alluxio.client.file.options.InStreamOptions; import alluxio.client.util.ClientTestUtils; import alluxio.conf.InstancedConfiguration; import alluxio.conf.PropertyKey; import alluxio.exception.PreconditionMessage; import alluxio.exception.status.UnavailableException; import alluxio.grpc.OpenFilePOptions; import alluxio.grpc.ReadPType; import alluxio.util.io.BufferUtils; import alluxio.wire.BlockInfo; import alluxio.wire.FileBlockInfo; import alluxio.wire.FileInfo; import alluxio.wire.WorkerNetAddress; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.powermock.modules.junit4.PowerMockRunnerDelegate; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; /** * Tests for the {@link AlluxioFileInStream} class. * * It is a parameterized test that checks different caching behaviors when the blocks are located at * different locations. */ @RunWith(PowerMockRunner.class) @PowerMockRunnerDelegate(Parameterized.class) @PrepareForTest({FileSystemContext.class, AlluxioBlockStore.class, BlockInStream.class}) public final class AlluxioFileInStreamTest { private static final long BLOCK_LENGTH = 100L; private static final long FILE_LENGTH = 350L; private static final long NUM_STREAMS = ((FILE_LENGTH - 1) / BLOCK_LENGTH) + 1; private AlluxioBlockStore mBlockStore; private BlockInStreamSource mBlockSource; private FileSystemContext mContext; private FileInfo mInfo; private URIStatus mStatus; private static InstancedConfiguration sConf = ConfigurationTestUtils.defaults(); private List<TestBlockInStream> mInStreams; private FileInStream mTestStream; /** * @return a list of all sources of where the blocks reside */ @Parameterized.Parameters public static Collection<Object[]> data() { return Arrays.asList(new Object[][] { {BlockInStreamSource.PROCESS_LOCAL}, {BlockInStreamSource.NODE_LOCAL}, {BlockInStreamSource.UFS}, {BlockInStreamSource.REMOTE} }); } /** * @param blockSource the source of the block to read */ public AlluxioFileInStreamTest(BlockInStreamSource blockSource) { mBlockSource = blockSource; } private long getBlockLength(int streamId) { return streamId == NUM_STREAMS - 1 ? 50 : BLOCK_LENGTH; } /** * Sets up the context and streams before a test runs. */ @Before public void before() throws Exception { sConf = ConfigurationTestUtils.defaults(); mInfo = new FileInfo().setBlockSizeBytes(BLOCK_LENGTH).setLength(FILE_LENGTH); ClientTestUtils.setSmallBufferSizes(sConf); sConf.set(PropertyKey.USER_BLOCK_READ_RETRY_SLEEP_MIN, "1ms"); sConf.set(PropertyKey.USER_BLOCK_READ_RETRY_SLEEP_MAX, "5ms"); sConf.set(PropertyKey.USER_BLOCK_READ_RETRY_MAX_DURATION, "1s"); mContext = PowerMockito.mock(FileSystemContext.class); when(mContext.getClientContext()).thenReturn(ClientContext.create(sConf)); when(mContext.getClusterConf()).thenReturn(sConf); when(mContext.getPathConf(any(AlluxioURI.class))).thenReturn(sConf); PowerMockito.when(mContext.getNodeLocalWorker()).thenReturn(new WorkerNetAddress()); mBlockStore = mock(AlluxioBlockStore.class); PowerMockito.mockStatic(AlluxioBlockStore.class); PowerMockito.when(AlluxioBlockStore.create(mContext)).thenReturn(mBlockStore); when(mContext.getCachedWorkers()).thenReturn(new ArrayList<>()); // Set up BufferedBlockInStreams and caching streams mInStreams = new ArrayList<>(); List<Long> blockIds = new ArrayList<>(); List<FileBlockInfo> fileBlockInfos = new ArrayList<>(); for (int i = 0; i < NUM_STREAMS; i++) { blockIds.add((long) i); FileBlockInfo fbInfo = new FileBlockInfo().setBlockInfo(new BlockInfo().setBlockId(i)); fileBlockInfos.add(fbInfo); final byte[] input = BufferUtils .getIncreasingByteArray((int) (i * BLOCK_LENGTH), (int) getBlockLength(i)); mInStreams.add(new TestBlockInStream(input, i, input.length, false, mBlockSource)); when(mContext.getCachedWorkers()) .thenReturn(Arrays.asList(new BlockWorkerInfo(new WorkerNetAddress(), 0, 0))); when(mBlockStore.getInStream(eq((long) i), any(InStreamOptions.class), any())) .thenAnswer(invocation -> { long blockId = (Long) invocation.getArguments()[0]; return mInStreams.get((int) blockId).isClosed() ? new TestBlockInStream(input, blockId, input.length, false, mBlockSource) : mInStreams.get((int) blockId); }); when(mBlockStore.getInStream(eq(new BlockInfo().setBlockId(i)), any(InStreamOptions.class), any())).thenAnswer(invocation -> { long blockId = ((BlockInfo) invocation.getArguments()[0]).getBlockId(); return mInStreams.get((int) blockId).isClosed() ? new TestBlockInStream(input, blockId, input.length, false, mBlockSource) : mInStreams.get((int) blockId); }); } mInfo.setBlockIds(blockIds); mInfo.setFileBlockInfos(fileBlockInfos); mStatus = new URIStatus(mInfo); OpenFilePOptions readOptions = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build(); mTestStream = new AlluxioFileInStream(mStatus, new InStreamOptions(mStatus, readOptions, sConf), mContext); } @After public void after() { ClientTestUtils.resetClient(sConf); } /** * Tests that reading through the file one byte at a time will yield the correct data. */ @Test public void singleByteRead() throws Exception { for (int i = 0; i < FILE_LENGTH; i++) { assertEquals(i & 0xff, mTestStream.read()); } mTestStream.close(); } /** * Tests that reading half of a file works. */ @Test public void readHalfFile() throws Exception { testReadBuffer((int) (FILE_LENGTH / 2)); } /** * Tests that reading a part of a file works. */ @Test public void readPartialBlock() throws Exception { testReadBuffer((int) (BLOCK_LENGTH / 2)); } /** * Tests that reading the complete block works. */ @Test public void readBlock() throws Exception { testReadBuffer((int) BLOCK_LENGTH); } /** * Tests that reading the complete block works and the BlockInStream is closed. */ @Test public void readBlockStreamCloseOnEnd() throws Exception { int dataRead = (int) BLOCK_LENGTH; byte[] buffer = new byte[dataRead]; mTestStream.read(buffer); assertEquals(true, mInStreams.get(0).isClosed()); mTestStream.close(); assertArrayEquals(BufferUtils.getIncreasingByteArray(dataRead), buffer); } /** * Tests that reading the complete file works. */ @Test public void readFile() throws Exception { testReadBuffer((int) FILE_LENGTH); } /** * Tests that reading the complete file works and all streams are closed when to the end of file. */ @Test public void readFileStreamCloseOnEnd() throws Exception { int dataRead = (int) FILE_LENGTH; byte[] buffer = new byte[dataRead]; mTestStream.read(buffer); for (int i = 0; i < NUM_STREAMS; i++) { assertEquals(true, mInStreams.get(i).isClosed()); } mTestStream.close(); assertArrayEquals(BufferUtils.getIncreasingByteArray(dataRead), buffer); } /** * Tests that reading a buffer at an offset writes the bytes to the correct places. */ @Test public void readOffset() throws IOException { int offset = (int) (BLOCK_LENGTH / 3); int len = (int) BLOCK_LENGTH; byte[] buffer = new byte[offset + len]; // Create expectedBuffer containing `offset` 0's followed by `len` increasing bytes byte[] expectedBuffer = new byte[offset + len]; System.arraycopy(BufferUtils.getIncreasingByteArray(len), 0, expectedBuffer, offset, len); mTestStream.read(buffer, offset, len); assertArrayEquals(expectedBuffer, buffer); } /** * Read through the file in small chunks and verify each chunk. */ @Test public void readManyChunks() throws IOException { int chunksize = 10; // chunksize must divide FILE_LENGTH evenly for this test to work assertEquals(0, FILE_LENGTH % chunksize); byte[] buffer = new byte[chunksize]; int offset = 0; for (int i = 0; i < FILE_LENGTH / chunksize; i++) { mTestStream.read(buffer, 0, chunksize); assertArrayEquals(BufferUtils.getIncreasingByteArray(offset, chunksize), buffer); offset += chunksize; } mTestStream.close(); } /** * Tests that {@link FileInStream#remaining()} is correctly updated during reads, skips, and * seeks. */ @Test public void testRemaining() throws IOException { assertEquals(FILE_LENGTH, mTestStream.remaining()); mTestStream.read(); assertEquals(FILE_LENGTH - 1, mTestStream.remaining()); mTestStream.read(new byte[150]); assertEquals(FILE_LENGTH - 151, mTestStream.remaining()); mTestStream.skip(140); assertEquals(FILE_LENGTH - 291, mTestStream.remaining()); mTestStream.seek(310); assertEquals(FILE_LENGTH - 310, mTestStream.remaining()); mTestStream.seek(130); assertEquals(FILE_LENGTH - 130, mTestStream.remaining()); } /** * Tests seek, particularly that seeking over part of a block will cause us not to cache it, and * cancels the existing cache stream. */ @Test public void testSeek() throws IOException { int seekAmount = (int) (BLOCK_LENGTH / 2); int readAmount = (int) (BLOCK_LENGTH * 2); byte[] buffer = new byte[readAmount]; // Seek halfway into block 1 mTestStream.seek(seekAmount); // Read two blocks from 0.5 to 2.5 mTestStream.read(buffer); assertArrayEquals(BufferUtils.getIncreasingByteArray(seekAmount, readAmount), buffer); // second block is cached if the block is not local byte[] expected = mBlockSource != BlockInStreamSource.REMOTE ? new byte[0] : BufferUtils.getIncreasingByteArray((int) BLOCK_LENGTH, (int) BLOCK_LENGTH); // Seek to current position (does nothing) mTestStream.seek(seekAmount + readAmount); // Seek a short way past start of block 3 mTestStream.seek((long) (BLOCK_LENGTH * 3.1)); assertEquals(BufferUtils.byteToInt((byte) (BLOCK_LENGTH * 3.1)), mTestStream.read()); mTestStream.seek(FILE_LENGTH); } /** * Tests seeking back to the beginning of a block after the block's remaining is 0. */ @Test public void seekToBeginningAfterReadingWholeBlock() throws IOException { // Read the whole block. int blockSize = (int) BLOCK_LENGTH; byte[] block = new byte[blockSize]; mTestStream.read(block); assertArrayEquals(BufferUtils.getIncreasingByteArray(0, blockSize), block); // Seek to the beginning of the current block, then read half of it. mTestStream.seek(0); int halfBlockSize = blockSize / 2; byte[] halfBlock = new byte[halfBlockSize]; mTestStream.read(halfBlock); assertArrayEquals(BufferUtils.getIncreasingByteArray(0, halfBlockSize), halfBlock); } /** * Tests seeking to the beginning of the last block after reaching EOF. */ @Test public void seekToLastBlockAfterReachingEOF() throws IOException { mTestStream.read(new byte[(int) FILE_LENGTH]); mTestStream.seek(FILE_LENGTH - BLOCK_LENGTH); byte[] block = new byte[(int) BLOCK_LENGTH]; mTestStream.read(block); assertArrayEquals(BufferUtils.getIncreasingByteArray( (int) (FILE_LENGTH - BLOCK_LENGTH), (int) BLOCK_LENGTH), block); } /** * Tests seeking to EOF, then seeking to position 0 and read the whole file. */ @Test public void seekToEOFBeforeReadingFirstBlock() throws IOException { mTestStream.seek(FILE_LENGTH); mTestStream.seek(0); byte[] block = new byte[(int) BLOCK_LENGTH]; mTestStream.read(block); assertArrayEquals( BufferUtils.getIncreasingByteArray(0, (int) BLOCK_LENGTH), block); } /** * Tests seeking with incomplete block caching enabled. It seeks backward for more than a block. */ @Test public void longSeekBackwardCachingPartiallyReadBlocks() throws IOException { OpenFilePOptions options = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build(); mTestStream = new AlluxioFileInStream(mStatus, new InStreamOptions(mStatus, options, sConf), mContext); int seekAmount = (int) (BLOCK_LENGTH / 4 + BLOCK_LENGTH); int readAmount = (int) (BLOCK_LENGTH * 3 - BLOCK_LENGTH / 2); byte[] buffer = new byte[readAmount]; mTestStream.read(buffer); // Seek backward. mTestStream.seek(readAmount - seekAmount); // Block 2 is cached though it is not fully read. validatePartialCaching(2, (int) BLOCK_LENGTH / 2); } /** * Tests reading and seeking with no local worker. Nothing should be cached. */ @Test public void testSeekWithNoLocalWorker() throws IOException { // Overrides the get local worker call PowerMockito.when(mContext.getNodeLocalWorker()).thenReturn(null); OpenFilePOptions options = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build(); mTestStream = new AlluxioFileInStream(mStatus, new InStreamOptions(mStatus, options, sConf), mContext); int readAmount = (int) (BLOCK_LENGTH / 2); byte[] buffer = new byte[readAmount]; // read and seek several times mTestStream.read(buffer); assertEquals(readAmount, mInStreams.get(0).getBytesRead()); mTestStream.seek(BLOCK_LENGTH + BLOCK_LENGTH / 2); mTestStream.seek(0); // only reads the read amount, regardless of block source assertEquals(readAmount, mInStreams.get(0).getBytesRead()); assertEquals(0, mInStreams.get(1).getBytesRead()); } @Test public void seekAndClose() throws IOException { OpenFilePOptions options = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build(); mTestStream = new AlluxioFileInStream(mStatus, new InStreamOptions(mStatus, options, sConf), mContext); int seekAmount = (int) (BLOCK_LENGTH / 2); mTestStream.seek(seekAmount); mTestStream.close(); // Block 0 is cached though it is not fully read. validatePartialCaching(0, 0); } /** * Tests seeking with incomplete block caching enabled. It seeks backward within 1 block. */ @Test public void shortSeekBackwardCachingPartiallyReadBlocks() throws IOException { OpenFilePOptions options = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build(); mTestStream = new AlluxioFileInStream(mStatus, new InStreamOptions(mStatus, options, sConf), mContext); int seekAmount = (int) (BLOCK_LENGTH / 4); int readAmount = (int) (BLOCK_LENGTH * 2 - BLOCK_LENGTH / 2); byte[] buffer = new byte[readAmount]; mTestStream.read(buffer); // Seek backward. mTestStream.seek(readAmount - seekAmount); // Block 1 is cached though it is not fully read. validatePartialCaching(1, (int) BLOCK_LENGTH / 2); // Seek many times. It will cache block 1 only once. for (int i = 0; i <= seekAmount; i++) { mTestStream.seek(readAmount - seekAmount - i); } validatePartialCaching(1, (int) BLOCK_LENGTH / 2); } /** * Tests seeking with incomplete block caching enabled. It seeks forward for more than a block. */ @Test public void longSeekForwardCachingPartiallyReadBlocks() throws IOException { OpenFilePOptions options = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build(); mTestStream = new AlluxioFileInStream(mStatus, new InStreamOptions(mStatus, options, sConf), mContext); int seekAmount = (int) (BLOCK_LENGTH / 4 + BLOCK_LENGTH); int readAmount = (int) (BLOCK_LENGTH / 2); byte[] buffer = new byte[readAmount]; mTestStream.read(buffer); // Seek backward. mTestStream.seek(readAmount + seekAmount); // Block 0 is cached though it is not fully read. validatePartialCaching(0, readAmount); // Block 1 is being cached though its prefix it not read. validatePartialCaching(1, 0); mTestStream.close(); validatePartialCaching(1, 0); } /** * Tests seeking with incomplete block caching enabled. It seeks forward within a block. */ @Test public void shortSeekForwardCachingPartiallyReadBlocks() throws IOException { OpenFilePOptions options = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build(); mTestStream = new AlluxioFileInStream(mStatus, new InStreamOptions(mStatus, options, sConf), mContext); int seekAmount = (int) (BLOCK_LENGTH / 4); int readAmount = (int) (BLOCK_LENGTH * 2 - BLOCK_LENGTH / 2); byte[] buffer = new byte[readAmount]; mTestStream.read(buffer); // Seek backward. mTestStream.seek(readAmount + seekAmount); // Block 1 (till seek pos) is being cached. validatePartialCaching(1, (int) BLOCK_LENGTH / 2); // Seek forward many times. The prefix is always cached. for (int i = 0; i < seekAmount; i++) { mTestStream.seek(readAmount + seekAmount + i); validatePartialCaching(1, (int) BLOCK_LENGTH / 2); } } /** * Tests skipping backwards when the seek buffer size is smaller than block size. */ @Test public void seekBackwardSmallSeekBuffer() throws IOException { OpenFilePOptions options = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build(); mTestStream = new AlluxioFileInStream(mStatus, new InStreamOptions(mStatus, options, sConf), mContext); int readAmount = (int) (BLOCK_LENGTH / 2); byte[] buffer = new byte[readAmount]; mTestStream.read(buffer); mTestStream.seek(readAmount - 1); validatePartialCaching(0, readAmount); } /** * Tests seeking with incomplete block caching enabled. It seeks forward for more than a block * and then seek to the file beginning. */ @Test public void seekBackwardToFileBeginning() throws IOException { OpenFilePOptions options = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build(); mTestStream = new AlluxioFileInStream(mStatus, new InStreamOptions(mStatus, options, sConf), mContext); int seekAmount = (int) (BLOCK_LENGTH / 4 + BLOCK_LENGTH); // Seek forward. mTestStream.seek(seekAmount); // Block 1 is partially cached though it is not fully read. validatePartialCaching(1, 0); // Seek backward. mTestStream.seek(0); // Block 1 is fully cached though it is not fully read. validatePartialCaching(1, 0); mTestStream.close(); // block 0 is cached validatePartialCaching(0, 0); } /** * Tests skip, particularly that skipping the start of a block will cause us not to cache it, and * cancels the existing cache stream. */ @Test public void testSkip() throws IOException { int skipAmount = (int) (BLOCK_LENGTH / 2); int readAmount = (int) (BLOCK_LENGTH * 2); byte[] buffer = new byte[readAmount]; // Skip halfway into block 1 mTestStream.skip(skipAmount); // Read two blocks from 0.5 to 2.5 mTestStream.read(buffer); assertArrayEquals(BufferUtils.getIncreasingByteArray(skipAmount, readAmount), buffer); assertEquals(0, mTestStream.skip(0)); // Skip the next half block, bringing us to block 3 assertEquals(BLOCK_LENGTH / 2, mTestStream.skip(BLOCK_LENGTH / 2)); assertEquals(BufferUtils.byteToInt((byte) (BLOCK_LENGTH * 3)), mTestStream.read()); } /** * Tests that {@link IOException}s thrown by the {@link AlluxioBlockStore} are properly * propagated. */ @Test public void failGetInStream() throws IOException { when(mBlockStore.getInStream(any(BlockInfo.class), any(InStreamOptions.class), any())) .thenThrow(new UnavailableException("test exception")); try { mTestStream.read(); fail("block store should throw exception"); } catch (IOException e) { assertEquals("test exception", e.getMessage()); } } /** * Tests that reading out of bounds properly returns -1. */ @Test public void readOutOfBounds() throws IOException { mTestStream.read(new byte[(int) FILE_LENGTH]); assertEquals(-1, mTestStream.read()); assertEquals(-1, mTestStream.read(new byte[10])); } /** * Tests that specifying an invalid offset/length for a buffer read throws the right exception. */ @Test public void readBadBuffer() throws IOException { try { mTestStream.read(new byte[10], 5, 6); fail("the buffer read of invalid offset/length should fail"); } catch (IllegalArgumentException e) { assertEquals(String.format(PreconditionMessage.ERR_BUFFER_STATE.toString(), 10, 5, 6), e.getMessage()); } } /** * Tests that seeking to a negative position will throw the right exception. */ @Test public void seekNegative() throws IOException { try { mTestStream.seek(-1); fail("seeking negative position should fail"); } catch (IllegalArgumentException e) { assertEquals(String.format(PreconditionMessage.ERR_SEEK_NEGATIVE.toString(), -1), e.getMessage()); } } /** * Tests that seeking past the end of the stream will throw the right exception. */ @Test public void seekPastEnd() throws IOException { try { mTestStream.seek(FILE_LENGTH + 1); fail("seeking past the end of the stream should fail"); } catch (IllegalArgumentException e) { assertEquals(String.format(PreconditionMessage.ERR_SEEK_PAST_END_OF_FILE.toString(), FILE_LENGTH + 1), e.getMessage()); } } /** * Tests that skipping a negative amount correctly reports that 0 bytes were skipped. */ @Test public void skipNegative() throws IOException { assertEquals(0, mTestStream.skip(-10)); } @Test public void positionedRead() throws IOException { byte[] b = new byte[(int) BLOCK_LENGTH]; mTestStream.positionedRead(BLOCK_LENGTH, b, 0, b.length); assertArrayEquals(BufferUtils.getIncreasingByteArray((int) BLOCK_LENGTH, (int) BLOCK_LENGTH), b); } /** * Tests the BlockInStream is closed when reading to the end of the block. */ @Test public void positionedReadStreamCloseOnEnd() throws IOException { byte[] b = new byte[(int) BLOCK_LENGTH]; mTestStream.positionedRead(0, b, 0, b.length); assertEquals(true, mInStreams.get(0).isClosed()); assertArrayEquals(BufferUtils.getIncreasingByteArray((int) 0, (int) BLOCK_LENGTH), b); } @Test public void multiBlockPositionedRead() throws IOException { byte[] b = new byte[(int) BLOCK_LENGTH * 2]; mTestStream.positionedRead(BLOCK_LENGTH / 2, b, 0, b.length); assertArrayEquals(BufferUtils.getIncreasingByteArray((int) BLOCK_LENGTH / 2, (int) BLOCK_LENGTH * 2), b); } @Test public void readOneRetry() throws Exception { long offset = 37; // Setups a broken stream for the first block to throw an exception. TestBlockInStream workingStream = mInStreams.get(0); TestBlockInStream brokenStream = mock(TestBlockInStream.class); when(mBlockStore .getInStream(any(BlockInfo.class), any(InStreamOptions.class), any())) .thenReturn(brokenStream).thenReturn(workingStream); when(brokenStream.read()).thenThrow(new UnavailableException("test exception")); when(brokenStream.getPos()).thenReturn(offset); mTestStream.seek(offset); int b = mTestStream.read(); doReturn(0).when(brokenStream).read(); verify(brokenStream, times(1)).read(); assertEquals(offset, b); } @Test public void readBufferRetry() throws Exception { TestBlockInStream workingStream = mInStreams.get(0); TestBlockInStream brokenStream = mock(TestBlockInStream.class); when(mBlockStore .getInStream(any(BlockInfo.class), any(InStreamOptions.class), any())) .thenReturn(brokenStream).thenReturn(workingStream); when(brokenStream.read(any(ByteBuffer.class), anyInt(), anyInt())) .thenThrow(new UnavailableException("test exception")); when(brokenStream.getPos()).thenReturn(BLOCK_LENGTH / 2); mTestStream.seek(BLOCK_LENGTH / 2); byte[] b = new byte[(int) BLOCK_LENGTH * 2]; mTestStream.read(b, 0, b.length); doReturn(0).when(brokenStream).read(any(ByteBuffer.class), anyInt(), anyInt()); verify(brokenStream, times(1)) .read(any(ByteBuffer.class), anyInt(), anyInt()); assertArrayEquals(BufferUtils.getIncreasingByteArray((int) BLOCK_LENGTH / 2, (int) BLOCK_LENGTH * 2), b); } @Test public void positionedReadRetry() throws Exception { TestBlockInStream workingStream = mInStreams.get(0); TestBlockInStream brokenStream = mock(TestBlockInStream.class); when(mBlockStore .getInStream(eq(0L), any(InStreamOptions.class), any())) .thenReturn(brokenStream).thenReturn(workingStream); when(brokenStream.positionedRead(anyLong(), any(byte[].class), anyInt(), anyInt())) .thenThrow(new UnavailableException("test exception")); byte[] b = new byte[(int) BLOCK_LENGTH * 2]; mTestStream.positionedRead(BLOCK_LENGTH / 2, b, 0, b.length); doReturn(0) .when(brokenStream).positionedRead(anyLong(), any(byte[].class), anyInt(), anyInt()); verify(brokenStream, times(1)) .positionedRead(anyLong(), any(byte[].class), anyInt(), anyInt()); assertArrayEquals(BufferUtils.getIncreasingByteArray((int) BLOCK_LENGTH / 2, (int) BLOCK_LENGTH * 2), b); } /** * Tests that when the underlying blocks are inconsistent with the metadata in terms of block * length, an exception is thrown rather than client hanging indefinitely. This case may happen if * the file in Alluxio and UFS is out of sync. */ @Test public void blockInStreamOutOfSync() throws Exception { when(mBlockStore.getInStream(any(BlockInfo.class), any(InStreamOptions.class), any())) .thenAnswer(new Answer<BlockInStream>() { @Override public BlockInStream answer(InvocationOnMock invocation) throws Throwable { return new TestBlockInStream(new byte[1], 0, BLOCK_LENGTH, false, mBlockSource); } }); byte[] buffer = new byte[(int) BLOCK_LENGTH]; try { mTestStream.read(buffer, 0, (int) BLOCK_LENGTH); fail("BlockInStream is inconsistent, an Exception is expected"); } catch (IllegalStateException e) { // expect an exception to throw } } /** * Tests that reading dataRead bytes into a buffer will properly write those bytes to the cache * streams and that the correct bytes are read from the {@link FileInStream}. * * @param dataRead the bytes to read */ private void testReadBuffer(int dataRead) throws Exception { byte[] buffer = new byte[dataRead]; mTestStream.read(buffer); mTestStream.close(); assertArrayEquals(BufferUtils.getIncreasingByteArray(dataRead), buffer); } /** * Validates the partial caching behavior. This function * verifies the block at the given index is read for the given sizes. */ // TODO(binfan): with better netty RPC mocking, verify that async cache request for the target // block is sent to the netty channel private void validatePartialCaching(int index, int readSize) { assertEquals(readSize, mInStreams.get(index).getBytesRead()); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.update; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.instance.TransportInstanceSingleOperationAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.engine.DocumentAlreadyExistsException; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndexAlreadyExistsException; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.Collections; import java.util.Map; /** */ public class TransportUpdateAction extends TransportInstanceSingleOperationAction<UpdateRequest, UpdateResponse> { private final TransportDeleteAction deleteAction; private final TransportIndexAction indexAction; private final AutoCreateIndex autoCreateIndex; private final TransportCreateIndexAction createIndexAction; private final UpdateHelper updateHelper; private final IndicesService indicesService; @Inject public TransportUpdateAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, TransportIndexAction indexAction, TransportDeleteAction deleteAction, TransportCreateIndexAction createIndexAction, UpdateHelper updateHelper, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService, AutoCreateIndex autoCreateIndex) { super(settings, UpdateAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, UpdateRequest.class); this.indexAction = indexAction; this.deleteAction = deleteAction; this.createIndexAction = createIndexAction; this.updateHelper = updateHelper; this.indicesService = indicesService; this.autoCreateIndex = autoCreateIndex; } @Override protected String executor() { return ThreadPool.Names.INDEX; } @Override protected UpdateResponse newResponse() { return new UpdateResponse(); } @Override protected boolean retryOnFailure(Throwable e) { return TransportActions.isShardNotAvailableException(e); } @Override protected boolean resolveRequest(ClusterState state, UpdateRequest request, ActionListener<UpdateResponse> listener) { request.routing((state.metaData().resolveIndexRouting(request.routing(), request.index()))); // Fail fast on the node that received the request, rather than failing when translating on the index or delete request. if (request.routing() == null && state.getMetaData().routingRequired(request.concreteIndex(), request.type())) { throw new RoutingMissingException(request.concreteIndex(), request.type(), request.id()); } return true; } @Override protected void doExecute(final UpdateRequest request, final ActionListener<UpdateResponse> listener) { // if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) { createIndexAction.execute(new CreateIndexRequest(request).index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener<CreateIndexResponse>() { @Override public void onResponse(CreateIndexResponse result) { innerExecute(request, listener); } @Override public void onFailure(Throwable e) { if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) { // we have the index, do it try { innerExecute(request, listener); } catch (Throwable e1) { listener.onFailure(e1); } } else { listener.onFailure(e); } } }); } else { innerExecute(request, listener); } } private void innerExecute(final UpdateRequest request, final ActionListener<UpdateResponse> listener) { super.doExecute(request, listener); } @Override protected ShardIterator shards(ClusterState clusterState, UpdateRequest request) { if (request.shardId() != -1) { return clusterState.routingTable().index(request.concreteIndex()).shard(request.shardId()).primaryShardIt(); } ShardIterator shardIterator = clusterService.operationRouting() .indexShards(clusterState, request.concreteIndex(), request.type(), request.id(), request.routing()); ShardRouting shard; while ((shard = shardIterator.nextOrNull()) != null) { if (shard.primary()) { return new PlainShardIterator(shardIterator.shardId(), Collections.singletonList(shard)); } } return new PlainShardIterator(shardIterator.shardId(), Collections.<ShardRouting>emptyList()); } @Override protected void shardOperation(final UpdateRequest request, final ActionListener<UpdateResponse> listener) { shardOperation(request, listener, 0); } protected void shardOperation(final UpdateRequest request, final ActionListener<UpdateResponse> listener, final int retryCount) { IndexService indexService = indicesService.indexServiceSafe(request.concreteIndex()); IndexShard indexShard = indexService.shardSafe(request.shardId()); final UpdateHelper.Result result = updateHelper.prepare(request, indexShard); switch (result.operation()) { case UPSERT: IndexRequest upsertRequest = new IndexRequest((IndexRequest)result.action(), request); // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request final BytesReference upsertSourceBytes = upsertRequest.source(); indexAction.execute(upsertRequest, new ActionListener<IndexResponse>() { @Override public void onResponse(IndexResponse response) { UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getIndex(), response.getType(), response.getId(), response.getVersion(), response.isCreated()); if (request.fields() != null && request.fields().length > 0) { Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes)); } else { update.setGetResult(null); } listener.onResponse(update); } @Override public void onFailure(Throwable e) { e = ExceptionsHelper.unwrapCause(e); if (e instanceof VersionConflictEngineException || e instanceof DocumentAlreadyExistsException) { if (retryCount < request.retryOnConflict()) { threadPool.executor(executor()).execute(new ActionRunnable<UpdateResponse>(listener) { @Override protected void doRun() { shardOperation(request, listener, retryCount + 1); } }); return; } } listener.onFailure(e); } }); break; case INDEX: IndexRequest indexRequest = new IndexRequest((IndexRequest)result.action(), request); // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request final BytesReference indexSourceBytes = indexRequest.source(); indexAction.execute(indexRequest, new ActionListener<IndexResponse>() { @Override public void onResponse(IndexResponse response) { UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getIndex(), response.getType(), response.getId(), response.getVersion(), response.isCreated()); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes)); listener.onResponse(update); } @Override public void onFailure(Throwable e) { e = ExceptionsHelper.unwrapCause(e); if (e instanceof VersionConflictEngineException) { if (retryCount < request.retryOnConflict()) { threadPool.executor(executor()).execute(new ActionRunnable<UpdateResponse>(listener) { @Override protected void doRun() { shardOperation(request, listener, retryCount + 1); } }); return; } } listener.onFailure(e); } }); break; case DELETE: DeleteRequest deleteRequest = new DeleteRequest((DeleteRequest)result.action(), request); deleteAction.execute(deleteRequest, new ActionListener<DeleteResponse>() { @Override public void onResponse(DeleteResponse response) { UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getIndex(), response.getType(), response.getId(), response.getVersion(), false); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null)); listener.onResponse(update); } @Override public void onFailure(Throwable e) { e = ExceptionsHelper.unwrapCause(e); if (e instanceof VersionConflictEngineException) { if (retryCount < request.retryOnConflict()) { threadPool.executor(executor()).execute(new ActionRunnable<UpdateResponse>(listener) { @Override protected void doRun() { shardOperation(request, listener, retryCount + 1); } }); return; } } listener.onFailure(e); } }); break; case NONE: UpdateResponse update = result.action(); IndexService indexServiceOrNull = indicesService.indexService(request.concreteIndex()); if (indexServiceOrNull != null) { IndexShard shard = indexService.shard(request.shardId()); if (shard != null) { shard.indexingService().noopUpdate(request.type()); } } listener.onResponse(update); break; default: throw new IllegalStateException("Illegal operation " + result.operation()); } } }
package com.mikepenz.materialdrawer.model; import android.content.Context; import android.graphics.drawable.Drawable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.CompoundButton; import android.widget.ImageView; import android.widget.TextView; import android.widget.ToggleButton; import com.mikepenz.iconics.IconicsDrawable; import com.mikepenz.materialdrawer.R; import com.mikepenz.materialdrawer.model.interfaces.OnCheckedChangeListener; import com.mikepenz.materialdrawer.util.PressedEffectStateListDrawable; import com.mikepenz.materialdrawer.util.UIUtils; /** * Created by mikepenz on 03.02.15. */ public class ToggleDrawerItem extends BaseDrawerItem<ToggleDrawerItem> { private String description; private int descriptionRes = -1; private boolean checked = false; private OnCheckedChangeListener onCheckedChangeListener = null; public ToggleDrawerItem withDescription(String description) { this.description = description; return this; } public ToggleDrawerItem withDescription(int descriptionRes) { this.descriptionRes = descriptionRes; return this; } public ToggleDrawerItem withChecked(boolean checked) { this.checked = checked; return this; } public ToggleDrawerItem withOnCheckedChangeListener(OnCheckedChangeListener onCheckedChangeListener) { this.onCheckedChangeListener = onCheckedChangeListener; return this; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public int getDescriptionRes() { return descriptionRes; } public void setDescriptionRes(int descriptionRes) { this.descriptionRes = descriptionRes; } public boolean isChecked() { return checked; } public void setChecked(boolean checked) { this.checked = checked; } public OnCheckedChangeListener getOnCheckedChangeListener() { return onCheckedChangeListener; } public void setOnCheckedChangeListener(OnCheckedChangeListener onCheckedChangeListener) { this.onCheckedChangeListener = onCheckedChangeListener; } @Override public String getType() { return "TOGGLE_ITEM"; } @Override public int getLayoutRes() { return R.layout.material_drawer_item_toggle; } @Override public View convertView(LayoutInflater inflater, View convertView, ViewGroup parent) { Context ctx = parent.getContext(); final ViewHolder viewHolder; if (convertView == null) { convertView = inflater.inflate(getLayoutRes(), parent, false); viewHolder = new ViewHolder(convertView); convertView.setTag(viewHolder); } else { viewHolder = (ViewHolder) convertView.getTag(); } int selected_color = getSelectedColor(); if (selected_color == 0 && getSelectedColorRes() != -1) { selected_color = ctx.getResources().getColor(getSelectedColorRes()); } else if (selected_color == 0) { selected_color = UIUtils.getThemeColorFromAttrOrRes(ctx, R.attr.material_drawer_selected, R.color.material_drawer_selected); } UIUtils.setBackground(viewHolder.view, UIUtils.getDrawerItemBackground(selected_color)); if (this.getNameRes() != -1) { viewHolder.name.setText(this.getNameRes()); } else { viewHolder.name.setText(this.getName()); } viewHolder.description.setVisibility(View.VISIBLE); if (this.getDescriptionRes() != -1) { viewHolder.description.setText(this.getDescriptionRes()); } else if (this.getDescription() != null) { viewHolder.description.setText(this.getDescription()); } else { viewHolder.description.setVisibility(View.GONE); } viewHolder.view.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { viewHolder.toggle.setChecked(!viewHolder.toggle.isChecked()); } }); viewHolder.toggle.setChecked(checked); viewHolder.toggle.setOnCheckedChangeListener(checkedChangeListener); //get the correct color for the text int color; int selected_text = getSelectedTextColor(); if (selected_text == 0 && getSelectedTextColorRes() != -1) { selected_text = ctx.getResources().getColor(getSelectedTextColorRes()); } else if (selected_text == 0) { selected_text = UIUtils.getThemeColorFromAttrOrRes(ctx, R.attr.material_drawer_selected_text, R.color.material_drawer_selected_text); } if (this.isEnabled()) { color = getTextColor(); if (color == 0 && getTextColorRes() != -1) { color = ctx.getResources().getColor(getTextColorRes()); } else if (color == 0) { color = UIUtils.getThemeColorFromAttrOrRes(ctx, R.attr.material_drawer_primary_text, R.color.material_drawer_primary_text); } } else { color = getDisabledTextColor(); if (color == 0 && getDisabledTextColorRes() != -1) { color = ctx.getResources().getColor(getDisabledTextColorRes()); } else if (color == 0) { color = UIUtils.getThemeColorFromAttrOrRes(ctx, R.attr.material_drawer_hint_text, R.color.material_drawer_hint_text); } } //get the correct color for the icon int iconColor; int selected_icon = getSelectedIconColor(); if (selected_icon == 0 && getSelectedIconColorRes() != -1) { selected_icon = ctx.getResources().getColor(getSelectedIconColorRes()); } else if (selected_icon == 0) { selected_icon = UIUtils.getThemeColorFromAttrOrRes(ctx, R.attr.material_drawer_selected_text, R.color.material_drawer_selected_text); } if (this.isEnabled()) { iconColor = getIconColor(); if (iconColor == 0 && getIconColorRes() != -1) { iconColor = ctx.getResources().getColor(getIconColorRes()); } else if (iconColor == 0) { iconColor = UIUtils.getThemeColorFromAttrOrRes(ctx, R.attr.material_drawer_primary_icon, R.color.material_drawer_primary_icon); } } else { iconColor = getDisabledIconColor(); if (iconColor == 0 && getDisabledIconColorRes() != -1) { iconColor = ctx.getResources().getColor(getDisabledIconColorRes()); } else if (iconColor == 0) { iconColor = UIUtils.getThemeColorFromAttrOrRes(ctx, R.attr.material_drawer_hint_text, R.color.material_drawer_hint_text); } } viewHolder.name.setTextColor(UIUtils.getTextColor(color, selected_text)); viewHolder.description.setTextColor(UIUtils.getTextColor(color, selected_text)); if (getTypeface() != null) { viewHolder.name.setTypeface(getTypeface()); viewHolder.description.setTypeface(getTypeface()); } Drawable icon = null; Drawable selectedIcon = null; if (this.getIcon() != null) { icon = this.getIcon(); if (this.getSelectedIcon() != null) { selectedIcon = this.getSelectedIcon(); } else if (this.isSelectedIconTinted()) { icon = new PressedEffectStateListDrawable(icon, selected_icon); } } else if (this.getIIcon() != null) { icon = new IconicsDrawable(ctx, this.getIIcon()).color(iconColor).actionBarSize().paddingDp(1); selectedIcon = new IconicsDrawable(ctx, this.getIIcon()).color(selected_icon).actionBarSize().paddingDp(1); } else if (this.getIconRes() > -1) { icon = UIUtils.getCompatDrawable(ctx, getIconRes()); if (this.getSelectedIconRes() > -1) { selectedIcon = UIUtils.getCompatDrawable(ctx, getSelectedIconRes()); } else if (this.isSelectedIconTinted()) { icon = new PressedEffectStateListDrawable(icon, selected_icon); } } if (icon != null) { if (selectedIcon != null) { viewHolder.icon.setImageDrawable(UIUtils.getIconColor(icon, selectedIcon)); } else { viewHolder.icon.setImageDrawable(icon); } viewHolder.icon.setVisibility(View.VISIBLE); } else { viewHolder.icon.setVisibility(View.GONE); } return convertView; } private static class ViewHolder { private View view; private ImageView icon; private TextView name; private TextView description; private ToggleButton toggle; private ViewHolder(View view) { this.view = view; this.icon = (ImageView) view.findViewById(R.id.icon); this.name = (TextView) view.findViewById(R.id.name); this.description = (TextView) view.findViewById(R.id.description); this.toggle = (ToggleButton) view.findViewById(R.id.toggle); } } private CompoundButton.OnCheckedChangeListener checkedChangeListener = new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (getOnCheckedChangeListener() != null) { getOnCheckedChangeListener().onCheckedChanged(ToggleDrawerItem.this, buttonView, isChecked); } } }; }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.09.07 at 08:01:35 PM IST // package com.mozu.qbintegration.model.qbmodel.allgen; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{}TxnLineID"/> * &lt;element ref="{}ItemRef" minOccurs="0"/> * &lt;element ref="{}InventorySiteRef" minOccurs="0"/> * &lt;element ref="{}InventorySiteLocationRef" minOccurs="0"/> * &lt;choice minOccurs="0"> * &lt;element name="SerialNumber" minOccurs="0"> * &lt;simpleType> * &lt;restriction base="{}STRTYPE"> * &lt;maxLength value="4095"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/element> * &lt;element name="LotNumber" minOccurs="0"> * &lt;simpleType> * &lt;restriction base="{}STRTYPE"> * &lt;maxLength value="40"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/element> * &lt;/choice> * &lt;element name="Desc" minOccurs="0"> * &lt;simpleType> * &lt;restriction base="{}STRTYPE"> * &lt;maxLength value="4095"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/element> * &lt;element ref="{}Quantity" minOccurs="0"/> * &lt;element name="UnitOfMeasure" minOccurs="0"> * &lt;simpleType> * &lt;restriction base="{}STRTYPE"> * &lt;maxLength value="31"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/element> * &lt;element ref="{}OverrideUOMSetRef" minOccurs="0"/> * &lt;element ref="{}Cost" minOccurs="0"/> * &lt;element ref="{}Amount" minOccurs="0"/> * &lt;element ref="{}TaxAmount" minOccurs="0"/> * &lt;element ref="{}CustomerRef" minOccurs="0"/> * &lt;element ref="{}ClassRef" minOccurs="0"/> * &lt;element ref="{}SalesTaxCodeRef" minOccurs="0"/> * &lt;element ref="{}BillableStatus" minOccurs="0"/> * &lt;element ref="{}SalesRepRef" minOccurs="0"/> * &lt;element ref="{}DataExtRet" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "txnLineID", "itemRef", "inventorySiteRef", "inventorySiteLocationRef", "serialNumber", "lotNumber", "desc", "quantity", "unitOfMeasure", "overrideUOMSetRef", "cost", "amount", "taxAmount", "customerRef", "classRef", "salesTaxCodeRef", "billableStatus", "salesRepRef", "dataExtRet" }) @XmlRootElement(name = "ItemLineRet") public class ItemLineRet { @XmlElement(name = "TxnLineID", required = true) protected String txnLineID; @XmlElement(name = "ItemRef") protected ItemRef itemRef; @XmlElement(name = "InventorySiteRef") protected InventorySiteRef inventorySiteRef; @XmlElement(name = "InventorySiteLocationRef") protected InventorySiteLocationRef inventorySiteLocationRef; @XmlElement(name = "SerialNumber") protected String serialNumber; @XmlElement(name = "LotNumber") protected String lotNumber; @XmlElement(name = "Desc") protected String desc; @XmlElement(name = "Quantity") protected String quantity; @XmlElement(name = "UnitOfMeasure") protected String unitOfMeasure; @XmlElement(name = "OverrideUOMSetRef") protected OverrideUOMSetRef overrideUOMSetRef; @XmlElement(name = "Cost") protected String cost; @XmlElement(name = "Amount") protected String amount; @XmlElement(name = "TaxAmount") protected String taxAmount; @XmlElement(name = "CustomerRef") protected CustomerRef customerRef; @XmlElement(name = "ClassRef") protected ClassRef classRef; @XmlElement(name = "SalesTaxCodeRef") protected SalesTaxCodeRef salesTaxCodeRef; @XmlElement(name = "BillableStatus") protected String billableStatus; @XmlElement(name = "SalesRepRef") protected SalesRepRef salesRepRef; @XmlElement(name = "DataExtRet") protected List<DataExtRet> dataExtRet; /** * Gets the value of the txnLineID property. * * @return * possible object is * {@link String } * */ public String getTxnLineID() { return txnLineID; } /** * Sets the value of the txnLineID property. * * @param value * allowed object is * {@link String } * */ public void setTxnLineID(String value) { this.txnLineID = value; } /** * Gets the value of the itemRef property. * * @return * possible object is * {@link ItemRef } * */ public ItemRef getItemRef() { return itemRef; } /** * Sets the value of the itemRef property. * * @param value * allowed object is * {@link ItemRef } * */ public void setItemRef(ItemRef value) { this.itemRef = value; } /** * Gets the value of the inventorySiteRef property. * * @return * possible object is * {@link InventorySiteRef } * */ public InventorySiteRef getInventorySiteRef() { return inventorySiteRef; } /** * Sets the value of the inventorySiteRef property. * * @param value * allowed object is * {@link InventorySiteRef } * */ public void setInventorySiteRef(InventorySiteRef value) { this.inventorySiteRef = value; } /** * Gets the value of the inventorySiteLocationRef property. * * @return * possible object is * {@link InventorySiteLocationRef } * */ public InventorySiteLocationRef getInventorySiteLocationRef() { return inventorySiteLocationRef; } /** * Sets the value of the inventorySiteLocationRef property. * * @param value * allowed object is * {@link InventorySiteLocationRef } * */ public void setInventorySiteLocationRef(InventorySiteLocationRef value) { this.inventorySiteLocationRef = value; } /** * Gets the value of the serialNumber property. * * @return * possible object is * {@link String } * */ public String getSerialNumber() { return serialNumber; } /** * Sets the value of the serialNumber property. * * @param value * allowed object is * {@link String } * */ public void setSerialNumber(String value) { this.serialNumber = value; } /** * Gets the value of the lotNumber property. * * @return * possible object is * {@link String } * */ public String getLotNumber() { return lotNumber; } /** * Sets the value of the lotNumber property. * * @param value * allowed object is * {@link String } * */ public void setLotNumber(String value) { this.lotNumber = value; } /** * Gets the value of the desc property. * * @return * possible object is * {@link String } * */ public String getDesc() { return desc; } /** * Sets the value of the desc property. * * @param value * allowed object is * {@link String } * */ public void setDesc(String value) { this.desc = value; } /** * Gets the value of the quantity property. * * @return * possible object is * {@link String } * */ public String getQuantity() { return quantity; } /** * Sets the value of the quantity property. * * @param value * allowed object is * {@link String } * */ public void setQuantity(String value) { this.quantity = value; } /** * Gets the value of the unitOfMeasure property. * * @return * possible object is * {@link String } * */ public String getUnitOfMeasure() { return unitOfMeasure; } /** * Sets the value of the unitOfMeasure property. * * @param value * allowed object is * {@link String } * */ public void setUnitOfMeasure(String value) { this.unitOfMeasure = value; } /** * Gets the value of the overrideUOMSetRef property. * * @return * possible object is * {@link OverrideUOMSetRef } * */ public OverrideUOMSetRef getOverrideUOMSetRef() { return overrideUOMSetRef; } /** * Sets the value of the overrideUOMSetRef property. * * @param value * allowed object is * {@link OverrideUOMSetRef } * */ public void setOverrideUOMSetRef(OverrideUOMSetRef value) { this.overrideUOMSetRef = value; } /** * Gets the value of the cost property. * * @return * possible object is * {@link String } * */ public String getCost() { return cost; } /** * Sets the value of the cost property. * * @param value * allowed object is * {@link String } * */ public void setCost(String value) { this.cost = value; } /** * Gets the value of the amount property. * * @return * possible object is * {@link String } * */ public String getAmount() { return amount; } /** * Sets the value of the amount property. * * @param value * allowed object is * {@link String } * */ public void setAmount(String value) { this.amount = value; } /** * Gets the value of the taxAmount property. * * @return * possible object is * {@link String } * */ public String getTaxAmount() { return taxAmount; } /** * Sets the value of the taxAmount property. * * @param value * allowed object is * {@link String } * */ public void setTaxAmount(String value) { this.taxAmount = value; } /** * Gets the value of the customerRef property. * * @return * possible object is * {@link CustomerRef } * */ public CustomerRef getCustomerRef() { return customerRef; } /** * Sets the value of the customerRef property. * * @param value * allowed object is * {@link CustomerRef } * */ public void setCustomerRef(CustomerRef value) { this.customerRef = value; } /** * Gets the value of the classRef property. * * @return * possible object is * {@link ClassRef } * */ public ClassRef getClassRef() { return classRef; } /** * Sets the value of the classRef property. * * @param value * allowed object is * {@link ClassRef } * */ public void setClassRef(ClassRef value) { this.classRef = value; } /** * Gets the value of the salesTaxCodeRef property. * * @return * possible object is * {@link SalesTaxCodeRef } * */ public SalesTaxCodeRef getSalesTaxCodeRef() { return salesTaxCodeRef; } /** * Sets the value of the salesTaxCodeRef property. * * @param value * allowed object is * {@link SalesTaxCodeRef } * */ public void setSalesTaxCodeRef(SalesTaxCodeRef value) { this.salesTaxCodeRef = value; } /** * Gets the value of the billableStatus property. * * @return * possible object is * {@link String } * */ public String getBillableStatus() { return billableStatus; } /** * Sets the value of the billableStatus property. * * @param value * allowed object is * {@link String } * */ public void setBillableStatus(String value) { this.billableStatus = value; } /** * Gets the value of the salesRepRef property. * * @return * possible object is * {@link SalesRepRef } * */ public SalesRepRef getSalesRepRef() { return salesRepRef; } /** * Sets the value of the salesRepRef property. * * @param value * allowed object is * {@link SalesRepRef } * */ public void setSalesRepRef(SalesRepRef value) { this.salesRepRef = value; } /** * Gets the value of the dataExtRet property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the dataExtRet property. * * <p> * For example, to add a new item, do as follows: * <pre> * getDataExtRet().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link DataExtRet } * * */ public List<DataExtRet> getDataExtRet() { if (dataExtRet == null) { dataExtRet = new ArrayList<DataExtRet>(); } return this.dataExtRet; } }
/** * Copyright 2014 Confluent Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.confluent.kafka.formatter; import org.apache.avro.AvroRuntimeException; import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.io.DatumReader; import org.apache.avro.io.DecoderFactory; import org.apache.avro.util.Utf8; import org.apache.kafka.common.config.ConfigException; import org.apache.kafka.common.errors.SerializationException; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig; import kafka.common.KafkaException; import kafka.producer.KeyedMessage; import kafka.tools.ConsoleProducer.MessageReader; import io.confluent.kafka.serializers.AbstractKafkaAvroSerializer; /** * Example * To use AvroMessageReader, first make sure that Zookeeper, Kafka and schema registry server are * all started. Second, make sure the jar for AvroMessageReader and its dependencies are included * in the classpath of kafka-console-producer.sh. Then run the following * command. * * 1. Send Avro string as value. (make sure there is no space in the schema string) * bin/kafka-console-producer.sh --broker-list localhost:9092 --topic t1 \ * --line-reader io.confluent.kafka.formatter.AvroMessageReader \ * --property schema.registry.url=http://localhost:8081 \ * --property value.schema='{"type":"string"}' * * In the shell, type in the following. * "a" * "b" * * 2. Send Avro record as value. * bin/kafka-console-producer.sh --broker-list localhost:9092 --topic t1 \ * --line-reader io.confluent.kafka.formatter.AvroMessageReader \ * --property schema.registry.url=http://localhost:8081 \ * --property value.schema='{"type":"record","name":"myrecord","fields":[{"name":"f1","type":"string"}]}' * * In the shell, type in the following. * {"f1": "value1"} * * 3. Send Avro string as key and Avro record as value. * bin/kafka-console-producer.sh --broker-list localhost:9092 --topic t1 \ * --line-reader io.confluent.kafka.formatter.AvroMessageReader \ * --property schema.registry.url=http://localhost:8081 \ * --property parse.key=true \ * --property key.schema='{"type":"string"}' \ * --property value.schema='{"type":"record","name":"myrecord","fields":[{"name":"f1","type":"string"}]}' * * In the shell, type in the following. * "key1" \t {"f1": "value1"} * */ public class AvroMessageReader extends AbstractKafkaAvroSerializer implements MessageReader { private String topic = null; private BufferedReader reader = null; private Boolean parseKey = false; private String keySeparator = "\t"; private boolean ignoreError = false; private final DecoderFactory decoderFactory = DecoderFactory.get(); private Schema keySchema = null; private Schema valueSchema = null; private String keySubject = null; private String valueSubject = null; /** * Constructor needed by kafka console producer. */ public AvroMessageReader() { } /** * For testing only. */ AvroMessageReader(SchemaRegistryClient schemaRegistryClient, Schema keySchema, Schema valueSchema, String topic, boolean parseKey, BufferedReader reader) { this.schemaRegistry = schemaRegistryClient; this.keySchema = keySchema; this.valueSchema = valueSchema; this.topic = topic; this.keySubject = topic + "-key"; this.valueSubject = topic + "-value"; this.parseKey = parseKey; this.reader = reader; } @Override public void init(java.io.InputStream inputStream, java.util.Properties props) { topic = props.getProperty("topic"); if (props.containsKey("parse.key")) { parseKey = props.getProperty("parse.key").trim().toLowerCase().equals("true"); } if (props.containsKey("key.separator")) { keySeparator = props.getProperty("key.separator"); } if (props.containsKey("ignore.error")) { ignoreError = props.getProperty("ignore.error").trim().toLowerCase().equals("true"); } reader = new BufferedReader(new InputStreamReader(inputStream)); String url = props.getProperty(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG); if (url == null) { throw new ConfigException("Missing schema registry url!"); } schemaRegistry = new CachedSchemaRegistryClient( url, AbstractKafkaAvroSerDeConfig.MAX_SCHEMAS_PER_SUBJECT_DEFAULT); if (!props.containsKey("value.schema")) { throw new ConfigException("Must provide the Avro schema string in value.schema"); } String valueSchemaString = props.getProperty("value.schema"); Schema.Parser parser = new Schema.Parser(); valueSchema = parser.parse(valueSchemaString); if (parseKey) { if (!props.containsKey("key.schema")) { throw new ConfigException("Must provide the Avro schema string in key.schema"); } String keySchemaString = props.getProperty("key.schema"); keySchema = parser.parse(keySchemaString); } keySubject = topic + "-key"; valueSubject = topic + "-value"; } @Override public kafka.producer.KeyedMessage readMessage() { try { String line = reader.readLine(); if (line == null) { return null; } if (!parseKey) { Object value = jsonToAvro(line, valueSchema); byte[] serializedValue = serializeImpl(valueSubject, value); return new KeyedMessage<byte[], byte[]>(topic, serializedValue); } else { int keyIndex = line.indexOf(keySeparator); if (keyIndex < 0) { if (ignoreError) { Object value = jsonToAvro(line, valueSchema); byte[] serializedValue = serializeImpl(valueSubject, value); return new KeyedMessage<byte[], byte[]>(topic, serializedValue); } else { throw new KafkaException("No key found in line " + line); } } else { String keyString = line.substring(0, keyIndex); String valueString = (keyIndex + keySeparator.length() > line.length()) ? "" : line.substring(keyIndex + keySeparator.length()); Object key = jsonToAvro(keyString, keySchema); byte[] serializedKey = serializeImpl(keySubject, key); Object value = jsonToAvro(valueString, valueSchema); byte[] serializedValue = serializeImpl(valueSubject, value); return new KeyedMessage<byte[], byte[]>(topic, serializedKey, serializedValue); } } } catch (IOException e) { throw new KafkaException("Error reading from input", e); } } private Object jsonToAvro(String jsonString, Schema schema) { try { DatumReader<Object> reader = new GenericDatumReader<Object>(schema); Object object = reader.read(null, decoderFactory.jsonDecoder(schema, jsonString)); if (schema.getType().equals(Schema.Type.STRING)) { object = ((Utf8) object).toString(); } return object; } catch (IOException e) { throw new SerializationException( String.format("Error deserializing json %s to Avro of schema %s", jsonString, schema), e); } catch (AvroRuntimeException e) { throw new SerializationException( String.format("Error deserializing json %s to Avro of schema %s", jsonString, schema), e); } } @Override public void close() { // nothing to do } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.client.impl; import com.google.common.collect.Sets; import io.netty.buffer.ByteBuf; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CancellationException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.bookkeeper.mledger.ManagedLedger; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.pulsar.broker.auth.MockedPulsarServiceBaseTest; import org.apache.pulsar.broker.service.persistent.PersistentTopic; import org.apache.pulsar.client.api.MessageId; import org.apache.pulsar.client.api.MessageRoutingMode; import org.apache.pulsar.client.api.Producer; import org.apache.pulsar.client.api.RawMessage; import org.apache.pulsar.client.api.RawReader; import org.apache.pulsar.common.protocol.Commands; import org.apache.pulsar.common.api.proto.PulsarApi.MessageMetadata; import org.apache.pulsar.common.policies.data.ClusterData; import org.apache.pulsar.common.policies.data.TenantInfo; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; public class RawReaderTest extends MockedPulsarServiceBaseTest { private static final int BATCH_MAX_MESSAGES = 10; private static final String subscription = "foobar-sub"; @BeforeMethod @Override public void setup() throws Exception { super.internalSetup(); admin.clusters().createCluster("test", new ClusterData("http://127.0.0.1:" + BROKER_WEBSERVICE_PORT)); admin.tenants().createTenant("my-property", new TenantInfo(Sets.newHashSet("appid1", "appid2"), Sets.newHashSet("test"))); admin.namespaces().createNamespace("my-property/my-ns", Sets.newHashSet("test")); } @AfterMethod @Override public void cleanup() throws Exception { super.internalCleanup(); } private Set<String> publishMessages(String topic, int count) throws Exception { Set<String> keys = new HashSet<>(); try (Producer<byte[]> producer = pulsarClient.newProducer() .enableBatching(false) .messageRoutingMode(MessageRoutingMode.SinglePartition) .maxPendingMessages(count) .topic(topic) .create()) { Future<?> lastFuture = null; for (int i = 0; i < count; i++) { String key = "key"+i; byte[] data = ("my-message-" + i).getBytes(); lastFuture = producer.newMessage().key(key).value(data).sendAsync(); keys.add(key); } lastFuture.get(); } return keys; } public static String extractKey(RawMessage m) throws Exception { ByteBuf headersAndPayload = m.getHeadersAndPayload(); MessageMetadata msgMetadata = Commands.parseMessageMetadata(headersAndPayload); return msgMetadata.getPartitionKey(); } @Test public void testRawReader() throws Exception { int numKeys = 10; String topic = "persistent://my-property/my-ns/my-raw-topic"; Set<String> keys = publishMessages(topic, numKeys); RawReader reader = RawReader.create(pulsarClient, topic, subscription).get(); MessageId lastMessageId = reader.getLastMessageIdAsync().get(); while (true) { try (RawMessage m = reader.readNextAsync().get()) { Assert.assertTrue(keys.remove(extractKey(m))); if (lastMessageId.compareTo(m.getMessageId()) == 0) { break; } } } Assert.assertTrue(keys.isEmpty()); } @Test public void testSeekToStart() throws Exception { int numKeys = 10; String topic = "persistent://my-property/my-ns/my-raw-topic"; publishMessages(topic, numKeys); Set<String> readKeys = new HashSet<>(); RawReader reader = RawReader.create(pulsarClient, topic, subscription).get(); MessageId lastMessageId = reader.getLastMessageIdAsync().get(); while (true) { try (RawMessage m = reader.readNextAsync().get()) { readKeys.add(extractKey(m)); if (lastMessageId.compareTo(m.getMessageId()) == 0) { break; } } } Assert.assertEquals(readKeys.size(), numKeys); // seek to start, read all keys again, // assert that we read all keys we had read previously reader.seekAsync(MessageId.earliest).get(); while (true) { try (RawMessage m = reader.readNextAsync().get()) { Assert.assertTrue(readKeys.remove(extractKey(m))); if (lastMessageId.compareTo(m.getMessageId()) == 0) { break; } } } Assert.assertTrue(readKeys.isEmpty()); } @Test public void testSeekToMiddle() throws Exception { int numKeys = 10; String topic = "persistent://my-property/my-ns/my-raw-topic"; publishMessages(topic, numKeys); Set<String> readKeys = new HashSet<>(); RawReader reader = RawReader.create(pulsarClient, topic, subscription).get(); int i = 0; MessageId seekTo = null; MessageId lastMessageId = reader.getLastMessageIdAsync().get(); while (true) { try (RawMessage m = reader.readNextAsync().get()) { i++; if (i > numKeys/2) { if (seekTo == null) { seekTo = m.getMessageId(); } readKeys.add(extractKey(m)); } if (lastMessageId.compareTo(m.getMessageId()) == 0) { break; } } } Assert.assertEquals(readKeys.size(), numKeys/2); // seek to middle, read all keys again, // assert that we read all keys we had read previously reader.seekAsync(seekTo).get(); while (true) { // should break out with TimeoutException try (RawMessage m = reader.readNextAsync().get()) { Assert.assertTrue(readKeys.remove(extractKey(m))); if (lastMessageId.compareTo(m.getMessageId()) == 0) { break; } } } Assert.assertTrue(readKeys.isEmpty()); } /** * Try to fill the receiver queue, and drain it multiple times */ @Test public void testFlowControl() throws Exception { int numMessages = RawReaderImpl.DEFAULT_RECEIVER_QUEUE_SIZE * 5; String topic = "persistent://my-property/my-ns/my-raw-topic"; publishMessages(topic, numMessages); RawReader reader = RawReader.create(pulsarClient, topic, subscription).get(); List<Future<RawMessage>> futures = new ArrayList<>(); Set<String> keys = new HashSet<>(); // +1 to make sure we read past the end for (int i = 0; i < numMessages + 1; i++) { futures.add(reader.readNextAsync()); } int timeouts = 0; for (Future<RawMessage> f : futures) { try (RawMessage m = f.get(1, TimeUnit.SECONDS)) { // Assert each key is unique String key = extractKey(m); Assert.assertTrue( keys.add(key), "Received duplicated key '" + key + "' : already received keys = " + keys); } catch (TimeoutException te) { timeouts++; } } Assert.assertEquals(timeouts, 1); Assert.assertEquals(keys.size(), numMessages); } @Test public void testBatchingExtractKeysAndIds() throws Exception { String topic = "persistent://my-property/my-ns/my-raw-topic"; try (Producer<byte[]> producer = pulsarClient.newProducer().topic(topic) .maxPendingMessages(3) .enableBatching(true) .batchingMaxMessages(3) .batchingMaxPublishDelay(1, TimeUnit.HOURS) .messageRoutingMode(MessageRoutingMode.SinglePartition) .create()) { producer.newMessage().key("key1").value("my-content-1".getBytes()).sendAsync(); producer.newMessage().key("key2").value("my-content-2".getBytes()).sendAsync(); producer.newMessage().key("key3").value("my-content-3".getBytes()).send(); } RawReader reader = RawReader.create(pulsarClient, topic, subscription).get(); try (RawMessage m = reader.readNextAsync().get()) { List<ImmutablePair<MessageId,String>> idsAndKeys = RawBatchConverter.extractIdsAndKeys(m); Assert.assertEquals(idsAndKeys.size(), 3); // assert message ids are in correct order Assert.assertTrue(idsAndKeys.get(0).getLeft().compareTo(idsAndKeys.get(1).getLeft()) < 0); Assert.assertTrue(idsAndKeys.get(1).getLeft().compareTo(idsAndKeys.get(2).getLeft()) < 0); // assert keys are as expected Assert.assertEquals(idsAndKeys.get(0).getRight(), "key1"); Assert.assertEquals(idsAndKeys.get(1).getRight(), "key2"); Assert.assertEquals(idsAndKeys.get(2).getRight(), "key3"); } finally { reader.closeAsync().get(); } } @Test public void testBatchingRebatch() throws Exception { String topic = "persistent://my-property/my-ns/my-raw-topic"; try (Producer<byte[]> producer = pulsarClient.newProducer().topic(topic) .maxPendingMessages(3) .enableBatching(true) .batchingMaxMessages(3) .batchingMaxPublishDelay(1, TimeUnit.HOURS) .messageRoutingMode(MessageRoutingMode.SinglePartition) .create()) { producer.newMessage().key("key1").value("my-content-1".getBytes()).sendAsync(); producer.newMessage().key("key2").value("my-content-2".getBytes()).sendAsync(); producer.newMessage().key("key3").value("my-content-3".getBytes()).send(); } RawReader reader = RawReader.create(pulsarClient, topic, subscription).get(); try { RawMessage m1 = reader.readNextAsync().get(); RawMessage m2 = RawBatchConverter.rebatchMessage(m1, (key, id) -> key.equals("key2")).get(); List<ImmutablePair<MessageId,String>> idsAndKeys = RawBatchConverter.extractIdsAndKeys(m2); Assert.assertEquals(idsAndKeys.size(), 1); Assert.assertEquals(idsAndKeys.get(0).getRight(), "key2"); m2.close(); } finally { reader.closeAsync().get(); } } @Test public void testAcknowledgeWithProperties() throws Exception { int numKeys = 10; String topic = "persistent://my-property/my-ns/my-raw-topic"; Set<String> keys = publishMessages(topic, numKeys); RawReader reader = RawReader.create(pulsarClient, topic, subscription).get(); MessageId lastMessageId = reader.getLastMessageIdAsync().get(); while (true) { try (RawMessage m = reader.readNextAsync().get()) { Assert.assertTrue(keys.remove(extractKey(m))); if (lastMessageId.compareTo(m.getMessageId()) == 0) { break; } } } Assert.assertTrue(keys.isEmpty()); Map<String,Long> properties = new HashMap<>(); properties.put("foobar", 0xdeadbeefdecaL); reader.acknowledgeCumulativeAsync(lastMessageId, properties).get(); PersistentTopic topicRef = (PersistentTopic) pulsar.getBrokerService().getTopicReference(topic).get(); ManagedLedger ledger = topicRef.getManagedLedger(); for (int i = 0; i < 30; i++) { if (ledger.openCursor(subscription).getProperties().get("foobar") == Long.valueOf(0xdeadbeefdecaL)) { break; } Thread.sleep(100); } Assert.assertEquals(ledger.openCursor(subscription).getProperties().get("foobar"), Long.valueOf(0xdeadbeefdecaL)); } @Test public void testReadCancellationOnClose() throws Exception { int numKeys = 10; String topic = "persistent://my-property/my-ns/my-raw-topic"; publishMessages(topic, numKeys/2); RawReader reader = RawReader.create(pulsarClient, topic, subscription).get(); List<Future<RawMessage>> futures = new ArrayList<>(); for (int i = 0; i < numKeys; i++) { futures.add(reader.readNextAsync()); } for (int i = 0; i < numKeys/2; i++) { futures.remove(0).get(); // complete successfully } reader.closeAsync().get(); while (!futures.isEmpty()) { try { futures.remove(0).get(); Assert.fail("Should have been cancelled"); } catch (CancellationException ee) { // correct behaviour } } } }
/******************************************************************************* * Copyright 2014 Rafael Garcia Moreno. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.bladecoder.engine.ui; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.g2d.Batch; import com.badlogic.gdx.graphics.g2d.BitmapFont; import com.badlogic.gdx.graphics.g2d.GlyphLayout; import com.badlogic.gdx.math.Vector3; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.Touchable; import com.badlogic.gdx.scenes.scene2d.utils.Drawable; import com.badlogic.gdx.utils.Align; import com.badlogic.gdx.utils.ObjectMap; import com.bladecoder.engine.model.Text; import com.bladecoder.engine.model.TextManager; import com.bladecoder.engine.model.World; import com.bladecoder.engine.util.DPIUtils; import com.bladecoder.engine.util.StringUtils; /** * TextManagerUI draws texts and dialogs on screen. * * For now, only one subtitle is displayed in the screen. * * @author rgarcia * */ public class TextManagerUI extends Actor { private static final float PADDING = DPIUtils.getMarginSize(); private float maxRectangleWidth; private float maxTalkWidth; private SceneScreen sceneScreen; private final Vector3 unprojectTmp = new Vector3(); private ObjectMap<String, TextManagerUIStyle> styles; private Text subtitle; private final GlyphLayout layout = new GlyphLayout(); private float fontX = 0; public TextManagerUI(SceneScreen sceneScreen) { this.sceneScreen = sceneScreen; setTouchable(Touchable.disabled); styles = sceneScreen.getUI().getSkin().getAll(TextManagerUIStyle.class); for (TextManagerUIStyle style : styles.values()) { style.font.getData().markupEnabled = true; } setVisible(false); } @Override public void act(float delta) { super.act(delta); Text currentSubtitle = World.getInstance().getTextManager().getCurrentSubtitle(); if (subtitle != currentSubtitle) { subtitle = currentSubtitle; if (currentSubtitle == null && isVisible()) { setVisible(false); } else if (currentSubtitle != null && !isVisible()) { setVisible(true); } if (isVisible()) { float posx = currentSubtitle.x; float posy = currentSubtitle.y; unprojectTmp.set(posx, posy, 0); World.getInstance().getSceneCamera().scene2screen(sceneScreen.getViewport(), unprojectTmp); float maxWidth = currentSubtitle.type == Text.Type.TALK?maxTalkWidth:maxRectangleWidth; final TextManagerUIStyle style = getStyle(currentSubtitle); layout.setText(style.font, currentSubtitle.str, currentSubtitle.color, maxWidth, Align.center, true); if (posx == TextManager.POS_CENTER || posx == TextManager.POS_SUBTITLE) { posx = (sceneScreen.getViewport().getScreenWidth() - layout.width)/2; fontX = (sceneScreen.getViewport().getScreenWidth() - maxWidth)/2; } else { posx = unprojectTmp.x; fontX = unprojectTmp.x; } if (posy == TextManager.POS_CENTER) { posy = (sceneScreen.getViewport().getScreenHeight() - layout.height)/2; } else if (posy == TextManager.POS_SUBTITLE) { posy = sceneScreen.getViewport().getScreenHeight() - layout.height - DPIUtils.getMarginSize() * 4; } else { posy = unprojectTmp.y; } setPosition(posx - PADDING, posy - PADDING); setSize(layout.width + PADDING * 2, layout.height + PADDING * 2); if (currentSubtitle.type == Text.Type.TALK) { if (style.talkBubble != null) { setY(getY() + DPIUtils.getTouchMinSize() / 3 + PADDING); } setX(getX() - layout.width / 2); fontX = posx - maxWidth / 2; // check if the text exits the screen if (getX() < 0 && getX() > -getWidth()) { setX(0); fontX = getX() + PADDING; } else if (getX() + getWidth() > sceneScreen.getViewport().getScreenWidth() && getX() + getWidth() < sceneScreen.getViewport().getScreenWidth() + getWidth()) { setX(sceneScreen.getViewport().getScreenWidth() - getWidth()); fontX = sceneScreen.getViewport().getScreenWidth() - layout.width / 2 - PADDING - maxWidth / 2; } if (getY() + getHeight() > sceneScreen.getViewport().getScreenHeight()) { setY(sceneScreen.getViewport().getScreenHeight() - getHeight()); } } } } } @Override public void draw(Batch batch, float alpha) { batch.setColor(Color.WHITE); final TextManagerUIStyle style = getStyle(subtitle); if (subtitle.type == Text.Type.TALK) { if (style.talkBubble != null) { float scale = DPIUtils.getTouchMinSize() / 4 / style.talkBubble.getMinHeight(); // float bubbleX = getX() + (getWidth() - style.talkBubble.getMinWidth() * scale)/ 2; unprojectTmp.set(subtitle.x, subtitle.y, 0); World.getInstance().getSceneCamera().scene2screen(sceneScreen.getViewport(), unprojectTmp); float bubbleX = unprojectTmp.x - style.talkBubble.getMinWidth() * scale / 2; float bubbleY = getY() - style.talkBubble.getMinHeight() * scale + 2; if(bubbleX + style.talkBubble.getMinWidth() * scale < getX() + getWidth() && bubbleX > getX()) style.talkBubble.draw(batch, bubbleX, bubbleY, style.talkBubble.getMinWidth() * scale, style.talkBubble.getMinHeight() * scale); } if (style.talkBackground != null) { style.talkBackground.draw(batch, getX(), getY(), getWidth(), getHeight()); } } else if (subtitle.type == Text.Type.RECTANGLE) { if (style.rectBackground != null) { style.rectBackground.draw(batch, getX(), getY(), getWidth(), getHeight()); } } style.font.draw(batch, layout, fontX, getY() + PADDING + layout.height); } public void resize(int width, int height) { final Text currentSubtitle = subtitle != null ? subtitle : World.getInstance().getTextManager().getCurrentSubtitle(); final TextManagerUIStyle style = getStyle(currentSubtitle); maxRectangleWidth = Math.min(width - DPIUtils.getMarginSize() * 2, style.font.getSpaceWidth() * 80); maxTalkWidth = Math.min(width - DPIUtils.getMarginSize() * 2, style.font.getSpaceWidth() * 35); } private TextManagerUIStyle getStyle(Text text) { String key = "default"; if (text != null) { key = text.font; } if (StringUtils.isEmpty(key)) { key = "default"; } return styles.get(key); } /** The style for the TextManagerUI */ static public class TextManagerUIStyle { /** Optional. */ public Drawable rectBackground; public Drawable talkBackground; public Drawable talkBubble; public BitmapFont font; public TextManagerUIStyle() { } public TextManagerUIStyle(TextManagerUIStyle style) { rectBackground = style.rectBackground; talkBackground = style.talkBackground; talkBubble = style.talkBubble; font = style.font; } } }
/** Copyright Ryan Ylitalo and BytePerceptions LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.byteperceptions.require.gui.stockpurchase; import java.awt.Color; import java.awt.Component; import java.awt.Dimension; import java.awt.Frame; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.List; import javax.swing.JButton; import javax.swing.JDialog; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.border.BevelBorder; import javax.swing.border.LineBorder; import javax.swing.border.SoftBevelBorder; import javax.swing.border.TitledBorder; import com.byteperceptions.require.model.BankStockRegistry; import com.byteperceptions.require.model.HotelChain; import com.byteperceptions.require.model.ManualPlayer; import com.byteperceptions.require.model.StockShare; import com.byteperceptions.require.model.TileRegistry; /** * @author Ryan Ylitalo * */ public class BuyStocksDialog extends JDialog implements ActionListener { private static final long serialVersionUID = 1L; private static final Dimension PREFERRED_SIZE = new Dimension(300, 300); private JButton okButton; private JButton cancelButton; private JButton endGameButton; private JLabel availableCashLabel; private List<StocksPurchaseButton> stocksToPurchaseButtons = new ArrayList<StocksPurchaseButton>( 3); private List<HotelPurchaseButton> availableHotelStocksToPurchase = new ArrayList<HotelPurchaseButton>( 7); private ManualPlayer player; private int totalPlayerCash; //This is only a mechanism to return the purchased stocks from the dialog private transient List<StockShare> purchasedShares; public BuyStocksDialog(ManualPlayer player) { super((Frame) null, true); this.player = player; totalPlayerCash = player.getCash(); this.okButton = new JButton("OK"); this.okButton.addActionListener(this); this.cancelButton = new JButton("Cancel"); this.cancelButton.addActionListener(this); this.endGameButton = new JButton("End Game"); this.endGameButton.addActionListener(this); JPanel stocksPanel = new JPanel(); stocksPanel.setLayout(new GridLayout(1, 2)); stocksPanel.add(createAvailableStocksButtonPanel()); stocksPanel.add(createCurrentlyPurchasedStocksButton()); stocksPanel.setPreferredSize(PREFERRED_SIZE); stocksPanel.setMinimumSize(PREFERRED_SIZE); stocksPanel.setSize(PREFERRED_SIZE); stocksPanel.setMaximumSize(PREFERRED_SIZE); JPanel rootPanel = new JPanel(); rootPanel.setLayout(new GridBagLayout()); GridBagConstraints constraints = new GridBagConstraints(); constraints.gridx = 0; constraints.gridy = 0; constraints.gridheight = 4; constraints.gridwidth = 4; constraints.fill = GridBagConstraints.BOTH; rootPanel.add(stocksPanel, constraints); constraints = new GridBagConstraints(); constraints.gridx = 0; constraints.gridy = 4; constraints.gridheight = 1; constraints.gridwidth = 4; constraints.fill = GridBagConstraints.BOTH; rootPanel.add(createAvailableCashPanel(), constraints); constraints = new GridBagConstraints(); constraints.gridx = 0; constraints.gridy = 5; constraints.gridheight = 1; constraints.gridwidth = 4; constraints.fill = GridBagConstraints.BOTH; rootPanel.add(createButtonPanel(), constraints); rootPanel.setPreferredSize(PREFERRED_SIZE); rootPanel.setMinimumSize(PREFERRED_SIZE); rootPanel.setSize(PREFERRED_SIZE); rootPanel.setMaximumSize(PREFERRED_SIZE); setContentPane(rootPanel); Dimension dialogDimension = new Dimension(325, 400); this.setPreferredSize(dialogDimension); this.setMinimumSize(dialogDimension); this.setSize(dialogDimension); this.setMaximumSize(dialogDimension); } private Component createAvailableCashPanel() { JPanel buttonPanel = new JPanel(); availableCashLabel = new JLabel("Cash Available : $" + totalPlayerCash); buttonPanel.add(availableCashLabel); return buttonPanel; } /** * @return */ private Component createCurrentlyPurchasedStocksButton() { JPanel stocksPurchasedPanel = new JPanel(); stocksPurchasedPanel.setPreferredSize(PREFERRED_SIZE); stocksPurchasedPanel.setMinimumSize(PREFERRED_SIZE); stocksPurchasedPanel.setSize(PREFERRED_SIZE); stocksPurchasedPanel.setMaximumSize(PREFERRED_SIZE); stocksPurchasedPanel.setLayout(new GridLayout(3, 0)); stocksPurchasedPanel.setBorder(new TitledBorder(new SoftBevelBorder( BevelBorder.RAISED), "Stock Purchases")); for (int i = 0; i < 3; i++) { StocksPurchaseButton button = new StocksPurchaseButton(); button.addActionListener(this); stocksPurchasedPanel.add(button); stocksToPurchaseButtons.add(button); } return stocksPurchasedPanel; } private JPanel createButtonPanel() { JPanel buttonPanel = new JPanel(); buttonPanel.setLayout(new GridLayout(1, 3)); buttonPanel.setBorder(new TitledBorder(new SoftBevelBorder( BevelBorder.RAISED), "")); buttonPanel.add(okButton); buttonPanel.add(cancelButton); if( TileRegistry.getInstance().canEndGame()) { buttonPanel.add(endGameButton); } return buttonPanel; } /** * @return */ private JPanel createAvailableStocksButtonPanel() { JPanel availableStocksButtonPanel = new JPanel(); availableStocksButtonPanel.setPreferredSize(PREFERRED_SIZE); availableStocksButtonPanel.setMinimumSize(PREFERRED_SIZE); availableStocksButtonPanel.setSize(PREFERRED_SIZE); availableStocksButtonPanel.setMaximumSize(PREFERRED_SIZE); availableStocksButtonPanel.setLayout(new GridLayout(0, 1)); availableStocksButtonPanel.setBorder(new TitledBorder( new SoftBevelBorder(BevelBorder.RAISED), "Available Hotels")); for (HotelChain chain : TileRegistry.getInstance() .getActiveHotelChains()) { if (BankStockRegistry.getInstance().getNumberOfShares(chain) > 0) { HotelPurchaseButton button = createStockPurchaseButton(chain); setButtonVisibility(button); availableHotelStocksToPurchase.add(button); availableStocksButtonPanel.add(button); } } return availableStocksButtonPanel; } private void setButtonVisibility(HotelPurchaseButton button) { //American is hard to see with black text. if(button.getHotelChain() == HotelChain.AMERICAN) { button.setForeground(Color.WHITE); } if (button.getSharesRemaining() == 0) { button.setEnabled(false); button.setText("None Available"); } else if (totalPlayerCash < button.getHotelChain().getPrice()) { button.setEnabled(false); button.setText("Insufficient Funds"); } else { button.setEnabled(true); button.setText(button.getHotelChain().getLabel() + " $" + button.getHotelChain().getPrice()); } } private HotelPurchaseButton createStockPurchaseButton(HotelChain hotelChain) { HotelPurchaseButton button = new HotelPurchaseButton(hotelChain); button.setBorder(new LineBorder(Color.GRAY)); button.addActionListener(this); return button; } /** * @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent) */ @Override public void actionPerformed(ActionEvent e) { if (e.getSource() instanceof HotelPurchaseButton) { HotelPurchaseButton button = (HotelPurchaseButton) e.getSource(); HotelChain hotelChain = button.getHotelChain(); for (StocksPurchaseButton purchaseButton : stocksToPurchaseButtons) { if (purchaseButton.getHotelPurchaseButton() == null) { purchaseButton.setHotelPurchaseButton(button); totalPlayerCash -= hotelChain.getPrice(); refreshPurchaseButtons(); return; } } } else if (e.getSource() instanceof StocksPurchaseButton) { StocksPurchaseButton stocksPurchaseButton = (StocksPurchaseButton) e .getSource(); if (stocksPurchaseButton.getHotelPurchaseButton() != null) { stocksPurchaseButton.getHotelPurchaseButton() .incrementSharesRemaining(); totalPlayerCash += stocksPurchaseButton .getHotelPurchaseButton().getHotelChain().getPrice(); } stocksPurchaseButton.setHotelPurchaseButton(null); refreshPurchaseButtons(); } else if (e.getSource() == cancelButton) { this.dispose(); } else if (e.getSource() == okButton) { saveStockPurchases(); this.dispose(); } else if(e.getSource() == endGameButton) { saveStockPurchases(); player.setShouldEndGame(true); this.dispose(); } } private void refreshPurchaseButtons() { for (HotelPurchaseButton button : availableHotelStocksToPurchase) { setButtonVisibility(button); } availableCashLabel.setText("Cash Available : $" + totalPlayerCash); } private void saveStockPurchases() { purchasedShares = new ArrayList<StockShare>(3); for (StocksPurchaseButton purchaseButton : stocksToPurchaseButtons) { if (purchaseButton.getHotelPurchaseButton() != null) { StockShare purchasedShare = BankStockRegistry.getInstance().brokerSharePurchase( purchaseButton.getHotelPurchaseButton().getHotelChain(), player); purchasedShares.add(purchasedShare); } } } public List<StockShare> getPurchasedShares() { return purchasedShares; } }
/* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.aries.blueprint.itests; import static org.junit.Assert.assertNotNull; import static org.ops4j.pax.exam.CoreOptions.equinox; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Arrays; import junit.framework.Assert; import org.apache.aries.blueprint.testquiescebundle.TestBean; import org.apache.aries.itest.AbstractIntegrationTest; import org.apache.aries.quiesce.manager.QuiesceCallback; import org.apache.aries.quiesce.participant.QuiesceParticipant; import org.junit.Test; import org.junit.runner.RunWith; import org.ops4j.pax.exam.CoreOptions; import org.ops4j.pax.exam.Option; import org.ops4j.pax.exam.junit.JUnit4TestRunner; import org.ops4j.pax.exam.options.BootDelegationOption; import org.ops4j.pax.exam.options.MavenArtifactProvisionOption; import org.osgi.framework.Bundle; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.ServiceReference; import org.osgi.framework.Version; import static org.apache.aries.itest.ExtraOptions.*; @RunWith(JUnit4TestRunner.class) public class QuiesceBlueprintTest extends AbstractIntegrationTest { private static class TestQuiesceCallback implements QuiesceCallback { private int calls = 0; public synchronized void bundleQuiesced(Bundle... bundlesQuiesced) { System.out.println("bundleQuiesced "+ Arrays.toString(bundlesQuiesced)); calls++; } public synchronized int getCalls() { return calls; } } private QuiesceParticipant getParticipant(String bundleName) throws InvalidSyntaxException { ServiceReference[] refs = bundleContext.getServiceReferences(QuiesceParticipant.class.getName(), null); if(refs != null) { for(ServiceReference ref : refs) { if(ref.getBundle().getSymbolicName().equals(bundleName)) return (QuiesceParticipant) bundleContext.getService(ref); else System.out.println(ref.getBundle().getSymbolicName()); } } return null; } @org.ops4j.pax.exam.junit.Configuration public static Option[] configuration() { return testOptions( paxLogging("DEBUG"), transactionBootDelegation(), Helper.blueprintBundles(), mavenBundle("org.apache.aries.quiesce", "org.apache.aries.quiesce.api"), mavenBundle("org.apache.aries.blueprint", "org.apache.aries.blueprint.testbundlea").noStart(), mavenBundle("org.apache.aries.blueprint", "org.apache.aries.blueprint.testbundleb").noStart(), mavenBundle("org.apache.aries.blueprint", "org.apache.aries.blueprint.testquiescebundle"), equinox().version("3.5.0")); } protected Bundle getBundle(String symbolicName) { return getBundle(symbolicName, null); } protected Bundle getBundle(String bundleSymbolicName, String version) { Bundle result = null; for (Bundle b : bundleContext.getBundles()) { if (b.getSymbolicName().equals(bundleSymbolicName)) { if (version == null || b.getVersion().equals(Version.parseVersion(version))) { result = b; break; } } } return result; } public static BootDelegationOption bootDelegation() { return new BootDelegationOption("org.apache.aries.unittest.fixture"); } public static MavenArtifactProvisionOption mavenBundle(String groupId, String artifactId) { return CoreOptions.mavenBundle().groupId(groupId).artifactId(artifactId) .versionAsInProject(); } @Test public void testBasicQuieseEmptyCounter() throws Exception { //This test checks that a single bundle when called will not quiesce while //there is an active request (method sleeps), but will quiesce after the //request is completed. System.out.println("In testBasicQuieseEmptyCounter"); Object obj = context().getService(TestBean.class); if (obj != null) { QuiesceParticipant participant = getParticipant("org.apache.aries.blueprint.core"); if (participant != null) { System.out.println(obj.getClass().getName()); TestQuiesceCallback callback = new TestQuiesceCallback(); Bundle bundle = getBundle("org.apache.aries.blueprint.testquiescebundle"); System.out.println("Got the bundle"); List<Bundle> bundles = new ArrayList<Bundle>(); bundles.add(bundle); Thread t = new Thread(new TestBeanClient((TestBean)obj, 2000)); t.start(); System.out.println("Thread Started"); participant.quiesce(callback, bundles); System.out.println("Called Quiesce"); Thread.sleep(1000); Assert.assertTrue("Quiesce callback should not have occurred yet; calls should be 0, but it is "+callback.getCalls(), callback.getCalls()==0); t.join(); System.out.println("After join"); Assert.assertTrue("Quiesce callback should have occurred once; calls should be 1, but it is "+callback.getCalls(), callback.getCalls()==1); } else { throw new Exception("No Quiesce Participant found for the blueprint service"); } System.out.println("done"); } else { throw new Exception("No Service returned for " + TestBean.class); } } @Test public void testNoServicesQuiesce() throws Exception { //This test covers the case where one of the bundles being asked to quiesce has no //services. It should be quiesced immediately. System.out.println("In testNoServicesQuiesce"); Object obj = context().getService(TestBean.class); if (obj != null) { QuiesceParticipant participant = getParticipant("org.apache.aries.blueprint.core"); if (participant != null) { TestQuiesceCallback callbackA = new TestQuiesceCallback(); TestQuiesceCallback callbackB = new TestQuiesceCallback(); //bundlea provides the ns handlers, bean processors, interceptors etc for this test. Bundle bundlea = getBundle("org.apache.aries.blueprint.testbundlea"); assertNotNull(bundlea); bundlea.start(); //bundleb has no services and makes use of the extensions provided by bundlea Bundle bundleb = getBundle("org.apache.aries.blueprint.testbundleb"); assertNotNull(bundleb); bundleb.start(); Helper.getBlueprintContainerForBundle(context(), "org.apache.aries.blueprint.testbundleb"); participant.quiesce(callbackB, Collections.singletonList(getBundle( "org.apache.aries.blueprint.testbundleb"))); System.out.println("Called Quiesce"); Thread.sleep(200); Assert.assertTrue("Quiesce callback B should have occurred; calls should be 1, but it is "+callbackB.getCalls(), callbackB.getCalls()==1); Assert.assertTrue("Quiesce callback A should not have occurred yet; calls should be 0, but it is "+callbackA.getCalls(), callbackA.getCalls()==0); bundleb.stop(); participant.quiesce(callbackA, Collections.singletonList(getBundle( "org.apache.aries.blueprint.testbundlea"))); Thread.sleep(1000); System.out.println("After second sleep"); Assert.assertTrue("Quiesce callback A should have occurred once; calls should be 1, but it is "+callbackA.getCalls(), callbackA.getCalls()==1); Assert.assertTrue("Quiesce callback B should have occurred once; calls should be 1, but it is "+callbackB.getCalls(), callbackB.getCalls()==1); }else{ throw new Exception("No Quiesce Participant found for the blueprint service"); } }else{ throw new Exception("No Service returned for " + TestBean.class); } } @Test public void testMultiBundleQuiesce() throws Exception { //This test covers the case where two bundles are quiesced at the same time. //Bundle A should quiesce immediately, quiesce bundle should quiesce after the //request has completed. System.out.println("In testMultiBundleQuiesce"); Object obj = context().getService(TestBean.class); if (obj != null) { QuiesceParticipant participant = getParticipant("org.apache.aries.blueprint.core"); if (participant != null) { TestQuiesceCallback callback = new TestQuiesceCallback(); //bundlea provides the ns handlers, bean processors, interceptors etc for this test. Bundle bundlea = getBundle("org.apache.aries.blueprint.testbundlea"); assertNotNull(bundlea); bundlea.start(); //quiesce bundle will sleep for a second so will quiesce after that Bundle bundleq = getBundle("org.apache.aries.blueprint.testquiescebundle"); System.out.println("Got the bundle"); List<Bundle> bundles = new ArrayList<Bundle>(); bundles.add(bundlea); bundles.add(bundleq); Thread t = new Thread(new TestBeanClient((TestBean)obj, 1500)); t.start(); Thread.sleep(200); participant.quiesce(callback, bundles); System.out.println("Called Quiesce"); Thread.sleep(500); Assert.assertTrue("Quiesce callback should have occurred once for bundle a but not for bundle q; calls should be 1, but it is "+callback.getCalls(), callback.getCalls()==1); Thread.sleep(1500); System.out.println("After second sleep"); Assert.assertTrue("Quiesce callback should have occurred twice, once for bundle a and q respectively; calls should be 2, but it is "+callback.getCalls(), callback.getCalls()==2); }else{ throw new Exception("No Quiesce Participant found for the blueprint service"); } }else{ throw new Exception("No Service returned for " + TestBean.class); } } @Test public void testMultiRequestQuiesce() throws Exception { //This test covers the case where we have two active requests when //the bundle is being quiesced. System.out.println("In testMultiRequestQuiesce"); Object obj = context().getService(TestBean.class); if (obj != null) { QuiesceParticipant participant = getParticipant("org.apache.aries.blueprint.core"); if (participant != null) { TestQuiesceCallback callback = new TestQuiesceCallback(); TestBeanClient client = new TestBeanClient((TestBean)obj, 1500); //quiesce bundle will sleep for a second so will quiesce after that Bundle bundle = getBundle("org.apache.aries.blueprint.testquiescebundle"); System.out.println("Got the bundle"); List<Bundle> bundles = new ArrayList<Bundle>(); bundles.add(bundle); Thread t = new Thread(client); t.start(); participant.quiesce(callback, bundles); System.out.println("Called Quiesce, putting in a new request"); Thread t2 = new Thread(client); t2.start(); Thread.sleep(5000); Assert.assertTrue("Quiesce callback should have occurred once; calls should be 1, but it is "+callback.getCalls(), callback.getCalls()==1); }else{ throw new Exception("No Quiesce Participant found for the blueprint service"); } }else{ throw new Exception("No Service returned for " + TestBean.class); } } private class TestBeanClient implements Runnable { private final TestBean myService; private final int time; public TestBeanClient(TestBean myService, int time) { this.myService = myService; this.time = time; } public void run() { try { System.out.println("In Test Bean Client - Sleeping zzzzzzz"); myService.sleep(time); System.out.println("Woken up"); } catch (InterruptedException ie) { ie.printStackTrace(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.core.operations.modify; import static org.apache.directory.server.core.integ.IntegrationUtils.getSchemaContext; import static org.apache.directory.server.core.integ.IntegrationUtils.getSystemContext; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import javax.naming.NameNotFoundException; import javax.naming.NamingException; import javax.naming.NoPermissionException; import javax.naming.directory.Attribute; import javax.naming.directory.AttributeInUseException; import javax.naming.directory.Attributes; import javax.naming.directory.BasicAttribute; import javax.naming.directory.BasicAttributes; import javax.naming.directory.DirContext; import javax.naming.directory.InvalidAttributeValueException; import javax.naming.directory.InvalidAttributesException; import javax.naming.directory.ModificationItem; import javax.naming.directory.SchemaViolationException; import javax.naming.ldap.LdapContext; import org.apache.directory.api.util.StringConstants; import org.apache.directory.server.core.annotations.ApplyLdifs; import org.apache.directory.server.core.annotations.CreateDS; import org.apache.directory.server.core.integ.AbstractLdapTestUnit; import org.apache.directory.server.core.integ.FrameworkRunner; import org.junit.Test; import org.junit.runner.RunWith; /** * Tests the modify() methods of the provider. * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ @RunWith ( FrameworkRunner.class ) @CreateDS(name = "ModifyAddIT") @ApplyLdifs( { "dn: m-oid=2.2.0, ou=attributeTypes, cn=apachemeta, ou=schema", "objectclass: metaAttributeType", "objectclass: metaTop", "objectclass: top", "m-oid: 2.2.0", "m-name: integerAttribute", "m-description: the precursor for all integer attributes", "m-equality: integerMatch", "m-ordering: integerOrderingMatch", "m-syntax: 1.3.6.1.4.1.1466.115.121.1.27", "m-length: 0", "", "dn: ou=testing00,ou=system", "objectClass: top", "objectClass: organizationalUnit", "objectClass: extensibleObject", "ou: testing00", "integerAttribute: 0", "", "dn: ou=testing01,ou=system", "objectClass: top", "objectClass: organizationalUnit", "objectClass: extensibleObject", "ou: testing01", "integerAttribute: 1", "", "dn: ou=testing02,ou=system", "objectClass: top", "objectClass: organizationalUnit", "objectClass: extensibleObject", "ou: testing02", "integerAttribute: 2", "", "dn: ou=testing03,ou=system", "objectClass: top", "objectClass: organizationalUnit", "objectClass: extensibleObject", "ou: testing03", "integerAttribute: 3", "", "dn: ou=testing04,ou=system", "objectClass: top", "objectClass: organizationalUnit", "objectClass: extensibleObject", "ou: testing04", "integerAttribute: 4", "", "dn: ou=testing05,ou=system", "objectClass: top", "objectClass: organizationalUnit", "objectClass: extensibleObject", "ou: testing05", "integerAttribute: 5", "", "dn: ou=subtest,ou=testing01,ou=system", "objectClass: top", "objectClass: organizationalUnit", "ou: subtest", "", "dn: cn=Heather Nova, ou=system", "objectClass: top", "objectClass: person", "cn: Heather Nova", "sn: Nova", "telephoneNumber: 1 801 555 1212 ", "description: an American singer-songwriter", "", "dn: cn=with-dn, ou=system", "objectClass: top", "objectClass: person", "objectClass: organizationalPerson", "objectClass: inetorgPerson", "cn: singer", "sn: manager", "telephoneNumber: 1 801 555 1212 ", "manager: cn=Heather Nova, ou=system" } ) public class ModifyAddIT extends AbstractLdapTestUnit { private static final String PERSON_DESCRIPTION = "an American singer-songwriter"; private static final String RDN_HEATHER_NOVA = "cn=Heather Nova"; /** * @param sysRoot the system root to add entries to * @throws NamingException on errors */ protected void createData( LdapContext sysRoot ) throws Exception { /* * Check ou=testing00,ou=system */ DirContext ctx = ( DirContext ) sysRoot.lookup( "ou=testing00" ); assertNotNull( ctx ); Attributes attributes = ctx.getAttributes( "" ); assertNotNull( attributes ); assertEquals( "testing00", attributes.get( "ou" ).get() ); Attribute attribute = attributes.get( "objectClass" ); assertNotNull( attribute ); assertTrue( attribute.contains( "top" ) ); assertTrue( attribute.contains( "organizationalUnit" ) ); /* * check ou=testing01,ou=system */ ctx = ( DirContext ) sysRoot.lookup( "ou=testing01" ); assertNotNull( ctx ); attributes = ctx.getAttributes( "" ); assertNotNull( attributes ); assertEquals( "testing01", attributes.get( "ou" ).get() ); attribute = attributes.get( "objectClass" ); assertNotNull( attribute ); assertTrue( attribute.contains( "top" ) ); assertTrue( attribute.contains( "organizationalUnit" ) ); /* * Check ou=testing02,ou=system */ ctx = ( DirContext ) sysRoot.lookup( "ou=testing02" ); assertNotNull( ctx ); attributes = ctx.getAttributes( "" ); assertNotNull( attributes ); assertEquals( "testing02", attributes.get( "ou" ).get() ); attribute = attributes.get( "objectClass" ); assertNotNull( attribute ); assertTrue( attribute.contains( "top" ) ); assertTrue( attribute.contains( "organizationalUnit" ) ); /* * Check ou=subtest,ou=testing01,ou=system */ ctx = ( DirContext ) sysRoot.lookup( "ou=subtest,ou=testing01" ); assertNotNull( ctx ); attributes = ctx.getAttributes( "" ); assertNotNull( attributes ); assertEquals( "subtest", attributes.get( "ou" ).get() ); attribute = attributes.get( "objectClass" ); assertNotNull( attribute ); assertTrue( attribute.contains( "top" ) ); assertTrue( attribute.contains( "organizationalUnit" ) ); /* * Check entry cn=Heather Nova, ou=system */ ctx = ( DirContext ) sysRoot.lookup( RDN_HEATHER_NOVA ); assertNotNull( ctx ); // ------------------------------------------------------------------- // Enable the nis schema // ------------------------------------------------------------------- // check if nis is disabled LdapContext schemaRoot = getSchemaContext( getService() ); Attributes nisAttrs = schemaRoot.getAttributes( "cn=nis" ); boolean isNisDisabled = false; if ( nisAttrs.get( "m-disabled" ) != null ) { isNisDisabled = ( ( String ) nisAttrs.get( "m-disabled" ).get() ).equalsIgnoreCase( "TRUE" ); } // if nis is disabled then enable it if ( isNisDisabled ) { Attribute disabled = new BasicAttribute( "m-disabled" ); ModificationItem[] mods = new ModificationItem[] { new ModificationItem( DirContext.REMOVE_ATTRIBUTE, disabled ) }; schemaRoot.modifyAttributes( "cn=nis", mods ); } // ------------------------------------------------------------------- // Add a bunch of nis groups // ------------------------------------------------------------------- addNisPosixGroup( "testGroup0", 0 ); addNisPosixGroup( "testGroup1", 1 ); addNisPosixGroup( "testGroup2", 2 ); addNisPosixGroup( "testGroup4", 4 ); addNisPosixGroup( "testGroup5", 5 ); } /** * Create a NIS group */ private DirContext addNisPosixGroup( String name, int gid ) throws Exception { Attributes attrs = new BasicAttributes( "objectClass", "top", true ); attrs.get( "objectClass" ).add( "posixGroup" ); attrs.put( "cn", name ); attrs.put( "gidNumber", String.valueOf( gid ) ); return getSystemContext( getService() ).createSubcontext( "cn="+name+",ou=groups", attrs ); } //--------------------------------------------------------------------------------------------- // Add operation //--------------------------------------------------------------------------------------------- // 1 Entry exists // 1.1 AT does not exist. // - new valid Ava, new AT is in MAY // - new valid Ava, new AT is not in MAY => error // - new valid Ava, new AT is not in MAY, but OC contains extensibleOC // - new valid Ava, new AT is not in MAY, but OC contains extensibleOC, legal empty value // - new invalid Ava, not existing AT => error // - new invalid Ava, existing AT, two values in a single valued AT => error // - new invalid Ava, not existing AT, extensibleObject in OCs => error // - new invalid Ava (Value is invalid per syntax), AT is in MAY => error // - new invalid Ava (Value is invalid per syntax), AT is not in MAY, but OC contains extensibleOC => error // - new OperationalAttribute => error // - new OperationalAttribute, OC contains extensibleOC => error //--------------------------------------------------------------------------------------------- /** * Add a new AT with a valid Value in the entry, the AT is part of the MAY */ @Test public void testModifyAddExistingEntryNotExistingATValidAVA() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // A new description attribute value String newValue = "ou=test"; Attributes attrs = new BasicAttributes( "seeAlso", newValue, true ); sysRoot.modifyAttributes( RDN_HEATHER_NOVA, DirContext.ADD_ATTRIBUTE, attrs ); // Verify that the attribute value has been added attrs = sysRoot.getAttributes( RDN_HEATHER_NOVA ); Attribute attr = attrs.get( "seeAlso" ); assertNotNull( attr ); assertTrue( attr.contains( newValue ) ); assertEquals( 1, attr.size() ); } /** * Add a new AT with a valid Value in the entry, the AT is not part of the MAY or MUST, * and the OC does not contain the extensibleObject OC */ @Test( expected = SchemaViolationException.class ) public void testModifyAddExistingEntryNotExistingATNotInMayValidAVA() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // A valid AT not in MUST or MAY Attributes attrs = new BasicAttributes( "crossCertificatePair", "12345", true ); sysRoot.modifyAttributes( RDN_HEATHER_NOVA, DirContext.ADD_ATTRIBUTE, attrs ); } /** * Add a new AT with a valid Value in the entry, the AT is not part of the MAY or MUST, * and the OC contains the extensibleObject OC */ @Test public void testModifyAddExistingEntryNotExistingATNotInMayExtensibleObjectOCValidAVA() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // A valid AT not in MUST or MAY, but the extensibleObject OC is present in the OCs Attributes attrs = new BasicAttributes( "crossCertificatePair", "12345", true ); // Add the Ava sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs ); // Verify that the attribute value has been added attrs = sysRoot.getAttributes( "ou=testing01" ); Attribute attr = attrs.get( "crossCertificatePair" ); assertNotNull( attr ); assertTrue( attr.contains( "12345".getBytes() ) ); assertEquals( 1, attr.size() ); } /** * Add a new AT with an empty Value in the entry, the AT is not part of the MAY or MUST, * and the OC contains the extensibleObject OC */ @Test public void testModifyAddExistingEntryNotExistingAtEmptyValue() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // A valid AT not in MUST or MAY, but the extensibleObject OC is present in the OCs // The value is empty Attributes attrs = new BasicAttributes( "crossCertificatePair", StringConstants.EMPTY_BYTES, true ); // Add the Ava sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs ); // Verify that the attribute value has been added attrs = sysRoot.getAttributes( "ou=testing01" ); Attribute attr = attrs.get( "crossCertificatePair" ); assertNotNull( attr ); assertTrue( attr.contains( StringConstants.EMPTY_BYTES ) ); assertEquals( 1, attr.size() ); } /** * Add a new single valued AT with 2 Values in the entry */ @Test( expected = InvalidAttributeValueException.class ) public void testModifyAddExistingEntrySingleValuedATWithTwoValues() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // Attribute attr = new BasicAttribute( "c" ); attr.add( "FR" ); attr.add( "US" ); Attributes attrs = new BasicAttributes( "c", true ); attrs.put( attr ); // Add the Ava sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs ); } /** * Add a bad AT in the entry, the OC does not contain the extensibleObject OC */ @Test( expected = InvalidAttributesException.class ) public void testModifyAddExistingEntryNotExistingATInvalidAVA() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // An invalid AT Attributes attrs = new BasicAttributes( "badAttr", "12345", true ); sysRoot.modifyAttributes( RDN_HEATHER_NOVA, DirContext.ADD_ATTRIBUTE, attrs ); } /** * Add a bad AT in the entry, the OC contains the extensibleObject OC */ @Test( expected = InvalidAttributesException.class ) public void testModifyAddExistingEntryNotExistingATInvalidAVAExtensibleObjectInOcs() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // An invalid AT Attributes attrs = new BasicAttributes( "badAttr", "12345", true ); sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs ); } /** * Add a AT part of the MAY/MUST, with an invalid value */ @Test( expected = InvalidAttributeValueException.class ) public void testModifyAddExistingEntryExistingATInvalidValue() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // An invalid AT value Attributes attrs = new BasicAttributes( "seeAlso", "AAA", true ); sysRoot.modifyAttributes( RDN_HEATHER_NOVA, DirContext.ADD_ATTRIBUTE, attrs ); } /** * Add a AT not part of the MAY/MUST, with an invalid value, in an entry with the * extensibleObject OC */ @Test( expected = InvalidAttributeValueException.class ) public void testModifyAddExistingEntryExistingATInvalidValueExtensibleObjectInOcs() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // An invalid AT value Attributes attrs = new BasicAttributes( "mobile", "AAA", true ); sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs ); } /** * Add an operational AT in an entry with no extensibleObject OC */ @Test( expected = NoPermissionException.class ) public void testModifyAddExistingEntryOperationalAttribute() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // An operationalAttribute Attributes attrs = new BasicAttributes( "subschemaSubentry", "cn=anotherSchema", true ); sysRoot.modifyAttributes( RDN_HEATHER_NOVA, DirContext.ADD_ATTRIBUTE, attrs ); } /** * Add an operational AT in an entry the extensibleObject OC */ @Test( expected = NoPermissionException.class ) public void testModifyAddExistingEntryOperationalAttributeExtensibleObjectInOcs() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // An operational attribute Attributes attrs = new BasicAttributes( "subschemaSubentry", "cn=anotherSchema", true ); sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs ); } //--------------------------------------------------------------------------------------------- // 1 Entry exists // 1.2 AT exists. //--------------------------------------------------------------------------------------------- /** * Add a new AT with a valid Value in the entry, the AT is part of the MAY */ @Test public void testModifyAddExistingEntryExistingATValidAVA() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // A new description attribute value String newValue = "test"; Attributes attrs = new BasicAttributes( "description", newValue, true ); sysRoot.modifyAttributes( RDN_HEATHER_NOVA, DirContext.ADD_ATTRIBUTE, attrs ); // Verify that the attribute value has been added attrs = sysRoot.getAttributes( RDN_HEATHER_NOVA ); Attribute attr = attrs.get( "description" ); assertNotNull( attr ); assertTrue( attr.contains( newValue ) ); assertTrue( attr.contains( PERSON_DESCRIPTION ) ); assertEquals( 2, attr.size() ); } /** * Add a new AT with a valid Value in the entry, the AT is part of the MAY, * the value already exists */ @Test( expected = AttributeInUseException.class ) public void testModifyAddExistingEntryExistingATExistingValue() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); Attributes attrs = new BasicAttributes( "description", PERSON_DESCRIPTION, true ); sysRoot.modifyAttributes( RDN_HEATHER_NOVA, DirContext.ADD_ATTRIBUTE, attrs ); } /** * Add an empty value in an existing AT in the entry, the AT is not part of the MAY or MUST, * and the OC contains the extensibleObject OC */ @Test public void testModifyAddExistingEntryExistingAtEmptyValue() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); Attributes attrs = new BasicAttributes( "crossCertificatePair", "12345".getBytes(), true ); // Add the first Ava sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs ); attrs = new BasicAttributes( "crossCertificatePair", StringConstants.EMPTY_BYTES, true ); // Add the second Ava sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs ); // Verify that the attribute value has been added attrs = sysRoot.getAttributes( "ou=testing01" ); Attribute attr = attrs.get( "crossCertificatePair" ); assertNotNull( attr ); assertTrue( attr.contains( "12345".getBytes() ) ); assertTrue( attr.contains( StringConstants.EMPTY_BYTES ) ); assertEquals( 2, attr.size() ); } /** * Add a new value in a single valued AT */ @Test( expected = InvalidAttributeValueException.class ) public void testModifyAddExistingEntryExistingSingleValuedAT() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // The initial value Attributes attrs = new BasicAttributes( "c", "FR", true ); // Add the Ava sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs ); // Add another value Attributes attrs2 = new BasicAttributes( "c", "US", true ); // Add the Ava sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs2 ); } /** * Add the existing value in a single valued AT */ @Test( expected = AttributeInUseException.class ) public void testModifyAddExistingEntryExistingSingleValuedATExistingValue() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // The initial value Attributes attrs = new BasicAttributes( "c", "FR", true ); // Add the Ava sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs ); // Add another value Attributes attrs2 = new BasicAttributes( "c", "FR", true ); // Add the Ava sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs2 ); } /** * Add an invalue in a existing AT */ @Test( expected = InvalidAttributeValueException.class ) public void testModifyAddExistingEntryExistingATBadValue() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // The added value Attributes attrs = new BasicAttributes( "telephoneNumber", "BAD", true ); // Add the Ava sysRoot.modifyAttributes( "ou=testing01", DirContext.ADD_ATTRIBUTE, attrs ); } //--------------------------------------------------------------------------------------------- // 1 Entry exists // 1.3 Entry is an alias //--------------------------------------------------------------------------------------------- //--------------------------------------------------------------------------------------------- // 1 Entry exists // 1.4 Entry is a referral. //--------------------------------------------------------------------------------------------- //--------------------------------------------------------------------------------------------- // 1 Entry exists // 1.2 Entry is a schema element. //--------------------------------------------------------------------------------------------- //--------------------------------------------------------------------------------------------- // 1 Entry exists // 1.2 The added AT is ObjectClass. //--------------------------------------------------------------------------------------------- //--------------------------------------------------------------------------------------------- // 2 Entry does not exist //--------------------------------------------------------------------------------------------- /** * Add an AT in an entry which does not exist */ @Test( expected = NameNotFoundException.class ) public void testModifyAddNotExistingEntry() throws Exception { LdapContext sysRoot = getSystemContext( getService() ); createData( sysRoot ); // An operational attribute Attributes attrs = new BasicAttributes( "cn", "test", true ); sysRoot.modifyAttributes( "ou=absent", DirContext.ADD_ATTRIBUTE, attrs ); } }
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.plugins.camera; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.app.Activity; import android.content.Context; import android.graphics.ImageFormat; import android.graphics.SurfaceTexture; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.TotalCaptureResult; import android.hardware.camera2.params.OutputConfiguration; import android.hardware.camera2.params.SessionConfiguration; import android.media.CamcorderProfile; import android.media.Image; import android.media.ImageReader; import android.media.MediaRecorder; import android.os.Build; import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.os.Handler; import android.os.HandlerThread; import android.os.Looper; import android.util.Log; import android.util.Size; import android.view.Display; import android.view.Surface; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import io.flutter.embedding.engine.systemchannels.PlatformChannel; import io.flutter.plugin.common.EventChannel; import io.flutter.plugin.common.MethodChannel; import io.flutter.plugin.common.MethodChannel.Result; import io.flutter.plugins.camera.features.CameraFeature; import io.flutter.plugins.camera.features.CameraFeatureFactory; import io.flutter.plugins.camera.features.CameraFeatures; import io.flutter.plugins.camera.features.Point; import io.flutter.plugins.camera.features.autofocus.AutoFocusFeature; import io.flutter.plugins.camera.features.autofocus.FocusMode; import io.flutter.plugins.camera.features.exposurelock.ExposureLockFeature; import io.flutter.plugins.camera.features.exposurelock.ExposureMode; import io.flutter.plugins.camera.features.exposureoffset.ExposureOffsetFeature; import io.flutter.plugins.camera.features.exposurepoint.ExposurePointFeature; import io.flutter.plugins.camera.features.flash.FlashFeature; import io.flutter.plugins.camera.features.flash.FlashMode; import io.flutter.plugins.camera.features.focuspoint.FocusPointFeature; import io.flutter.plugins.camera.features.resolution.ResolutionFeature; import io.flutter.plugins.camera.features.resolution.ResolutionPreset; import io.flutter.plugins.camera.features.sensororientation.DeviceOrientationManager; import io.flutter.plugins.camera.features.sensororientation.SensorOrientationFeature; import io.flutter.plugins.camera.features.zoomlevel.ZoomLevelFeature; import io.flutter.plugins.camera.media.MediaRecorderBuilder; import io.flutter.plugins.camera.types.CameraCaptureProperties; import io.flutter.plugins.camera.types.CaptureTimeoutsWrapper; import io.flutter.view.TextureRegistry.SurfaceTextureEntry; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.Executors; @FunctionalInterface interface ErrorCallback { void onError(String errorCode, String errorMessage); } class Camera implements CameraCaptureCallback.CameraCaptureStateListener, ImageReader.OnImageAvailableListener { private static final String TAG = "Camera"; private static final HashMap<String, Integer> supportedImageFormats; // Current supported outputs. static { supportedImageFormats = new HashMap<>(); supportedImageFormats.put("yuv420", ImageFormat.YUV_420_888); supportedImageFormats.put("jpeg", ImageFormat.JPEG); } /** * Holds all of the camera features/settings and will be used to update the request builder when * one changes. */ private final CameraFeatures cameraFeatures; private final SurfaceTextureEntry flutterTexture; private final boolean enableAudio; private final Context applicationContext; private final DartMessenger dartMessenger; private final CameraProperties cameraProperties; private final CameraFeatureFactory cameraFeatureFactory; private final Activity activity; /** A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture. */ private final CameraCaptureCallback cameraCaptureCallback; /** A {@link Handler} for running tasks in the background. */ private Handler backgroundHandler; /** An additional thread for running tasks that shouldn't block the UI. */ private HandlerThread backgroundHandlerThread; private CameraDevice cameraDevice; private CameraCaptureSession captureSession; private ImageReader pictureImageReader; private ImageReader imageStreamReader; /** {@link CaptureRequest.Builder} for the camera preview */ private CaptureRequest.Builder previewRequestBuilder; private MediaRecorder mediaRecorder; /** True when recording video. */ private boolean recordingVideo; /** True when the preview is paused. */ private boolean pausedPreview; private File captureFile; /** Holds the current capture timeouts */ private CaptureTimeoutsWrapper captureTimeouts; /** Holds the last known capture properties */ private CameraCaptureProperties captureProps; private MethodChannel.Result flutterResult; public Camera( final Activity activity, final SurfaceTextureEntry flutterTexture, final CameraFeatureFactory cameraFeatureFactory, final DartMessenger dartMessenger, final CameraProperties cameraProperties, final ResolutionPreset resolutionPreset, final boolean enableAudio) { if (activity == null) { throw new IllegalStateException("No activity available!"); } this.activity = activity; this.enableAudio = enableAudio; this.flutterTexture = flutterTexture; this.dartMessenger = dartMessenger; this.applicationContext = activity.getApplicationContext(); this.cameraProperties = cameraProperties; this.cameraFeatureFactory = cameraFeatureFactory; this.cameraFeatures = CameraFeatures.init( cameraFeatureFactory, cameraProperties, activity, dartMessenger, resolutionPreset); // Create capture callback. captureTimeouts = new CaptureTimeoutsWrapper(3000, 3000); captureProps = new CameraCaptureProperties(); cameraCaptureCallback = CameraCaptureCallback.create(this, captureTimeouts, captureProps); startBackgroundThread(); } @Override public void onConverged() { takePictureAfterPrecapture(); } @Override public void onPrecapture() { runPrecaptureSequence(); } /** * Updates the builder settings with all of the available features. * * @param requestBuilder request builder to update. */ private void updateBuilderSettings(CaptureRequest.Builder requestBuilder) { for (CameraFeature feature : cameraFeatures.getAllFeatures()) { Log.d(TAG, "Updating builder with feature: " + feature.getDebugName()); feature.updateBuilder(requestBuilder); } } private void prepareMediaRecorder(String outputFilePath) throws IOException { Log.i(TAG, "prepareMediaRecorder"); if (mediaRecorder != null) { mediaRecorder.release(); } final PlatformChannel.DeviceOrientation lockedOrientation = ((SensorOrientationFeature) cameraFeatures.getSensorOrientation()) .getLockedCaptureOrientation(); mediaRecorder = new MediaRecorderBuilder(getRecordingProfile(), outputFilePath) .setEnableAudio(enableAudio) .setMediaOrientation( lockedOrientation == null ? getDeviceOrientationManager().getVideoOrientation() : getDeviceOrientationManager().getVideoOrientation(lockedOrientation)) .build(); } @SuppressLint("MissingPermission") public void open(String imageFormatGroup) throws CameraAccessException { final ResolutionFeature resolutionFeature = cameraFeatures.getResolution(); if (!resolutionFeature.checkIsSupported()) { // Tell the user that the camera they are trying to open is not supported, // as its {@link android.media.CamcorderProfile} cannot be fetched due to the name // not being a valid parsable integer. dartMessenger.sendCameraErrorEvent( "Camera with name \"" + cameraProperties.getCameraName() + "\" is not supported by this plugin."); return; } // Always capture using JPEG format. pictureImageReader = ImageReader.newInstance( resolutionFeature.getCaptureSize().getWidth(), resolutionFeature.getCaptureSize().getHeight(), ImageFormat.JPEG, 1); // For image streaming, use the provided image format or fall back to YUV420. Integer imageFormat = supportedImageFormats.get(imageFormatGroup); if (imageFormat == null) { Log.w(TAG, "The selected imageFormatGroup is not supported by Android. Defaulting to yuv420"); imageFormat = ImageFormat.YUV_420_888; } imageStreamReader = ImageReader.newInstance( resolutionFeature.getPreviewSize().getWidth(), resolutionFeature.getPreviewSize().getHeight(), imageFormat, 1); // Open the camera. CameraManager cameraManager = CameraUtils.getCameraManager(activity); cameraManager.openCamera( cameraProperties.getCameraName(), new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice device) { cameraDevice = device; try { startPreview(); dartMessenger.sendCameraInitializedEvent( resolutionFeature.getPreviewSize().getWidth(), resolutionFeature.getPreviewSize().getHeight(), cameraFeatures.getExposureLock().getValue(), cameraFeatures.getAutoFocus().getValue(), cameraFeatures.getExposurePoint().checkIsSupported(), cameraFeatures.getFocusPoint().checkIsSupported()); } catch (CameraAccessException e) { dartMessenger.sendCameraErrorEvent(e.getMessage()); close(); } } @Override public void onClosed(@NonNull CameraDevice camera) { Log.i(TAG, "open | onClosed"); dartMessenger.sendCameraClosingEvent(); super.onClosed(camera); } @Override public void onDisconnected(@NonNull CameraDevice cameraDevice) { Log.i(TAG, "open | onDisconnected"); close(); dartMessenger.sendCameraErrorEvent("The camera was disconnected."); } @Override public void onError(@NonNull CameraDevice cameraDevice, int errorCode) { Log.i(TAG, "open | onError"); close(); String errorDescription; switch (errorCode) { case ERROR_CAMERA_IN_USE: errorDescription = "The camera device is in use already."; break; case ERROR_MAX_CAMERAS_IN_USE: errorDescription = "Max cameras in use"; break; case ERROR_CAMERA_DISABLED: errorDescription = "The camera device could not be opened due to a device policy."; break; case ERROR_CAMERA_DEVICE: errorDescription = "The camera device has encountered a fatal error"; break; case ERROR_CAMERA_SERVICE: errorDescription = "The camera service has encountered a fatal error."; break; default: errorDescription = "Unknown camera error"; } dartMessenger.sendCameraErrorEvent(errorDescription); } }, backgroundHandler); } private void createCaptureSession(int templateType, Surface... surfaces) throws CameraAccessException { createCaptureSession(templateType, null, surfaces); } private void createCaptureSession( int templateType, Runnable onSuccessCallback, Surface... surfaces) throws CameraAccessException { // Close any existing capture session. closeCaptureSession(); // Create a new capture builder. previewRequestBuilder = cameraDevice.createCaptureRequest(templateType); // Build Flutter surface to render to. ResolutionFeature resolutionFeature = cameraFeatures.getResolution(); SurfaceTexture surfaceTexture = flutterTexture.surfaceTexture(); surfaceTexture.setDefaultBufferSize( resolutionFeature.getPreviewSize().getWidth(), resolutionFeature.getPreviewSize().getHeight()); Surface flutterSurface = new Surface(surfaceTexture); previewRequestBuilder.addTarget(flutterSurface); List<Surface> remainingSurfaces = Arrays.asList(surfaces); if (templateType != CameraDevice.TEMPLATE_PREVIEW) { // If it is not preview mode, add all surfaces as targets. for (Surface surface : remainingSurfaces) { previewRequestBuilder.addTarget(surface); } } // Update camera regions. Size cameraBoundaries = CameraRegionUtils.getCameraBoundaries(cameraProperties, previewRequestBuilder); cameraFeatures.getExposurePoint().setCameraBoundaries(cameraBoundaries); cameraFeatures.getFocusPoint().setCameraBoundaries(cameraBoundaries); // Prepare the callback. CameraCaptureSession.StateCallback callback = new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession session) { // Camera was already closed. if (cameraDevice == null) { dartMessenger.sendCameraErrorEvent("The camera was closed during configuration."); return; } captureSession = session; Log.i(TAG, "Updating builder settings"); updateBuilderSettings(previewRequestBuilder); refreshPreviewCaptureSession( onSuccessCallback, (code, message) -> dartMessenger.sendCameraErrorEvent(message)); } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { dartMessenger.sendCameraErrorEvent("Failed to configure camera session."); } }; // Start the session. if (VERSION.SDK_INT >= VERSION_CODES.P) { // Collect all surfaces to render to. List<OutputConfiguration> configs = new ArrayList<>(); configs.add(new OutputConfiguration(flutterSurface)); for (Surface surface : remainingSurfaces) { configs.add(new OutputConfiguration(surface)); } createCaptureSessionWithSessionConfig(configs, callback); } else { // Collect all surfaces to render to. List<Surface> surfaceList = new ArrayList<>(); surfaceList.add(flutterSurface); surfaceList.addAll(remainingSurfaces); createCaptureSession(surfaceList, callback); } } @TargetApi(VERSION_CODES.P) private void createCaptureSessionWithSessionConfig( List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback callback) throws CameraAccessException { cameraDevice.createCaptureSession( new SessionConfiguration( SessionConfiguration.SESSION_REGULAR, outputConfigs, Executors.newSingleThreadExecutor(), callback)); } @TargetApi(VERSION_CODES.LOLLIPOP) @SuppressWarnings("deprecation") private void createCaptureSession( List<Surface> surfaces, CameraCaptureSession.StateCallback callback) throws CameraAccessException { cameraDevice.createCaptureSession(surfaces, callback, backgroundHandler); } // Send a repeating request to refresh capture session. private void refreshPreviewCaptureSession( @Nullable Runnable onSuccessCallback, @NonNull ErrorCallback onErrorCallback) { if (captureSession == null) { Log.i( TAG, "[refreshPreviewCaptureSession] captureSession not yet initialized, " + "skipping preview capture session refresh."); return; } try { if (!pausedPreview) { captureSession.setRepeatingRequest( previewRequestBuilder.build(), cameraCaptureCallback, backgroundHandler); } if (onSuccessCallback != null) { onSuccessCallback.run(); } } catch (CameraAccessException e) { onErrorCallback.onError("cameraAccess", e.getMessage()); } } public void takePicture(@NonNull final Result result) { // Only take one picture at a time. if (cameraCaptureCallback.getCameraState() != CameraState.STATE_PREVIEW) { result.error("captureAlreadyActive", "Picture is currently already being captured", null); return; } flutterResult = result; // Create temporary file. final File outputDir = applicationContext.getCacheDir(); try { captureFile = File.createTempFile("CAP", ".jpg", outputDir); captureTimeouts.reset(); } catch (IOException | SecurityException e) { dartMessenger.error(flutterResult, "cannotCreateFile", e.getMessage(), null); return; } // Listen for picture being taken. pictureImageReader.setOnImageAvailableListener(this, backgroundHandler); final AutoFocusFeature autoFocusFeature = cameraFeatures.getAutoFocus(); final boolean isAutoFocusSupported = autoFocusFeature.checkIsSupported(); if (isAutoFocusSupported && autoFocusFeature.getValue() == FocusMode.auto) { runPictureAutoFocus(); } else { runPrecaptureSequence(); } } /** * Run the precapture sequence for capturing a still image. This method should be called when a * response is received in {@link #cameraCaptureCallback} from lockFocus(). */ private void runPrecaptureSequence() { Log.i(TAG, "runPrecaptureSequence"); try { // First set precapture state to idle or else it can hang in STATE_WAITING_PRECAPTURE_START. previewRequestBuilder.set( CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE); captureSession.capture( previewRequestBuilder.build(), cameraCaptureCallback, backgroundHandler); // Repeating request to refresh preview session. refreshPreviewCaptureSession( null, (code, message) -> dartMessenger.error(flutterResult, "cameraAccess", message, null)); // Start precapture. cameraCaptureCallback.setCameraState(CameraState.STATE_WAITING_PRECAPTURE_START); previewRequestBuilder.set( CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); // Trigger one capture to start AE sequence. captureSession.capture( previewRequestBuilder.build(), cameraCaptureCallback, backgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } /** * Capture a still picture. This method should be called when a response is received {@link * #cameraCaptureCallback} from both lockFocus(). */ private void takePictureAfterPrecapture() { Log.i(TAG, "captureStillPicture"); cameraCaptureCallback.setCameraState(CameraState.STATE_CAPTURING); if (cameraDevice == null) { return; } // This is the CaptureRequest.Builder that is used to take a picture. CaptureRequest.Builder stillBuilder; try { stillBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); } catch (CameraAccessException e) { dartMessenger.error(flutterResult, "cameraAccess", e.getMessage(), null); return; } stillBuilder.addTarget(pictureImageReader.getSurface()); // Zoom. stillBuilder.set( CaptureRequest.SCALER_CROP_REGION, previewRequestBuilder.get(CaptureRequest.SCALER_CROP_REGION)); // Have all features update the builder. updateBuilderSettings(stillBuilder); // Orientation. final PlatformChannel.DeviceOrientation lockedOrientation = ((SensorOrientationFeature) cameraFeatures.getSensorOrientation()) .getLockedCaptureOrientation(); stillBuilder.set( CaptureRequest.JPEG_ORIENTATION, lockedOrientation == null ? getDeviceOrientationManager().getPhotoOrientation() : getDeviceOrientationManager().getPhotoOrientation(lockedOrientation)); CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted( @NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { unlockAutoFocus(); } }; try { captureSession.stopRepeating(); captureSession.abortCaptures(); Log.i(TAG, "sending capture request"); captureSession.capture(stillBuilder.build(), captureCallback, backgroundHandler); } catch (CameraAccessException e) { dartMessenger.error(flutterResult, "cameraAccess", e.getMessage(), null); } } @SuppressWarnings("deprecation") private Display getDefaultDisplay() { return activity.getWindowManager().getDefaultDisplay(); } /** Starts a background thread and its {@link Handler}. */ public void startBackgroundThread() { if (backgroundHandlerThread != null) { return; } backgroundHandlerThread = HandlerThreadFactory.create("CameraBackground"); try { backgroundHandlerThread.start(); } catch (IllegalThreadStateException e) { // Ignore exception in case the thread has already started. } backgroundHandler = HandlerFactory.create(backgroundHandlerThread.getLooper()); } /** Stops the background thread and its {@link Handler}. */ public void stopBackgroundThread() { if (backgroundHandlerThread != null) { backgroundHandlerThread.quitSafely(); try { backgroundHandlerThread.join(); } catch (InterruptedException e) { dartMessenger.error(flutterResult, "cameraAccess", e.getMessage(), null); } } backgroundHandlerThread = null; backgroundHandler = null; } /** Start capturing a picture, doing autofocus first. */ private void runPictureAutoFocus() { Log.i(TAG, "runPictureAutoFocus"); cameraCaptureCallback.setCameraState(CameraState.STATE_WAITING_FOCUS); lockAutoFocus(); } private void lockAutoFocus() { Log.i(TAG, "lockAutoFocus"); if (captureSession == null) { Log.i(TAG, "[unlockAutoFocus] captureSession null, returning"); return; } // Trigger AF to start. previewRequestBuilder.set( CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START); try { captureSession.capture(previewRequestBuilder.build(), null, backgroundHandler); } catch (CameraAccessException e) { dartMessenger.sendCameraErrorEvent(e.getMessage()); } } /** Cancel and reset auto focus state and refresh the preview session. */ private void unlockAutoFocus() { Log.i(TAG, "unlockAutoFocus"); if (captureSession == null) { Log.i(TAG, "[unlockAutoFocus] captureSession null, returning"); return; } try { // Cancel existing AF state. previewRequestBuilder.set( CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL); captureSession.capture(previewRequestBuilder.build(), null, backgroundHandler); // Set AF state to idle again. previewRequestBuilder.set( CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE); captureSession.capture(previewRequestBuilder.build(), null, backgroundHandler); } catch (CameraAccessException e) { dartMessenger.sendCameraErrorEvent(e.getMessage()); return; } refreshPreviewCaptureSession( null, (errorCode, errorMessage) -> dartMessenger.error(flutterResult, errorCode, errorMessage, null)); } public void startVideoRecording(@NonNull Result result) { final File outputDir = applicationContext.getCacheDir(); try { captureFile = File.createTempFile("REC", ".mp4", outputDir); } catch (IOException | SecurityException e) { result.error("cannotCreateFile", e.getMessage(), null); return; } try { prepareMediaRecorder(captureFile.getAbsolutePath()); } catch (IOException e) { recordingVideo = false; captureFile = null; result.error("videoRecordingFailed", e.getMessage(), null); return; } // Re-create autofocus feature so it's using video focus mode now. cameraFeatures.setAutoFocus( cameraFeatureFactory.createAutoFocusFeature(cameraProperties, true)); recordingVideo = true; try { createCaptureSession( CameraDevice.TEMPLATE_RECORD, () -> mediaRecorder.start(), mediaRecorder.getSurface()); result.success(null); } catch (CameraAccessException e) { recordingVideo = false; captureFile = null; result.error("videoRecordingFailed", e.getMessage(), null); } } public void stopVideoRecording(@NonNull final Result result) { if (!recordingVideo) { result.success(null); return; } // Re-create autofocus feature so it's using continuous capture focus mode now. cameraFeatures.setAutoFocus( cameraFeatureFactory.createAutoFocusFeature(cameraProperties, false)); recordingVideo = false; try { captureSession.abortCaptures(); mediaRecorder.stop(); } catch (CameraAccessException | IllegalStateException e) { // Ignore exceptions and try to continue (changes are camera session already aborted capture). } mediaRecorder.reset(); try { startPreview(); } catch (CameraAccessException | IllegalStateException e) { result.error("videoRecordingFailed", e.getMessage(), null); return; } result.success(captureFile.getAbsolutePath()); captureFile = null; } public void pauseVideoRecording(@NonNull final Result result) { if (!recordingVideo) { result.success(null); return; } try { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { mediaRecorder.pause(); } else { result.error("videoRecordingFailed", "pauseVideoRecording requires Android API +24.", null); return; } } catch (IllegalStateException e) { result.error("videoRecordingFailed", e.getMessage(), null); return; } result.success(null); } public void resumeVideoRecording(@NonNull final Result result) { if (!recordingVideo) { result.success(null); return; } try { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { mediaRecorder.resume(); } else { result.error( "videoRecordingFailed", "resumeVideoRecording requires Android API +24.", null); return; } } catch (IllegalStateException e) { result.error("videoRecordingFailed", e.getMessage(), null); return; } result.success(null); } /** * Method handler for setting new flash modes. * * @param result Flutter result. * @param newMode new mode. */ public void setFlashMode(@NonNull final Result result, @NonNull FlashMode newMode) { // Save the new flash mode setting. final FlashFeature flashFeature = cameraFeatures.getFlash(); flashFeature.setValue(newMode); flashFeature.updateBuilder(previewRequestBuilder); refreshPreviewCaptureSession( () -> result.success(null), (code, message) -> result.error("setFlashModeFailed", "Could not set flash mode.", null)); } /** * Method handler for setting new exposure modes. * * @param result Flutter result. * @param newMode new mode. */ public void setExposureMode(@NonNull final Result result, @NonNull ExposureMode newMode) { final ExposureLockFeature exposureLockFeature = cameraFeatures.getExposureLock(); exposureLockFeature.setValue(newMode); exposureLockFeature.updateBuilder(previewRequestBuilder); refreshPreviewCaptureSession( () -> result.success(null), (code, message) -> result.error("setExposureModeFailed", "Could not set exposure mode.", null)); } /** * Sets new exposure point from dart. * * @param result Flutter result. * @param point The exposure point. */ public void setExposurePoint(@NonNull final Result result, @Nullable Point point) { final ExposurePointFeature exposurePointFeature = cameraFeatures.getExposurePoint(); exposurePointFeature.setValue(point); exposurePointFeature.updateBuilder(previewRequestBuilder); refreshPreviewCaptureSession( () -> result.success(null), (code, message) -> result.error("setExposurePointFailed", "Could not set exposure point.", null)); } /** Return the max exposure offset value supported by the camera to dart. */ public double getMaxExposureOffset() { return cameraFeatures.getExposureOffset().getMaxExposureOffset(); } /** Return the min exposure offset value supported by the camera to dart. */ public double getMinExposureOffset() { return cameraFeatures.getExposureOffset().getMinExposureOffset(); } /** Return the exposure offset step size to dart. */ public double getExposureOffsetStepSize() { return cameraFeatures.getExposureOffset().getExposureOffsetStepSize(); } /** * Sets new focus mode from dart. * * @param result Flutter result. * @param newMode New mode. */ public void setFocusMode(final Result result, @NonNull FocusMode newMode) { final AutoFocusFeature autoFocusFeature = cameraFeatures.getAutoFocus(); autoFocusFeature.setValue(newMode); autoFocusFeature.updateBuilder(previewRequestBuilder); /* * For focus mode an extra step of actually locking/unlocking the * focus has to be done, in order to ensure it goes into the correct state. */ if (!pausedPreview) { switch (newMode) { case locked: // Perform a single focus trigger. if (captureSession == null) { Log.i(TAG, "[unlockAutoFocus] captureSession null, returning"); return; } lockAutoFocus(); // Set AF state to idle again. previewRequestBuilder.set( CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE); try { captureSession.setRepeatingRequest( previewRequestBuilder.build(), null, backgroundHandler); } catch (CameraAccessException e) { if (result != null) { result.error( "setFocusModeFailed", "Error setting focus mode: " + e.getMessage(), null); } return; } break; case auto: // Cancel current AF trigger and set AF to idle again. unlockAutoFocus(); break; } } if (result != null) { result.success(null); } } /** * Sets new focus point from dart. * * @param result Flutter result. * @param point the new coordinates. */ public void setFocusPoint(@NonNull final Result result, @Nullable Point point) { final FocusPointFeature focusPointFeature = cameraFeatures.getFocusPoint(); focusPointFeature.setValue(point); focusPointFeature.updateBuilder(previewRequestBuilder); refreshPreviewCaptureSession( () -> result.success(null), (code, message) -> result.error("setFocusPointFailed", "Could not set focus point.", null)); this.setFocusMode(null, cameraFeatures.getAutoFocus().getValue()); } /** * Sets a new exposure offset from dart. From dart the offset comes as a double, like +1.3 or * -1.3. * * @param result flutter result. * @param offset new value. */ public void setExposureOffset(@NonNull final Result result, double offset) { final ExposureOffsetFeature exposureOffsetFeature = cameraFeatures.getExposureOffset(); exposureOffsetFeature.setValue(offset); exposureOffsetFeature.updateBuilder(previewRequestBuilder); refreshPreviewCaptureSession( () -> result.success(exposureOffsetFeature.getValue()), (code, message) -> result.error("setExposureOffsetFailed", "Could not set exposure offset.", null)); } public float getMaxZoomLevel() { return cameraFeatures.getZoomLevel().getMaximumZoomLevel(); } public float getMinZoomLevel() { return cameraFeatures.getZoomLevel().getMinimumZoomLevel(); } /** Shortcut to get current recording profile. */ CamcorderProfile getRecordingProfile() { return cameraFeatures.getResolution().getRecordingProfile(); } /** Shortut to get deviceOrientationListener. */ DeviceOrientationManager getDeviceOrientationManager() { return cameraFeatures.getSensorOrientation().getDeviceOrientationManager(); } /** * Sets zoom level from dart. * * @param result Flutter result. * @param zoom new value. */ public void setZoomLevel(@NonNull final Result result, float zoom) throws CameraAccessException { final ZoomLevelFeature zoomLevel = cameraFeatures.getZoomLevel(); float maxZoom = zoomLevel.getMaximumZoomLevel(); float minZoom = zoomLevel.getMinimumZoomLevel(); if (zoom > maxZoom || zoom < minZoom) { String errorMessage = String.format( Locale.ENGLISH, "Zoom level out of bounds (zoom level should be between %f and %f).", minZoom, maxZoom); result.error("ZOOM_ERROR", errorMessage, null); return; } zoomLevel.setValue(zoom); zoomLevel.updateBuilder(previewRequestBuilder); refreshPreviewCaptureSession( () -> result.success(null), (code, message) -> result.error("setZoomLevelFailed", "Could not set zoom level.", null)); } /** * Lock capture orientation from dart. * * @param orientation new orientation. */ public void lockCaptureOrientation(PlatformChannel.DeviceOrientation orientation) { cameraFeatures.getSensorOrientation().lockCaptureOrientation(orientation); } /** Unlock capture orientation from dart. */ public void unlockCaptureOrientation() { cameraFeatures.getSensorOrientation().unlockCaptureOrientation(); } /** Pause the preview from dart. */ public void pausePreview() throws CameraAccessException { this.pausedPreview = true; this.captureSession.stopRepeating(); } /** Resume the preview from dart. */ public void resumePreview() { this.pausedPreview = false; this.refreshPreviewCaptureSession( null, (code, message) -> dartMessenger.sendCameraErrorEvent(message)); } public void startPreview() throws CameraAccessException { if (pictureImageReader == null || pictureImageReader.getSurface() == null) return; Log.i(TAG, "startPreview"); createCaptureSession(CameraDevice.TEMPLATE_PREVIEW, pictureImageReader.getSurface()); } public void startPreviewWithImageStream(EventChannel imageStreamChannel) throws CameraAccessException { createCaptureSession(CameraDevice.TEMPLATE_RECORD, imageStreamReader.getSurface()); Log.i(TAG, "startPreviewWithImageStream"); imageStreamChannel.setStreamHandler( new EventChannel.StreamHandler() { @Override public void onListen(Object o, EventChannel.EventSink imageStreamSink) { setImageStreamImageAvailableListener(imageStreamSink); } @Override public void onCancel(Object o) { imageStreamReader.setOnImageAvailableListener(null, backgroundHandler); } }); } /** * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a * still image is ready to be saved. */ @Override public void onImageAvailable(ImageReader reader) { Log.i(TAG, "onImageAvailable"); backgroundHandler.post( new ImageSaver( // Use acquireNextImage since image reader is only for one image. reader.acquireNextImage(), captureFile, new ImageSaver.Callback() { @Override public void onComplete(String absolutePath) { dartMessenger.finish(flutterResult, absolutePath); } @Override public void onError(String errorCode, String errorMessage) { dartMessenger.error(flutterResult, errorCode, errorMessage, null); } })); cameraCaptureCallback.setCameraState(CameraState.STATE_PREVIEW); } private void setImageStreamImageAvailableListener(final EventChannel.EventSink imageStreamSink) { imageStreamReader.setOnImageAvailableListener( reader -> { Image img = reader.acquireNextImage(); // Use acquireNextImage since image reader is only for one image. if (img == null) return; List<Map<String, Object>> planes = new ArrayList<>(); for (Image.Plane plane : img.getPlanes()) { ByteBuffer buffer = plane.getBuffer(); byte[] bytes = new byte[buffer.remaining()]; buffer.get(bytes, 0, bytes.length); Map<String, Object> planeBuffer = new HashMap<>(); planeBuffer.put("bytesPerRow", plane.getRowStride()); planeBuffer.put("bytesPerPixel", plane.getPixelStride()); planeBuffer.put("bytes", bytes); planes.add(planeBuffer); } Map<String, Object> imageBuffer = new HashMap<>(); imageBuffer.put("width", img.getWidth()); imageBuffer.put("height", img.getHeight()); imageBuffer.put("format", img.getFormat()); imageBuffer.put("planes", planes); imageBuffer.put("lensAperture", this.captureProps.getLastLensAperture()); imageBuffer.put("sensorExposureTime", this.captureProps.getLastSensorExposureTime()); Integer sensorSensitivity = this.captureProps.getLastSensorSensitivity(); imageBuffer.put( "sensorSensitivity", sensorSensitivity == null ? null : (double) sensorSensitivity); final Handler handler = new Handler(Looper.getMainLooper()); handler.post(() -> imageStreamSink.success(imageBuffer)); img.close(); }, backgroundHandler); } private void closeCaptureSession() { if (captureSession != null) { Log.i(TAG, "closeCaptureSession"); captureSession.close(); captureSession = null; } } public void close() { Log.i(TAG, "close"); closeCaptureSession(); if (cameraDevice != null) { cameraDevice.close(); cameraDevice = null; } if (pictureImageReader != null) { pictureImageReader.close(); pictureImageReader = null; } if (imageStreamReader != null) { imageStreamReader.close(); imageStreamReader = null; } if (mediaRecorder != null) { mediaRecorder.reset(); mediaRecorder.release(); mediaRecorder = null; } stopBackgroundThread(); } public void dispose() { Log.i(TAG, "dispose"); close(); flutterTexture.release(); getDeviceOrientationManager().stop(); } /** Factory class that assists in creating a {@link HandlerThread} instance. */ static class HandlerThreadFactory { /** * Creates a new instance of the {@link HandlerThread} class. * * <p>This method is visible for testing purposes only and should never be used outside this * * class. * * @param name to give to the HandlerThread. * @return new instance of the {@link HandlerThread} class. */ @VisibleForTesting public static HandlerThread create(String name) { return new HandlerThread(name); } } /** Factory class that assists in creating a {@link Handler} instance. */ static class HandlerFactory { /** * Creates a new instance of the {@link Handler} class. * * <p>This method is visible for testing purposes only and should never be used outside this * * class. * * @param looper to give to the Handler. * @return new instance of the {@link Handler} class. */ @VisibleForTesting public static Handler create(Looper looper) { return new Handler(looper); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.pdmodel.interactive.form; import java.awt.geom.GeneralPath; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pdfbox.cos.COSArray; import org.apache.pdfbox.cos.COSBase; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDPage; import org.apache.pdfbox.pdmodel.PDPageContentStream; import org.apache.pdfbox.pdmodel.PDPageContentStream.AppendMode; import org.apache.pdfbox.pdmodel.PDResources; import org.apache.pdfbox.pdmodel.common.COSArrayList; import org.apache.pdfbox.pdmodel.common.COSObjectable; import org.apache.pdfbox.pdmodel.common.PDRectangle; import org.apache.pdfbox.pdmodel.fdf.FDFCatalog; import org.apache.pdfbox.pdmodel.fdf.FDFDictionary; import org.apache.pdfbox.pdmodel.fdf.FDFDocument; import org.apache.pdfbox.pdmodel.fdf.FDFField; import org.apache.pdfbox.pdmodel.graphics.form.PDFormXObject; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAnnotation; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAnnotationWidget; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAppearanceStream; import org.apache.pdfbox.util.Matrix; /** * An interactive form, also known as an AcroForm. * * @author Ben Litchfield */ public final class PDAcroForm implements COSObjectable { private static final Log LOG = LogFactory.getLog(PDAcroForm.class); private static final int FLAG_SIGNATURES_EXIST = 1; private static final int FLAG_APPEND_ONLY = 1 << 1; private final PDDocument document; private final COSDictionary dictionary; private Map<String, PDField> fieldCache; private ScriptingHandler scriptingHandler; /** * Constructor. * * @param doc The document that this form is part of. */ public PDAcroForm(PDDocument doc) { document = doc; dictionary = new COSDictionary(); dictionary.setItem(COSName.FIELDS, new COSArray()); } /** * Constructor. * * @param doc The document that this form is part of. * @param form The existing acroForm. */ public PDAcroForm(PDDocument doc, COSDictionary form) { document = doc; dictionary = form; } /** * This will get the document associated with this form. * * @return The PDF document. */ PDDocument getDocument() { return document; } @Override public COSDictionary getCOSObject() { return dictionary; } /** * This method will import an entire FDF document into the PDF document * that this acroform is part of. * * @param fdf The FDF document to import. * * @throws IOException If there is an error doing the import. */ public void importFDF(FDFDocument fdf) throws IOException { List<FDFField> fields = fdf.getCatalog().getFDF().getFields(); if (fields != null) { for (FDFField field : fields) { FDFField fdfField = field; PDField docField = getField(fdfField.getPartialFieldName()); if (docField != null) { docField.importFDF(fdfField); } } } } /** * This will export all FDF form data. * * @return An FDF document used to export the document. * @throws IOException If there is an error when exporting the document. */ public FDFDocument exportFDF() throws IOException { FDFDocument fdf = new FDFDocument(); FDFCatalog catalog = fdf.getCatalog(); FDFDictionary fdfDict = new FDFDictionary(); catalog.setFDF(fdfDict); List<FDFField> fdfFields = new ArrayList<>(); List<PDField> fields = getFields(); for (PDField field : fields) { fdfFields.add(field.exportFDF()); } fdfDict.setID(document.getDocument().getDocumentID()); if (!fdfFields.isEmpty()) { fdfDict.setFields(fdfFields); } return fdf; } /** * This will flatten all form fields. * * <p>Flattening a form field will take the current appearance and make that part * of the pages content stream. All form fields and annotations associated are removed.</p> * * <p>Invisible and hidden fields will be skipped and will not become part of the * page content stream</p> * * <p>The appearances for the form fields widgets will <strong>not</strong> be generated<p> * * @throws IOException */ public void flatten() throws IOException { // for dynamic XFA forms there is no flatten as this would mean to do a rendering // from the XFA content into a static PDF. if (xfaIsDynamic()) { LOG.warn("Flatten for a dynamix XFA form is not supported"); return; } List<PDField> fields = new ArrayList<>(); for (PDField field: getFieldTree()) { fields.add(field); } flatten(fields, false); } /** * This will flatten the specified form fields. * * <p>Flattening a form field will take the current appearance and make that part * of the pages content stream. All form fields and annotations associated are removed.</p> * * <p>Invisible and hidden fields will be skipped and will not become part of the * page content stream</p> * * @param fields * @param refreshAppearances if set to true the appearances for the form field widgets will be updated * @throws IOException */ public void flatten(List<PDField> fields, boolean refreshAppearances) throws IOException { // Nothing to flatten if there are no fields provided if (fields.isEmpty()) { return; } if (!refreshAppearances && getNeedAppearances()) { LOG.warn("acroForm.getNeedAppearances() returns true, " + "visual field appearances may not have been set"); LOG.warn("call acroForm.refreshAppearances() or " + "use the flatten() method with refreshAppearances parameter"); } // for dynamic XFA forms there is no flatten as this would mean to do a rendering // from the XFA content into a static PDF. if (xfaIsDynamic()) { LOG.warn("Flatten for a dynamix XFA form is not supported"); return; } // refresh the appearances if set if (refreshAppearances) { refreshAppearances(fields); } Map<COSDictionary,Set<COSDictionary>> pagesWidgetsMap = buildPagesWidgetsMap(fields); // preserve all non widget annotations for (PDPage page : document.getPages()) { Set<COSDictionary> widgetsForPageMap = pagesWidgetsMap.get(page.getCOSObject()); // indicates if the original content stream // has been wrapped in a q...Q pair. boolean isContentStreamWrapped = false; List<PDAnnotation> annotations = new ArrayList<>(); for (PDAnnotation annotation: page.getAnnotations()) { if (widgetsForPageMap == null || !widgetsForPageMap.contains(annotation.getCOSObject())) { annotations.add(annotation); } else if (isVisibleAnnotation(annotation)) { try (PDPageContentStream contentStream = new PDPageContentStream( document, page, AppendMode.APPEND, true, !isContentStreamWrapped)) { isContentStreamWrapped = true; PDAppearanceStream appearanceStream = annotation.getNormalAppearanceStream(); PDFormXObject fieldObject = new PDFormXObject(appearanceStream.getCOSObject()); contentStream.saveGraphicsState(); // see https://stackoverflow.com/a/54091766/1729265 for an explanation // of the steps required // this will transform the appearance stream form object into the rectangle of the // annotation bbox and map the coordinate systems Matrix transformationMatrix = resolveTransformationMatrix(annotation, appearanceStream); contentStream.transform(transformationMatrix); contentStream.drawForm(fieldObject); contentStream.restoreGraphicsState(); } } } page.setAnnotations(annotations); } // remove the fields removeFields(fields); // remove XFA for hybrid forms dictionary.removeItem(COSName.XFA); } private boolean isVisibleAnnotation(PDAnnotation annotation) { if (annotation.isInvisible() || annotation.isHidden()) { return false; } PDAppearanceStream normalAppearanceStream = annotation.getNormalAppearanceStream(); if (normalAppearanceStream == null) { return false; } PDRectangle bbox = normalAppearanceStream.getBBox(); return bbox != null && bbox.getWidth() > 0 && bbox.getHeight() > 0; } /** * Refreshes the appearance streams and appearance dictionaries for * the widget annotations of all fields. * * @throws IOException */ public void refreshAppearances() throws IOException { for (PDField field : getFieldTree()) { if (field instanceof PDTerminalField) { ((PDTerminalField) field).constructAppearances(); } } } /** * Refreshes the appearance streams and appearance dictionaries for * the widget annotations of the specified fields. * * @param fields * @throws IOException */ public void refreshAppearances(List<PDField> fields) throws IOException { for (PDField field : fields) { if (field instanceof PDTerminalField) { ((PDTerminalField) field).constructAppearances(); } } } /** * This will return all of the documents root fields. * * A field might have children that are fields (non-terminal field) or does not * have children which are fields (terminal fields). * * The fields within an AcroForm are organized in a tree structure. The documents root fields * might either be terminal fields, non-terminal fields or a mixture of both. Non-terminal fields * mark branches which contents can be retrieved using {@link PDNonTerminalField#getChildren()}. * * @return A list of the documents root fields, never null. If there are no fields then this * method returns an empty list. */ public List<PDField> getFields() { COSArray cosFields = dictionary.getCOSArray(COSName.FIELDS); if (cosFields == null) { return Collections.emptyList(); } List<PDField> pdFields = new ArrayList<>(); for (int i = 0; i < cosFields.size(); i++) { COSDictionary element = (COSDictionary) cosFields.getObject(i); if (element != null) { PDField field = PDField.fromDictionary(this, element, null); if (field != null) { pdFields.add(field); } } } return new COSArrayList<>(pdFields, cosFields); } /** * Set the documents root fields. * * @param fields The fields that are part of the documents root fields. */ public void setFields(List<PDField> fields) { dictionary.setItem(COSName.FIELDS, new COSArray(fields)); } /** * Returns an iterator which walks all fields in the field tree, in order. */ public Iterator<PDField> getFieldIterator() { return new PDFieldTree(this).iterator(); } /** * Return the field tree representing all form fields */ public PDFieldTree getFieldTree() { return new PDFieldTree(this); } /** * This will tell this form to cache the fields into a Map structure * for fast access via the getField method. The default is false. You would * want this to be false if you were changing the COSDictionary behind the scenes, * otherwise setting this to true is acceptable. * * @param cache A boolean telling if we should cache the fields. */ public void setCacheFields(boolean cache) { if (cache) { fieldCache = new HashMap<>(); for (PDField field : getFieldTree()) { fieldCache.put(field.getFullyQualifiedName(), field); } } else { fieldCache = null; } } /** * This will tell if this acro form is caching the fields. * * @return true if the fields are being cached. */ public boolean isCachingFields() { return fieldCache != null; } /** * This will get a field by name, possibly using the cache if setCache is true. * * @param fullyQualifiedName The name of the field to get. * @return The field with that name of null if one was not found. */ public PDField getField(String fullyQualifiedName) { // get the field from the cache if there is one. if (fieldCache != null) { return fieldCache.get(fullyQualifiedName); } // get the field from the field tree for (PDField field : getFieldTree()) { if (field.getFullyQualifiedName().equals(fullyQualifiedName)) { return field; } } return null; } /** * Get the default appearance. * * @return the DA element of the dictionary object */ public String getDefaultAppearance() { return dictionary.getString(COSName.DA,""); } /** * Set the default appearance. * * @param daValue a string describing the default appearance */ public void setDefaultAppearance(String daValue) { dictionary.setString(COSName.DA, daValue); } /** * True if the viewing application should construct the appearances of all field widgets. * The default value is false. * * @return the value of NeedAppearances, false if the value isn't set */ public boolean getNeedAppearances() { return dictionary.getBoolean(COSName.NEED_APPEARANCES, false); } /** * Set the NeedAppearances value. If this is false, PDFBox will create appearances for all field * widget. * * @param value the value for NeedAppearances */ public void setNeedAppearances(Boolean value) { dictionary.setBoolean(COSName.NEED_APPEARANCES, value); } /** * This will get the default resources for the AcroForm. * * @return The default resources or null if there is none. */ public PDResources getDefaultResources() { COSDictionary dr = dictionary.getCOSDictionary(COSName.DR); return dr != null ? new PDResources(dr, document.getResourceCache()) : null; } /** * This will set the default resources for the acroform. * * @param dr The new default resources. */ public void setDefaultResources(PDResources dr) { dictionary.setItem(COSName.DR, dr); } /** * This will tell if the AcroForm has XFA content. * * @return true if the AcroForm is an XFA form */ public boolean hasXFA() { return dictionary.containsKey(COSName.XFA); } /** * This will tell if the AcroForm is a dynamic XFA form. * * @return true if the AcroForm is a dynamic XFA form */ public boolean xfaIsDynamic() { return hasXFA() && getFields().isEmpty(); } /** * Get the XFA resource, the XFA resource is only used for PDF 1.5+ forms. * * @return The xfa resource or null if it does not exist. */ public PDXFAResource getXFA() { COSBase base = dictionary.getDictionaryObject(COSName.XFA); return base != null ? new PDXFAResource(base) : null; } /** * Set the XFA resource, this is only used for PDF 1.5+ forms. * * @param xfa The xfa resource. */ public void setXFA(PDXFAResource xfa) { dictionary.setItem(COSName.XFA, xfa); } /** * This will get the document-wide default value for the quadding/justification of variable text * fields. * <p> * 0 - Left(default)<br> * 1 - Centered<br> * 2 - Right<br> * See the QUADDING constants of {@link PDVariableText}. * * @return The justification of the variable text fields. */ public int getQ() { return dictionary.getInt(COSName.Q, 0); } /** * This will set the document-wide default value for the quadding/justification of variable text * fields. See the QUADDING constants of {@link PDVariableText}. * * @param q The justification of the variable text fields. */ public void setQ(int q) { dictionary.setInt(COSName.Q, q); } /** * Determines if SignaturesExist is set. * * @return true if the document contains at least one signature. */ public boolean isSignaturesExist() { return dictionary.getFlag(COSName.SIG_FLAGS, FLAG_SIGNATURES_EXIST); } /** * Set the SignaturesExist bit. * * @param signaturesExist The value for SignaturesExist. */ public void setSignaturesExist(boolean signaturesExist) { dictionary.setFlag(COSName.SIG_FLAGS, FLAG_SIGNATURES_EXIST, signaturesExist); } /** * Determines if AppendOnly is set. * * @return true if the document contains signatures that may be invalidated if the file is saved. */ public boolean isAppendOnly() { return dictionary.getFlag(COSName.SIG_FLAGS, FLAG_APPEND_ONLY); } /** * Set a handler to support JavaScript actions in the form. * * @return scriptingHandler */ public ScriptingHandler getScriptingHandler() { return scriptingHandler; } /** * Set a handler to support JavaScript actions in the form. * * @param scriptingHandler */ public void setScriptingHandler(ScriptingHandler scriptingHandler) { this.scriptingHandler = scriptingHandler; } /** * Set the AppendOnly bit. * * @param appendOnly The value for AppendOnly. */ public void setAppendOnly(boolean appendOnly) { dictionary.setFlag(COSName.SIG_FLAGS, FLAG_APPEND_ONLY, appendOnly); } private Matrix resolveTransformationMatrix(PDAnnotation annotation, PDAppearanceStream appearanceStream) { // 1st step transform appearance stream bbox with appearance stream matrix Rectangle2D transformedAppearanceBox = getTransformedAppearanceBBox(appearanceStream); PDRectangle annotationRect = annotation.getRectangle(); // 2nd step caclulate matrix to transform calculated rectangle into the annotation Rect boundaries Matrix transformationMatrix = new Matrix(); transformationMatrix.translate((float) (annotationRect.getLowerLeftX()-transformedAppearanceBox.getX()), (float) (annotationRect.getLowerLeftY()-transformedAppearanceBox.getY())); transformationMatrix.scale((float) (annotationRect.getWidth()/transformedAppearanceBox.getWidth()), (float) (annotationRect.getHeight()/transformedAppearanceBox.getHeight())); return transformationMatrix; } /** * Calculate the transformed appearance box. * * Apply the Matrix (or an identity transform) to the BBox of * the appearance stream * * @param appearanceStream * @return the transformed rectangle */ private Rectangle2D getTransformedAppearanceBBox(PDAppearanceStream appearanceStream) { Matrix appearanceStreamMatrix = appearanceStream.getMatrix(); PDRectangle appearanceStreamBBox = appearanceStream.getBBox(); GeneralPath transformedAppearanceBox = appearanceStreamBBox.transform(appearanceStreamMatrix); return transformedAppearanceBox.getBounds2D(); } private Map<COSDictionary,Set<COSDictionary>> buildPagesWidgetsMap(List<PDField> fields) throws IOException { Map<COSDictionary,Set<COSDictionary>> pagesAnnotationsMap = new HashMap<>(); boolean hasMissingPageRef = false; for (PDField field : fields) { List<PDAnnotationWidget> widgets = field.getWidgets(); for (PDAnnotationWidget widget : widgets) { PDPage page = widget.getPage(); if (page != null) { fillPagesAnnotationMap(pagesAnnotationsMap, page, widget); } else { hasMissingPageRef = true; } } } if (!hasMissingPageRef) { return pagesAnnotationsMap; } // If there is a widget with a missing page reference we need to build the map reverse i.e. // from the annotations to the widget. LOG.warn("There has been a widget with a missing page reference, will check all page annotations"); for (PDPage page : document.getPages()) { for (PDAnnotation annotation : page.getAnnotations()) { if (annotation instanceof PDAnnotationWidget) { fillPagesAnnotationMap(pagesAnnotationsMap, page, (PDAnnotationWidget) annotation); } } } return pagesAnnotationsMap; } private void fillPagesAnnotationMap(Map<COSDictionary, Set<COSDictionary>> pagesAnnotationsMap, PDPage page, PDAnnotationWidget widget) { if (pagesAnnotationsMap.get(page.getCOSObject()) == null) { Set<COSDictionary> widgetsForPage = new HashSet<>(); widgetsForPage.add(widget.getCOSObject()); pagesAnnotationsMap.put(page.getCOSObject(), widgetsForPage); } else { Set<COSDictionary> widgetsForPage = pagesAnnotationsMap.get(page.getCOSObject()); widgetsForPage.add(widget.getCOSObject()); } } private void removeFields(List<PDField> fields) { for (PDField field : fields) { COSArray array; if (field.getParent() == null) { // if the field has no parent, assume it is at root level list, remove it from there array = dictionary.getCOSArray(COSName.FIELDS); } else { // if the field has a parent, then remove from the list there array = field.getParent().getCOSObject().getCOSArray(COSName.KIDS); } array.removeObject(field.getCOSObject()); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.sip.listener; import java.text.ParseException; import java.util.UUID; import javax.sip.ClientTransaction; import javax.sip.Dialog; import javax.sip.DialogTerminatedEvent; import javax.sip.IOExceptionEvent; import javax.sip.RequestEvent; import javax.sip.ResponseEvent; import javax.sip.ServerTransaction; import javax.sip.SipException; import javax.sip.SipListener; import javax.sip.SipProvider; import javax.sip.TransactionTerminatedEvent; import javax.sip.address.SipURI; import javax.sip.header.EventHeader; import javax.sip.header.SubscriptionStateHeader; import javax.sip.header.ToHeader; import javax.sip.message.Request; import javax.sip.message.Response; import org.apache.camel.component.sip.SipPresenceAgent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SipPresenceAgentListener implements SipListener, SipMessageCodes { private static final transient Logger LOG = LoggerFactory.getLogger(SipPresenceAgentListener.class); protected Dialog dialog; protected int notifyCount; private SipPresenceAgent sipPresenceAgent; public SipPresenceAgentListener(SipPresenceAgent sipPresenceAgent) { this.sipPresenceAgent = sipPresenceAgent; } public void processRequest(RequestEvent requestEvent) { Request request = requestEvent.getRequest(); ServerTransaction serverTransactionId = requestEvent.getServerTransaction(); LOG.debug("Request: {}", request.getMethod()); LOG.debug("Server Transaction Id: {}", serverTransactionId); if (request.getMethod().equals(Request.SUBSCRIBE)) { processSubscribe(requestEvent, serverTransactionId); } else if (request.getMethod().equals(Request.PUBLISH)) { processPublish(requestEvent, serverTransactionId); } else { LOG.debug("Received expected request with method: {}. No further processing done", request.getMethod()); } } private void sendNotification(EventHeader eventHeader, boolean isInitial, Object body) throws SipException, ParseException { /* * NOTIFY requests MUST contain a "Subscription-State" header with a * value of "active", "pending", or "terminated". The "active" value * indicates that the subscription has been accepted and has been * authorized (in most cases; see section 5.2.). The "pending" value * indicates that the subscription has been received, but that * policy information is insufficient to accept or deny the * subscription at this time. The "terminated" value indicates that * the subscription is not active. */ Request notifyRequest = dialog.createRequest("NOTIFY"); // Mark the contact header, to check that the remote contact is updated ((SipURI)sipPresenceAgent.getConfiguration().getContactHeader().getAddress().getURI()).setParameter( sipPresenceAgent.getConfiguration().getFromUser(), sipPresenceAgent.getConfiguration().getFromHost()); SubscriptionStateHeader sstate; if (isInitial) { // Initial state is pending, second time we assume terminated (Expires==0) sstate = sipPresenceAgent.getConfiguration().getHeaderFactory().createSubscriptionStateHeader(isInitial ? SubscriptionStateHeader.PENDING : SubscriptionStateHeader.TERMINATED); // Need a reason for terminated if (sstate.getState().equalsIgnoreCase("terminated")) { sstate.setReasonCode("deactivated"); } } else { sstate = sipPresenceAgent.getConfiguration().getHeaderFactory().createSubscriptionStateHeader(SubscriptionStateHeader.ACTIVE); } notifyRequest.addHeader(sstate); notifyRequest.setHeader(eventHeader); notifyRequest.setHeader(sipPresenceAgent.getConfiguration().getContactHeader()); notifyRequest.setContent(body, sipPresenceAgent.getConfiguration().getContentTypeHeader()); LOG.debug("Sending the following NOTIFY request to Subscriber: {}", notifyRequest); ClientTransaction clientTransactionId = sipPresenceAgent.getProvider().getNewClientTransaction(notifyRequest); dialog.sendRequest(clientTransactionId); } private void processPublish(RequestEvent requestEvent, ServerTransaction serverTransactionId) { try { Request request = requestEvent.getRequest(); LOG.debug("SipPresenceAgentListener: Received a Publish request, sending OK"); LOG.debug("SipPresenceAgentListener request: {}", request); EventHeader eventHeader = (EventHeader) requestEvent.getRequest().getHeader(EventHeader.NAME); Response response = sipPresenceAgent.getConfiguration().getMessageFactory().createResponse(202, request); sipPresenceAgent.getProvider().sendResponse(response); // Send notification to subscriber sendNotification(eventHeader, false, request.getContent()); } catch (Exception e) { LOG.error("Exception thrown during publish/notify processing in the Sip Presence Agent Listener", e); } } public void processSubscribe(RequestEvent requestEvent, ServerTransaction serverTransaction) { SipProvider sipProvider = (SipProvider) requestEvent.getSource(); Request request = requestEvent.getRequest(); try { LOG.debug("SipPresenceAgentListener: Received a Subscribe request, sending OK"); LOG.debug("SipPresenceAgentListener request: {}", request); EventHeader eventHeader = (EventHeader) request.getHeader(EventHeader.NAME); if (eventHeader == null) { LOG.debug("Cannot find event header.... dropping request."); return; } // Always create a ServerTransaction, best as early as possible in the code Response response = null; ServerTransaction st = requestEvent.getServerTransaction(); if (st == null) { st = sipProvider.getNewServerTransaction(request); } // Check if it is an initial SUBSCRIBE or a refresh / unsubscribe boolean isInitial = requestEvent.getDialog() == null; if (isInitial) { String toTag = UUID.randomUUID().toString(); response = sipPresenceAgent.getConfiguration().getMessageFactory().createResponse(202, request); ToHeader toHeader = (ToHeader) response.getHeader(ToHeader.NAME); toHeader.setTag(toTag); // Application is supposed to set. this.dialog = st.getDialog(); // subscribe dialogs do not terminate on bye. this.dialog.terminateOnBye(false); } else { response = sipPresenceAgent.getConfiguration().getMessageFactory().createResponse(200, request); } // Both 2xx response to SUBSCRIBE and NOTIFY need a Contact response.addHeader(sipPresenceAgent.getConfiguration().getContactHeader()); // Expires header is mandatory in 2xx responses to SUBSCRIBE response.addHeader(sipPresenceAgent.getConfiguration().getExpiresHeader()); st.sendResponse(response); LOG.debug("SipPresenceAgentListener: Sent OK Message"); LOG.debug("SipPresenceAgentListener response: {}", response); sendNotification(eventHeader, isInitial, request.getContent()); } catch (Throwable e) { LOG.error("Exception thrown during Notify processing in the SipPresenceAgentListener.", e); } } public synchronized void processResponse(ResponseEvent responseReceivedEvent) { Response response = responseReceivedEvent.getResponse(); Integer statusCode = response.getStatusCode(); if (SIP_MESSAGE_CODES.containsKey(statusCode)) { LOG.debug(SIP_MESSAGE_CODES.get(statusCode) + " received from Subscriber"); } } public void processTimeout(javax.sip.TimeoutEvent timeoutEvent) { if (LOG.isWarnEnabled()) { LOG.warn("TimeoutEvent received at Sip Subscription Listener"); } } public void processIOException(IOExceptionEvent exceptionEvent) { if (LOG.isWarnEnabled()) { LOG.warn("IOExceptionEvent received at SipPresenceAgentListener"); } } public void processTransactionTerminated( TransactionTerminatedEvent transactionTerminatedEvent) { if (LOG.isWarnEnabled()) { LOG.warn("TransactionTerminatedEvent received at SipPresenceAgentListener"); } } public void processDialogTerminated( DialogTerminatedEvent dialogTerminatedEvent) { if (LOG.isWarnEnabled()) { LOG.warn("DialogTerminatedEvent received at SipPresenceAgentListener"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.clients.producer; import java.util.ArrayList; import java.util.HashSet; import java.util.Set; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.Metadata; import org.apache.kafka.clients.MockClient; import org.apache.kafka.clients.producer.internals.ProducerInterceptors; import org.apache.kafka.common.Cluster; import org.apache.kafka.common.KafkaException; import org.apache.kafka.common.Node; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.config.ConfigException; import org.apache.kafka.common.errors.InterruptException; import org.apache.kafka.common.errors.InvalidTopicException; import org.apache.kafka.common.errors.TimeoutException; import org.apache.kafka.common.header.internals.RecordHeader; import org.apache.kafka.common.internals.ClusterResourceListeners; import org.apache.kafka.common.metrics.Sensor; import org.apache.kafka.common.network.Selectable; import org.apache.kafka.common.serialization.ByteArraySerializer; import org.apache.kafka.common.serialization.ExtendedSerializer; import org.apache.kafka.common.serialization.Serializer; import org.apache.kafka.common.serialization.StringSerializer; import org.apache.kafka.common.utils.MockTime; import org.apache.kafka.common.utils.Time; import org.apache.kafka.test.MockMetricsReporter; import org.apache.kafka.test.MockPartitioner; import org.apache.kafka.test.MockProducerInterceptor; import org.apache.kafka.test.MockSerializer; import org.apache.kafka.test.TestUtils; import org.easymock.EasyMock; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.easymock.PowerMock; import org.powermock.api.support.membermodification.MemberModifier; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareOnlyThisForTest; import org.powermock.modules.junit4.PowerMockRunner; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(PowerMockRunner.class) @PowerMockIgnore("javax.management.*") public class KafkaProducerTest { @Test public void testConstructorWithSerializers() { Properties producerProps = new Properties(); producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); new KafkaProducer<>(producerProps, new ByteArraySerializer(), new ByteArraySerializer()).close(); } @Test(expected = ConfigException.class) public void testNoSerializerProvided() { Properties producerProps = new Properties(); producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); new KafkaProducer(producerProps); } @Test public void testConstructorFailureCloseResource() { Properties props = new Properties(); props.setProperty(ProducerConfig.CLIENT_ID_CONFIG, "testConstructorClose"); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "some.invalid.hostname.foo.bar.local:9999"); props.setProperty(ProducerConfig.METRIC_REPORTER_CLASSES_CONFIG, MockMetricsReporter.class.getName()); final int oldInitCount = MockMetricsReporter.INIT_COUNT.get(); final int oldCloseCount = MockMetricsReporter.CLOSE_COUNT.get(); try (KafkaProducer<byte[], byte[]> ignored = new KafkaProducer<>(props, new ByteArraySerializer(), new ByteArraySerializer())) { fail("should have caught an exception and returned"); } catch (KafkaException e) { assertEquals(oldInitCount + 1, MockMetricsReporter.INIT_COUNT.get()); assertEquals(oldCloseCount + 1, MockMetricsReporter.CLOSE_COUNT.get()); assertEquals("Failed to construct kafka producer", e.getMessage()); } } @Test public void testSerializerClose() { Map<String, Object> configs = new HashMap<>(); configs.put(ProducerConfig.CLIENT_ID_CONFIG, "testConstructorClose"); configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); configs.put(ProducerConfig.METRIC_REPORTER_CLASSES_CONFIG, MockMetricsReporter.class.getName()); configs.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, CommonClientConfigs.DEFAULT_SECURITY_PROTOCOL); final int oldInitCount = MockSerializer.INIT_COUNT.get(); final int oldCloseCount = MockSerializer.CLOSE_COUNT.get(); KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>( configs, new MockSerializer(), new MockSerializer()); assertEquals(oldInitCount + 2, MockSerializer.INIT_COUNT.get()); assertEquals(oldCloseCount, MockSerializer.CLOSE_COUNT.get()); producer.close(); assertEquals(oldInitCount + 2, MockSerializer.INIT_COUNT.get()); assertEquals(oldCloseCount + 2, MockSerializer.CLOSE_COUNT.get()); } @Test public void testInterceptorConstructClose() { try { Properties props = new Properties(); // test with client ID assigned by KafkaProducer props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); props.setProperty(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, MockProducerInterceptor.class.getName()); props.setProperty(MockProducerInterceptor.APPEND_STRING_PROP, "something"); KafkaProducer<String, String> producer = new KafkaProducer<>( props, new StringSerializer(), new StringSerializer()); assertEquals(1, MockProducerInterceptor.INIT_COUNT.get()); assertEquals(0, MockProducerInterceptor.CLOSE_COUNT.get()); // Cluster metadata will only be updated on calling onSend. Assert.assertNull(MockProducerInterceptor.CLUSTER_META.get()); producer.close(); assertEquals(1, MockProducerInterceptor.INIT_COUNT.get()); assertEquals(1, MockProducerInterceptor.CLOSE_COUNT.get()); } finally { // cleanup since we are using mutable static variables in MockProducerInterceptor MockProducerInterceptor.resetCounters(); } } @Test public void testPartitionerClose() { try { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); MockPartitioner.resetCounters(); props.setProperty(ProducerConfig.PARTITIONER_CLASS_CONFIG, MockPartitioner.class.getName()); KafkaProducer<String, String> producer = new KafkaProducer<>( props, new StringSerializer(), new StringSerializer()); assertEquals(1, MockPartitioner.INIT_COUNT.get()); assertEquals(0, MockPartitioner.CLOSE_COUNT.get()); producer.close(); assertEquals(1, MockPartitioner.INIT_COUNT.get()); assertEquals(1, MockPartitioner.CLOSE_COUNT.get()); } finally { // cleanup since we are using mutable static variables in MockPartitioner MockPartitioner.resetCounters(); } } @Test public void shouldCloseProperlyAndThrowIfInterrupted() throws Exception { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); props.setProperty(ProducerConfig.PARTITIONER_CLASS_CONFIG, MockPartitioner.class.getName()); props.setProperty(ProducerConfig.BATCH_SIZE_CONFIG, "1"); Time time = new MockTime(); Cluster cluster = TestUtils.singletonCluster("topic", 1); Node node = cluster.nodes().get(0); Metadata metadata = new Metadata(0, Long.MAX_VALUE, true); metadata.update(cluster, Collections.emptySet(), time.milliseconds()); MockClient client = new MockClient(time, metadata); client.setNode(node); final Producer<String, String> producer = new KafkaProducer<>( new ProducerConfig(ProducerConfig.addSerializerToConfig(props, new StringSerializer(), new StringSerializer())), new StringSerializer(), new StringSerializer(), metadata, client); ExecutorService executor = Executors.newSingleThreadExecutor(); final AtomicReference<Exception> closeException = new AtomicReference<>(); try { Future<?> future = executor.submit(() -> { producer.send(new ProducerRecord<>("topic", "key", "value")); try { producer.close(); fail("Close should block and throw."); } catch (Exception e) { closeException.set(e); } }); // Close producer should not complete until send succeeds try { future.get(100, TimeUnit.MILLISECONDS); fail("Close completed without waiting for send"); } catch (java.util.concurrent.TimeoutException expected) { /* ignore */ } // Ensure send has started client.waitForRequests(1, 1000); assertTrue("Close terminated prematurely", future.cancel(true)); TestUtils.waitForCondition(() -> closeException.get() != null, "InterruptException did not occur within timeout."); assertTrue("Expected exception not thrown " + closeException, closeException.get() instanceof InterruptException); } finally { executor.shutdownNow(); } } @Test public void testOsDefaultSocketBufferSizes() { Map<String, Object> config = new HashMap<>(); config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); config.put(ProducerConfig.SEND_BUFFER_CONFIG, Selectable.USE_DEFAULT_BUFFER_SIZE); config.put(ProducerConfig.RECEIVE_BUFFER_CONFIG, Selectable.USE_DEFAULT_BUFFER_SIZE); new KafkaProducer<>(config, new ByteArraySerializer(), new ByteArraySerializer()).close(); } @Test(expected = KafkaException.class) public void testInvalidSocketSendBufferSize() { Map<String, Object> config = new HashMap<>(); config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); config.put(ProducerConfig.SEND_BUFFER_CONFIG, -2); new KafkaProducer<>(config, new ByteArraySerializer(), new ByteArraySerializer()); } @Test(expected = KafkaException.class) public void testInvalidSocketReceiveBufferSize() { Map<String, Object> config = new HashMap<>(); config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); config.put(ProducerConfig.RECEIVE_BUFFER_CONFIG, -2); new KafkaProducer<>(config, new ByteArraySerializer(), new ByteArraySerializer()); } @PrepareOnlyThisForTest(Metadata.class) @Test public void testMetadataFetch() throws Exception { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); KafkaProducer<String, String> producer = new KafkaProducer<>(props, new StringSerializer(), new StringSerializer()); Metadata metadata = PowerMock.createNiceMock(Metadata.class); MemberModifier.field(KafkaProducer.class, "metadata").set(producer, metadata); String topic = "topic"; ProducerRecord<String, String> record = new ProducerRecord<>(topic, "value"); Collection<Node> nodes = Collections.singletonList(new Node(0, "host1", 1000)); final Cluster emptyCluster = new Cluster(null, nodes, Collections.emptySet(), Collections.emptySet(), Collections.emptySet()); final Cluster cluster = new Cluster( "dummy", Collections.singletonList(new Node(0, "host1", 1000)), Collections.singletonList(new PartitionInfo(topic, 0, null, null, null)), Collections.emptySet(), Collections.emptySet()); // Expect exactly one fetch for each attempt to refresh while topic metadata is not available final int refreshAttempts = 5; EasyMock.expect(metadata.fetch()).andReturn(emptyCluster).times(refreshAttempts - 1); EasyMock.expect(metadata.fetch()).andReturn(cluster).once(); EasyMock.expect(metadata.fetch()).andThrow(new IllegalStateException("Unexpected call to metadata.fetch()")).anyTimes(); PowerMock.replay(metadata); producer.send(record); PowerMock.verify(metadata); // Expect exactly one fetch if topic metadata is available PowerMock.reset(metadata); EasyMock.expect(metadata.fetch()).andReturn(cluster).once(); EasyMock.expect(metadata.fetch()).andThrow(new IllegalStateException("Unexpected call to metadata.fetch()")).anyTimes(); PowerMock.replay(metadata); producer.send(record, null); PowerMock.verify(metadata); // Expect exactly one fetch if topic metadata is available PowerMock.reset(metadata); EasyMock.expect(metadata.fetch()).andReturn(cluster).once(); EasyMock.expect(metadata.fetch()).andThrow(new IllegalStateException("Unexpected call to metadata.fetch()")).anyTimes(); PowerMock.replay(metadata); producer.partitionsFor(topic); PowerMock.verify(metadata); } @PrepareOnlyThisForTest(Metadata.class) @Test public void testMetadataFetchOnStaleMetadata() throws Exception { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); KafkaProducer<String, String> producer = new KafkaProducer<>(props, new StringSerializer(), new StringSerializer()); Metadata metadata = PowerMock.createNiceMock(Metadata.class); MemberModifier.field(KafkaProducer.class, "metadata").set(producer, metadata); String topic = "topic"; ProducerRecord<String, String> initialRecord = new ProducerRecord<>(topic, "value"); // Create a record with a partition higher than the initial (outdated) partition range ProducerRecord<String, String> extendedRecord = new ProducerRecord<>(topic, 2, null, "value"); Collection<Node> nodes = Collections.singletonList(new Node(0, "host1", 1000)); final Cluster emptyCluster = new Cluster(null, nodes, Collections.emptySet(), Collections.emptySet(), Collections.emptySet()); final Cluster initialCluster = new Cluster( "dummy", Collections.singletonList(new Node(0, "host1", 1000)), Collections.singletonList(new PartitionInfo(topic, 0, null, null, null)), Collections.emptySet(), Collections.emptySet()); final Cluster extendedCluster = new Cluster( "dummy", Collections.singletonList(new Node(0, "host1", 1000)), Arrays.asList( new PartitionInfo(topic, 0, null, null, null), new PartitionInfo(topic, 1, null, null, null), new PartitionInfo(topic, 2, null, null, null)), Collections.emptySet(), Collections.emptySet()); // Expect exactly one fetch for each attempt to refresh while topic metadata is not available final int refreshAttempts = 5; EasyMock.expect(metadata.fetch()).andReturn(emptyCluster).times(refreshAttempts - 1); EasyMock.expect(metadata.fetch()).andReturn(initialCluster).once(); EasyMock.expect(metadata.fetch()).andThrow(new IllegalStateException("Unexpected call to metadata.fetch()")).anyTimes(); PowerMock.replay(metadata); producer.send(initialRecord); PowerMock.verify(metadata); // Expect exactly one fetch if topic metadata is available and records are still within range PowerMock.reset(metadata); EasyMock.expect(metadata.fetch()).andReturn(initialCluster).once(); EasyMock.expect(metadata.fetch()).andThrow(new IllegalStateException("Unexpected call to metadata.fetch()")).anyTimes(); PowerMock.replay(metadata); producer.send(initialRecord, null); PowerMock.verify(metadata); // Expect exactly two fetches if topic metadata is available but metadata response still returns // the same partition size (either because metadata are still stale at the broker too or because // there weren't any partitions added in the first place). PowerMock.reset(metadata); EasyMock.expect(metadata.fetch()).andReturn(initialCluster).once(); EasyMock.expect(metadata.fetch()).andReturn(initialCluster).once(); EasyMock.expect(metadata.fetch()).andThrow(new IllegalStateException("Unexpected call to metadata.fetch()")).anyTimes(); PowerMock.replay(metadata); try { producer.send(extendedRecord, null); fail("Expected KafkaException to be raised"); } catch (KafkaException e) { // expected } PowerMock.verify(metadata); // Expect exactly two fetches if topic metadata is available but outdated for the given record PowerMock.reset(metadata); EasyMock.expect(metadata.fetch()).andReturn(initialCluster).once(); EasyMock.expect(metadata.fetch()).andReturn(extendedCluster).once(); EasyMock.expect(metadata.fetch()).andThrow(new IllegalStateException("Unexpected call to metadata.fetch()")).anyTimes(); PowerMock.replay(metadata); producer.send(extendedRecord, null); PowerMock.verify(metadata); } @Test public void testTopicRefreshInMetadata() throws Exception { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); props.setProperty(ProducerConfig.MAX_BLOCK_MS_CONFIG, "600000"); KafkaProducer<String, String> producer = new KafkaProducer<>(props, new StringSerializer(), new StringSerializer()); long refreshBackoffMs = 500L; long metadataExpireMs = 60000L; final Metadata metadata = new Metadata(refreshBackoffMs, metadataExpireMs, true, true, new ClusterResourceListeners()); final Time time = new MockTime(); MemberModifier.field(KafkaProducer.class, "metadata").set(producer, metadata); MemberModifier.field(KafkaProducer.class, "time").set(producer, time); final String topic = "topic"; Thread t = new Thread(() -> { long startTimeMs = System.currentTimeMillis(); for (int i = 0; i < 10; i++) { while (!metadata.updateRequested() && System.currentTimeMillis() - startTimeMs < 1000) Thread.yield(); metadata.update(Cluster.empty(), Collections.singleton(topic), time.milliseconds()); time.sleep(60 * 1000L); } }); t.start(); try { producer.partitionsFor(topic); fail("Expect TimeoutException"); } catch (TimeoutException e) { // skip } Assert.assertTrue("Topic should still exist in metadata", metadata.containsTopic(topic)); } @SuppressWarnings("unchecked") // safe as generic parameters won't vary @PrepareOnlyThisForTest(Metadata.class) @Test public void testHeadersWithExtendedClasses() throws Exception { doTestHeaders(ExtendedSerializer.class); } @SuppressWarnings("unchecked") @PrepareOnlyThisForTest(Metadata.class) @Test public void testHeaders() throws Exception { doTestHeaders(Serializer.class); } private <T extends Serializer<String>> void doTestHeaders(Class<T> serializerClassToMock) throws Exception { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); T keySerializer = PowerMock.createNiceMock(serializerClassToMock); T valueSerializer = PowerMock.createNiceMock(serializerClassToMock); KafkaProducer<String, String> producer = new KafkaProducer<>(props, keySerializer, valueSerializer); Metadata metadata = PowerMock.createNiceMock(Metadata.class); MemberModifier.field(KafkaProducer.class, "metadata").set(producer, metadata); String topic = "topic"; final Cluster cluster = new Cluster( "dummy", Collections.singletonList(new Node(0, "host1", 1000)), Collections.singletonList(new PartitionInfo(topic, 0, null, null, null)), Collections.emptySet(), Collections.emptySet()); EasyMock.expect(metadata.fetch()).andReturn(cluster).anyTimes(); PowerMock.replay(metadata); String value = "value"; ProducerRecord<String, String> record = new ProducerRecord<>(topic, value); EasyMock.expect(keySerializer.serialize(topic, record.headers(), null)).andReturn(null).once(); EasyMock.expect(valueSerializer.serialize(topic, record.headers(), value)).andReturn(value.getBytes()).once(); PowerMock.replay(keySerializer); PowerMock.replay(valueSerializer); //ensure headers can be mutated pre send. record.headers().add(new RecordHeader("test", "header2".getBytes())); producer.send(record, null); //ensure headers are closed and cannot be mutated post send try { record.headers().add(new RecordHeader("test", "test".getBytes())); fail("Expected IllegalStateException to be raised"); } catch (IllegalStateException ise) { //expected } //ensure existing headers are not changed, and last header for key is still original value assertTrue(Arrays.equals(record.headers().lastHeader("test").value(), "header2".getBytes())); PowerMock.verify(valueSerializer); PowerMock.verify(keySerializer); } @Test public void closeShouldBeIdempotent() { Properties producerProps = new Properties(); producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); Producer producer = new KafkaProducer<>(producerProps, new ByteArraySerializer(), new ByteArraySerializer()); producer.close(); producer.close(); } @Test public void testMetricConfigRecordingLevel() { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); try (KafkaProducer producer = new KafkaProducer<>(props, new ByteArraySerializer(), new ByteArraySerializer())) { assertEquals(Sensor.RecordingLevel.INFO, producer.metrics.config().recordLevel()); } props.put(ProducerConfig.METRICS_RECORDING_LEVEL_CONFIG, "DEBUG"); try (KafkaProducer producer = new KafkaProducer<>(props, new ByteArraySerializer(), new ByteArraySerializer())) { assertEquals(Sensor.RecordingLevel.DEBUG, producer.metrics.config().recordLevel()); } } @PrepareOnlyThisForTest(Metadata.class) @Test public void testInterceptorPartitionSetOnTooLargeRecord() throws Exception { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); props.setProperty(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, "1"); String topic = "topic"; ProducerRecord<String, String> record = new ProducerRecord<>(topic, "value"); KafkaProducer<String, String> producer = new KafkaProducer<>(props, new StringSerializer(), new StringSerializer()); Metadata metadata = PowerMock.createNiceMock(Metadata.class); MemberModifier.field(KafkaProducer.class, "metadata").set(producer, metadata); final Cluster cluster = new Cluster( "dummy", Collections.singletonList(new Node(0, "host1", 1000)), Collections.singletonList(new PartitionInfo(topic, 0, null, null, null)), Collections.emptySet(), Collections.emptySet()); EasyMock.expect(metadata.fetch()).andReturn(cluster).once(); // Mock interceptors field @SuppressWarnings("unchecked") // it is safe to suppress, since this is a mock class ProducerInterceptors<String, String> interceptors = PowerMock.createMock(ProducerInterceptors.class); EasyMock.expect(interceptors.onSend(record)).andReturn(record); interceptors.onSendError(EasyMock.eq(record), EasyMock.notNull(), EasyMock.notNull()); EasyMock.expectLastCall(); MemberModifier.field(KafkaProducer.class, "interceptors").set(producer, interceptors); PowerMock.replay(metadata); EasyMock.replay(interceptors); producer.send(record); EasyMock.verify(interceptors); } @Test public void testPartitionsForWithNullTopic() { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); try (KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(props, new ByteArraySerializer(), new ByteArraySerializer())) { producer.partitionsFor(null); fail("Expected NullPointerException to be raised"); } catch (NullPointerException e) { // expected } } @Test(expected = TimeoutException.class) public void testInitTransactionTimeout() { Properties props = new Properties(); props.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "bad-transaction"); props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 5); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); Time time = new MockTime(); Cluster cluster = TestUtils.singletonCluster("topic", 1); Node node = cluster.nodes().get(0); Metadata metadata = new Metadata(0, Long.MAX_VALUE, true); metadata.update(cluster, Collections.emptySet(), time.milliseconds()); MockClient client = new MockClient(time, metadata); client.setNode(node); try (Producer<String, String> producer = new KafkaProducer<>( new ProducerConfig(ProducerConfig.addSerializerToConfig(props, new StringSerializer(), new StringSerializer())), new StringSerializer(), new StringSerializer(), metadata, client)) { producer.initTransactions(); fail("initTransactions() should have raised TimeoutException"); } } @Test(expected = KafkaException.class) public void testOnlyCanExecuteCloseAfterInitTransactionsTimeout() { Properties props = new Properties(); props.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "bad-transaction"); props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 5); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); Time time = new MockTime(); Cluster cluster = TestUtils.singletonCluster("topic", 1); Node node = cluster.nodes().get(0); Metadata metadata = new Metadata(0, Long.MAX_VALUE, true); metadata.update(cluster, Collections.emptySet(), time.milliseconds()); MockClient client = new MockClient(time, metadata); client.setNode(node); Producer<String, String> producer = new KafkaProducer<>( new ProducerConfig(ProducerConfig.addSerializerToConfig(props, new StringSerializer(), new StringSerializer())), new StringSerializer(), new StringSerializer(), metadata, client); try { producer.initTransactions(); } catch (TimeoutException e) { // expected } // other transactional operations should not be allowed if we catch the error after initTransactions failed try { producer.beginTransaction(); } finally { producer.close(0, TimeUnit.MILLISECONDS); } } @Test public void testSendToInvalidTopic() throws Exception { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, "15000"); Time time = new MockTime(); Cluster cluster = TestUtils.singletonCluster(); Node node = cluster.nodes().get(0); Metadata metadata = new Metadata(0, Long.MAX_VALUE, true); metadata.update(cluster, Collections.emptySet(), time.milliseconds()); MockClient client = new MockClient(time, metadata); client.setNode(node); Producer<String, String> producer = new KafkaProducer<>(new ProducerConfig( ProducerConfig.addSerializerToConfig(props, new StringSerializer(), new StringSerializer())), new StringSerializer(), new StringSerializer(), metadata, client); String invalidTopicName = "topic abc"; // Invalid topic name due to space ProducerRecord<String, String> record = new ProducerRecord<>(invalidTopicName, "HelloKafka"); Set<String> invalidTopic = new HashSet<>(); invalidTopic.add(invalidTopicName); Cluster metaDataUpdateResponseCluster = new Cluster(cluster.clusterResource().clusterId(), cluster.nodes(), new ArrayList<>(0), Collections.emptySet(), invalidTopic, cluster.internalTopics(), cluster.controller()); client.prepareMetadataUpdate(metaDataUpdateResponseCluster, Collections.emptySet()); Future<RecordMetadata> future = producer.send(record); assertEquals("Cluster has incorrect invalid topic list.", metaDataUpdateResponseCluster.invalidTopics(), metadata.fetch().invalidTopics()); TestUtils.assertFutureError(future, InvalidTopicException.class); } @Test public void testCloseWhenWaitingForMetadataUpdate() throws InterruptedException { Properties props = new Properties(); props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, Long.MAX_VALUE); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); // Simulate a case where metadata for a particular topic is not available. This will cause KafkaProducer#send to // block in Metadata#awaitUpdate for the configured max.block.ms. When close() is invoked, KafkaProducer#send should // return with a KafkaException. String topicName = "test"; Time time = new MockTime(); Cluster cluster = TestUtils.singletonCluster(); Node node = cluster.nodes().get(0); Metadata metadata = new Metadata(0, Long.MAX_VALUE, false); metadata.update(cluster, Collections.emptySet(), time.milliseconds()); MockClient client = new MockClient(time, metadata); client.setNode(node); Producer<String, String> producer = new KafkaProducer<>( new ProducerConfig(ProducerConfig.addSerializerToConfig(props, new StringSerializer(), new StringSerializer())), new StringSerializer(), new StringSerializer(), metadata, client); ExecutorService executor = Executors.newSingleThreadExecutor(); final AtomicReference<Exception> sendException = new AtomicReference<>(); try { executor.submit(() -> { try { // Metadata for topic "test" will not be available which will cause us to block indefinitely until // KafkaProducer#close is invoked. producer.send(new ProducerRecord<>(topicName, "key", "value")); fail(); } catch (Exception e) { sendException.set(e); } }); // Wait until metadata update for the topic has been requested TestUtils.waitForCondition(() -> metadata.containsTopic(topicName), "Timeout when waiting for topic to be added to metadata"); producer.close(0, TimeUnit.MILLISECONDS); TestUtils.waitForCondition(() -> sendException.get() != null, "No producer exception within timeout"); assertEquals(KafkaException.class, sendException.get().getClass()); } finally { executor.shutdownNow(); } } }
/* * Copyright 2015 University of Oxford * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.ac.ox.it.ords.api.statistics.services.impl.hibernate; import java.math.BigInteger; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.util.List; import java.util.Properties; import javax.persistence.NonUniqueResultException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.criterion.Order; import uk.ac.ox.it.ords.api.statistics.model.OrdsStatistics; import uk.ac.ox.it.ords.api.statistics.services.MessagingService; import uk.ac.ox.it.ords.api.statistics.services.StatisticsService; import uk.ac.ox.it.ords.security.model.DatabaseServer; import uk.ac.ox.it.ords.security.services.ServerConfigurationService; public class StatisticsServiceImpl implements StatisticsService { Logger log = LoggerFactory.getLogger(StatisticsServiceImpl.class); private SessionFactory sessionFactory; public void setSessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; } public StatisticsServiceImpl(){ setSessionFactory(HibernateUtils.getSessionFactory()); } @Override public OrdsStatistics getStatistics() throws Exception { OrdsStatistics statistics = null; Session session = sessionFactory.getCurrentSession(); try { session.beginTransaction(); statistics = (OrdsStatistics) session.createCriteria(OrdsStatistics.class) .addOrder(Order.desc("statsId")) .setFirstResult(0) .setMaxResults(1) .uniqueResult(); session.getTransaction().commit(); } catch (NonUniqueResultException n){ log.error("Problem obtaining Statistics record; probably there are no records yet", n); session.getTransaction().rollback(); } catch (RuntimeException e) { log.error("Problem obtaining Statistics record", e); session.getTransaction().rollback(); } finally { HibernateUtils.closeSession(); } return statistics; } protected void create(OrdsStatistics stats){ Session session = sessionFactory.getCurrentSession(); try { session.beginTransaction(); session.save(stats); session.getTransaction().commit(); } catch (RuntimeException e) { log.error("Problem creating Statistics record", e); session.getTransaction().rollback(); } finally { HibernateUtils.closeSession(); } } /** * Gather project-level statistics * @param stats */ protected void getProjectStatistics(OrdsStatistics stats) { BigInteger count = null; Session session = sessionFactory.getCurrentSession(); try { session.beginTransaction(); count = (BigInteger) session.createSQLQuery("select count(*) from project where privateProject=false and deleted=false and trialproject=false").uniqueResult(); stats.setNumberOfOpenProjects(count.intValue()); count = (BigInteger) session.createSQLQuery("select count(*) from project where privateProject=true and deleted=false and trialproject=false").uniqueResult(); stats.setNumberOfClosedProjects(count.intValue()); count = (BigInteger) session.createSQLQuery("select count(*) from project where deleted=false and trialproject=false").uniqueResult(); stats.setNumberOfFullProjects(count.intValue()); count = (BigInteger) session.createSQLQuery("select count(*) from project where deleted=false and trialproject=true").uniqueResult(); stats.setNumberOfTrialProjects(count.intValue()); count = (BigInteger) session.createSQLQuery("select count(*) from project where deleted=false and age(datecreated) < '6 months'").uniqueResult(); stats.setNumberOfProjectsInLastSixMonths(count.intValue()); count = (BigInteger) session.createSQLQuery("select count(*) from project where deleted=false and age(datecreated) < '30 days'").uniqueResult(); stats.setNumberOfRecentProjects(count.intValue()); count = (BigInteger) session.createSQLQuery("select count(*) from project where deleted=false").uniqueResult(); stats.setNumberOfProjectsManagedByOrds(count.intValue()); session.getTransaction().commit(); } catch (RuntimeException e) { log.error("Problem creating Statistics record", e); session.getTransaction().rollback(); } finally { HibernateUtils.closeSession(); } } public void computeLatestStatistics() throws Exception { log.debug("computeLatestStats"); int numberOfRecords = 0; // Let's now find a server to use for (DatabaseServer server : ServerConfigurationService.Factory.getInstance().getAllDatabaseServers()) { numberOfRecords += getNumberOfRecordsForServer(server); } OrdsStatistics stats = new OrdsStatistics(); stats.setNumberOfRecordsManagedByOrds(numberOfRecords); // // Gather project stats // getProjectStatistics(stats); create(stats); log.debug("computeLatestStats:return"); } public int getNumberOfRecordsForServer(DatabaseServer server) throws Exception { Long numberOfRecords = 0L; // // Get database list for server // List<String> databases = getDatabases(server); // // Get row count for each database // for (String database : databases){ numberOfRecords += getRecordCountForDatabase(server, database); } return numberOfRecords.intValue(); } private int getRecordCountForDatabase(DatabaseServer server, String database) throws Exception{ int numberOfRecords = 0; String url = server.getUrl(database); Connection conn = null; Properties connectionProps = new Properties(); connectionProps.put("user", server.getUsername()); connectionProps.put("password", server.getPassword()); try { conn = DriverManager.getConnection(url, connectionProps); // // This is the query used internally by PostgreSQL for the ANALYZE command. Its // much more up to date than relying on PG_STAT, but also performs better than a // SELECT on each table. // ResultSet rs = conn.prepareStatement("SELECT SUM(c.reltuples) FROM pg_class C LEFT JOIN pg_namespace N ON (N.oid = C.relnamespace) WHERE nspname NOT IN ('pg_catalog', 'information_schema') AND relkind='r';").executeQuery(); rs.next(); numberOfRecords = rs.getInt(1); rs.close(); conn.close(); } finally { if (conn != null) conn.close(); } return numberOfRecords; } @SuppressWarnings("unchecked") private List<String> getDatabases(DatabaseServer server) { List<String> databases = null; Session session = sessionFactory.getCurrentSession(); try { session.beginTransaction(); databases = session.createSQLQuery("SELECT datname FROM pg_database WHERE datistemplate = false AND datname <> 'postgres';").list(); session.getTransaction().commit(); } catch (Exception e) { log.error("Problem creating Statistics record", e); session.getTransaction().rollback(); } finally { HibernateUtils.closeSession(); } return databases; } /* (non-Javadoc) * @see uk.ac.ox.it.ords.api.statistics.services.StatisticsService#generateAndSendStatsEmail() */ public void generateAndSendStatsEmail() throws Exception { log.debug("generateAndSendStatsEmail"); String messageToSend = generateStatsEmail(); MessagingService.Factory.getInstance().sendMessage(messageToSend); } protected String generateStatsEmail() throws Exception{ String messageToSend = "Hi Ords Admin. Here are the current statistics:\n\n"; OrdsStatistics stats = getStatistics(); messageToSend += String.format("There are %d projects defined.\n", stats.getNumberOfProjectsManagedByOrds()); messageToSend += String.format("Number of open projects:%d, Number of closed projects: %d.\n", stats.getNumberOfOpenProjects(), stats.getNumberOfClosedProjects()); messageToSend += String.format("Number of full projects:%d, Number of trial projects: %d\n", stats.getNumberOfFullProjects(), stats.getNumberOfTrialProjects()); messageToSend += String.format("%d projects have been created in the last 30 days\n", stats.getNumberOfRecentProjects()); messageToSend += String.format("%d projects have been created in the last 6 months\n", stats.getNumberOfProjectsInLastSixMonths()); messageToSend += "\n"; messageToSend += "\n\nTTFN\n\nYour humble ORDS servant"; return messageToSend; } }
package org.hive2hive.core.utils; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.security.KeyPair; import java.util.ArrayList; import java.util.List; import java.util.Random; import org.hive2hive.core.H2HConstants; import org.hive2hive.core.H2HJUnitTest; import org.hive2hive.core.H2HSession; import org.hive2hive.core.api.H2HNode; import org.hive2hive.core.api.configs.NetworkConfiguration; import org.hive2hive.core.api.interfaces.IFileConfiguration; import org.hive2hive.core.api.interfaces.IH2HNode; import org.hive2hive.core.api.interfaces.INetworkConfiguration; import org.hive2hive.core.exceptions.NoPeerConnectionException; import org.hive2hive.core.network.NetworkManager; import org.hive2hive.core.network.data.PublicKeyManager; import org.hive2hive.core.network.data.UserProfileManager; import org.hive2hive.core.network.data.download.DownloadManager; import org.hive2hive.core.network.data.vdht.LocationsManager; import org.hive2hive.core.processes.login.SessionParameters; import org.hive2hive.core.security.H2HDummyEncryption; import org.hive2hive.core.security.UserCredentials; import org.hive2hive.core.serializer.FSTSerializer; import org.hive2hive.core.utils.helper.TestFileAgent; /** * Helper class for testing. Provides methods for creating, shutdown nodes and some random generators. * * @author Seppi, Nico */ public class NetworkTestUtil { /** * Creates a network with the given number of nodes. First node in the list is the * initial node where all other nodes bootstrapped to him.<br> * <b>Important:</b> After usage please shutdown the network. See {@link NetworkTestUtil#shutdownNetwork} * * @param numberOfNodes * size of the network (has to be larger than one) * @return list containing all nodes where the first one is the bootstrapping node (initial) */ public static List<NetworkManager> createNetwork(int numberOfNodes) { IFileConfiguration fileConfig = new TestFileConfiguration(); List<NetworkManager> nodes = new ArrayList<NetworkManager>(numberOfNodes); // create the first node (initial) FSTSerializer serializer = new FSTSerializer(); H2HDummyEncryption encryption = new H2HDummyEncryption(); NetworkManager initial = new NetworkManager(encryption, serializer, fileConfig); INetworkConfiguration netConfig = NetworkConfiguration.createInitialLocalPeer("Node A"); initial.connect(netConfig); nodes.add(initial); // create the other nodes and bootstrap them to the initial peer char letter = 'A'; for (int i = 1; i < numberOfNodes; i++) { NetworkManager node = new NetworkManager(encryption, serializer, fileConfig); INetworkConfiguration otherNetConfig = NetworkConfiguration.createLocalPeer(String.format("Node %s", ++letter), initial.getConnection().getPeer().peer()); node.connect(otherNetConfig); nodes.add(node); } return nodes; } /** * Shutdown a network. * * @param network * list containing all nodes which has to be disconnected. */ public static void shutdownNetwork(List<NetworkManager> network) { if (network != null) { for (NetworkManager networkManager : network) { networkManager.disconnect(false); } } } /** * Generate and assign public/private key pairs to the nodes. * * @param network * list containing all nodes which have different key pairs * @throws NoPeerConnectionException * @throws IOException */ public static void setDifferentSessions(List<NetworkManager> network) throws NoPeerConnectionException, IOException { for (NetworkManager node : network) { KeyPair keyPair = H2HJUnitTest.generateRSAKeyPair(H2HConstants.KEYLENGTH_USER_KEYS); KeyPair protectionKeyPair = H2HJUnitTest.generateRSAKeyPair(H2HConstants.KEYLENGTH_PROTECTION); UserCredentials userCredentials = H2HJUnitTest.generateRandomCredentials(); UserProfileManager profileManager = new UserProfileManager(node.getDataManager(), userCredentials); PublicKeyManager keyManager = new PublicKeyManager(userCredentials.getUserId(), keyPair, protectionKeyPair, node.getDataManager()); DownloadManager downloadManager = new DownloadManager(node, new TestFileConfiguration()); LocationsManager locationsManager = new LocationsManager(node.getDataManager(), userCredentials.getUserId(), protectionKeyPair); SessionParameters params = new SessionParameters(new TestFileAgent()); params.setDownloadManager(downloadManager); params.setKeyManager(keyManager); params.setUserProfileManager(profileManager); params.setLocationsManager(locationsManager); node.setSession(new H2HSession(params)); } } /** * Generate and assign a public/private key pair to all nodes. * * @param network * list containing all nodes which need to have the same key pair * @throws NoPeerConnectionException * @throws IOException */ public static void setSameSession(List<NetworkManager> network) throws NoPeerConnectionException, IOException { KeyPair keyPair = H2HJUnitTest.generateRSAKeyPair(H2HConstants.KEYLENGTH_USER_KEYS); KeyPair protectionKeys = H2HJUnitTest.generateRSAKeyPair(H2HConstants.KEYLENGTH_USER_KEYS); UserCredentials userCredentials = H2HJUnitTest.generateRandomCredentials(); for (NetworkManager node : network) { UserProfileManager profileManager = new UserProfileManager(node.getDataManager(), userCredentials); PublicKeyManager keyManager = new PublicKeyManager(userCredentials.getUserId(), keyPair, protectionKeys, node.getDataManager()); DownloadManager downloadManager = new DownloadManager(node, new TestFileConfiguration()); LocationsManager locationsManager = new LocationsManager(node.getDataManager(), userCredentials.getUserId(), protectionKeys); SessionParameters params = new SessionParameters(new TestFileAgent()); params.setDownloadManager(downloadManager); params.setKeyManager(keyManager); params.setUserProfileManager(profileManager); params.setLocationsManager(locationsManager); node.setSession(new H2HSession(params)); } } /** * Creates a <code>Hive2Hive</code> network with the given number of nodes. First node in the list is the * initial node where all other nodes bootstrapped to him.<br> * <b>Important:</b> After usage please shutdown the network. See {@link NetworkTestUtil#shutdownNetwork} * * @param numberOfNodes * size of the network (has to be larger than one) * @return list containing all Hive2Hive nodes where the first one is the bootstrapping node (initial) */ public static List<IH2HNode> createH2HNetwork(int numberOfNodes) { if (numberOfNodes < 1) throw new IllegalArgumentException("Invalid network size."); List<IH2HNode> nodes = new ArrayList<IH2HNode>(numberOfNodes); // create initial peer FSTSerializer serializer = new FSTSerializer(); H2HDummyEncryption encryption = new H2HDummyEncryption(); IFileConfiguration fileConfig = new TestFileConfiguration(); IH2HNode initial = H2HNode.createNode(fileConfig, encryption, serializer); initial.connect(NetworkConfiguration.createInitial("initial")); nodes.add(initial); try { InetAddress bootstrapAddress = InetAddress.getLocalHost(); for (int i = 1; i < numberOfNodes; i++) { IH2HNode node = H2HNode.createNode(fileConfig, encryption, serializer); node.connect(NetworkConfiguration.create("node " + i, bootstrapAddress)); nodes.add(node); } } catch (UnknownHostException e) { // should not happen } return nodes; } /** * Shutdown a network. * * @param network * list containing all nodes which has to be disconnected. */ public static void shutdownH2HNetwork(List<IH2HNode> network) { for (IH2HNode node : network) { node.disconnect(); } } /** * Selects a random node of the given network * * @param network a list of online peers * @return a random node in the list */ public static NetworkManager getRandomNode(List<NetworkManager> network) { return network.get(new Random().nextInt(network.size())); } }
/* * The MIT License * * Copyright (c) 2004-2011, Sun Microsystems, Inc., Kohsuke Kawaguchi, * Erik Ramfelt, Martin Eigenbrodt, Stephen Connolly, Tom Huybrechts * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model; import com.google.common.collect.ImmutableSet; import hudson.DescriptorExtensionList; import hudson.EnvVars; import hudson.FilePath; import hudson.Launcher; import hudson.Launcher.RemoteLauncher; import hudson.Util; import hudson.cli.CLI; import hudson.model.Descriptor.FormException; import hudson.remoting.Callable; import hudson.remoting.Channel; import hudson.remoting.Which; import hudson.slaves.ComputerLauncher; import hudson.slaves.DumbSlave; import hudson.slaves.JNLPLauncher; import hudson.slaves.NodeDescriptor; import hudson.slaves.NodeProperty; import hudson.slaves.NodePropertyDescriptor; import hudson.slaves.RetentionStrategy; import hudson.slaves.SlaveComputer; import hudson.util.ClockDifference; import hudson.util.DescribableList; import hudson.util.FormValidation; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.servlet.ServletException; import jenkins.model.Jenkins; import jenkins.security.MasterToSlaveCallable; import jenkins.slaves.WorkspaceLocator; import jenkins.util.SystemProperties; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.NoExternalUse; import org.kohsuke.stapler.DataBoundSetter; import org.kohsuke.stapler.HttpResponse; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; /** * Information about a Hudson agent node. * * <p> * Ideally this would have been in the {@code hudson.slaves} package, * but for compatibility reasons, it can't. * * <p> * TODO: move out more stuff to {@link DumbSlave}. * * On February, 2016 a general renaming was done internally: the "slave" term was replaced by * "Agent". This change was applied in: UI labels/HTML pages, javadocs and log messages. * Java classes, fields, methods, etc were not renamed to avoid compatibility issues. * See <a href="https://jenkins-ci.org/issue/27268">JENKINS-27268</a>. * * @author Kohsuke Kawaguchi */ public abstract class Slave extends Node implements Serializable { private static final Logger LOGGER = Logger.getLogger(Slave.class.getName()); /** * Name of this agent node. */ protected String name; /** * Description of this node. */ private String description; /** * Path to the root of the workspace from the view point of this node, such as "/hudson", this need not * be absolute provided that the launcher establishes a consistent working directory, such as "./.jenkins-slave" * when used with an SSH launcher. * * NOTE: if the administrator is using a relative path they are responsible for ensuring that the launcher used * provides a consistent working directory */ protected final String remoteFS; /** * Number of executors of this node. */ private int numExecutors = 2; /** * Job allocation strategy. */ private Mode mode = Mode.NORMAL; /** * Agent availability strategy. */ private RetentionStrategy retentionStrategy; /** * The starter that will startup this agent. */ private ComputerLauncher launcher; /** * Whitespace-separated labels. */ private String label=""; private /*almost final*/ DescribableList<NodeProperty<?>,NodePropertyDescriptor> nodeProperties = new DescribableList<>(this); /** * Lazily computed set of labels from {@link #label}. */ private transient volatile Set<Label> labels; /** * Id of user which creates this agent {@link User}. */ private String userId; /** * Use {@link #Slave(String, String, ComputerLauncher)} and set the rest through setters. * @deprecated since FIXME */ @Deprecated public Slave(String name, String nodeDescription, String remoteFS, String numExecutors, Mode mode, String labelString, ComputerLauncher launcher, RetentionStrategy retentionStrategy, List<? extends NodeProperty<?>> nodeProperties) throws FormException, IOException { this(name,nodeDescription,remoteFS,Util.tryParseNumber(numExecutors, 1).intValue(),mode,labelString,launcher,retentionStrategy, nodeProperties); } /** * @deprecated since 2009-02-20. */ @Deprecated public Slave(String name, String nodeDescription, String remoteFS, int numExecutors, Mode mode, String labelString, ComputerLauncher launcher, RetentionStrategy retentionStrategy) throws FormException, IOException { this(name, nodeDescription, remoteFS, numExecutors, mode, labelString, launcher, retentionStrategy, new ArrayList()); } public Slave(@Nonnull String name, String remoteFS, ComputerLauncher launcher) throws FormException, IOException { this.name = name; this.remoteFS = remoteFS; this.launcher = launcher; } /** * @deprecated as of 2.2 * Use {@link #Slave(String, String, ComputerLauncher)} and set the rest through setters. */ @Deprecated public Slave(@Nonnull String name, String nodeDescription, String remoteFS, int numExecutors, Mode mode, String labelString, ComputerLauncher launcher, RetentionStrategy retentionStrategy, List<? extends NodeProperty<?>> nodeProperties) throws FormException, IOException { this.name = name; this.description = nodeDescription; this.numExecutors = numExecutors; this.mode = mode; this.remoteFS = Util.fixNull(remoteFS).trim(); this.label = Util.fixNull(labelString).trim(); this.launcher = launcher; this.retentionStrategy = retentionStrategy; getAssignedLabels(); // compute labels now this.nodeProperties.replaceBy(nodeProperties); Slave node = (Slave) Jenkins.get().getNode(name); if(node!=null){ this.userId= node.getUserId(); //agent has already existed } else{ User user = User.current(); userId = user!=null ? user.getId() : "anonymous"; } if (name.equals("")) throw new FormException(Messages.Slave_InvalidConfig_NoName(), null); // if (remoteFS.equals("")) // throw new FormException(Messages.Slave_InvalidConfig_NoRemoteDir(name), null); if (this.numExecutors<=0) throw new FormException(Messages.Slave_InvalidConfig_Executors(name), null); } /** * Return id of user which created this agent * * @return id of user */ public String getUserId() { return userId; } public void setUserId(String userId){ this.userId = userId; } public ComputerLauncher getLauncher() { if (launcher == null && !StringUtils.isEmpty(agentCommand)) { try { launcher = (ComputerLauncher) Jenkins.get().getPluginManager().uberClassLoader.loadClass("hudson.slaves.CommandLauncher").getConstructor(String.class, EnvVars.class).newInstance(agentCommand, null); agentCommand = null; save(); } catch (Exception x) { LOGGER.log(Level.WARNING, "could not update historical agentCommand setting to CommandLauncher", x); } } // Default launcher does not use Work Directory return launcher == null ? new JNLPLauncher(false) : launcher; } public void setLauncher(ComputerLauncher launcher) { this.launcher = launcher; } public String getRemoteFS() { return remoteFS; } public String getNodeName() { return name; } @Override public String toString() { return getClass().getName() + "[" + name + "]"; } public void setNodeName(String name) { this.name = name; } @DataBoundSetter public void setNodeDescription(String value) { this.description = value; } public String getNodeDescription() { return description; } public int getNumExecutors() { return numExecutors; } @DataBoundSetter public void setNumExecutors(int n) { this.numExecutors = n; } public Mode getMode() { return mode; } @DataBoundSetter public void setMode(Mode mode) { this.mode = mode; } public DescribableList<NodeProperty<?>, NodePropertyDescriptor> getNodeProperties() { assert nodeProperties != null; return nodeProperties; } @DataBoundSetter public void setNodeProperties(List<? extends NodeProperty<?>> properties) throws IOException { nodeProperties.replaceBy(properties); } public RetentionStrategy getRetentionStrategy() { return retentionStrategy == null ? RetentionStrategy.Always.INSTANCE : retentionStrategy; } @DataBoundSetter public void setRetentionStrategy(RetentionStrategy availabilityStrategy) { this.retentionStrategy = availabilityStrategy; } public String getLabelString() { return Util.fixNull(label).trim(); } @Override @DataBoundSetter public void setLabelString(String labelString) throws IOException { this.label = Util.fixNull(labelString).trim(); // Compute labels now. getAssignedLabels(); } @Override public Callable<ClockDifference,IOException> getClockDifferenceCallable() { return new GetClockDifference1(); } public Computer createComputer() { return new SlaveComputer(this); } public FilePath getWorkspaceFor(TopLevelItem item) { for (WorkspaceLocator l : WorkspaceLocator.all()) { FilePath workspace = l.locate(item, this); if (workspace != null) { return workspace; } } FilePath r = getWorkspaceRoot(); if(r==null) return null; // offline return r.child(item.getFullName()); } @CheckForNull public FilePath getRootPath() { final SlaveComputer computer = getComputer(); if (computer == null) { // if computer is null then channel is null and thus we were going to return null anyway return null; } else { return createPath(StringUtils.defaultString(computer.getAbsoluteRemoteFs(), remoteFS)); } } /** * Root directory on this agent where all the job workspaces are laid out. * @return * null if not connected. */ public @CheckForNull FilePath getWorkspaceRoot() { FilePath r = getRootPath(); if(r==null) return null; return r.child(WORKSPACE_ROOT); } /** * Web-bound object used to serve jar files for inbound connections. */ public static final class JnlpJar implements HttpResponse { private final String fileName; public JnlpJar(String fileName) { this.fileName = fileName; } public void doIndex( StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { URLConnection con = connect(); // since we end up redirecting users to jnlpJars/foo.jar/, set the content disposition // so that browsers can download them in the right file name. // see http://support.microsoft.com/kb/260519 and http://www.boutell.com/newfaq/creating/forcedownload.html rsp.setHeader("Content-Disposition", "attachment; filename=" + fileName); InputStream in = con.getInputStream(); rsp.serveFile(req, in, con.getLastModified(), con.getContentLength(), "*.jar" ); in.close(); } public void generateResponse(StaplerRequest req, StaplerResponse rsp, Object node) throws IOException, ServletException { doIndex(req,rsp); } private URLConnection connect() throws IOException { URL res = getURL(); return res.openConnection(); } public URL getURL() throws IOException { String name = fileName; // Prevent the access to war contents & prevent the folder escaping (SECURITY-195) if (!ALLOWED_JNLPJARS_FILES.contains(name)) { throw new MalformedURLException("The specified file path " + fileName + " is not allowed due to security reasons"); } if (name.equals("hudson-cli.jar") || name.equals("jenkins-cli.jar")) { File cliJar = Which.jarFile(CLI.class); if (cliJar.isFile()) { name = "jenkins-cli.jar"; } else { URL res = findExecutableJar(cliJar, CLI.class); if (res != null) { return res; } } } else if (name.equals("agent.jar") || name.equals("slave.jar") || name.equals("remoting.jar")) { File remotingJar = Which.jarFile(hudson.remoting.Launcher.class); if (remotingJar.isFile()) { name = "lib/" + remotingJar.getName(); } else { URL res = findExecutableJar(remotingJar, hudson.remoting.Launcher.class); if (res != null) { return res; } } } URL res = Jenkins.get().servletContext.getResource("/WEB-INF/" + name); if(res==null) { throw new FileNotFoundException(name); // giving up } else { LOGGER.log(Level.FINE, "found {0}", res); } return res; } /** Useful for {@code JenkinsRule.createSlave}, {@code hudson-dev:run}, etc. */ private @CheckForNull URL findExecutableJar(File notActuallyJAR, Class<?> mainClass) throws IOException { if (notActuallyJAR.getName().equals("classes")) { File[] siblings = notActuallyJAR.getParentFile().listFiles(); if (siblings != null) { for (File actualJar : siblings) { if (actualJar.getName().endsWith(".jar")) { try (JarFile jf = new JarFile(actualJar, false)) { Manifest mf = jf.getManifest(); if (mf != null && mainClass.getName().equals(mf.getMainAttributes().getValue("Main-Class"))) { LOGGER.log(Level.FINE, "found {0}", actualJar); return actualJar.toURI().toURL(); } } } } } } return null; } public byte[] readFully() throws IOException { try (InputStream in = connect().getInputStream()) { return IOUtils.toByteArray(in); } } } /** * Creates a launcher for the agent. * * @return * If there is no computer it will return a {@link hudson.Launcher.DummyLauncher}, otherwise it * will return a {@link hudson.Launcher.RemoteLauncher} instead. */ @Nonnull public Launcher createLauncher(TaskListener listener) { SlaveComputer c = getComputer(); if (c == null) { listener.error("Issue with creating launcher for agent " + name + ". Computer has been disconnected"); return new Launcher.DummyLauncher(listener); } else { // TODO: ideally all the logic below should be inside the SlaveComputer class with proper locking to prevent race conditions, // but so far there is no locks for setNode() hence it requires serious refactoring // Ensure that the Computer instance still points to this node // Otherwise we may end up running the command on a wrong (reconnected) Node instance. Slave node = c.getNode(); if (node != this) { String message = "Issue with creating launcher for agent " + name + ". Computer has been reconnected"; if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.log(Level.WARNING, message, new IllegalStateException("Computer has been reconnected, this Node instance cannot be used anymore")); } return new Launcher.DummyLauncher(listener); } // RemoteLauncher requires an active Channel instance to operate correctly final Channel channel = c.getChannel(); if (channel == null) { reportLauncherCreateError("The agent has not been fully initialized yet", "No remoting channel to the agent OR it has not been fully initialized yet", listener); return new Launcher.DummyLauncher(listener); } if (channel.isClosingOrClosed()) { reportLauncherCreateError("The agent is being disconnected", "Remoting channel is either in the process of closing down or has closed down", listener); return new Launcher.DummyLauncher(listener); } final Boolean isUnix = c.isUnix(); if (isUnix == null) { // isUnix is always set when the channel is not null, so it should never happen reportLauncherCreateError("The agent has not been fully initialized yet", "Cannot determing if the agent is a Unix one, the System status request has not completed yet. " + "It is an invalid channel state, please report a bug to Jenkins if you see it.", listener); return new Launcher.DummyLauncher(listener); } return new RemoteLauncher(listener, channel, isUnix).decorateFor(this); } } private void reportLauncherCreateError(@Nonnull String humanReadableMsg, @CheckForNull String exceptionDetails, @Nonnull TaskListener listener) { String message = "Issue with creating launcher for agent " + name + ". " + humanReadableMsg; listener.error(message); if (LOGGER.isLoggable(Level.WARNING)) { // Send stacktrace to the log as well in order to diagnose the root cause of issues like JENKINS-38527 LOGGER.log(Level.WARNING, message + "Probably there is a race condition with Agent reconnection or disconnection, check other log entries", new IllegalStateException(exceptionDetails != null ? exceptionDetails : humanReadableMsg)); } } /** * Gets the corresponding computer object. * * @return * this method can return null if there's no {@link Computer} object for this node, * such as when this node has no executors at all. */ @CheckForNull public SlaveComputer getComputer() { return (SlaveComputer)toComputer(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final Slave that = (Slave) o; return name.equals(that.name); } @Override public int hashCode() { return name.hashCode(); } /** * Invoked by XStream when this object is read into memory. */ protected Object readResolve() { if(nodeProperties==null) nodeProperties = new DescribableList<>(this); return this; } public SlaveDescriptor getDescriptor() { Descriptor d = Jenkins.get().getDescriptorOrDie(getClass()); if (d instanceof SlaveDescriptor) return (SlaveDescriptor) d; throw new IllegalStateException(d.getClass()+" needs to extend from SlaveDescriptor"); } public static abstract class SlaveDescriptor extends NodeDescriptor { public FormValidation doCheckNumExecutors(@QueryParameter String value) { return FormValidation.validatePositiveInteger(value); } /** * Performs syntactical check on the remote FS for agents. */ public FormValidation doCheckRemoteFS(@QueryParameter String value) throws IOException, ServletException { if(Util.fixEmptyAndTrim(value)==null) return FormValidation.error(Messages.Slave_Remote_Director_Mandatory()); if(value.startsWith("\\\\") || value.startsWith("/net/")) return FormValidation.warning(Messages.Slave_Network_Mounted_File_System_Warning()); if (Util.isRelativePath(value)) { return FormValidation.warning(Messages.Slave_Remote_Relative_Path_Warning()); } return FormValidation.ok(); } /** * Returns the list of {@link ComputerLauncher} descriptors appropriate to the supplied {@link Slave}. * * @param it the {@link Slave} or {@code null} to assume the slave is of type {@link #clazz}. * @return the filtered list * @since 2.12 */ @Nonnull @Restricted(NoExternalUse.class) // intended for use by Jelly EL only (plus hack in DelegatingComputerLauncher) public final List<Descriptor<ComputerLauncher>> computerLauncherDescriptors(@CheckForNull Slave it) { DescriptorExtensionList<ComputerLauncher, Descriptor<ComputerLauncher>> all = Jenkins.get().getDescriptorList( ComputerLauncher.class); return it == null ? DescriptorVisibilityFilter.applyType(clazz, all) : DescriptorVisibilityFilter.apply(it, all); } /** * Returns the list of {@link RetentionStrategy} descriptors appropriate to the supplied {@link Slave}. * * @param it the {@link Slave} or {@code null} to assume the slave is of type {@link #clazz}. * @return the filtered list * @since 2.12 */ @Nonnull @SuppressWarnings("unchecked") // used by Jelly EL only @Restricted(NoExternalUse.class) // used by Jelly EL only public final List<Descriptor<RetentionStrategy<?>>> retentionStrategyDescriptors(@CheckForNull Slave it) { return it == null ? DescriptorVisibilityFilter.applyType(clazz, RetentionStrategy.all()) : DescriptorVisibilityFilter.apply(it, RetentionStrategy.all()); } /** * Returns the list of {@link NodePropertyDescriptor} appropriate to the supplied {@link Slave}. * * @param it the {@link Slave} or {@code null} to assume the slave is of type {@link #clazz}. * @return the filtered list * @since 2.12 */ @Nonnull @SuppressWarnings("unchecked") // used by Jelly EL only @Restricted(NoExternalUse.class) // used by Jelly EL only public final List<NodePropertyDescriptor> nodePropertyDescriptors(@CheckForNull Slave it) { List<NodePropertyDescriptor> result = new ArrayList<>(); Collection<NodePropertyDescriptor> list = (Collection) Jenkins.get().getDescriptorList(NodeProperty.class); for (NodePropertyDescriptor npd : it == null ? DescriptorVisibilityFilter.applyType(clazz, list) : DescriptorVisibilityFilter.apply(it, list)) { if (npd.isApplicable(clazz)) { result.add(npd); } } return result; } } // // backward compatibility // /** * Command line to launch the agent, like * "ssh myslave java -jar /path/to/hudson-remoting.jar" * @deprecated in 1.216 */ @Deprecated private transient String agentCommand; /** * Obtains the clock difference between this side and that side of the channel. * * <p> * This is a hack to wrap the whole thing into a simple {@link Callable}. * * <ol> * <li>When the callable is sent to remote, we capture the time (on this side) in {@link GetClockDifference2#startTime} * <li>When the other side receives the callable it is {@link GetClockDifference2}. * <li>We capture the time on the other side and {@link GetClockDifference3} gets sent from the other side * <li>When it's read on this side as a return value, it morphs itself into {@link ClockDifference}. * </ol> */ private static final class GetClockDifference1 extends MasterToSlaveCallable<ClockDifference,IOException> { public ClockDifference call() { // this method must be being invoked locally, which means the clock is in sync return new ClockDifference(0); } private Object writeReplace() { return new GetClockDifference2(); } private static final long serialVersionUID = 1L; } private static final class GetClockDifference2 extends MasterToSlaveCallable<GetClockDifference3,IOException> { /** * Capture the time on the master when this object is sent to remote, which is when * {@link GetClockDifference1#writeReplace()} is run. */ private final long startTime = System.currentTimeMillis(); public GetClockDifference3 call() { return new GetClockDifference3(startTime); } private static final long serialVersionUID = 1L; } private static final class GetClockDifference3 implements Serializable { private final long remoteTime = System.currentTimeMillis(); private final long startTime; public GetClockDifference3(long startTime) { this.startTime = startTime; } private Object readResolve() { long endTime = System.currentTimeMillis(); return new ClockDifference((startTime + endTime)/2-remoteTime); } } /** * Determines the workspace root file name for those who really really need the shortest possible path name. */ private static final String WORKSPACE_ROOT = SystemProperties.getString(Slave.class.getName()+".workspaceRoot","workspace"); /** * Provides a collection of file names, which are accessible via /jnlpJars link. */ private static final Set<String> ALLOWED_JNLPJARS_FILES = ImmutableSet.of("agent.jar", "slave.jar", "remoting.jar", "jenkins-cli.jar", "hudson-cli.jar"); }
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.enhanced_bookmarks; import android.graphics.Bitmap; import android.util.LruCache; import android.util.Pair; import org.chromium.base.CalledByNative; import org.chromium.base.JNINamespace; import org.chromium.base.ObserverList; import org.chromium.chrome.browser.profiles.Profile; import org.chromium.components.bookmarks.BookmarkId; import org.chromium.content_public.browser.WebContents; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; /** * Access gate to C++ side enhanced bookmarks functionalities. */ @JNINamespace("enhanced_bookmarks::android") public final class EnhancedBookmarksBridge { private long mNativeEnhancedBookmarksBridge; private final ObserverList<FiltersObserver> mFilterObservers = new ObserverList<FiltersObserver>(); private final ObserverList<SearchServiceObserver> mSearchObservers = new ObserverList<SearchServiceObserver>(); private LruCache<String, Pair<String, Bitmap>> mSalientImageCache; /** * Interface for getting result back from SalientImageForUrl function. */ public interface SalientImageCallback { /** * Callback method for fetching salient image. * @param image Salient image. This can be null if the image cannot be found. * @param imageUrl Url of the image. Note this is not the same as the url of the website * containing the image. */ @CalledByNative("SalientImageCallback") void onSalientImageReady(Bitmap image, String imageUrl); } /** * Interface to provide consumers notifications to changes in clusters */ public interface FiltersObserver { /** * Invoked when client detects that filters have been added/removed from the server. */ void onFiltersChanged(); } /** * Interface to provide consumers notifications to changes in search service results. */ public interface SearchServiceObserver { /** * Invoked when client detects that search results have been updated. This callback is * guaranteed to be called only once and only for the most recent query. */ void onSearchResultsReturned(); } public EnhancedBookmarksBridge(Profile profile, int maxCacheSize) { this(profile); // Do not initialize LruCache if cache size is set to 0. if (maxCacheSize != 0) { mSalientImageCache = new LruCache<String, Pair<String, Bitmap>>(maxCacheSize) { @Override protected int sizeOf(String key, Pair<String, Bitmap> urlImage) { return urlImage.first.length() + urlImage.second.getByteCount(); } }; } } public EnhancedBookmarksBridge(Profile profile) { mNativeEnhancedBookmarksBridge = nativeInit(profile); } public void destroy() { assert mNativeEnhancedBookmarksBridge != 0; nativeDestroy(mNativeEnhancedBookmarksBridge); mNativeEnhancedBookmarksBridge = 0; if (mSalientImageCache != null) { for (Map.Entry<String, Pair<String, Bitmap>> entry : mSalientImageCache.snapshot().entrySet()) { entry.getValue().second.recycle(); } mSalientImageCache.evictAll(); } } /** * Adds a folder to the EnhancedBookmarkModel * @param parent The parent of this folder * @param index The position this folder should appear within the parent * @param title The title of the bookmark * @return The ID of the newly created folder. */ public BookmarkId addFolder(BookmarkId parent, int index, String title) { return nativeAddFolder(mNativeEnhancedBookmarksBridge, parent, index, title); } /** * Adds a Bookmark to the EnhancedBookmarkModel * @param parent The parent of this bookmark * @param index The position this bookmark should appear within the parent * @param title The title of the bookmark * @param url URL of the bookmark * @return The ID of the newly created bookmark */ public BookmarkId addBookmark(BookmarkId parent, int index, String title, String url) { return nativeAddBookmark(mNativeEnhancedBookmarksBridge, parent, index, title, url); } /** * Moves a bookmark to another folder, and append it at the end of the list of all children. * @param bookmarkId The item to be be moved * @param newParentId The new parent of the item */ public void moveBookmark(BookmarkId bookmarkId, BookmarkId newParentId) { nativeMoveBookmark(mNativeEnhancedBookmarksBridge, bookmarkId, newParentId); } /** * Get descriptions of a given bookmark. * @param id The id of the bookmark to look at. * @return Description of the bookmark. If given a partner bookmark, this method will return an * empty list. */ public String getBookmarkDescription(BookmarkId id) { return nativeGetBookmarkDescription(mNativeEnhancedBookmarksBridge, id.getId(), id.getType()); } /** * Sets the description of the given bookmark. */ public void setBookmarkDescription(BookmarkId id, String description) { nativeSetBookmarkDescription(mNativeEnhancedBookmarksBridge, id.getId(), id.getType(), description); } /** * Registers a FiltersObserver to listen for filter change notifications. * @param observer Observer to add */ public void addFiltersObserver(FiltersObserver observer) { mFilterObservers.addObserver(observer); } /** * Unregisters a FiltersObserver from listening to filter change notifications. * @param observer Observer to remove */ public void removeFiltersObserver(FiltersObserver observer) { mFilterObservers.removeObserver(observer); } /** * Gets all the bookmark ids associated with a filter string. * @param filter The filter string * @return List of bookmark ids */ public List<BookmarkId> getBookmarksForFilter(String filter) { List<BookmarkId> list = new ArrayList<BookmarkId>(); nativeGetBookmarksForFilter(mNativeEnhancedBookmarksBridge, filter, list); return list; } /** * Sends request to search server for querying related bookmarks. * @param query Keyword used to find related bookmarks. */ public void sendSearchRequest(String query) { nativeSendSearchRequest(mNativeEnhancedBookmarksBridge, query); } /** * Get list of bookmarks as result of a search request that was sent before in * {@link EnhancedBookmarksBridge#sendSearchRequest(String)}. Normally this function should be * called after {@link SearchServiceObserver#onSearchResultsReturned()} * @param query Keyword used to find related bookmarks. * @return List of BookmarkIds that are related to query. It will be null if the request is * still on the fly, or empty list if there are no results for the query. */ public List<BookmarkId> getSearchResultsForQuery(String query) { return nativeGetSearchResults(mNativeEnhancedBookmarksBridge, query); } /** * Registers a SearchObserver that listens to search request updates. * @param observer Observer to add */ public void addSearchObserver(SearchServiceObserver observer) { mSearchObservers.addObserver(observer); } /** * Unregisters a SearchObserver that listens to search request updates. * @param observer Observer to remove */ public void removeSearchObserver(SearchServiceObserver observer) { mSearchObservers.removeObserver(observer); } /** * Request bookmark salient image for the given URL. Please refer to * |BookmarkImageService::SalientImageForUrl|. * @return True if this method is executed synchronously. False if * {@link SalientImageCallback#onSalientImageReady(Bitmap, String)} is called later * (asynchronously). */ public boolean salientImageForUrl(final String url, final SalientImageCallback callback) { assert callback != null; SalientImageCallback callbackWrapper = callback; if (mSalientImageCache != null) { Pair<String, Bitmap> cached = mSalientImageCache.get(url); if (cached != null) { callback.onSalientImageReady(cached.second, cached.first); return true; } callbackWrapper = new SalientImageCallback() { @Override public void onSalientImageReady(Bitmap image, String imageUrl) { if (image != null) { mSalientImageCache.put(url, new Pair<String, Bitmap>(imageUrl, image)); } callback.onSalientImageReady(image, imageUrl); } }; } nativeSalientImageForUrl(mNativeEnhancedBookmarksBridge, url, callbackWrapper); return false; } /** * Parses the web content of a tab, and stores salient images to local database. * @param webContents Contents of the tab that the user is currently in. */ public void fetchImageForTab(WebContents webContents) { nativeFetchImageForTab(mNativeEnhancedBookmarksBridge, webContents); } /** * Get all filters associated with the given bookmark. * * @param bookmark The bookmark to find filters for. * @return Array of Strings, each representing a filter. If given a partner bookmark, this * method will return an empty array. */ public String[] getFiltersForBookmark(BookmarkId bookmark) { return nativeGetFiltersForBookmark(mNativeEnhancedBookmarksBridge, bookmark.getId(), bookmark.getType()); } /** * @return Current set of known auto-filters for bookmarks. */ public List<String> getFilters() { List<String> list = Arrays.asList(nativeGetFilters(mNativeEnhancedBookmarksBridge)); return list; } @CalledByNative private void onFiltersChanged() { for (FiltersObserver observer : mFilterObservers) { observer.onFiltersChanged(); } } @CalledByNative private void onSearchResultReturned() { for (SearchServiceObserver observer : mSearchObservers) { observer.onSearchResultsReturned(); } } @CalledByNative private static List<BookmarkId> createBookmarkIdList() { return new ArrayList<BookmarkId>(); } @CalledByNative private static void addToBookmarkIdList(List<BookmarkId> bookmarkIdList, long id, int type) { bookmarkIdList.add(new BookmarkId(id, type)); } private native long nativeInit(Profile profile); private native void nativeDestroy(long nativeEnhancedBookmarksBridge); private native String nativeGetBookmarkDescription(long nativeEnhancedBookmarksBridge, long id, int type); private native void nativeSetBookmarkDescription(long nativeEnhancedBookmarksBridge, long id, int type, String description); private native void nativeGetBookmarksForFilter(long nativeEnhancedBookmarksBridge, String filter, List<BookmarkId> list); private native List<BookmarkId> nativeGetSearchResults(long nativeEnhancedBookmarksBridge, String query); private native String[] nativeGetFilters(long nativeEnhancedBookmarksBridge); private native String[] nativeGetFiltersForBookmark(long nativeEnhancedBookmarksBridge, long id, int type); private native BookmarkId nativeAddFolder(long nativeEnhancedBookmarksBridge, BookmarkId parent, int index, String title); private native void nativeMoveBookmark(long nativeEnhancedBookmarksBridge, BookmarkId bookmarkId, BookmarkId newParentId); private native BookmarkId nativeAddBookmark(long nativeEnhancedBookmarksBridge, BookmarkId parent, int index, String title, String url); private native void nativeSendSearchRequest(long nativeEnhancedBookmarksBridge, String query); private native void nativeSalientImageForUrl(long nativeEnhancedBookmarksBridge, String url, SalientImageCallback callback); private native void nativeFetchImageForTab(long nativeEnhancedBookmarksBridge, WebContents webContents); }
/* Copyright (c) 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.wave.api; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.google.wave.api.JsonRpcConstant.ParamsProperty; import com.google.wave.api.JsonRpcConstant.RequestProperty; import com.google.wave.api.OperationRequest.Parameter; import com.google.wave.api.WaveService.HttpFetcher; import com.google.wave.api.WaveService.HttpResponse; import com.google.wave.api.event.BlipContributorsChangedEvent; import com.google.wave.api.event.BlipSubmittedEvent; import com.google.wave.api.event.DocumentChangedEvent; import com.google.wave.api.event.EventType; import com.google.wave.api.event.WaveletTagsChangedEvent; import com.google.wave.api.impl.EventMessageBundle; import com.google.wave.api.impl.GsonFactory; import com.google.wave.api.impl.WaveletData; import junit.framework.TestCase; import net.oauth.http.HttpMessage; import org.mockito.Matchers; import org.waveprotocol.wave.model.id.WaveId; import org.waveprotocol.wave.model.id.WaveletId; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.PrintWriter; import java.io.StringReader; import java.io.StringWriter; import java.net.HttpURLConnection; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * Test cases for {@link AbstractRobot}. */ public class AbstractRobotTest extends TestCase { private static final WaveId WAVE_1 = WaveId.of("example.com", "wave1"); private static final WaveletId WAVELET_1 = WaveletId.of("example.com", "wavelet1"); private static final String PROFILE_PATH = "/basepath/_wave/robot/profile"; private static final String CAPABILITIES_XML_PATH = "/basepath/_wave/capabilities.xml"; private static final String JSONRPC_PATH = "/basepath/_wave/robot/jsonrpc"; private static final String VERIFY_TOKEN_PATH = "/basepath/_wave/verify_token"; private class MockRobot extends AbstractRobot { public MockRobot() { super(); } // This method provided to enable mock on HttpFetcher public List<JsonRpcResponse> submit(Wavelet wavelet, String rpcServerUrl, WaveService service) throws IOException { return service.submit(wavelet, rpcServerUrl); } @Override protected String getRobotName() { return "Foo"; } @Override protected String getRobotProfilePageUrl() { return "http://foo.com"; } @Override protected String getRobotAvatarUrl() { return "http://foo.com/foo.png"; } @Capability(contexts = {Context.PARENT, Context.SELF, Context.CHILDREN}, filter=".*") @Override public void onBlipSubmitted(BlipSubmittedEvent e) { calledEvents.add(e.getType()); } @Override public void onDocumentChanged(DocumentChangedEvent e) { calledEvents.add(e.getType()); } } private static class MockWriter extends PrintWriter { private String string; public MockWriter() { super(new StringWriter()); } @Override public void write(String string) { this.string = string; } public String getString() { return string; } } private static <K, V> Map<K, V> anyMapOf(Class<K> keyClass, Class<V> valueClass) { return Matchers.<Map<K, V>>any(); } private final List<EventType> calledEvents = new ArrayList<EventType>(); public void testSubmit() throws Exception { HttpFetcher fetcher = mock(HttpFetcher.class); when(fetcher.execute(any(HttpMessage.class), anyMapOf(String.class, Object.class))) .thenReturn(new HttpResponse("POST", new URL("http://foo.google.com"), 0, new ByteArrayInputStream("[{\"id\":\"op1\",\"data\":{}}]".getBytes()))); MockRobot robot = new MockRobot(); robot.setupOAuth("consumerKey", "consumerSecret", "http://gmodules.com/api/rpc"); WaveService service = new WaveService(fetcher, robot.computeHash()); service.setupOAuth("consumerKey", "consumerSecret", "http://gmodules.com/api/rpc"); OperationQueue opQueue = new OperationQueue(); opQueue.appendOperation(OperationType.ROBOT_NOTIFY, Parameter.of(ParamsProperty.CAPABILITIES_HASH, "123")); Wavelet wavelet = mock(Wavelet.class); when(wavelet.getOperationQueue()).thenReturn(opQueue); assertEquals(1, opQueue.getPendingOperations().size()); robot.submit(wavelet, "http://gmodules.com/api/rpc", service); assertEquals(0, opQueue.getPendingOperations().size()); verify(fetcher, times(1)).execute(any(HttpMessage.class), anyMapOf(String.class, Object.class)); } public void testServiceCapabilitiesRequest() throws Exception { AbstractRobot robot = new MockRobot() { @Override public void onBlipContributorsChanged(BlipContributorsChangedEvent e) { calledEvents.add(e.getType()); } }; MockWriter writer = new MockWriter(); robot.doGet(makeMockRequest(CAPABILITIES_XML_PATH), makeMockResponse(writer)); String capabilitiesXml = writer.getString(); String expectedCapabilityTag = "<w:capability name=\"BLIP_SUBMITTED\" context=\"PARENT,SELF,CHILDREN\" filter=\".*\"/>\n"; assertTrue(capabilitiesXml.contains(expectedCapabilityTag)); expectedCapabilityTag = "<w:capability name=\"DOCUMENT_CHANGED\"/>\n"; assertTrue(capabilitiesXml.contains(expectedCapabilityTag)); expectedCapabilityTag = "<w:capability name=\"BLIP_CONTRIBUTORS_CHANGED\"/>\n"; assertTrue(capabilitiesXml.contains(expectedCapabilityTag)); expectedCapabilityTag = "<w:capability name=\"WAVELET_SELF_ADDED\"/>\n"; assertFalse(capabilitiesXml.contains(expectedCapabilityTag)); expectedCapabilityTag = "<w:capability name=\"WAVELET_SELF_ADDED\" context=\"ROOT,PARENT,CHILDREN\"/>\n"; assertFalse(capabilitiesXml.contains(expectedCapabilityTag)); } public void testServiceProfileRequest() throws Exception { AbstractRobot robot = new MockRobot(); MockWriter writer = new MockWriter(); robot.doGet(makeMockRequest(PROFILE_PATH), makeMockResponse(writer)); String profileJson = writer.getString(); String expectedProfileJson = "{\"address\":\"\",\"name\":\"Foo\",\"imageUrl\":\"http://foo.com/foo.png\"," + "\"profileUrl\":\"http://foo.com\"}"; assertEquals(expectedProfileJson, profileJson); } public void testServiceVerificationTokenRequest() throws Exception { AbstractRobot robot = new MockRobot(); robot.setupVerificationToken("vertoken", "sectoken"); MockWriter writer = new MockWriter(); robot.doGet(makeMockRequest(VERIFY_TOKEN_PATH, "st", "sectoken"), makeMockResponse(writer)); assertEquals("vertoken", writer.getString()); HttpServletResponse response = makeMockResponse(new MockWriter()); robot.doGet(makeMockRequest(VERIFY_TOKEN_PATH), response); verify(response).setStatus(HttpURLConnection.HTTP_UNAUTHORIZED); } public void refactor_testServiceEventMessageBundleRequest() throws Exception { final List<EventType> calledEvents = new ArrayList<EventType>(); AbstractRobot robot = new AbstractRobot() { @Override protected String getRobotName() { return "Foo"; } @Override public String getRobotProfilePageUrl() { return "http://code.google.com/apis/wave/"; } @Override public void onBlipSubmitted(BlipSubmittedEvent e) { calledEvents.add(e.getType()); } @Override public void onDocumentChanged(DocumentChangedEvent e) { calledEvents.add(e.getType()); } @Override public void onWaveletTagsChanged(WaveletTagsChangedEvent e) { calledEvents.add(e.getType()); } @Override protected String computeHash() { return "hash1"; } }; WaveletData waveletData = new WaveletData("google.com!wave1", "google.com!conv+root", "blip1", null); waveletData.addParticipant("foo@google.com"); BlipSubmittedEvent event1 = new BlipSubmittedEvent(null, null, "foo@test.com", 1l, "blip1"); DocumentChangedEvent event2 = new DocumentChangedEvent(null, null, "foo@test.com", 1l, "blip1"); WaveletTagsChangedEvent event3 = new WaveletTagsChangedEvent(null, null, "foo@test.com", 1l, "blip1"); EventMessageBundle bundle = new EventMessageBundle("Foo", "http://gmodules.com/api/rpc"); bundle.addEvent(event1); bundle.addEvent(event2); bundle.addEvent(event3); bundle.setWaveletData(waveletData); String json = new GsonFactory().create().toJson(bundle); MockWriter mockWriter = new MockWriter(); robot.doPost( makeMockRequest(JSONRPC_PATH, new BufferedReader(new StringReader(json))), makeMockResponse(mockWriter)); assertEquals(3, calledEvents.size()); assertEquals(EventType.BLIP_SUBMITTED, calledEvents.get(0)); assertEquals(EventType.DOCUMENT_CHANGED, calledEvents.get(1)); assertEquals(EventType.WAVELET_TAGS_CHANGED, calledEvents.get(2)); // Assert that the outgoing operation bundle contains robot.notify() op. JsonParser jsonParser = new JsonParser(); JsonArray ops = jsonParser.parse(mockWriter.getString()).getAsJsonArray(); assertEquals(1, ops.size()); JsonObject op = ops.get(0).getAsJsonObject(); assertEquals(OperationType.ROBOT_NOTIFY.method(), op.get(RequestProperty.METHOD.key()).getAsString()); JsonObject params = op.get(RequestProperty.PARAMS.key()).getAsJsonObject(); assertEquals("0.22", params.get(ParamsProperty.PROTOCOL_VERSION.key()).getAsString()); assertEquals("hash1", params.get(ParamsProperty.CAPABILITIES_HASH.key()).getAsString()); } public void testBlindWavelet() throws Exception { AbstractRobot robot = new MockRobot(); Wavelet blindWavelet = robot.blindWavelet(WAVE_1, WAVELET_1); assertEquals(0, blindWavelet.getOperationQueue().getPendingOperations().size()); blindWavelet.getParticipants().add("foo@test.com"); blindWavelet.reply("\n"); assertEquals(2, blindWavelet.getOperationQueue().getPendingOperations().size()); assertEquals(OperationType.WAVELET_ADD_PARTICIPANT_NEWSYNTAX.method(), blindWavelet.getOperationQueue().getPendingOperations().get(0).getMethod()); assertEquals(OperationType.WAVELET_APPEND_BLIP.method(), blindWavelet.getOperationQueue().getPendingOperations().get(1).getMethod()); } public void testProxiedBlindWavelet() throws Exception { AbstractRobot robot = new MockRobot(); Wavelet blindWavelet = robot.blindWavelet(WAVE_1, WAVELET_1, "proxyid"); assertEquals(0, blindWavelet.getOperationQueue().getPendingOperations().size()); blindWavelet.reply("\n"); List<OperationRequest> ops = blindWavelet.getOperationQueue().getPendingOperations(); assertEquals(1, ops.size()); assertEquals(OperationType.WAVELET_APPEND_BLIP.method(), ops.get(0).getMethod()); assertEquals("proxyid", ops.get(0).getParameter(ParamsProperty.PROXYING_FOR)); // Assert that proxy id should be valid. try { robot.blindWavelet(WAVE_1, WAVELET_1, "foo@bar.com"); fail("Should have failed since proxy id is not valid."); } catch (IllegalArgumentException e) { // Expected. } } public void testInitRobot() throws Exception { AbstractRobot robot = new MockRobot(); AbstractRobot spyRobot = spy(robot); spyRobot.initRobot(); verify(spyRobot).computeCapabilityMap(); verify(spyRobot).computeHash(); } private HttpServletRequest makeMockRequest(String path, BufferedReader reader) throws IOException { HttpServletRequest request = mock(HttpServletRequest.class); when(request.getRequestURI()).thenReturn(path); when(request.getReader()).thenReturn(reader); return request; } private HttpServletRequest makeMockRequest(String path, String parameterKey, String parameterValue) { HttpServletRequest request = mock(HttpServletRequest.class); when(request.getRequestURI()).thenReturn(path); when(request.getParameter(parameterKey)).thenReturn(parameterValue); return request; } private HttpServletRequest makeMockRequest(String path) { HttpServletRequest request = mock(HttpServletRequest.class); when(request.getRequestURI()).thenReturn(path); return request; } private HttpServletResponse makeMockResponse(MockWriter writer) throws IOException { HttpServletResponse response = mock(HttpServletResponse.class); when(response.getWriter()).thenReturn(writer); return response; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence.pagemem; import org.apache.ignite.IgniteCache; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.pagemem.PageMemory; import org.apache.ignite.internal.processors.cache.persistence.IgniteCacheDatabaseSharedManager; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.junit.After; import org.junit.Before; import org.junit.Test; /** */ public class PageMemoryLazyAllocationTest extends GridCommonAbstractTest { /** */ public static final String LAZY_REGION = "lazyRegion"; /** */ public static final String EAGER_REGION = "eagerRegion"; /** */ protected boolean lazyAllocation = true; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); cfg.setDataStorageConfiguration(new DataStorageConfiguration() .setDataRegionConfigurations( new DataRegionConfiguration() .setName(LAZY_REGION) .setLazyMemoryAllocation(lazyAllocation) .setPersistenceEnabled(persistenceEnabled()), new DataRegionConfiguration() .setName(EAGER_REGION) .setLazyMemoryAllocation(lazyAllocation) .setPersistenceEnabled(persistenceEnabled()))); CacheConfiguration<?, ?> ccfg = new CacheConfiguration<>("my-cache") .setDataRegionName(EAGER_REGION); cfg.setCacheConfiguration(ccfg); return cfg; } /** @throws Exception If failed. */ @Test public void testLazyMemoryAllocationOnServer() throws Exception { lazyAllocation = true; IgniteEx srv = startSrv()[0]; IgniteCacheDatabaseSharedManager db = srv.context().cache().context().database(); checkMemoryAllocated(db.dataRegion(EAGER_REGION).pageMemory()); checkMemoryNotAllocated(db.dataRegion(LAZY_REGION).pageMemory()); createCacheAndPut(srv); checkMemoryAllocated(db.dataRegion(LAZY_REGION).pageMemory()); } /** @throws Exception If failed. */ @Test public void testLazyMemoryAllocationOnClient() throws Exception { lazyAllocation = true; IgniteEx srv = startSrv()[0]; IgniteCacheDatabaseSharedManager srvDb = srv.context().cache().context().database(); checkMemoryAllocated(srvDb.dataRegion(EAGER_REGION).pageMemory()); checkMemoryNotAllocated(srvDb.dataRegion(LAZY_REGION).pageMemory()); IgniteEx clnt = startClientGrid(2); IgniteCacheDatabaseSharedManager clntDb = clnt.context().cache().context().database(); checkMemoryNotAllocated(clntDb.dataRegion(EAGER_REGION).pageMemory()); checkMemoryNotAllocated(clntDb.dataRegion(LAZY_REGION).pageMemory()); createCacheAndPut(clnt); checkMemoryAllocated(srvDb.dataRegion(LAZY_REGION).pageMemory()); checkMemoryNotAllocated(clntDb.dataRegion(EAGER_REGION).pageMemory()); checkMemoryNotAllocated(clntDb.dataRegion(LAZY_REGION).pageMemory()); } /** @throws Exception If failed. */ @Test public void testEagerMemoryAllocationOnServer() throws Exception { lazyAllocation = false; IgniteEx g = startSrv()[0]; IgniteCacheDatabaseSharedManager db = g.context().cache().context().database(); checkMemoryAllocated(db.dataRegion(EAGER_REGION).pageMemory()); checkMemoryAllocated(db.dataRegion(LAZY_REGION).pageMemory()); createCacheAndPut(g); } /** @throws Exception If failed. */ @Test public void testEagerMemoryAllocationOnClient() throws Exception { lazyAllocation = false; IgniteEx srv = startSrv()[0]; IgniteCacheDatabaseSharedManager srvDb = srv.context().cache().context().database(); checkMemoryAllocated(srvDb.dataRegion(EAGER_REGION).pageMemory()); checkMemoryAllocated(srvDb.dataRegion(LAZY_REGION).pageMemory()); IgniteEx clnt = startClientGrid(2); IgniteCacheDatabaseSharedManager clntDb = clnt.context().cache().context().database(); checkMemoryNotAllocated(clntDb.dataRegion(EAGER_REGION).pageMemory()); checkMemoryNotAllocated(clntDb.dataRegion(LAZY_REGION).pageMemory()); createCacheAndPut(clnt); checkMemoryNotAllocated(clntDb.dataRegion(EAGER_REGION).pageMemory()); checkMemoryNotAllocated(clntDb.dataRegion(LAZY_REGION).pageMemory()); } /** @throws Exception If failed. */ @Test public void testLocalCacheOnClientNodeWithLazyAllocation() throws Exception { lazyAllocation = true; IgniteEx srv = startSrv()[0]; IgniteCacheDatabaseSharedManager srvDb = srv.context().cache().context().database(); checkMemoryAllocated(srvDb.dataRegion(EAGER_REGION).pageMemory()); checkMemoryNotAllocated(srvDb.dataRegion(LAZY_REGION).pageMemory()); IgniteEx clnt = startClientGrid(2); IgniteCacheDatabaseSharedManager clntDb = clnt.context().cache().context().database(); checkMemoryNotAllocated(clntDb.dataRegion(EAGER_REGION).pageMemory()); checkMemoryNotAllocated(clntDb.dataRegion(LAZY_REGION).pageMemory()); createCacheAndPut(clnt, CacheMode.LOCAL); checkMemoryNotAllocated(clntDb.dataRegion(EAGER_REGION).pageMemory()); //LOCAL Cache was created in LAZY_REGION so it has to be allocated on client node. checkMemoryAllocated(clntDb.dataRegion(LAZY_REGION).pageMemory()); } /** @throws Exception If failed. */ @Test public void testStopNotAllocatedRegions() throws Exception { IgniteEx srv = startSrv()[0]; IgniteCacheDatabaseSharedManager srvDb = srv.context().cache().context().database(); checkMemoryAllocated(srvDb.dataRegion(EAGER_REGION).pageMemory()); checkMemoryNotAllocated(srvDb.dataRegion(LAZY_REGION).pageMemory()); stopGrid(0); } /** */ @After public void after() { stopAllGrids(); } /** */ @Before public void before() throws Exception { cleanPersistenceDir(); } /** */ protected void createCacheAndPut(IgniteEx g) { createCacheAndPut(g, CacheConfiguration.DFLT_CACHE_MODE); } /** */ private void createCacheAndPut(IgniteEx g, CacheMode cacheMode) { createCacheAndPut(g, cacheMode, null); } /** */ private void createCacheAndPut(IgniteEx g, CacheMode cacheMode, IgnitePredicate<ClusterNode> fltr) { IgniteCache<Integer, String> cache = g.createCache(new CacheConfiguration<Integer, String>("my-cache-2") .setCacheMode(cacheMode) .setDataRegionName(LAZY_REGION) .setNodeFilter(fltr)); cache.put(1, "test"); assertEquals(cache.get(1), "test"); } /** */ protected void checkMemoryAllocated(PageMemory pageMem) { Object[] segments = GridTestUtils.getFieldValue(pageMem, "segments"); assertNotNull(segments); assertTrue(segments.length > 0); assertNotNull(segments[0]); } /** */ protected void checkMemoryNotAllocated(PageMemory pageMem) { Object[] segments = GridTestUtils.getFieldValue(pageMem, "segments"); assertNull(segments); } /** */ protected IgniteEx[] startSrv() throws Exception { IgniteEx srv0 = startGrid(0); IgniteEx srv1 = startGrid(1); srv0.cluster().active(true); awaitPartitionMapExchange(); return new IgniteEx[] {srv0, srv1}; } /** */ protected boolean persistenceEnabled() { return false; } }
/* * H2DomainReader.java * * This file is part of SQL Workbench/J, http://www.sql-workbench.net * * Copyright 2002-2015, Thomas Kellerer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * To contact the author please send an email to: support@sql-workbench.net * */ package workbench.db.h2database; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Savepoint; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.Collections; import java.util.List; import workbench.log.LogMgr; import workbench.resource.Settings; import workbench.db.ColumnIdentifier; import workbench.db.DbMetadata; import workbench.db.DbObject; import workbench.db.DomainIdentifier; import workbench.db.ObjectListExtender; import workbench.db.WbConnection; import workbench.storage.DataStore; import workbench.util.CollectionUtil; import workbench.util.SqlUtil; import workbench.util.StringUtil; /** * A class to read information about defined DOMAINs in H2. * * @author Thomas Kellerer */ public class H2DomainReader implements ObjectListExtender { final String baseSql = "SELECT domain_catalog, \n" + " domain_schema, \n" + " domain_name, \n" + " type_name, \n" + " data_type, \n" + " precision, \n" + " scale, \n" + " is_nullable as nullable, \n" + " column_default as default_value, \n" + " check_constraint as constraint_definition, \n" + " remarks \n" + " FROM information_schema.domains "; private String getSql(WbConnection connection, String schema, String name) { StringBuilder sql = new StringBuilder(baseSql.length() + 40); sql.append(baseSql); boolean whereAdded = false; if (StringUtil.isNonBlank(name)) { sql.append(" WHERE domain_name like '"); sql.append(connection.getMetadata().quoteObjectname(name)); sql.append("%' "); whereAdded = true; } if (StringUtil.isNonBlank(schema)) { sql.append(whereAdded ? " AND " : " WHERE "); sql.append(" domain_schema = '"); sql.append(connection.getMetadata().quoteObjectname(schema)); sql.append("'"); } sql.append(" ORDER BY 1, 2 "); if (Settings.getInstance().getDebugMetadataSql()) { LogMgr.logDebug("H2DomainReader.getSql()", "Using SQL=\n" + sql); } return sql.toString(); } public List<DomainIdentifier> getDomainList(WbConnection connection, String schemaPattern, String namePattern) { Statement stmt = null; ResultSet rs = null; Savepoint sp = null; List<DomainIdentifier> result = new ArrayList<>(); try { sp = connection.setSavepoint(); stmt = connection.createStatementForQuery(); String sql = getSql(connection, schemaPattern, namePattern); rs = stmt.executeQuery(sql); while (rs.next()) { String cat = rs.getString("domain_catalog"); String schema = rs.getString("domain_schema"); String name = rs.getString("domain_name"); DomainIdentifier domain = new DomainIdentifier(cat, schema, name); domain.setCheckConstraint(rs.getString("constraint_definition")); String typeName = rs.getString("type_name"); int type = rs.getInt("data_type"); int precision = rs.getInt("precision"); int scale = rs.getInt("scale"); String dataType = SqlUtil.getSqlTypeDisplay(typeName, type, scale, precision); domain.setDataType(dataType); domain.setNullable(rs.getBoolean("nullable")); domain.setDefaultValue(rs.getString("default_value")); domain.setComment(rs.getString("remarks")); result.add(domain); } connection.releaseSavepoint(sp); } catch (SQLException e) { connection.rollback(sp); LogMgr.logError("H2DomainReader.getDomainList()", "Could not read domains", e); } finally { SqlUtil.closeAll(rs, stmt); } return result; } @Override public boolean isDerivedType() { return false; } @Override public DomainIdentifier getObjectDefinition(WbConnection connection, DbObject object) { List<DomainIdentifier> domains = getDomainList(connection, object.getSchema(), object.getObjectName()); if (CollectionUtil.isEmpty(domains)) return null; return domains.get(0); } public String getDomainSource(DomainIdentifier domain) { if (domain == null) return null; StringBuilder result = new StringBuilder(50); result.append("CREATE DOMAIN "); result.append(domain.getObjectName()); result.append(" AS "); result.append(domain.getDataType()); if (domain.getDefaultValue() != null) { result.append("\n DEFAULT "); result.append(domain.getDefaultValue()); } if (StringUtil.isNonBlank(domain.getCheckConstraint()) || !domain.isNullable()) { result.append("\n CHECK "); if (StringUtil.isNonBlank(domain.getConstraintName())) { result.append(domain.getConstraintName() + " "); } if (!domain.isNullable()) result.append("NOT NULL "); if (StringUtil.isNonBlank(domain.getCheckConstraint())) { result.append(domain.getCheckConstraint()); } } result.append(";\n"); if (StringUtil.isNonBlank(domain.getComment())) { result.append("\nCOMMENT ON DOMAIN " + domain.getObjectName() + " IS '"); result.append(SqlUtil.escapeQuotes(domain.getComment())); result.append("';\n"); } return result.toString(); } @Override public boolean extendObjectList(WbConnection con, DataStore result, String catalog, String schema, String objects, String[] requestedTypes) { if (!DbMetadata.typeIncluded("DOMAIN", requestedTypes)) return false; List<DomainIdentifier> domains = getDomainList(con, schema, objects); if (domains.isEmpty()) return false; for (DomainIdentifier domain : domains) { int row = result.addRow(); result.setValue(row, DbMetadata.COLUMN_IDX_TABLE_LIST_CATALOG, null); result.setValue(row, DbMetadata.COLUMN_IDX_TABLE_LIST_SCHEMA, domain.getSchema()); result.setValue(row, DbMetadata.COLUMN_IDX_TABLE_LIST_NAME, domain.getObjectName()); result.setValue(row, DbMetadata.COLUMN_IDX_TABLE_LIST_REMARKS, domain.getComment()); result.setValue(row, DbMetadata.COLUMN_IDX_TABLE_LIST_TYPE, domain.getObjectType()); result.getRow(row).setUserObject(domain); } return true; } @Override public boolean handlesType(String type) { return StringUtil.equalStringIgnoreCase("DOMAIN", type); } @Override public boolean handlesType(String[] types) { if (types == null) return true; for (String type : types) { if (handlesType(type)) return true; } return false; } @Override public DataStore getObjectDetails(WbConnection con, DbObject object) { if (object == null) return null; if (!handlesType(object.getObjectType())) return null; DomainIdentifier domain = getObjectDefinition(con, object); if (domain == null) return null; String[] columns = new String[] { "DOMAIN", "DATA_TYPE", "NULLABLE", "CONSTRAINT", "REMARKS" }; int[] types = new int[] { Types.VARCHAR, Types.VARCHAR, Types.BOOLEAN, Types.VARCHAR, Types.VARCHAR }; int[] sizes = new int[] { 20, 10, 5, 30, 30 }; DataStore result = new DataStore(columns, types, sizes); result.addRow(); result.setValue(0, 0, domain.getObjectName()); result.setValue(0, 1, domain.getDataType()); result.setValue(0, 2, domain.isNullable()); result.setValue(0, 3, domain.getCheckConstraint()); result.setValue(0, 4, domain.getComment()); return result; } @Override public List<String> supportedTypes() { return Collections.singletonList("DOMAIN"); } @Override public String getObjectSource(WbConnection con, DbObject object) { return getDomainSource(getObjectDefinition(con, object)); } @Override public List<ColumnIdentifier> getColumns(WbConnection con, DbObject object) { return null; } @Override public boolean hasColumns() { return false; } }
package org.apache.cassandra.io.util; /* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import java.io.*; import java.nio.ByteOrder; import java.nio.MappedByteBuffer; public class MappedFileDataInput extends InputStream implements FileDataInput { private final MappedByteBuffer buffer; private final String filename; private int position; private int markedPosition; private final long absoluteStartPosition; public MappedFileDataInput(MappedByteBuffer buffer, String filename, long absoluteStartPosition) { this(buffer, filename, absoluteStartPosition, 0); } public MappedFileDataInput(MappedByteBuffer buffer, String filename, long absoluteStartPosition, int position) { assert buffer != null; assert buffer.order()==ByteOrder.BIG_ENDIAN; this.absoluteStartPosition = absoluteStartPosition; this.buffer = buffer; this.filename = filename; this.position = position; } public long getAbsolutePosition() { return absoluteStartPosition + position; } // don't make this public, this is only for seeking WITHIN the current mapped segment private void seekInternal(int pos) throws IOException { position = pos; } @Override public boolean markSupported() { return true; } @Override public void mark(int ignored) { markedPosition = position; } @Override public void reset() throws IOException { seekInternal(markedPosition); } public void mark() { mark(-1); } public int bytesPastMark() { assert position >= markedPosition; return position - markedPosition; } public boolean isEOF() throws IOException { return position == buffer.capacity(); } public boolean isEOF(int len) throws IOException { return position + len > buffer.capacity(); } public String getPath() { return filename; } public int read() throws IOException { if (isEOF()) return -1; return buffer.get(position++) & 0xFF; } public int skipBytes(int n) throws IOException { assert n >= 0 : "skipping negative bytes is illegal: " + n; if (n == 0) return 0; int oldPosition = position; assert ((long)oldPosition) + n <= Integer.MAX_VALUE; position = Math.min(buffer.capacity(), position + n); return position - oldPosition; } public void skipLong() throws IOException { if ( (position+=8) > buffer.capacity()) { position = buffer.capacity(); throw new EOFException(); }; } /* !! DataInput methods below are copied from the implementation in Apache Harmony RandomAccessFile. */ /** * Reads a boolean from the current position in this file. Blocks until one * byte has been read, the end of the file is reached or an exception is * thrown. * * @return the next boolean value from this file. * @throws EOFException * if the end of this file is detected. * @throws IOException * if this file is closed or another I/O error occurs. */ public final boolean readBoolean() throws IOException { int temp = this.read(); if (temp < 0) { throw new EOFException(); } return temp != 0; } /** * Reads an 8-bit byte from the current position in this file. Blocks until * one byte has been read, the end of the file is reached or an exception is * thrown. * * @return the next signed 8-bit byte value from this file. * @throws EOFException * if the end of this file is detected. * @throws IOException * if this file is closed or another I/O error occurs. */ public final byte readByte() throws IOException { int temp = this.read(); if (temp < 0) { throw new EOFException(); } return (byte) temp; } /** * Reads a 16-bit character from the current position in this file. Blocks until * two bytes have been read, the end of the file is reached or an exception is * thrown. * * @return the next char value from this file. * @throws EOFException * if the end of this file is detected. * @throws IOException * if this file is closed or another I/O error occurs. */ public final char readChar() throws IOException { if ( isEOF(2) ) { skipBytes(2); throw new EOFException(); } char c = buffer.getChar(position); position+=2; return c; } /** * Reads a 64-bit double from the current position in this file. Blocks * until eight bytes have been read, the end of the file is reached or an * exception is thrown. * * @return the next double value from this file. * @throws EOFException * if the end of this file is detected. * @throws IOException * if this file is closed or another I/O error occurs. */ public final double readDouble() throws IOException { return Double.longBitsToDouble(readLong()); } /** * Reads a 32-bit float from the current position in this file. Blocks * until four bytes have been read, the end of the file is reached or an * exception is thrown. * * @return the next float value from this file. * @throws EOFException * if the end of this file is detected. * @throws IOException * if this file is closed or another I/O error occurs. */ public final float readFloat() throws IOException { return Float.intBitsToFloat(readInt()); } /** * Reads bytes from this file into {@code buffer}. Blocks until {@code * buffer.length} number of bytes have been read, the end of the file is * reached or an exception is thrown. * * @param buffer * the buffer to read bytes into. * @throws EOFException * if the end of this file is detected. * @throws IOException * if this file is closed or another I/O error occurs. * @throws NullPointerException * if {@code buffer} is {@code null}. */ public final void readFully(byte[] buffer) throws IOException { readFully(buffer, 0, buffer.length); } /** * Read bytes from this file into {@code buffer} starting at offset {@code * offset}. This method blocks until {@code count} number of bytes have been * read. * * @param buffer * the buffer to read bytes into. * @param offset * the initial position in {@code buffer} to store the bytes read * from this file. * @param count * the maximum number of bytes to store in {@code buffer}. * @throws EOFException * if the end of this file is detected. * @throws IndexOutOfBoundsException * if {@code offset < 0} or {@code count < 0}, or if {@code * offset + count} is greater than the length of {@code buffer}. * @throws IOException * if this file is closed or another I/O error occurs. * @throws NullPointerException * if {@code buffer} is {@code null}. */ public final void readFully(byte[] buffer, int offset, int count) throws IOException { // avoid int overflow if (offset < 0 || offset > buffer.length || count < 0 || count > buffer.length - offset) { throw new IndexOutOfBoundsException(); } if (isEOF(count)) { // rare path read(buffer, offset, count); throw new EOFException(); } // fast common path while (count-- > 0) { buffer[ offset ++ ] = this.buffer.get( position++ ); } } /** * Reads a 32-bit integer from the current position in this file. Blocks * until four bytes have been read, the end of the file is reached or an * exception is thrown. * * @return the next int value from this file. * @throws EOFException * if the end of this file is detected. * @throws IOException * if this file is closed or another I/O error occurs. */ public final int readInt() throws IOException { if ( isEOF(4) ) { skipBytes(4); throw new EOFException(); } int i = buffer.getInt(position); position+=4; return i; } /** * Reads a line of text form the current position in this file. A line is * represented by zero or more characters followed by {@code '\n'}, {@code * '\r'}, {@code "\r\n"} or the end of file marker. The string does not * include the line terminating sequence. * <p> * Blocks until a line terminating sequence has been read, the end of the * file is reached or an exception is thrown. * * @return the contents of the line or {@code null} if no characters have * been read before the end of the file has been reached. * @throws IOException * if this file is closed or another I/O error occurs. */ public final String readLine() throws IOException { StringBuilder line = new StringBuilder(80); // Typical line length boolean foundTerminator = false; int unreadPosition = 0; while (true) { int nextByte = read(); switch (nextByte) { case -1: return line.length() != 0 ? line.toString() : null; case (byte) '\r': if (foundTerminator) { seekInternal(unreadPosition); return line.toString(); } foundTerminator = true; /* Have to be able to peek ahead one byte */ unreadPosition = position; break; case (byte) '\n': return line.toString(); default: if (foundTerminator) { seekInternal(unreadPosition); return line.toString(); } line.append((char) nextByte); } } } /** * Reads a 64-bit long from the current position in this file. Blocks until * eight bytes have been read, the end of the file is reached or an * exception is thrown. * * @return the next long value from this file. * @throws EOFException * if the end of this file is detected. * @throws IOException * if this file is closed or another I/O error occurs. */ public final long readLong() throws IOException { if ( isEOF(8) ) { skipBytes(8); throw new EOFException(); } long l = buffer.getLong(position); position+=8; return l; } /** * Reads a 16-bit short from the current position in this file. Blocks until * two bytes have been read, the end of the file is reached or an exception * is thrown. * * @return the next short value from this file. * @throws EOFException * if the end of this file is detected. * @throws IOException * if this file is closed or another I/O error occurs. */ public final short readShort() throws IOException { if ( isEOF(2) ) { skipBytes(2); throw new EOFException(); } short s = buffer.getShort(position); position +=2; return s; } /** * Reads an unsigned 8-bit byte from the current position in this file and * returns it as an integer. Blocks until one byte has been read, the end of * the file is reached or an exception is thrown. * * @return the next unsigned byte value from this file as an int. * @throws EOFException * if the end of this file is detected. * @throws IOException * if this file is closed or another I/O error occurs. */ public final int readUnsignedByte() throws IOException { int temp = this.read(); if (temp < 0) { throw new EOFException(); } return temp; } /** * Reads an unsigned 16-bit short from the current position in this file and * returns it as an integer. Blocks until two bytes have been read, the end of * the file is reached or an exception is thrown. * * @return the next unsigned short value from this file as an int. * @throws EOFException * if the end of this file is detected. * @throws IOException * if this file is closed or another I/O error occurs. */ public final int readUnsignedShort() throws IOException { if ( isEOF(2) ) { skipBytes(2); throw new EOFException(); } int i = buffer.getShort(position) & 0xFFFF; position +=2; return i; } /** * Reads a string that is encoded in {@link DataInput modified UTF-8} from * this file. The number of bytes that must be read for the complete string * is determined by the first two bytes read from the file. Blocks until all * required bytes have been read, the end of the file is reached or an * exception is thrown. * * @return the next string encoded in {@link DataInput modified UTF-8} from * this file. * @throws EOFException * if the end of this file is detected. * @throws IOException * if this file is closed or another I/O error occurs. * @throws UTFDataFormatException * if the bytes read cannot be decoded into a character string. */ public final String readUTF() throws IOException { int utflen = readUnsignedShort(); if (isEOF(utflen)) { skipBytes(utflen); throw new EOFException(); } char[] chars = null; chars = new char[utflen]; int c, c2, c3; int count = 0; int chararr_count=0; while (count < utflen) { c = this.buffer.get( position ) & 0xff; if (c > 127) break; chars[count++]=(char)c; position++; } if (count==utflen) return new String(chars); chararr_count = count; while (count < utflen) { c = this.buffer.get( position++ ) & 0xff; switch (c >> 4) { case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7: /* 0xxxxxxx*/ count++; chars[chararr_count++]=(char)c; break; case 12: case 13: /* 110x xxxx 10xx xxxx*/ count += 2; if (count > utflen) { skipBytes(1); throw new UTFDataFormatException( "malformed input: partial character at end"); } c2 = this.buffer.get( position++ ); if ((c2 & 0xC0) != 0x80) { skipBytes(utflen - count); throw new UTFDataFormatException( "malformed input around byte " + count); } chars[chararr_count++]=(char)(((c & 0x1F) << 6) | (c2 & 0x3F)); break; case 14: /* 1110 xxxx 10xx xxxx 10xx xxxx */ count += 3; if (count > utflen) { skipBytes(2); throw new UTFDataFormatException( "malformed input: partial character at end"); } c2 = this.buffer.get( position++ ); c3 = this.buffer.get( position++ ); if (((c2 & 0xC0) != 0x80) || ((c3 & 0xC0) != 0x80)) { skipBytes( utflen - count ); throw new UTFDataFormatException( "malformed input around byte " + (count-1)); } chars[chararr_count++]=(char)(((c & 0x0F) << 12) | ((c2 & 0x3F) << 6) | ((c3 & 0x3F) << 0)); break; default: /* 10xx xxxx, 1111 xxxx */ skipBytes( utflen - count ); throw new UTFDataFormatException( "malformed input around byte " + count); } } // The number of chars produced may be less than utflen return new String(chars, 0, chararr_count); } }
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.orient.core.storage; import com.orientechnologies.common.concur.lock.OReadersWriterSpinLock; import com.orientechnologies.common.concur.resource.*; import com.orientechnologies.common.exception.OException; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.config.OGlobalConfiguration; import com.orientechnologies.orient.core.config.OStorageConfiguration; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.db.record.OCurrentStorageComponentsFactory; import com.orientechnologies.orient.core.exception.OSecurityException; import com.orientechnologies.orient.core.metadata.OMetadata; import com.orientechnologies.orient.core.metadata.OMetadataInternal; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.metadata.security.OSecurityShared; import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper; import com.orientechnologies.orient.core.storage.impl.local.OAbstractPaginatedStorage; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicLong; public abstract class OStorageAbstract implements OStorage, OSharedContainer { public final static ThreadGroup storageThreadGroup; static { ThreadGroup parentThreadGroup = Thread.currentThread().getThreadGroup(); final ThreadGroup parentThreadGroupBackup = parentThreadGroup; boolean found = false; while (parentThreadGroup.getParent() != null) { if (parentThreadGroup.equals(Orient.instance().getThreadGroup())) { parentThreadGroup = parentThreadGroup.getParent(); found = true; break; } else parentThreadGroup = parentThreadGroup.getParent(); } if (!found) parentThreadGroup = parentThreadGroupBackup; storageThreadGroup = new ThreadGroup(parentThreadGroup, "OrientDB Storage"); } protected final String url; protected final String mode; protected final OSharedResourceAdaptiveExternal dataLock; protected final OReadersWriterSpinLock stateLock; protected volatile OStorageConfiguration configuration; protected volatile OCurrentStorageComponentsFactory componentsFactory; protected String name; protected AtomicLong version = new AtomicLong(); protected volatile STATUS status = STATUS.CLOSED; private final OSharedContainerImpl sharedContainer = new OSharedContainerImpl(); public OStorageAbstract(final String name, final String iURL, final String mode, final int timeout) { if (OStringSerializerHelper.contains(name, '/')) this.name = name.substring(name.lastIndexOf("/") + 1); else this.name = name; if (OStringSerializerHelper.contains(name, ',')) throw new IllegalArgumentException("Invalid character in storage name: " + this.name); url = iURL; this.mode = mode; dataLock = new OSharedResourceAdaptiveExternal(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), timeout, true); stateLock = new OReadersWriterSpinLock(); } public abstract OCluster getClusterByName(final String iClusterName); public OStorage getUnderlying() { return this; } public OStorageConfiguration getConfiguration() { return configuration; } public boolean isClosed() { return status == STATUS.CLOSED; } public boolean checkForRecordValidity(final OPhysicalPosition ppos) { return ppos != null && !ppos.recordVersion.isTombstone(); } public String getName() { return name; } public String getURL() { return url; } public void close() { close(false, false); } public void close(final boolean iForce, boolean onDelete) { sharedContainer.clearResources(); } @Override public boolean existsResource(String iName) { return sharedContainer.existsResource(iName); } @Override public <T> T removeResource(String iName) { return sharedContainer.removeResource(iName); } @Override public <T> T getResource(String iName, Callable<T> iCallback) { return sharedContainer.getResource(iName, iCallback); } /** * Returns current storage's version as serial. */ public long getVersion() { return version.get(); } public boolean dropCluster(final String iClusterName, final boolean iTruncate) { return dropCluster(getClusterIdByName(iClusterName), iTruncate); } public long countRecords() { long tot = 0; for (OCluster c : getClusterInstances()) if (c != null) tot += c.getEntries() - c.getTombstonesCount(); return tot; } public <V> V callInLock(final Callable<V> iCallable, final boolean iExclusiveLock) { stateLock.acquireReadLock(); try { if (iExclusiveLock) dataLock.acquireExclusiveLock(); else dataLock.acquireSharedLock(); try { return iCallable.call(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new OException("Error on nested call in lock", e); } finally { if (iExclusiveLock) dataLock.releaseExclusiveLock(); else dataLock.releaseSharedLock(); } } finally { stateLock.releaseReadLock(); } } @Override public String toString() { return url != null ? url : "?"; } public STATUS getStatus() { return status; } public void checkForClusterPermissions(final String iClusterName) { // CHECK FOR ORESTRICTED OMetadata metaData = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata(); if (metaData != null) { final Set<OClass> classes = ((OMetadataInternal) metaData).getImmutableSchemaSnapshot().getClassesRelyOnCluster(iClusterName); for (OClass c : classes) { if (c.isSubClassOf(OSecurityShared.RESTRICTED_CLASSNAME)) throw new OSecurityException("Class " + c.getName() + " cannot be truncated because has record level security enabled (extends " + OSecurityShared.RESTRICTED_CLASSNAME + ")"); } } } @Override public boolean isDistributed() { return false; } @Override public boolean isAssigningClusterIds() { return true; } @Override public OCurrentStorageComponentsFactory getComponentsFactory() { return componentsFactory; } @Override public long getLastOperationId() { return 0; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.search.Explanation; import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.nested.Nested; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.tophits.TopHits; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.highlight.HighlightField; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.smileBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.yamlBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.nestedQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.nested; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.aggregations.AggregationBuilders.topHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; /** * */ @ESIntegTestCase.SuiteScopeTestCase() public class TopHitsIT extends ESIntegTestCase { private static final String TERMS_AGGS_FIELD = "terms"; private static final String SORT_FIELD = "sort"; @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(MockScriptEngine.TestPlugin.class); } public static String randomExecutionHint() { return randomBoolean() ? null : randomFrom(ExecutionMode.values()).toString(); } static int numArticles; @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("idx").addMapping("type", TERMS_AGGS_FIELD, "type=keyword", "group", "type=keyword")); createIndex("empty"); assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject().startObject("article").startObject("properties") .startObject(TERMS_AGGS_FIELD) .field("type", "keyword") .endObject() .startObject("comments") .field("type", "nested") .startObject("properties") .startObject("user") .field("type", "keyword") .endObject() .startObject("date") .field("type", "long") .endObject() .startObject("message") .field("type", "text") .field("store", true) .field("term_vector", "with_positions_offsets") .field("index_options", "offsets") .endObject() .startObject("reviewers") .field("type", "nested") .startObject("properties") .startObject("name") .field("type", "keyword") .endObject() .endObject() .endObject() .endObject() .endObject() .endObject().endObject().endObject())); ensureGreen("idx", "empty", "articles"); List<IndexRequestBuilder> builders = new ArrayList<>(); for (int i = 0; i < 50; i++) { builders.add(client().prepareIndex("idx", "type", Integer.toString(i)).setSource(jsonBuilder() .startObject() .field(TERMS_AGGS_FIELD, "val" + (i / 10)) .field(SORT_FIELD, i + 1) .field("text", "some text to entertain") .field("field1", 5) .endObject())); } builders.add(client().prepareIndex("idx", "field-collapsing", "1").setSource(jsonBuilder() .startObject() .field("group", "a") .field("text", "term x y z b") .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "2").setSource(jsonBuilder() .startObject() .field("group", "a") .field("text", "term x y z n rare") .field("value", 1) .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "3").setSource(jsonBuilder() .startObject() .field("group", "b") .field("text", "x y z term") .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "4").setSource(jsonBuilder() .startObject() .field("group", "b") .field("text", "x y term") .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "5").setSource(jsonBuilder() .startObject() .field("group", "b") .field("text", "x term") .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "6").setSource(jsonBuilder() .startObject() .field("group", "b") .field("text", "term rare") .field("value", 3) .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "7").setSource(jsonBuilder() .startObject() .field("group", "c") .field("text", "x y z term") .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "8").setSource(jsonBuilder() .startObject() .field("group", "c") .field("text", "x y term b") .endObject())); builders.add(client().prepareIndex("idx", "field-collapsing", "9").setSource(jsonBuilder() .startObject() .field("group", "c") .field("text", "rare x term") .field("value", 2) .endObject())); numArticles = scaledRandomIntBetween(10, 100); numArticles -= (numArticles % 5); for (int i = 0; i < numArticles; i++) { XContentBuilder builder = randomFrom(jsonBuilder(), yamlBuilder(), smileBuilder()); builder.startObject().field("date", i).startArray("comments"); for (int j = 0; j < i; j++) { String user = Integer.toString(j); builder.startObject().field("id", j).field("user", user).field("message", "some text").endObject(); } builder.endArray().endObject(); builders.add( client().prepareIndex("articles", "article").setCreate(true).setSource(builder) ); } builders.add( client().prepareIndex("articles", "article", "1") .setSource(jsonBuilder().startObject().field("title", "title 1").field("body", "some text").startArray("comments") .startObject() .field("user", "a").field("date", 1L).field("message", "some comment") .startArray("reviewers") .startObject().field("name", "user a").endObject() .startObject().field("name", "user b").endObject() .startObject().field("name", "user c").endObject() .endArray() .endObject() .startObject() .field("user", "b").field("date", 2L).field("message", "some other comment") .startArray("reviewers") .startObject().field("name", "user c").endObject() .startObject().field("name", "user d").endObject() .startObject().field("name", "user e").endObject() .endArray() .endObject() .endArray().endObject()) ); builders.add( client().prepareIndex("articles", "article", "2") .setSource(jsonBuilder().startObject().field("title", "title 2").field("body", "some different text").startArray("comments") .startObject() .field("user", "b").field("date", 3L).field("message", "some comment") .startArray("reviewers") .startObject().field("name", "user f").endObject() .endArray() .endObject() .startObject().field("user", "c").field("date", 4L).field("message", "some other comment").endObject() .endArray().endObject()) ); indexRandom(true, builders); ensureSearchable(); } private String key(Terms.Bucket bucket) { return bucket.getKeyAsString(); } public void testBasics() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) ) ) .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); long higestSortValue = 0; for (int i = 0; i < 5; i++) { Terms.Bucket bucket = terms.getBucketByKey("val" + i); assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + i)); assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); higestSortValue += 10; assertThat((Long) hits.getAt(0).sortValues()[0], equalTo(higestSortValue)); assertThat((Long) hits.getAt(1).sortValues()[0], equalTo(higestSortValue - 1)); assertThat((Long) hits.getAt(2).sortValues()[0], equalTo(higestSortValue - 2)); assertThat(hits.getAt(0).sourceAsMap().size(), equalTo(4)); } } public void testIssue11119() throws Exception { // Test that top_hits aggregation is fed scores if query results size=0 SearchResponse response = client() .prepareSearch("idx") .setTypes("field-collapsing") .setSize(0) .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))) .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(8L)); assertThat(response.getHits().hits().length, equalTo(0)); assertThat(response.getHits().maxScore(), equalTo(0f)); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(3)); for (Terms.Bucket bucket : terms.getBuckets()) { assertThat(bucket, notNullValue()); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); float bestScore = Float.MAX_VALUE; for (int h = 0; h < hits.getHits().length; h++) { float score=hits.getAt(h).getScore(); assertThat(score, lessThanOrEqualTo(bestScore)); assertThat(score, greaterThan(0f)); bestScore = hits.getAt(h).getScore(); } } // Also check that min_score setting works when size=0 // (technically not a test of top_hits but implementation details are // tied up with the need to feed scores into the agg tree even when // users don't want ranked set of query results.) response = client() .prepareSearch("idx") .setTypes("field-collapsing") .setSize(0) .setMinScore(0.0001f) .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group")) .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(8L)); assertThat(response.getHits().hits().length, equalTo(0)); assertThat(response.getHits().maxScore(), equalTo(0f)); terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(3)); } public void testBreadthFirst() throws Exception { // breadth_first will be ignored since we need scores SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .collectMode(SubAggCollectionMode.BREADTH_FIRST) .field(TERMS_AGGS_FIELD) .subAggregation(topHits("hits").size(3)) ).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); for (int i = 0; i < 5; i++) { Terms.Bucket bucket = terms.getBucketByKey("val" + i); assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + i)); assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).sourceAsMap().size(), equalTo(4)); } } public void testBasicsGetProperty() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(topHits("hits"))).execute().actionGet(); assertSearchResponse(searchResponse); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); assertThat(global.getName(), equalTo("global")); assertThat(global.getAggregations(), notNullValue()); assertThat(global.getAggregations().asMap().size(), equalTo(1)); TopHits topHits = global.getAggregations().get("hits"); assertThat(topHits, notNullValue()); assertThat(topHits.getName(), equalTo("hits")); assertThat((TopHits) global.getProperty("hits"), sameInstance(topHits)); } public void testPagination() throws Exception { int size = randomIntBetween(1, 10); int from = randomIntBetween(0, 10); SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) .from(from) .size(size) ) ) .get(); assertSearchResponse(response); SearchResponse control = client().prepareSearch("idx") .setTypes("type") .setFrom(from) .setSize(size) .setPostFilter(QueryBuilders.termQuery(TERMS_AGGS_FIELD, "val0")) .addSort(SORT_FIELD, SortOrder.DESC) .get(); assertSearchResponse(control); SearchHits controlHits = control.getHits(); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); Terms.Bucket bucket = terms.getBucketByKey("val0"); assertThat(bucket, notNullValue()); assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(controlHits.totalHits())); assertThat(hits.getHits().length, equalTo(controlHits.getHits().length)); for (int i = 0; i < hits.getHits().length; i++) { logger.info("{}: top_hits: [{}][{}] control: [{}][{}]", i, hits.getAt(i).id(), hits.getAt(i).sortValues()[0], controlHits.getAt(i).id(), controlHits.getAt(i).sortValues()[0]); assertThat(hits.getAt(i).id(), equalTo(controlHits.getAt(i).id())); assertThat(hits.getAt(i).sortValues()[0], equalTo(controlHits.getAt(i).sortValues()[0])); } } public void testSortByBucket() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .order(Terms.Order.aggregation("max_sort", false)) .subAggregation( topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)).trackScores(true) ) .subAggregation( max("max_sort").field(SORT_FIELD) ) ) .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); long higestSortValue = 50; int currentBucket = 4; for (Terms.Bucket bucket : terms.getBuckets()) { assertThat(key(bucket), equalTo("val" + currentBucket--)); assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat((Long) hits.getAt(0).sortValues()[0], equalTo(higestSortValue)); assertThat((Long) hits.getAt(1).sortValues()[0], equalTo(higestSortValue - 1)); assertThat((Long) hits.getAt(2).sortValues()[0], equalTo(higestSortValue - 2)); Max max = bucket.getAggregations().get("max_sort"); assertThat(max.getValue(), equalTo(((Long) higestSortValue).doubleValue())); higestSortValue -= 10; } } public void testFieldCollapsing() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("field-collapsing") .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(matchQuery("text", "term rare")) .addAggregation( terms("terms").executionHint(randomExecutionHint()).field("group") .order(Terms.Order.aggregation("max_score", false)).subAggregation(topHits("hits").size(1)) .subAggregation(max("max_score").field("value"))).get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(3)); Iterator<Terms.Bucket> bucketIterator = terms.getBuckets().iterator(); Terms.Bucket bucket = bucketIterator.next(); assertThat(key(bucket), equalTo("b")); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(4L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).id(), equalTo("6")); bucket = bucketIterator.next(); assertThat(key(bucket), equalTo("c")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(3L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).id(), equalTo("9")); bucket = bucketIterator.next(); assertThat(key(bucket), equalTo("a")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(2L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).id(), equalTo("2")); } public void testFetchFeatures() { SearchResponse response = client().prepareSearch("idx").setTypes("type") .setQuery(matchQuery("text", "text").queryName("test")) .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").size(1) .highlighter(new HighlightBuilder().field("text")) .explain(true) .field("text") .fieldDataField("field1") .scriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())) .fetchSource("text", null) .version(true) ) ) .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); for (Terms.Bucket bucket : terms.getBuckets()) { TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.totalHits(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(1)); SearchHit hit = hits.getAt(0); HighlightField highlightField = hit.getHighlightFields().get("text"); assertThat(highlightField.getFragments().length, equalTo(1)); assertThat(highlightField.getFragments()[0].string(), equalTo("some <em>text</em> to entertain")); Explanation explanation = hit.explanation(); assertThat(explanation.toString(), containsString("text:text")); long version = hit.version(); assertThat(version, equalTo(1L)); assertThat(hit.matchedQueries()[0], equalTo("test")); SearchHitField field = hit.field("field1"); assertThat(field.getValue().toString(), equalTo("5")); assertThat(hit.getSource().get("text").toString(), equalTo("some text to entertain")); field = hit.field("script"); assertThat(field.getValue().toString(), equalTo("5")); assertThat(hit.sourceAsMap().size(), equalTo(1)); assertThat(hit.sourceAsMap().get("text").toString(), equalTo("some text to entertain")); } } public void testInvalidSortField() throws Exception { try { client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").sort(SortBuilders.fieldSort("xyz").order(SortOrder.DESC)) ) ).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("No mapping found for [xyz] in order to sort on")); } } public void testEmptyIndex() throws Exception { SearchResponse response = client().prepareSearch("empty").setTypes("type") .addAggregation(topHits("hits")) .get(); assertSearchResponse(response); TopHits hits = response.getAggregations().get("hits"); assertThat(hits, notNullValue()); assertThat(hits.getName(), equalTo("hits")); assertThat(hits.getHits().totalHits(), equalTo(0L)); } public void testTrackScores() throws Exception { boolean[] trackScores = new boolean[]{true, false}; for (boolean trackScore : trackScores) { logger.info("Track score={}", trackScore); SearchResponse response = client().prepareSearch("idx").setTypes("field-collapsing") .setQuery(matchQuery("text", "term rare")) .addAggregation(terms("terms") .field("group") .subAggregation( topHits("hits") .trackScores(trackScore) .size(1) .sort("_uid", SortOrder.DESC) ) ) .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(3)); Terms.Bucket bucket = terms.getBucketByKey("a"); assertThat(key(bucket), equalTo("a")); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); assertThat(hits.getMaxScore(), trackScore ? not(equalTo(Float.NaN)) : equalTo(Float.NaN)); assertThat(hits.getAt(0).score(), trackScore ? not(equalTo(Float.NaN)) : equalTo(Float.NaN)); bucket = terms.getBucketByKey("b"); assertThat(key(bucket), equalTo("b")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); assertThat(hits.getMaxScore(), trackScore ? not(equalTo(Float.NaN)) : equalTo(Float.NaN)); assertThat(hits.getAt(0).score(), trackScore ? not(equalTo(Float.NaN)) : equalTo(Float.NaN)); bucket = terms.getBucketByKey("c"); assertThat(key(bucket), equalTo("c")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); assertThat(hits.getMaxScore(), trackScore ? not(equalTo(Float.NaN)) : equalTo(Float.NaN)); assertThat(hits.getAt(0).score(), trackScore ? not(equalTo(Float.NaN)) : equalTo(Float.NaN)); } } public void testTopHitsInNestedSimple() throws Exception { SearchResponse searchResponse = client().prepareSearch("articles") .setQuery(matchQuery("title", "title")) .addAggregation( nested("to-comments", "comments") .subAggregation( terms("users") .field("comments.user") .subAggregation( topHits("top-comments").sort("comments.date", SortOrder.ASC) ) ) ) .get(); Nested nested = searchResponse.getAggregations().get("to-comments"); assertThat(nested.getDocCount(), equalTo(4L)); Terms terms = nested.getAggregations().get("users"); Terms.Bucket bucket = terms.getBucketByKey("a"); assertThat(bucket.getDocCount(), equalTo(1L)); TopHits topHits = bucket.getAggregations().get("top-comments"); SearchHits searchHits = topHits.getHits(); assertThat(searchHits.totalHits(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat((Integer) searchHits.getAt(0).getSource().get("date"), equalTo(1)); bucket = terms.getBucketByKey("b"); assertThat(bucket.getDocCount(), equalTo(2L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); assertThat(searchHits.totalHits(), equalTo(2L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat((Integer) searchHits.getAt(0).getSource().get("date"), equalTo(2)); assertThat(searchHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(1).getNestedIdentity().getOffset(), equalTo(0)); assertThat((Integer) searchHits.getAt(1).getSource().get("date"), equalTo(3)); bucket = terms.getBucketByKey("c"); assertThat(bucket.getDocCount(), equalTo(1L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); assertThat(searchHits.totalHits(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat((Integer) searchHits.getAt(0).getSource().get("date"), equalTo(4)); } public void testTopHitsInSecondLayerNested() throws Exception { SearchResponse searchResponse = client().prepareSearch("articles") .setQuery(matchQuery("title", "title")) .addAggregation( nested("to-comments", "comments") .subAggregation( nested("to-reviewers", "comments.reviewers").subAggregation( // Also need to sort on _doc because there are two reviewers with the same name topHits("top-reviewers").sort("comments.reviewers.name", SortOrder.ASC).sort("_doc", SortOrder.DESC).size(7) ) ) .subAggregation(topHits("top-comments").sort("comments.date", SortOrder.DESC).size(4)) ).get(); assertNoFailures(searchResponse); Nested toComments = searchResponse.getAggregations().get("to-comments"); assertThat(toComments.getDocCount(), equalTo(4L)); TopHits topComments = toComments.getAggregations().get("top-comments"); assertThat(topComments.getHits().totalHits(), equalTo(4L)); assertThat(topComments.getHits().getHits().length, equalTo(4)); assertThat(topComments.getHits().getAt(0).getId(), equalTo("2")); assertThat(topComments.getHits().getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topComments.getHits().getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(topComments.getHits().getAt(0).getNestedIdentity().getChild(), nullValue()); assertThat(topComments.getHits().getAt(1).getId(), equalTo("2")); assertThat(topComments.getHits().getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topComments.getHits().getAt(1).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topComments.getHits().getAt(1).getNestedIdentity().getChild(), nullValue()); assertThat(topComments.getHits().getAt(2).getId(), equalTo("1")); assertThat(topComments.getHits().getAt(2).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topComments.getHits().getAt(2).getNestedIdentity().getOffset(), equalTo(1)); assertThat(topComments.getHits().getAt(2).getNestedIdentity().getChild(), nullValue()); assertThat(topComments.getHits().getAt(3).getId(), equalTo("1")); assertThat(topComments.getHits().getAt(3).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topComments.getHits().getAt(3).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topComments.getHits().getAt(3).getNestedIdentity().getChild(), nullValue()); Nested toReviewers = toComments.getAggregations().get("to-reviewers"); assertThat(toReviewers.getDocCount(), equalTo(7L)); TopHits topReviewers = toReviewers.getAggregations().get("top-reviewers"); assertThat(topReviewers.getHits().totalHits(), equalTo(7L)); assertThat(topReviewers.getHits().getHits().length, equalTo(7)); assertThat(topReviewers.getHits().getAt(0).getId(), equalTo("1")); assertThat((String) topReviewers.getHits().getAt(0).sourceAsMap().get("name"), equalTo("user a")); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(1).getId(), equalTo("1")); assertThat((String) topReviewers.getHits().getAt(1).sourceAsMap().get("name"), equalTo("user b")); assertThat(topReviewers.getHits().getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(1).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(1).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(1).getNestedIdentity().getChild().getOffset(), equalTo(1)); assertThat(topReviewers.getHits().getAt(2).getId(), equalTo("1")); assertThat((String) topReviewers.getHits().getAt(2).sourceAsMap().get("name"), equalTo("user c")); assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getChild().getOffset(), equalTo(2)); assertThat(topReviewers.getHits().getAt(3).getId(), equalTo("1")); assertThat((String) topReviewers.getHits().getAt(3).sourceAsMap().get("name"), equalTo("user c")); assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getOffset(), equalTo(1)); assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getChild().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(4).getId(), equalTo("1")); assertThat((String) topReviewers.getHits().getAt(4).sourceAsMap().get("name"), equalTo("user d")); assertThat(topReviewers.getHits().getAt(4).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(4).getNestedIdentity().getOffset(), equalTo(1)); assertThat(topReviewers.getHits().getAt(4).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(4).getNestedIdentity().getChild().getOffset(), equalTo(1)); assertThat(topReviewers.getHits().getAt(5).getId(), equalTo("1")); assertThat((String) topReviewers.getHits().getAt(5).sourceAsMap().get("name"), equalTo("user e")); assertThat(topReviewers.getHits().getAt(5).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(5).getNestedIdentity().getOffset(), equalTo(1)); assertThat(topReviewers.getHits().getAt(5).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(5).getNestedIdentity().getChild().getOffset(), equalTo(2)); assertThat(topReviewers.getHits().getAt(6).getId(), equalTo("2")); assertThat((String) topReviewers.getHits().getAt(6).sourceAsMap().get("name"), equalTo("user f")); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); } public void testNestedFetchFeatures() { String hlType = randomFrom("plain", "fvh", "postings"); HighlightBuilder.Field hlField = new HighlightBuilder.Field("comments.message") .highlightQuery(matchQuery("comments.message", "comment")) .forceSource(randomBoolean()) // randomly from stored field or _source .highlighterType(hlType); SearchResponse searchResponse = client() .prepareSearch("articles") .setQuery(nestedQuery("comments", matchQuery("comments.message", "comment").queryName("test"))) .addAggregation( nested("to-comments", "comments").subAggregation( topHits("top-comments").size(1).highlighter(new HighlightBuilder().field(hlField)).explain(true) .fieldDataField("comments.user") .scriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())).fetchSource("message", null) .version(true).sort("comments.date", SortOrder.ASC))).get(); assertHitCount(searchResponse, 2); Nested nested = searchResponse.getAggregations().get("to-comments"); assertThat(nested.getDocCount(), equalTo(4L)); SearchHits hits = ((TopHits) nested.getAggregations().get("top-comments")).getHits(); assertThat(hits.totalHits(), equalTo(4L)); SearchHit searchHit = hits.getAt(0); assertThat(searchHit.getId(), equalTo("1")); assertThat(searchHit.getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHit.getNestedIdentity().getOffset(), equalTo(0)); HighlightField highlightField = searchHit.getHighlightFields().get("comments.message"); assertThat(highlightField.getFragments().length, equalTo(1)); assertThat(highlightField.getFragments()[0].string(), equalTo("some <em>comment</em>")); // Can't explain nested hit with the main query, since both are in a different scopes, also the nested doc may not even have matched with the main query // If top_hits would have a query option then we can explain that query Explanation explanation = searchHit.explanation(); assertFalse(explanation.isMatch()); // Returns the version of the root document. Nested docs don't have a separate version long version = searchHit.version(); assertThat(version, equalTo(1L)); assertThat(searchHit.matchedQueries(), arrayContaining("test")); SearchHitField field = searchHit.field("comments.user"); assertThat(field.getValue().toString(), equalTo("a")); field = searchHit.field("script"); assertThat(field.getValue().toString(), equalTo("5")); assertThat(searchHit.sourceAsMap().size(), equalTo(1)); assertThat(searchHit.sourceAsMap().get("message").toString(), equalTo("some comment")); } public void testTopHitsInNested() throws Exception { SearchResponse searchResponse = client().prepareSearch("articles") .addAggregation( histogram("dates") .field("date") .interval(5) .order(Histogram.Order.aggregation("to-comments", true)) .subAggregation( nested("to-comments", "comments") .subAggregation(topHits("comments") .highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("comments.message").highlightQuery(matchQuery("comments.message", "text")))) .sort("comments.id", SortOrder.ASC)) ) ) .get(); Histogram histogram = searchResponse.getAggregations().get("dates"); for (int i = 0; i < numArticles; i += 5) { Histogram.Bucket bucket = histogram.getBuckets().get(i / 5); assertThat(bucket.getDocCount(), equalTo(5L)); long numNestedDocs = 10 + (5 * i); Nested nested = bucket.getAggregations().get("to-comments"); assertThat(nested.getDocCount(), equalTo(numNestedDocs)); TopHits hits = nested.getAggregations().get("comments"); SearchHits searchHits = hits.getHits(); assertThat(searchHits.totalHits(), equalTo(numNestedDocs)); for (int j = 0; j < 3; j++) { assertThat(searchHits.getAt(j).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(j).getNestedIdentity().getOffset(), equalTo(0)); assertThat((Integer) searchHits.getAt(j).sourceAsMap().get("id"), equalTo(0)); HighlightField highlightField = searchHits.getAt(j).getHighlightFields().get("comments.message"); assertThat(highlightField.getFragments().length, equalTo(1)); assertThat(highlightField.getFragments()[0].string(), equalTo("some <em>text</em>")); } } } public void testDontExplode() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").size(ArrayUtil.MAX_ARRAY_LENGTH - 1).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) ) ) .get(); assertNoFailures(response); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/bigtable/admin/table/v1/bigtable_table_service_messages.proto package com.google.bigtable.admin.table.v1; /** * Protobuf type {@code google.bigtable.admin.table.v1.ListTablesResponse} */ public final class ListTablesResponse extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:google.bigtable.admin.table.v1.ListTablesResponse) ListTablesResponseOrBuilder { // Use ListTablesResponse.newBuilder() to construct. private ListTablesResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); } private ListTablesResponse() { tables_ = java.util.Collections.emptyList(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private ListTablesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tables_ = new java.util.ArrayList<com.google.bigtable.admin.table.v1.Table>(); mutable_bitField0_ |= 0x00000001; } tables_.add(input.readMessage(com.google.bigtable.admin.table.v1.Table.parser(), extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw new RuntimeException(e.setUnfinishedMessage(this)); } catch (java.io.IOException e) { throw new RuntimeException( new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this)); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { tables_ = java.util.Collections.unmodifiableList(tables_); } makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_ListTablesResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_ListTablesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.bigtable.admin.table.v1.ListTablesResponse.class, com.google.bigtable.admin.table.v1.ListTablesResponse.Builder.class); } public static final int TABLES_FIELD_NUMBER = 1; private java.util.List<com.google.bigtable.admin.table.v1.Table> tables_; /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public java.util.List<com.google.bigtable.admin.table.v1.Table> getTablesList() { return tables_; } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public java.util.List<? extends com.google.bigtable.admin.table.v1.TableOrBuilder> getTablesOrBuilderList() { return tables_; } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public int getTablesCount() { return tables_.size(); } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public com.google.bigtable.admin.table.v1.Table getTables(int index) { return tables_.get(index); } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public com.google.bigtable.admin.table.v1.TableOrBuilder getTablesOrBuilder( int index) { return tables_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < tables_.size(); i++) { output.writeMessage(1, tables_.get(i)); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tables_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, tables_.get(i)); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; public static com.google.bigtable.admin.table.v1.ListTablesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.bigtable.admin.table.v1.ListTablesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.bigtable.admin.table.v1.ListTablesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.bigtable.admin.table.v1.ListTablesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.bigtable.admin.table.v1.ListTablesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static com.google.bigtable.admin.table.v1.ListTablesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static com.google.bigtable.admin.table.v1.ListTablesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static com.google.bigtable.admin.table.v1.ListTablesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static com.google.bigtable.admin.table.v1.ListTablesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static com.google.bigtable.admin.table.v1.ListTablesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.bigtable.admin.table.v1.ListTablesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.bigtable.admin.table.v1.ListTablesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.bigtable.admin.table.v1.ListTablesResponse) com.google.bigtable.admin.table.v1.ListTablesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_ListTablesResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_ListTablesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.bigtable.admin.table.v1.ListTablesResponse.class, com.google.bigtable.admin.table.v1.ListTablesResponse.Builder.class); } // Construct using com.google.bigtable.admin.table.v1.ListTablesResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getTablesFieldBuilder(); } } public Builder clear() { super.clear(); if (tablesBuilder_ == null) { tables_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { tablesBuilder_.clear(); } return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_ListTablesResponse_descriptor; } public com.google.bigtable.admin.table.v1.ListTablesResponse getDefaultInstanceForType() { return com.google.bigtable.admin.table.v1.ListTablesResponse.getDefaultInstance(); } public com.google.bigtable.admin.table.v1.ListTablesResponse build() { com.google.bigtable.admin.table.v1.ListTablesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.bigtable.admin.table.v1.ListTablesResponse buildPartial() { com.google.bigtable.admin.table.v1.ListTablesResponse result = new com.google.bigtable.admin.table.v1.ListTablesResponse(this); int from_bitField0_ = bitField0_; if (tablesBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { tables_ = java.util.Collections.unmodifiableList(tables_); bitField0_ = (bitField0_ & ~0x00000001); } result.tables_ = tables_; } else { result.tables_ = tablesBuilder_.build(); } onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.bigtable.admin.table.v1.ListTablesResponse) { return mergeFrom((com.google.bigtable.admin.table.v1.ListTablesResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.bigtable.admin.table.v1.ListTablesResponse other) { if (other == com.google.bigtable.admin.table.v1.ListTablesResponse.getDefaultInstance()) return this; if (tablesBuilder_ == null) { if (!other.tables_.isEmpty()) { if (tables_.isEmpty()) { tables_ = other.tables_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTablesIsMutable(); tables_.addAll(other.tables_); } onChanged(); } } else { if (!other.tables_.isEmpty()) { if (tablesBuilder_.isEmpty()) { tablesBuilder_.dispose(); tablesBuilder_ = null; tables_ = other.tables_; bitField0_ = (bitField0_ & ~0x00000001); tablesBuilder_ = com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? getTablesFieldBuilder() : null; } else { tablesBuilder_.addAllMessages(other.tables_); } } } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.bigtable.admin.table.v1.ListTablesResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.bigtable.admin.table.v1.ListTablesResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<com.google.bigtable.admin.table.v1.Table> tables_ = java.util.Collections.emptyList(); private void ensureTablesIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { tables_ = new java.util.ArrayList<com.google.bigtable.admin.table.v1.Table>(tables_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilder< com.google.bigtable.admin.table.v1.Table, com.google.bigtable.admin.table.v1.Table.Builder, com.google.bigtable.admin.table.v1.TableOrBuilder> tablesBuilder_; /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public java.util.List<com.google.bigtable.admin.table.v1.Table> getTablesList() { if (tablesBuilder_ == null) { return java.util.Collections.unmodifiableList(tables_); } else { return tablesBuilder_.getMessageList(); } } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public int getTablesCount() { if (tablesBuilder_ == null) { return tables_.size(); } else { return tablesBuilder_.getCount(); } } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public com.google.bigtable.admin.table.v1.Table getTables(int index) { if (tablesBuilder_ == null) { return tables_.get(index); } else { return tablesBuilder_.getMessage(index); } } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public Builder setTables( int index, com.google.bigtable.admin.table.v1.Table value) { if (tablesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTablesIsMutable(); tables_.set(index, value); onChanged(); } else { tablesBuilder_.setMessage(index, value); } return this; } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public Builder setTables( int index, com.google.bigtable.admin.table.v1.Table.Builder builderForValue) { if (tablesBuilder_ == null) { ensureTablesIsMutable(); tables_.set(index, builderForValue.build()); onChanged(); } else { tablesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public Builder addTables(com.google.bigtable.admin.table.v1.Table value) { if (tablesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTablesIsMutable(); tables_.add(value); onChanged(); } else { tablesBuilder_.addMessage(value); } return this; } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public Builder addTables( int index, com.google.bigtable.admin.table.v1.Table value) { if (tablesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTablesIsMutable(); tables_.add(index, value); onChanged(); } else { tablesBuilder_.addMessage(index, value); } return this; } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public Builder addTables( com.google.bigtable.admin.table.v1.Table.Builder builderForValue) { if (tablesBuilder_ == null) { ensureTablesIsMutable(); tables_.add(builderForValue.build()); onChanged(); } else { tablesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public Builder addTables( int index, com.google.bigtable.admin.table.v1.Table.Builder builderForValue) { if (tablesBuilder_ == null) { ensureTablesIsMutable(); tables_.add(index, builderForValue.build()); onChanged(); } else { tablesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public Builder addAllTables( java.lang.Iterable<? extends com.google.bigtable.admin.table.v1.Table> values) { if (tablesBuilder_ == null) { ensureTablesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, tables_); onChanged(); } else { tablesBuilder_.addAllMessages(values); } return this; } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public Builder clearTables() { if (tablesBuilder_ == null) { tables_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tablesBuilder_.clear(); } return this; } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public Builder removeTables(int index) { if (tablesBuilder_ == null) { ensureTablesIsMutable(); tables_.remove(index); onChanged(); } else { tablesBuilder_.remove(index); } return this; } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public com.google.bigtable.admin.table.v1.Table.Builder getTablesBuilder( int index) { return getTablesFieldBuilder().getBuilder(index); } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public com.google.bigtable.admin.table.v1.TableOrBuilder getTablesOrBuilder( int index) { if (tablesBuilder_ == null) { return tables_.get(index); } else { return tablesBuilder_.getMessageOrBuilder(index); } } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public java.util.List<? extends com.google.bigtable.admin.table.v1.TableOrBuilder> getTablesOrBuilderList() { if (tablesBuilder_ != null) { return tablesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tables_); } } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public com.google.bigtable.admin.table.v1.Table.Builder addTablesBuilder() { return getTablesFieldBuilder().addBuilder( com.google.bigtable.admin.table.v1.Table.getDefaultInstance()); } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public com.google.bigtable.admin.table.v1.Table.Builder addTablesBuilder( int index) { return getTablesFieldBuilder().addBuilder( index, com.google.bigtable.admin.table.v1.Table.getDefaultInstance()); } /** * <code>repeated .google.bigtable.admin.table.v1.Table tables = 1;</code> * * <pre> * The tables present in the requested cluster. * At present, only the names of the tables are populated. * </pre> */ public java.util.List<com.google.bigtable.admin.table.v1.Table.Builder> getTablesBuilderList() { return getTablesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilder< com.google.bigtable.admin.table.v1.Table, com.google.bigtable.admin.table.v1.Table.Builder, com.google.bigtable.admin.table.v1.TableOrBuilder> getTablesFieldBuilder() { if (tablesBuilder_ == null) { tablesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< com.google.bigtable.admin.table.v1.Table, com.google.bigtable.admin.table.v1.Table.Builder, com.google.bigtable.admin.table.v1.TableOrBuilder>( tables_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); tables_ = null; } return tablesBuilder_; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.bigtable.admin.table.v1.ListTablesResponse) } // @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.ListTablesResponse) private static final com.google.bigtable.admin.table.v1.ListTablesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.bigtable.admin.table.v1.ListTablesResponse(); } public static com.google.bigtable.admin.table.v1.ListTablesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListTablesResponse> PARSER = new com.google.protobuf.AbstractParser<ListTablesResponse>() { public ListTablesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { try { return new ListTablesResponse(input, extensionRegistry); } catch (RuntimeException e) { if (e.getCause() instanceof com.google.protobuf.InvalidProtocolBufferException) { throw (com.google.protobuf.InvalidProtocolBufferException) e.getCause(); } throw e; } } }; public static com.google.protobuf.Parser<ListTablesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListTablesResponse> getParserForType() { return PARSER; } public com.google.bigtable.admin.table.v1.ListTablesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package fr.city.core; import static fr.city.core.TypeBuilding.BUILDING; import static fr.city.core.TypeBuilding.ROAD; import static fr.city.core.TypeBuilding.TRANSPORT; import java.awt.Color; import java.io.IOException; import java.io.StringWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import org.codehaus.jackson.map.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import fr.graph.api.GraphEntry; import fr.graph.api.InfoAddress; import fr.graph.api.InfoNode; import fr.graph.core.Graph; import fr.graph.core.RelTypes; import fr.network.transport.api.Coord2D; import fr.network.transport.api.PathInfo; import fr.network.transport.network.Address; public class CityBase implements City { private static final Logger LOG = LoggerFactory.getLogger(CityBase.class); private static CityBase city; private static ObserverCity observerCity; private String cityName = "My city"; private static int max = 200; private AtomicLong id = new AtomicLong(); private GraphEntry graphBuilding; private TerrainLayout terrainLayout; private Map<String, Building> transportsMap = new HashMap<>(); private Object monitorMaptransportsMap = new Object(); private static Object monitor = new Object(); private CityBase() { this.terrainLayout = new TerrainLayout(max); } public static CityBase getInstance(ObserverCity myObserverCity, String repo, boolean test) { if (city == null) { synchronized (monitor) { if (city == null) { LOG.info("City getInstance"); city = new CityBase(); city.graphBuilding = new GraphEntry(repo, test, BUILDING.name(), ROAD.name()); observerCity = myObserverCity; return city; } } } return city; } /* * (non-Javadoc) * * @see fr.city.core.City#destroy() */ @Override public void destroy() { synchronized (monitor) { city.graphBuilding.shutdown(); city = null; } } /* * (non-Javadoc) * * @see fr.city.core.City#getAllRoads() */ @Override public List<Road> getAllRoads() { List<Road> lstBs = new ArrayList<>(); List<Object> allBuildings = graphBuilding .findAllRelationProperty(RelTypes.EVENT); ObjectMapper mapper = new ObjectMapper(); for (Object o : allBuildings) { Road b = null; try { b = mapper.readValue((String) o, Road.class); } catch (IOException e) { LOG.error("convert object to Json", e); } if (b == null) { throw new IllegalArgumentException("Building is null"); } lstBs.add(b); } LOG.info("getAllRoads size " + allBuildings.size()); return lstBs; } /* * (non-Javadoc) * * @see fr.city.core.City#getAllBuildings() */ @Override public List<Building> getAllBuildings() { List<Building> lstBs = new ArrayList<>(); List<Object> allBuildings = graphBuilding.findAll(BUILDING.name(), Graph.CUSTOM); ObjectMapper mapper = new ObjectMapper(); for (Object o : allBuildings) { Building b = null; try { b = mapper.readValue((String) o, Building.class); } catch (IOException e) { LOG.error("convert object to Json", e); } if (b == null) { throw new IllegalArgumentException("Building is null"); } lstBs.add(b); } LOG.info("getAllBuildings size " + allBuildings.size()); return lstBs; } /* * (non-Javadoc) * * @see fr.city.core.City#removeBuilding(java.lang.String) */ @Override public void removeBuilding(String name) { if (name == null || name.equals("")) { throw new IllegalArgumentException("name must not be null or empty"); } String jsonBuilding = (String) graphBuilding.find(BUILDING.name(), name, Graph.CUSTOM); ObjectMapper mapper = new ObjectMapper(); Building bOld = null; try { bOld = mapper.readValue(jsonBuilding, Building.class); } catch (IOException e) { LOG.error("convert object to Json", e); } if (bOld == null) { throw new IllegalArgumentException("Doesn't exist " + name); } removeBuilding(bOld); } public List<PathInfo> findPath(Address origin, Address destination) { if (origin == null) { throw new IllegalArgumentException( "origin must not be null or empty"); } if (destination == null) { throw new IllegalArgumentException( "destination must not be null or empty"); } InfoAddress iA = new InfoAddress(); iA.setRoadName(origin.getRoadName()); iA.setX(origin.getX()); iA.setZ(origin.getZ()); InfoAddress iB = new InfoAddress(); iB.setRoadName(destination.getRoadName()); iB.setX(destination.getX()); iB.setZ(destination.getZ()); List<PathInfo> path = new ArrayList<>(); List<InfoNode> infonodes = graphBuilding.findPath(ROAD.name(), iA, iB); for (InfoNode i : infonodes) { LOG.info("InfoNode " + i); PathInfo pathInfo = new PathInfo(); pathInfo.setX(i.getX()); pathInfo.setZ(i.getZ()); pathInfo.setRoadName(i.getName()); path.add(pathInfo); } return path; } /* * (non-Javadoc) * * @see fr.city.core.City#removeBuilding(fr.city.core.Building) */ @Override public void removeBuilding(Building bOld) { if (bOld == null) { throw new IllegalArgumentException("The building dosen't exist "); } LOG.info("remove Building x:" + bOld.getName()); graphBuilding.remove(BUILDING.name(), bOld.getName()); observerCity.removeBuilding(bOld); } /* * (non-Javadoc) * * @see fr.city.core.City#createBuilding(int, int, int, java.lang.String) */ @Override public Building createBuilding(int x, int z, int height, String color) { if (x < 0 || z < 0 || x > max || z > max) { throw new IllegalArgumentException("The range is 0 to " + (max - 1)); } if (height < 0) { throw new IllegalArgumentException("The height must be positive"); } String jsonBuilding = (String) graphBuilding.findEventXYNodeProperty( BUILDING.name(), x, z, Graph.CUSTOM); if (jsonBuilding != null) { throw new IllegalArgumentException("The building already exist " + x + ":" + z); } Building b = new Building(); b.setX(x); b.setZ(z); b.setHeight(height); b.setName(graphBuilding.getNodeName(BUILDING.name(), String.valueOf(id.getAndIncrement()))); b.setColor(color); int y = terrainLayout.getHeight(x, z); b.setY(y); ObjectMapper mapper = new ObjectMapper(); StringWriter writer = new StringWriter(); try { mapper.writeValue(writer, b); } catch (Exception e) { LOG.error("convert object to Json", e); } LOG.debug(writer.toString()); graphBuilding.create(BUILDING.name(), b.getName(), b.getX(), b.getZ(), writer.toString()); observerCity.createBuilding(b); return b; } /* * (non-Javadoc) * * @see fr.city.core.City#findRoad(int, int, int, int) */ @Override public Road findRoad(int x, int z, int xD, int zD) { if (graphBuilding.checkIfRoadExists(ROAD.name(), x, z, xD, zD)) { return findRoadByName(graphBuilding.getRelationShipName(x, z, xD, zD)); } return null; } /* * (non-Javadoc) * * @see fr.city.core.City#findRoadByName(java.lang.String) */ @Override public Road findRoadByName(String name) { Object jsonBuilding = graphBuilding.findRoad(ROAD.name(), name); Road b = null; if (jsonBuilding != null) { LOG.debug("find " + (String) jsonBuilding); ObjectMapper mapper = new ObjectMapper(); try { b = mapper.readValue((String) jsonBuilding, Road.class); } catch (IOException e) { LOG.error("convert object to Json", e); } if (b == null) { throw new IllegalArgumentException("Road is null"); } } return b; } /* * (non-Javadoc) * * @see fr.city.core.City#findBuildingByName(java.lang.String) */ @Override public Building findBuildingByName(String name) { Building b = null; Object jsonBuilding = graphBuilding.find(BUILDING.name(), name, Graph.CUSTOM); if (jsonBuilding != null) { LOG.debug("findBuildingByName " + (String) jsonBuilding); ObjectMapper mapper = new ObjectMapper(); try { b = mapper.readValue((String) jsonBuilding, Building.class); } catch (IOException e) { LOG.error("convert object to Json", e); } if (b == null) { throw new IllegalArgumentException("Building is null"); } } return b; } /* * (non-Javadoc) * * @see fr.city.core.City#checkIfRoadExists(int, int, int, int) */ @Override public boolean checkIfRoadExists(int x, int z, int xD, int zD) { return graphBuilding.checkIfRoadExists(ROAD.name(), x, z, xD, zD); } /* * (non-Javadoc) * * @see fr.city.core.City#createSource(int, int) */ @Override public void createSource(int x, int z) { if (x < 0 || z < 0 || x > max || z > max) { throw new IllegalArgumentException("The range is 0 to " + (max - 1) + " " + x + " " + z); } graphBuilding.createSource(ROAD.name(), x, z); } /* * (non-Javadoc) * * @see fr.city.core.City#checkIfSourceExists(int, int) */ @Override public boolean checkIfSourceExists(int x, int z) { if (x < 0 || z < 0 || x > max || z > max) { throw new IllegalArgumentException("The range is 0 to " + (max - 1) + " " + x + " " + z); } return graphBuilding.checkIfSourceExists(ROAD.name(), x, z); } /* * (non-Javadoc) * * @see fr.city.core.City#createRoad(int, int, int, int, java.awt.Color) */ @Override public Road createRoad(int x, int z, int xD, int zD, Color color) { if (x < 0 || z < 0 || x > max || z > max) { throw new IllegalArgumentException("The range is 0 to " + (max - 1) + " " + x + " " + z); } if (xD < 0 || zD < 0 || xD > max || zD > max) { throw new IllegalArgumentException("The range is 0 to " + (max - 1) + " " + xD + " " + zD); } if (x != xD && z != zD) { throw new IllegalArgumentException( "you must have x == xD or z == zD"); } Road r = new Road(); r.setXa(x); r.setZa(z); r.setXb(xD); r.setZb(zD); r.setBlue(color.getBlue()); r.setGreen(color.getGreen()); r.setRed(color.getRed()); r.setName(graphBuilding.getRelationShipName(x, z, xD, zD)); ObjectMapper mapper = new ObjectMapper(); StringWriter writer = new StringWriter(); try { mapper.writeValue(writer, r); } catch (Exception e) { LOG.error("convert object to Json", e); } graphBuilding.createRoad(ROAD.name(), x, z, xD, zD, writer.toString()); observerCity.createRoad(r); return r; } /* * (non-Javadoc) * * @see fr.city.core.City#updateOrCreateBuilding(int, int, int, * java.lang.String) */ @Override public Building updateOrCreateBuilding(int x, int z, int height, String color) { if (x < 0 || z < 0 || x > max || z > max) { throw new IllegalArgumentException("The range is 0 to " + (max - 1) + " " + x + " " + z); } if (height < 0) { throw new IllegalArgumentException("The height must be positive " + height); } Object jsonBuilding = graphBuilding.findEventXYNodeProperty( BUILDING.name(), x, z, Graph.CUSTOM); if (jsonBuilding != null) { LOG.debug("find " + (String) jsonBuilding); ObjectMapper mapper = new ObjectMapper(); Building b = null; try { b = mapper.readValue((String) jsonBuilding, Building.class); } catch (IOException e) { LOG.error("convert object to Json", e); } if (b == null) { throw new IllegalArgumentException("Building is null"); } return updateBuilding(b.getName(), height, color); } else { return createBuilding(x, z, height, color); } } @Override public Building updateBuilding(String name, int height, String color) { Building b = findBuildingByName(name); if(b == null){ throw new IllegalArgumentException("The building doesn't exist " + name); } return updateBuilding(b, height, color); } private Building updateBuilding(Building bOld, int height, String color) { observerCity.removeBuilding(bOld); bOld.setHeight(height); bOld.setColor(color); ObjectMapper mapper = new ObjectMapper(); StringWriter writer = new StringWriter(); try { mapper.writeValue(writer, bOld); } catch (Exception e) { LOG.error("convert object to Json", e); } LOG.debug(writer.toString()); graphBuilding .update(BUILDING.name(), bOld.getName(), writer.toString()); observerCity.createBuilding(bOld); return bOld; } /* * (non-Javadoc) * * @see fr.city.core.City#findBuilding(int, int) */ @Override public Building findBuilding(int x, int z) { if (x < 0 || z < 0 || x > max || z > max) { throw new IllegalArgumentException("The range is 0 to " + (max - 1) + " " + x + " " + z); } Object jsonBuilding = graphBuilding.findEventXYNodeProperty( BUILDING.name(), x, z, Graph.CUSTOM); Building b = null; if (jsonBuilding != null) { LOG.debug("find " + (String) jsonBuilding); ObjectMapper mapper = new ObjectMapper(); try { b = mapper.readValue((String) jsonBuilding, Building.class); } catch (IOException e) { LOG.error("convert object to Json", e); } if (b == null) { throw new IllegalArgumentException("Building is null"); } } return b; } /** * Move the transport to the coordinates * * @param name * name of the transport * @param coord * destination coordinates */ public void moveTransport(String name, Address coord) { if (name == null || name.equals("")) { throw new IllegalArgumentException("name must not be null or empty"); } if (coord.getX() < 0 || coord.getZ() < 0 || coord.getX() > max || coord.getZ() > max) { throw new IllegalArgumentException("The range is 0 to " + (max - 1) + " " + coord.getX() + " " + coord.getZ()); } Building br = null; synchronized (monitorMaptransportsMap) { Building b = transportsMap.get(name); if (b == null) { throw new IllegalArgumentException("No transport for name " + name); } b.setX(coord.getX()); b.setZ(coord.getZ()); int y = terrainLayout.getHeight(coord.getX(), coord.getZ()); b.setY(y); // copy of the building to avoid concurrency br = copyBuilding(b); } observerCity.moveTransport(br); } private Building copyBuilding(Building b) { Building br = new Building(); br.setHeight(b.getHeight()); br.setName(b.getName()); br.setX(b.getX()); br.setY(b.getY()); br.setZ(b.getZ()); br.setColor(b.getColor()); return br; } /* * (non-Javadoc) * * @see fr.city.core.City#removeTransport(java.lang.String) */ @Override public void removeTransport(String name) { if (name == null || name.equals("")) { throw new IllegalArgumentException("name must not be null or empty"); } Building br = null; synchronized (monitorMaptransportsMap) { Building b = transportsMap.remove(name); // copy of the building to avoid concurrency br = copyBuilding(b); } observerCity.removeTransport(br); } /* * (non-Javadoc) * * @see fr.city.core.City#findTransportByName(java.lang.String) */ @Override public Building findTransportByName(String name) { Building br = null; synchronized (monitorMaptransportsMap) { Building b = transportsMap.get(name); if (b == null) { throw new IllegalArgumentException("No transport for name " + name); } // copy of the building to avoid concurrency br = copyBuilding(b); } return br; } /* * (non-Javadoc) * * @see fr.city.core.City#createTransport(fr.network.transport.api.Coord2D, * java.lang.String) */ @Override public Building createTransport(Coord2D coord, String color) { if (coord == null) { throw new IllegalArgumentException("coord must not be null"); } if (coord.getX() < 0 || coord.getZ() < 0 || coord.getX() > max || coord.getZ() > max) { throw new IllegalArgumentException("The range is 0 to " + (max - 1) + " " + coord.getX() + " " + coord.getZ()); } Building b = new Building(); b.setX(coord.getX()); b.setZ(coord.getZ()); b.setHeight(5); int y = terrainLayout.getHeight(coord.getX(), coord.getZ()); b.setY(y); b.setColor(color); String name = graphBuilding.getNodeName(TRANSPORT.name(), String.valueOf(id.getAndIncrement())); b.setName(name); LOG.info("new Transport x:" + coord.getX() + " z:" + coord.getZ() + " name:" + name); Building br = null; synchronized (monitorMaptransportsMap) { transportsMap.put(b.getName(), b); // copy of the building to avoid concurrency br = copyBuilding(b); } observerCity.createTransport(br); return br; } /* * (non-Javadoc) * * @see fr.city.core.City#getCityName() */ @Override public String getCityName() { return cityName; } public void setCityName(String cityName) { this.cityName = cityName; } /* * (non-Javadoc) * * @see fr.city.core.City#getMax() */ @Override public int getMax() { return max; } public TerrainLayout getTerrainLayout() { return terrainLayout; } public ObserverCity getObserverCity() { return observerCity; } }
package twilightforest.client.model; import net.minecraft.client.model.ModelBase; import net.minecraft.client.model.ModelRenderer; import net.minecraft.entity.Entity; import net.minecraft.util.MathHelper; public class ModelTFSlimeBeetle extends ModelBase { ModelRenderer head; ModelRenderer RearEnd; ModelRenderer Leg6; ModelRenderer Leg4; ModelRenderer Leg2; ModelRenderer Leg5; ModelRenderer Leg3; ModelRenderer Leg1; ModelRenderer connector1; ModelRenderer antenna1; ModelRenderer antenna2; ModelRenderer eye1; ModelRenderer eye2; ModelRenderer slimeCube; ModelRenderer tail1; ModelRenderer tail2; ModelRenderer mouth; ModelRenderer slimeCenter; boolean renderPassModel = false; public ModelTFSlimeBeetle() { this(false); } public ModelTFSlimeBeetle(boolean renderpass) { renderPassModel = renderpass; field_78090_t = 64; field_78089_u = 64; connector1 = new ModelRenderer(this, 0, 12); connector1.func_78789_a(-3.0F, -3.0F, -1.0F, 6, 6, 1); connector1.func_78793_a(0.0F, 19.0F, -4.0F); RearEnd = new ModelRenderer(this, 31, 6); RearEnd.func_78789_a(-4.0F, -11.0F, -4.0F, 8, 10, 8); RearEnd.func_78793_a(0.0F, 18.0F, 7.0F); setRotation(RearEnd, 1.570796F, 0.0F, 0.0F); Leg6 = new ModelRenderer(this, 40, 0); Leg6.func_78789_a(-1.0F, -1.0F, -1.0F, 10, 2, 2); Leg6.func_78793_a(2.0F, 21.0F, -4.0F); setRotation(Leg6, 0.0F, 0.2792527F, 0.3490659F); Leg5 = new ModelRenderer(this, 40, 0); Leg5.field_78809_i = true; Leg5.func_78789_a(-9.0F, -1.0F, -1.0F, 10, 2, 2); Leg5.func_78793_a(-2.0F, 21.0F, -4.0F); setRotation(Leg5, 0.0F, -0.2792527F, -0.3490659F); Leg4 = new ModelRenderer(this, 40, 0); Leg4.func_78789_a(-1.0F, -1.0F, -1.0F, 10, 2, 2); Leg4.func_78793_a(2.0F, 21.0F, -1.0F); setRotation(Leg4, 0.0F, -0.2792527F, 0.3490659F); Leg2 = new ModelRenderer(this, 40, 0); Leg2.func_78789_a(-1.0F, -1.0F, -1.0F, 10, 2, 2); Leg2.func_78793_a(2.0F, 21.0F, 4.0F); setRotation(Leg2, 0.0F, -0.6981317F, 0.3490659F); Leg3 = new ModelRenderer(this, 40, 0); Leg3.field_78809_i = true; Leg3.func_78789_a(-9.0F, -1.0F, -1.0F, 10, 2, 2); Leg3.func_78793_a(-2.0F, 21.0F, -1.0F); setRotation(Leg3, 0.0F, 0.2792527F, -0.3490659F); Leg1 = new ModelRenderer(this, 40, 0); Leg1.field_78809_i = true; Leg1.func_78789_a(-9.0F, -1.0F, -1.0F, 10, 2, 2); Leg1.func_78793_a(-2.0F, 21.0F, 4.0F); Leg1.func_78787_b(64, 32); setRotation(Leg1, 0.0F, 0.6981317F, -0.3490659F); head = new ModelRenderer(this, 0, 0); head.func_78789_a(-4.0F, -4.0F, -6.0F, 8, 6, 6); head.func_78793_a(0.0F, 19.0F, -5.0F); antenna1 = new ModelRenderer(this, 38, 4); antenna1.func_78789_a(0.0F, -0.5F, -0.5F, 12, 1, 1); antenna1.func_78793_a(1.0F, -3.0F, -5.0F); setRotation(antenna1, 0.0F, 1.047198F, -0.296706F); antenna2 = new ModelRenderer(this, 38, 4); antenna2.func_78789_a(0.0F, -0.5F, -0.5F, 12, 1, 1); antenna2.func_78793_a(-1.0F, -3.0F, -5.0F); setRotation(antenna2, 0.0F, 2.094395F, 0.296706F); eye1 = new ModelRenderer(this, 15, 12); eye1.func_78789_a(-1.5F, -1.5F, -1.5F, 3, 3, 3); eye1.func_78793_a(-3.0F, -2.0F, -5.0F); eye2 = new ModelRenderer(this, 15, 12); eye2.func_78789_a(-1.5F, -1.5F, -1.5F, 3, 3, 3); eye2.func_78793_a(3.0F, -2.0F, -5.0F); mouth = new ModelRenderer(this, 17, 12); mouth.func_78789_a(-1.0F, -1.0F, -1.0F, 2, 2, 1); mouth.func_78793_a(0.0F, 1.0F, -6.0F); head.func_78792_a(antenna1); head.func_78792_a(antenna2); head.func_78792_a(eye1); head.func_78792_a(eye2); head.func_78792_a(mouth); tail1 = new ModelRenderer(this, 0, 20); tail1.func_78789_a(-3.0F, -3.0F, -3.0F, 6, 6, 6); tail1.func_78793_a(0.0F, 19.0F, 9.0F); tail2 = new ModelRenderer(this, 0, 20); tail2.func_78789_a(-3.0F, -6.0F, -3.0F, 6, 6, 6); tail2.func_78793_a(0.0F, -3.0F, 2.0F); slimeCube = new ModelRenderer(this, 0, 40); slimeCube.func_78789_a(-6.0F, -12.0F, -9.0F, 12, 12, 12); slimeCube.func_78793_a(0.0F, -6.0F, 0.0F); slimeCenter = new ModelRenderer(this, 32, 24); slimeCenter.func_78789_a(-4.0F, -10.0F, -7.0F, 8, 8, 8); slimeCenter.func_78793_a(0.0F, -6.0F, 0.0F); tail1.func_78792_a(tail2); if (renderPassModel) { tail2.func_78792_a(slimeCube); } else { tail2.func_78792_a(slimeCenter); } } public void func_78088_a(Entity entity, float f, float f1, float f2, float f3, float f4, float f5) { func_78087_a(f, f1, f2, f3, f4, f5, entity); tail1.func_78785_a(f5); if (!renderPassModel) { head.func_78785_a(f5); RearEnd.func_78785_a(f5); Leg6.func_78785_a(f5); Leg4.func_78785_a(f5); Leg2.func_78785_a(f5); Leg5.func_78785_a(f5); Leg3.func_78785_a(f5); Leg1.func_78785_a(f5); connector1.func_78785_a(f5); } } private void setRotation(ModelRenderer model, float x, float y, float z) { field_78795_f = x; field_78796_g = y; field_78808_h = z; } public void func_78087_a(float par1, float par2, float par3, float par4, float par5, float par6, Entity par7Entity) { head.field_78796_g = (par4 / 57.295776F); head.field_78795_f = (par5 / 57.295776F); float legZ = 0.28559935F; Leg1.field_78808_h = (-legZ); Leg2.field_78808_h = legZ; Leg3.field_78808_h = (-legZ * 0.74F); Leg4.field_78808_h = (legZ * 0.74F); Leg5.field_78808_h = (-legZ); Leg6.field_78808_h = legZ; float var9 = -0.0F; float var10 = 0.3926991F; Leg1.field_78796_g = (var10 * 2.0F + var9); Leg2.field_78796_g = (-var10 * 2.0F - var9); Leg3.field_78796_g = (var10 * 1.0F + var9); Leg4.field_78796_g = (-var10 * 1.0F - var9); Leg5.field_78796_g = (-var10 * 2.0F + var9); Leg6.field_78796_g = (var10 * 2.0F - var9); float var11 = -(MathHelper.func_76134_b(par1 * 0.6662F * 2.0F + 0.0F) * 0.4F) * par2; float var12 = -(MathHelper.func_76134_b(par1 * 0.6662F * 2.0F + 3.1415927F) * 0.4F) * par2; float var14 = -(MathHelper.func_76134_b(par1 * 0.6662F * 2.0F + 4.712389F) * 0.4F) * par2; float var15 = Math.abs(MathHelper.func_76126_a(par1 * 0.6662F + 0.0F) * 0.4F) * par2; float var16 = Math.abs(MathHelper.func_76126_a(par1 * 0.6662F + 3.1415927F) * 0.4F) * par2; float var18 = Math.abs(MathHelper.func_76126_a(par1 * 0.6662F + 4.712389F) * 0.4F) * par2; Leg1.field_78796_g += var11; Leg2.field_78796_g += -var11; Leg3.field_78796_g += var12; Leg4.field_78796_g += -var12; Leg5.field_78796_g += var14; Leg6.field_78796_g += -var14; Leg1.field_78808_h += var15; Leg2.field_78808_h += -var15; Leg3.field_78808_h += var16; Leg4.field_78808_h += -var16; Leg5.field_78808_h += var18; Leg6.field_78808_h += -var18; tail1.field_78795_f = (MathHelper.func_76134_b(par3 * 0.3335F) * 0.15F); tail2.field_78795_f = (MathHelper.func_76134_b(par3 * 0.4445F) * 0.2F); slimeCube.field_78795_f = (MathHelper.func_76134_b(par3 * 0.5555F) * 0.25F); slimeCenter.field_78795_f = (MathHelper.func_76134_b(par3 * 0.5555F + 0.25F) * 0.25F); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.serde2.lazy; import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import org.junit.Test; /** * TestLazyPrimitive. * */ public class TestLazyPrimitive { /** * Initialize the LazyObject with the parameters, wrapping the byte[] * automatically. */ public static void initLazyObject(LazyObject lo, byte[] data, int start, int length) { ByteArrayRef b = new ByteArrayRef(); b.setData(data); lo.init(b, start, length); } /** * Test the LazyByte class. */ @Test public void testLazyByte() throws Throwable { try { LazyByte b = new LazyByte( LazyPrimitiveObjectInspectorFactory.LAZY_BYTE_OBJECT_INSPECTOR); initLazyObject(b, new byte[] {'0'}, 0, 0); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'0'}, 0, 1); assertEquals(new ByteWritable((byte) 0), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '0'}, 0, 2); assertEquals(new ByteWritable((byte) 0), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '0'}, 0, 2); assertEquals(new ByteWritable((byte) 0), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '1', 'b'}, 1, 1); assertEquals(new ByteWritable((byte) 1), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '-', '1'}, 1, 2); assertEquals(new ByteWritable((byte) -1), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '+', '1'}, 1, 2); assertEquals(new ByteWritable((byte) 1), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', '2', '8'}, 0, 4); assertEquals(new ByteWritable((byte) -128), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '1', '2', '7'}, 0, 4); assertEquals(new ByteWritable((byte) 127), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '1', 'b'}, 1, 2); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'+', '1', '2', '8'}, 0, 4); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', '2', '9'}, 0, 4); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', '2', '3'}, 0, 1); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'+', '1', '2', '3'}, 0, 1); assertNull(b.getWritableObject()); } catch (Throwable e) { e.printStackTrace(); throw e; } } /** * Test the LazyShort class. */ @Test public void testLazyShort() throws Throwable { try { LazyShort b = new LazyShort( LazyPrimitiveObjectInspectorFactory.LAZY_SHORT_OBJECT_INSPECTOR); initLazyObject(b, new byte[] {'0'}, 0, 0); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'0'}, 0, 1); assertEquals(new ShortWritable((short) 0), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '0'}, 0, 2); assertEquals(new ShortWritable((short) 0), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '0'}, 0, 2); assertEquals(new ShortWritable((short) 0), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '1', 'b'}, 1, 1); assertEquals(new ShortWritable((short) 1), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '-', '1'}, 1, 2); assertEquals(new ShortWritable((short) -1), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '+', '1'}, 1, 2); assertEquals(new ShortWritable((short) 1), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', '2', '8'}, 0, 4); assertEquals(new ShortWritable((short) -128), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '1', '2', '7'}, 0, 4); assertEquals(new ShortWritable((short) 127), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '3', '2', '7', '6', '8'}, 0, 6); assertEquals(new ShortWritable((short) -32768), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '3', '2', '7', '6', '7'}, 0, 6); assertEquals(new ShortWritable((short) 32767), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '1', 'b'}, 1, 2); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'-', '3', '2', '7', '6', '9'}, 0, 6); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'+', '3', '2', '7', '6', '8'}, 0, 6); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', '2', '3'}, 0, 1); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'+', '1', '2', '3'}, 0, 1); assertNull(b.getWritableObject()); } catch (Throwable e) { e.printStackTrace(); throw e; } } /** * Test the LazyInteger class. */ @Test public void testLazyInteger() throws Throwable { try { LazyInteger b = new LazyInteger( LazyPrimitiveObjectInspectorFactory.LAZY_INT_OBJECT_INSPECTOR); initLazyObject(b, new byte[] {'0'}, 0, 0); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'0'}, 0, 1); assertEquals(new IntWritable(0), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '0'}, 0, 2); assertEquals(new IntWritable(0), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '0'}, 0, 2); assertEquals(new IntWritable(0), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '1', 'b'}, 1, 1); assertEquals(new IntWritable(1), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '-', '1'}, 1, 2); assertEquals(new IntWritable(-1), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '+', '1'}, 1, 2); assertEquals(new IntWritable(1), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', '2', '8'}, 0, 4); assertEquals(new IntWritable(-128), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '1', '2', '7'}, 0, 4); assertEquals(new IntWritable(127), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '3', '2', '7', '6', '8'}, 0, 6); assertEquals(new IntWritable(-32768), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '3', '2', '7', '6', '7'}, 0, 6); assertEquals(new IntWritable(32767), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '2', '1', '4', '7', '4', '8', '3', '6', '4', '8'}, 0, 11); assertEquals(new IntWritable(-2147483648), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '2', '1', '4', '7', '4', '8', '3', '6', '4', '7'}, 0, 11); assertEquals(new IntWritable(2147483647), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '1', 'b'}, 1, 2); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'-', '2', '1', '4', '7', '4', '8', '3', '6', '4', '9'}, 0, 11); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'+', '2', '1', '4', '7', '4', '8', '3', '6', '4', '8'}, 0, 11); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', '2', '3'}, 0, 1); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'+', '1', '2', '3'}, 0, 1); assertNull(b.getWritableObject()); } catch (Throwable e) { e.printStackTrace(); throw e; } } /** * Test the LazyLong class. */ @Test public void testLazyLong() throws Throwable { try { LazyLong b = new LazyLong( LazyPrimitiveObjectInspectorFactory.LAZY_LONG_OBJECT_INSPECTOR); initLazyObject(b, new byte[] {'0'}, 0, 0); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'0'}, 0, 1); assertEquals(new LongWritable(0), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '0'}, 0, 2); assertEquals(new LongWritable(0), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '0'}, 0, 2); assertEquals(new LongWritable(0), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '1', 'b'}, 1, 1); assertEquals(new LongWritable(1), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '-', '1'}, 1, 2); assertEquals(new LongWritable(-1), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '+', '1'}, 1, 2); assertEquals(new LongWritable(1), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', '2', '8'}, 0, 4); assertEquals(new LongWritable(-128), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '1', '2', '7'}, 0, 4); assertEquals(new LongWritable(127), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '3', '2', '7', '6', '8'}, 0, 6); assertEquals(new LongWritable(-32768), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '3', '2', '7', '6', '7'}, 0, 6); assertEquals(new LongWritable(32767), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '2', '1', '4', '7', '4', '8', '3', '6', '4', '8'}, 0, 11); assertEquals(new LongWritable(-2147483648), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '2', '1', '4', '7', '4', '8', '3', '6', '4', '7'}, 0, 11); assertEquals(new LongWritable(2147483647), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '9', '2', '2', '3', '3', '7', '2', '0', '3', '6', '8', '5', '4', '7', '7', '5', '8', '0', '8'}, 0, 20); assertEquals(new LongWritable(-9223372036854775808L), b .getWritableObject()); initLazyObject(b, new byte[] {'+', '9', '2', '2', '3', '3', '7', '2', '0', '3', '6', '8', '5', '4', '7', '7', '5', '8', '0', '7'}, 0, 20); assertEquals(new LongWritable(9223372036854775807L), b .getWritableObject()); initLazyObject(b, new byte[] {'a', '1', 'b'}, 1, 2); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'-', '9', '2', '2', '3', '3', '7', '2', '0', '3', '6', '8', '5', '4', '7', '7', '5', '8', '0', '9'}, 0, 20); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'+', '9', '2', '2', '3', '3', '7', '2', '0', '3', '6', '8', '5', '4', '7', '7', '5', '8', '0', '8'}, 0, 20); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', '2', '3'}, 0, 1); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'+', '1', '2', '3'}, 0, 1); assertNull(b.getWritableObject()); } catch (Throwable e) { e.printStackTrace(); throw e; } } /** * Test the LazyDouble class. */ @Test public void testLazyDouble() throws Throwable { try { LazyDouble b = new LazyDouble( LazyPrimitiveObjectInspectorFactory.LAZY_DOUBLE_OBJECT_INSPECTOR); initLazyObject(b, new byte[] {'0'}, 0, 0); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'0'}, 0, 1); assertEquals(new DoubleWritable(0), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '0'}, 0, 2); assertEquals(new DoubleWritable(0), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '0'}, 0, 2); assertEquals(new DoubleWritable(-0.0), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '1', 'b'}, 1, 1); assertEquals(new DoubleWritable(1), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '-', '1'}, 1, 2); assertEquals(new DoubleWritable(-1), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '+', '1'}, 1, 2); assertEquals(new DoubleWritable(1), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', '2', '8'}, 0, 4); assertEquals(new DoubleWritable(-128), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '1', '2', '7'}, 0, 4); assertEquals(new DoubleWritable(127), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '3', '2', '7', '6', '8'}, 0, 6); assertEquals(new DoubleWritable(-32768), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '3', '2', '7', '6', '7'}, 0, 6); assertEquals(new DoubleWritable(32767), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '2', '1', '4', '7', '4', '8', '3', '6', '4', '8'}, 0, 11); assertEquals(new DoubleWritable(-2147483648), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '2', '1', '4', '7', '4', '8', '3', '6', '4', '7'}, 0, 11); assertEquals(new DoubleWritable(2147483647), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '9', '2', '2', '3', '3', '7', '2', '0', '3', '6', '8', '5', '4', '7', '7', '5', '8', '0', '8'}, 0, 20); assertEquals(new DoubleWritable(-9223372036854775808L), b .getWritableObject()); initLazyObject(b, new byte[] {'+', '9', '2', '2', '3', '3', '7', '2', '0', '3', '6', '8', '5', '4', '7', '7', '5', '8', '0', '7'}, 0, 20); assertEquals(new DoubleWritable(9223372036854775807L), b .getWritableObject()); initLazyObject(b, new byte[] {'-', '3', '.', '7', '6', '8'}, 0, 6); assertEquals(new DoubleWritable(-3.768), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '3', '.', '7', '6', '7'}, 0, 6); assertEquals(new DoubleWritable(3.767), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '2', '.', '4', '7', '4', '8', '3', '6', 'e', '8'}, 0, 11); assertEquals(new DoubleWritable(-2.474836e8), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '2', '.', '4', '7', '4', '8', '3', 'E', '-', '7'}, 0, 11); assertEquals(new DoubleWritable(2.47483E-7), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '.', '4', '7', '4', '8', '3', '6', 'e', '8'}, 0, 10); assertEquals(new DoubleWritable(-.474836e8), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '.', '4', '7', '4', '8', '3', 'E', '-', '7'}, 0, 10); assertEquals(new DoubleWritable(.47483E-7), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '2', '1', '4', '7', '4', '8', '3', '6', '4', '.'}, 0, 11); assertEquals(new DoubleWritable(-214748364.), b.getWritableObject()); initLazyObject(b, new byte[] {'+', '2', '1', '4', '7', '4', '8', '3', '6', '4', '.'}, 0, 11); assertEquals(new DoubleWritable(+214748364.), b.getWritableObject()); initLazyObject(b, new byte[] {'.', '0'}, 0, 2); assertEquals(new DoubleWritable(.0), b.getWritableObject()); initLazyObject(b, new byte[] {'0', '.'}, 0, 2); assertEquals(new DoubleWritable(0.), b.getWritableObject()); initLazyObject(b, new byte[] {'a', '1', 'b'}, 1, 2); assertNull(b.getWritableObject()); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'.', '1', '2', '3'}, 0, 1); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', '2', '3'}, 0, 1); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'+', '1', '2', '3'}, 0, 1); assertNull(b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', 'e', '3', '3', '3', '3', '3', '3'}, 0, 9); assertEquals(new DoubleWritable(Double.NEGATIVE_INFINITY), b .getWritableObject()); initLazyObject(b, new byte[] {'+', '1', 'e', '3', '3', '3', '3', '3', '3'}, 0, 9); assertEquals(new DoubleWritable(Double.POSITIVE_INFINITY), b .getWritableObject()); initLazyObject(b, new byte[] {'+', '1', 'e', '-', '3', '3', '3', '3', '3'}, 0, 8); assertEquals(new DoubleWritable(0), b.getWritableObject()); initLazyObject(b, new byte[] {'-', '1', 'e', '-', '3', '3', '3', '3', '3'}, 0, 8); assertEquals(new DoubleWritable(-0.0), b.getWritableObject()); } catch (Throwable e) { e.printStackTrace(); throw e; } } /** * Test the LazyString class. */ @Test public void testLazyString() throws Throwable { try { LazyString b = new LazyString(LazyPrimitiveObjectInspectorFactory .getLazyStringObjectInspector(false, (byte) 0)); initLazyObject(b, new byte[] {'0'}, 0, 0); assertEquals(new Text(""), b.getWritableObject()); initLazyObject(b, new byte[] {'0'}, 0, 1); assertEquals(new Text("0"), b.getWritableObject()); initLazyObject(b, new byte[] {'0', '1', '2'}, 1, 1); assertEquals(new Text("1"), b.getWritableObject()); } catch (Throwable e) { e.printStackTrace(); throw e; } } @Test public void testLazyBinary() { LazyBinary ba = new LazyBinary(LazyPrimitiveObjectInspectorFactory.LAZY_BINARY_OBJECT_INSPECTOR); initLazyObject(ba, new byte[] {}, 0, 0); assertEquals(new BytesWritable(), ba.getWritableObject()); initLazyObject(ba, new byte[] {'%'}, 0, 1); assertEquals(new BytesWritable(new byte[] {'%'}), ba.getWritableObject()); initLazyObject(ba, new byte[] {'2', '>', '3'}, 1, 1); assertEquals(new BytesWritable(new byte[] {'>'}), ba.getWritableObject()); initLazyObject(ba, new byte[] {'2', '?', '3'}, 0, 3); assertEquals(new BytesWritable(new byte[] {'2', '?', '3'}), ba.getWritableObject()); initLazyObject(ba, new byte[] {'\n'}, 0, 1); assertEquals(new BytesWritable(new byte[] {'\n'}), ba.getWritableObject()); } @Test public void testLazyTimestamp() throws Throwable { LazyTimestamp t = new LazyTimestamp(LazyPrimitiveObjectInspectorFactory.LAZY_TIMESTAMP_OBJECT_INSPECTOR); String nullDate = "NULL"; byte[] nullBytes = nullDate.getBytes(); initLazyObject(t, nullBytes, 0, nullBytes.length); assertEquals(true, t.isNull); String sampleDate = "2013-02-12 21:04:58"; byte[] good2013 = sampleDate.getBytes(); initLazyObject(t, good2013, 0, good2013.length); assertEquals(false, t.isNull); assertEquals(Timestamp.valueOf(sampleDate), t.getWritableObject().getTimestamp()); String badDate = "2013-02-12 21:04:XX"; byte[] bad2013 = badDate.getBytes(); initLazyObject(t, bad2013, 0, bad2013.length); assertEquals(true, t.isNull); } @Test public void testLazyDate() throws Throwable { LazyDate t = new LazyDate(LazyPrimitiveObjectInspectorFactory.LAZY_DATE_OBJECT_INSPECTOR); String nullDate = "NULL"; byte[] nullBytes = nullDate.getBytes(); initLazyObject(t, nullBytes, 0, nullBytes.length); assertEquals(true, t.isNull); String sampleDate = "2013-02-12"; byte[] good2013 = sampleDate.getBytes(); initLazyObject(t, good2013, 0, good2013.length); assertEquals(false, t.isNull); assertEquals(Date.valueOf(sampleDate), t.getWritableObject().get()); String badDate = "X013-02-12"; byte[] bad2013 = badDate.getBytes(); initLazyObject(t, bad2013, 0, bad2013.length); assertEquals(true, t.isNull); } @Test public void testLazyIntegerWrite() throws Throwable { try { ByteStream.Output out = new ByteStream.Output(); int[] tests = {0, -1, 1, -10, 10, -123, 123, Integer.MIN_VALUE, Integer.MIN_VALUE + 1, Integer.MAX_VALUE, Integer.MAX_VALUE - 1}; for (int v : tests) { out.reset(); LazyInteger.writeUTF8(out, v); Text t = new Text(); t.set(out.getData(), 0, out.getLength()); assertEquals(String.valueOf(v), t.toString()); } } catch (Throwable e) { e.printStackTrace(); throw e; } } @Test public void testLazyLongWrite() throws Throwable { try { ByteStream.Output out = new ByteStream.Output(); long[] tests = {0L, -1, 1, -10, 10, -123, 123, Long.MIN_VALUE, Long.MIN_VALUE + 1, Long.MAX_VALUE, Long.MAX_VALUE - 1}; for (long v : tests) { out.reset(); LazyLong.writeUTF8(out, v); Text t = new Text(); t.set(out.getData(), 0, out.getLength()); assertEquals(String.valueOf(v), t.toString()); } } catch (Throwable e) { e.printStackTrace(); throw e; } } private void testIntCaseWithPass(String strVal, int intVal, boolean trim) { Text text = new Text(strVal); assertEquals( intVal, LazyInteger.parseInt(text.getBytes(), 0, text.getLength(), 10, trim)); } private void testIntCaseWithFail(String strVal, boolean trim) { Text text = new Text(strVal); try { LazyInteger.parseInt(text.getBytes(), 0, text.getLength(), 10, trim); fail("Expected to fail while parsing '" + strVal + "'"); } catch (NumberFormatException err) { // Error was expected } } private void testLongCaseWithPass(String strVal, long longVal, boolean trim) { Text text = new Text(strVal); assertEquals( longVal, LazyLong.parseLong(text.getBytes(), 0, text.getLength(), 10, trim)); } private void testLongCaseWithFail(String strVal, boolean trim) { Text text = new Text(strVal); try { LazyLong.parseLong(text.getBytes(), 0, text.getLength(), 10, trim); fail("Expected to fail while parsing '" + strVal + "'"); } catch (NumberFormatException err) { // Error was expected } } @Test public void testLazyIntWithSpaces() throws Throwable { Object[][] casesWithoutSpaces = { {"0", 0}, {"-128", -128}, {"128", 128}, {"+128", 128}, {"-2147483648", -2147483648}, {"2147483647", 2147483647}, {"+2147483647", 2147483647}, }; Object[][] casesWithSpaces = { {" 0", 0}, {"0 ", 0}, {" 0 ", 0}, {" -128", -128}, {"-128 ", -128}, {" -128 ", -128}, {" 128", 128}, {"128 ", 128}, {" 128 ", 128}, {" +128", 128}, {"+128 ", 128}, {" +128 ", 128}, {" +128 ", 128}, {" -2147483648", -2147483648}, {"-2147483648 ", -2147483648}, {" -2147483648 ", -2147483648}, {" 2147483647", 2147483647}, {"2147483647 ", 2147483647}, {" 2147483647 ", 2147483647}, {" +2147483647", 2147483647}, {"+2147483647 ", 2147483647}, {" +2147483647 ", 2147483647}, }; String[] casesWithErrors = { "", " ", "one", " one ", "123:", "123a", " 123a ", "a123", " a123 ", // Exceeds MAX_VALUE "2147483648", "-2147483649", }; // // trim=false // boolean trim = false; for (Object[] testCase : casesWithoutSpaces) { testIntCaseWithPass((String) testCase[0], ((Number) testCase[1]).intValue(), trim); } for (Object[] testCase : casesWithSpaces) { // With trim=false, parsing cannot handle spaces testIntCaseWithFail((String) testCase[0], trim); } for (String testCase : casesWithErrors) { testIntCaseWithFail(testCase, trim); } // // trim=true // trim = true; for (Object[] testCase : casesWithoutSpaces) { testIntCaseWithPass((String) testCase[0], ((Number) testCase[1]).intValue(), trim); } for (Object[] testCase : casesWithSpaces) { // With trim=true, parsing can handle spaces testIntCaseWithPass((String) testCase[0], ((Number) testCase[1]).intValue(), trim); } for (String testCase : casesWithErrors) { testIntCaseWithFail(testCase, trim); } } @Test public void testLazyLongWithSpaces() throws Throwable { Object[][] casesWithoutSpaces = { {"0", 0}, {"-128", -128}, {"128", 128}, {"+128", 128}, {"-9223372036854775808", -9223372036854775808L}, {"9223372036854775807", 9223372036854775807L}, {"+9223372036854775807", 9223372036854775807L}, }; Object[][] casesWithSpaces = { {" 0", 0}, {"0 ", 0}, {" 0 ", 0}, {" -128", -128}, {"-128 ", -128}, {" -128 ", -128}, {" 128", 128}, {"128 ", 128}, {" 128 ", 128}, {" +128", 128}, {"+128 ", 128}, {" +128 ", 128}, {" +128 ", 128}, {" -9223372036854775808", -9223372036854775808L}, {"-9223372036854775808 ", -9223372036854775808L}, {" -9223372036854775808 ", -9223372036854775808L}, {" 9223372036854775807", 9223372036854775807L}, {"9223372036854775807 ", 9223372036854775807L}, {" 9223372036854775807 ", 9223372036854775807L}, {" +9223372036854775807", 9223372036854775807L}, {"+9223372036854775807 ", 9223372036854775807L}, {" +9223372036854775807 ", 9223372036854775807L}, }; String[] casesWithErrors = { "", " ", "one", " one ", "123:", "123a", " 123a ", "a123", " a123 ", // Exceeds max value "9223372036854775808", "9223372036854775809", }; // // trim=false // boolean trim = false; for (Object[] testCase : casesWithoutSpaces) { testLongCaseWithPass((String) testCase[0], ((Number) testCase[1]).longValue(), trim); } for (Object[] testCase : casesWithSpaces) { // With trim=false, parsing cannot handle spaces testLongCaseWithFail((String) testCase[0], trim); } for (String testCase : casesWithErrors) { testLongCaseWithFail(testCase, trim); } // // trim=true // trim = true; for (Object[] testCase : casesWithoutSpaces) { testLongCaseWithPass((String) testCase[0], ((Number) testCase[1]).longValue(), trim); } for (Object[] testCase : casesWithSpaces) { // With trim=true, parsing can handle spaces testLongCaseWithPass((String) testCase[0], ((Number) testCase[1]).longValue(), trim); } for (String testCase : casesWithErrors) { testLongCaseWithFail(testCase, trim); } } }
/* * Artifactory is a binaries repository manager. * Copyright (C) 2012 JFrog Ltd. * * Artifactory is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Artifactory is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Artifactory. If not, see <http://www.gnu.org/licenses/>. */ package org.artifactory.storage.db; import org.apache.commons.lang.StringUtils; import org.apache.tomcat.jdbc.pool.jmx.ConnectionPool; import org.artifactory.api.common.BasicStatusHolder; import org.artifactory.api.context.ContextHelper; import org.artifactory.common.ArtifactoryHome; import org.artifactory.descriptor.config.CentralConfigDescriptor; import org.artifactory.mbean.MBeanRegistrationService; import org.artifactory.spring.Reloadable; import org.artifactory.storage.StorageProperties; import org.artifactory.storage.db.fs.dao.NodesDao; import org.artifactory.storage.db.mbean.ManagedDataSource; import org.artifactory.storage.db.properties.model.DbProperties; import org.artifactory.storage.db.properties.service.ArtifactoryDbPropertiesService; import org.artifactory.storage.db.spring.ArtifactoryDataSource; import org.artifactory.storage.db.spring.ArtifactoryTomcatDataSource; import org.artifactory.storage.db.util.DbUtils; import org.artifactory.storage.db.util.IdGenerator; import org.artifactory.storage.db.util.JdbcHelper; import org.artifactory.storage.db.version.ArtifactoryDBVersion; import org.artifactory.util.ResourceUtils; import org.artifactory.version.CompoundVersionDetails; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Repository; import org.springframework.transaction.support.TransactionSynchronizationManager; import javax.annotation.PostConstruct; import javax.sql.DataSource; import java.io.IOException; import java.io.InputStream; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; import java.util.concurrent.Callable; /** * @author Yossi Shaul */ @Repository @Reloadable(beanClass = DbService.class) public class DbServiceImpl implements DbService { private static final Logger log = LoggerFactory.getLogger(DbServiceImpl.class); private static final double MYSQL_MIN_VERSION = 5.5; @Autowired private JdbcHelper jdbcHelper; @Autowired @Qualifier("storageProperties") private StorageProperties storageProperties; @Autowired private IdGenerator idGenerator; @Autowired private ArtifactoryDbPropertiesService dbPropertiesService; @PostConstruct private void initDb() throws Exception { printConnectionInfo(); // check if db tables exist and initialize if not if (!isSchemaExist()) { try (Connection con = jdbcHelper.getDataSource().getConnection()) { // if using mySQL, check version compatibility if (storageProperties.getDbType() == DbType.MYSQL) { checkMySqlMinVersion(); } // read ddl from file and execute log.info("***Creating database schema***"); DbUtils.executeSqlStream(con, getDbSchemaSql()); updateDbProperties(); } } // initialize id generator initializeIdGenerator(); } private void updateDbProperties() { // Update DBProperties long installTime = System.currentTimeMillis(); CompoundVersionDetails versionDetails = ArtifactoryHome.get().readRunningArtifactoryVersion(); String versionStr = versionDetails.getVersion().getValue(); long timestamp = versionDetails.getTimestamp(); int revisionInt = versionDetails.getRevisionInt(); dbPropertiesService.updateDbProperties(new DbProperties(installTime, versionStr, revisionInt, timestamp)); } @Override public void init() { registerDataSourceMBean(); } @Override public DbType getDatabaseType() { return storageProperties.getDbType(); } @Override public long nextId() { return idGenerator.nextId(); } @Override public void compressDerbyDb(BasicStatusHolder statusHolder) { DerbyUtils.compress(statusHolder); } @Override public <T> T invokeInTransaction(String transactionName, Callable<T> execute) { if (StringUtils.isNotBlank(transactionName)) { TransactionSynchronizationManager.setCurrentTransactionName(transactionName); } try { return execute.call(); } catch (Exception e) { if (e instanceof RuntimeException) { throw (RuntimeException) e; } throw new RuntimeException(e); } } //used via reflection by DbBaseTest public void initializeIdGenerator() throws SQLException { idGenerator.initializeIdGenerator(); } private InputStream getDbSchemaSql() throws IOException { String dbTypeName = storageProperties.getDbType().toString(); String resourcePath = "/" + dbTypeName + "/" + dbTypeName + ".sql"; InputStream resource = ResourceUtils.getResource(resourcePath); if (resource == null) { throw new IOException("Database DDL resource not found at: '" + resourcePath + "'"); } return resource; } private boolean isSchemaExist() throws SQLException { log.debug("Checking for database schema existence"); try (Connection con = jdbcHelper.getDataSource().getConnection()) { DatabaseMetaData metaData = con.getMetaData(); return tableExists(metaData, NodesDao.TABLE_NAME); } } public static boolean tableExists(DatabaseMetaData metaData, String tableName) throws SQLException { return DbUtils.tableExists(metaData, tableName); } private void printConnectionInfo() throws SQLException { Connection connection = jdbcHelper.getDataSource().getConnection(); try { DatabaseMetaData meta = connection.getMetaData(); log.info("Database: {} {}. Driver: {} {}", meta.getDatabaseProductName(), meta.getDatabaseProductVersion(), meta.getDriverName(), meta.getDriverVersion()); log.info("Connection URL: {}", meta.getURL()); } catch (SQLException e) { log.warn("Can not retrieve database and driver name / version", e); } finally { DbUtils.close(connection); } } private void registerDataSourceMBean() { DataSource dataSource = jdbcHelper.getDataSource(); if (dataSource instanceof ArtifactoryDataSource) { ContextHelper.get().beanForType(MBeanRegistrationService.class).register( new ManagedDataSource((ArtifactoryDataSource) dataSource, jdbcHelper), "Storage", "Data Source"); } if (dataSource instanceof ArtifactoryTomcatDataSource) { // register the Tomcat JDBC pool JMX if enabled ArtifactoryTomcatDataSource tomcatDataSource = (ArtifactoryTomcatDataSource) dataSource; if (tomcatDataSource.isJmxEnabled()) { ConnectionPool jmxPool = tomcatDataSource.getPool().getJmxPool(); ContextHelper.get().beanForType(MBeanRegistrationService.class) .register(jmxPool, "Storage", "Connection Pool"); } } } private boolean checkMySqlMinVersion() { log.debug("Checking MySQL version compatibility"); ResultSet rs = null; try { rs = jdbcHelper.executeSelect("SELECT VERSION();"); if (rs.next()) { String versionString = rs.getString(1); int i = StringUtils.ordinalIndexOf(versionString, ".", 2); if (i == -1) { i = versionString.length(); } Double mysqlVersion = Double.valueOf(versionString.substring(0, i)); if (mysqlVersion >= MYSQL_MIN_VERSION) { return true; } else { log.error("Unsupported MySQL version found [" + versionString + "]. " + "Minimum version required is " + MYSQL_MIN_VERSION + ". " + "Please follow the requirements on the wiki page."); return false; } } } catch (Exception e) { log.error("Could not determine MySQL version due to an exception", e); } finally { DbUtils.close(rs); } log.error("Could not determine MySQL version. Minimum version should be " + MYSQL_MIN_VERSION + " and above."); return false; } @Override public void convert(CompoundVersionDetails source, CompoundVersionDetails target) { ArtifactoryDBVersion.convert(source.getVersion(), jdbcHelper, storageProperties.getDbType()); updateDbProperties(); } @Override public void reload(CentralConfigDescriptor oldDescriptor) { } @Override public void destroy() { jdbcHelper.destroy(); } }
/* * Copyright 2011 Benjamin Glatzel <benjamin.glatzel@me.com>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.rendering.physics; import com.bulletphysics.collision.broadphase.BroadphaseInterface; import com.bulletphysics.collision.broadphase.DbvtBroadphase; import com.bulletphysics.collision.dispatch.CollisionDispatcher; import com.bulletphysics.collision.dispatch.CollisionObject; import com.bulletphysics.collision.dispatch.DefaultCollisionConfiguration; import com.bulletphysics.collision.shapes.BoxShape; import com.bulletphysics.dynamics.DiscreteDynamicsWorld; import com.bulletphysics.dynamics.RigidBody; import com.bulletphysics.dynamics.RigidBodyConstructionInfo; import com.bulletphysics.dynamics.constraintsolver.SequentialImpulseConstraintSolver; import com.bulletphysics.linearmath.DefaultMotionState; import com.bulletphysics.linearmath.Transform; import org.lwjgl.BufferUtils; import org.lwjgl.opengl.GL11; import org.terasology.game.Terasology; import org.terasology.logic.characters.Player; import org.terasology.logic.manager.AudioManager; import org.terasology.logic.world.Chunk; import org.terasology.model.blocks.Block; import org.terasology.model.blocks.management.BlockManager; import org.terasology.model.inventory.ItemBlock; import org.terasology.rendering.interfaces.IGameObject; import org.terasology.rendering.world.WorldRenderer; import org.terasology.utilities.FastRandom; import javax.vecmath.Matrix3f; import javax.vecmath.Matrix4f; import javax.vecmath.Vector3d; import javax.vecmath.Vector3f; import java.nio.FloatBuffer; import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.logging.Level; /** * Renders blocks using the Bullet physics library. * * @author Benjamin Glatzel <benjamin.glatzel@me.com> */ public class BulletPhysicsRenderer implements IGameObject { private class BlockRigidBody extends RigidBody implements Comparable<BlockRigidBody> { private final byte _type; private final long _createdAt; public boolean _temporary = false; public boolean _picked = false; public BlockRigidBody(RigidBodyConstructionInfo constructionInfo, byte type) { super(constructionInfo); _type = type; _createdAt = Terasology.getInstance().getTimeInMs(); } public float distanceToPlayer() { Transform t = new Transform(); getMotionState().getWorldTransform(t); Matrix4f tMatrix = new Matrix4f(); t.getMatrix(tMatrix); Player player = Terasology.getInstance().getActivePlayer(); Vector3f blockPlayer = new Vector3f(); tMatrix.get(blockPlayer); blockPlayer.sub(new Vector3f(player.getPosition())); return blockPlayer.length(); } public long calcAgeInMs() { return Terasology.getInstance().getTimeInMs() - _createdAt; } public byte getType() { return _type; } public int compareTo(BlockRigidBody blockRigidBody) { if (blockRigidBody.calcAgeInMs() == calcAgeInMs()) { return 0; } if (blockRigidBody.calcAgeInMs() > calcAgeInMs()) return 1; else return -1; } } public enum BLOCK_SIZE { FULL_SIZE, HALF_SIZE, QUARTER_SIZE } private static final int MAX_TEMP_BLOCKS = 128; private final LinkedList<RigidBody> _insertionQueue = new LinkedList<RigidBody>(); private final ArrayList<BlockRigidBody> _blocks = new ArrayList<BlockRigidBody>(); private HashSet<RigidBody> _chunks = new HashSet<RigidBody>(); private final BoxShape _blockShape = new BoxShape(new Vector3f(0.5f, 0.5f, 0.5f)); private final BoxShape _blockShapeHalf = new BoxShape(new Vector3f(0.25f, 0.25f, 0.25f)); private final BoxShape _blockShapeQuarter = new BoxShape(new Vector3f(0.125f, 0.125f, 0.125f)); private final CollisionDispatcher _dispatcher; private final BroadphaseInterface _broadphase; private final DefaultCollisionConfiguration _defaultCollisionConfiguration; private final SequentialImpulseConstraintSolver _sequentialImpulseConstraintSolver; private final DiscreteDynamicsWorld _discreteDynamicsWorld; private final WorldRenderer _parent; public BulletPhysicsRenderer(WorldRenderer parent) { _broadphase = new DbvtBroadphase(); _defaultCollisionConfiguration = new DefaultCollisionConfiguration(); _dispatcher = new CollisionDispatcher(_defaultCollisionConfiguration); _sequentialImpulseConstraintSolver = new SequentialImpulseConstraintSolver(); _discreteDynamicsWorld = new DiscreteDynamicsWorld(_dispatcher, _broadphase, _sequentialImpulseConstraintSolver, _defaultCollisionConfiguration); _discreteDynamicsWorld.setGravity(new Vector3f(0f, -10f, 0f)); _parent = parent; } public BlockRigidBody[] addLootableBlocks(Vector3f position, Block block) { FastRandom rand = Terasology.getInstance().getActiveWorldProvider().getRandom(); BlockRigidBody result[] = new BlockRigidBody[8]; for (int i = 0; i < block.getLootAmount(); i++) { // Position the smaller blocks Vector3f offsetPossition = new Vector3f((float) rand.randomDouble() * 0.5f, (float) rand.randomDouble() * 0.5f, (float) rand.randomDouble() * 0.5f); offsetPossition.add(position); result[i] = addBlock(offsetPossition, block.getId(), new Vector3f(0.0f, 4000f, 0.0f), BLOCK_SIZE.QUARTER_SIZE, false); } return result; } public BlockRigidBody addTemporaryBlock(Vector3f position, byte type, BLOCK_SIZE size) { BlockRigidBody result = addBlock(position, type, size, true); return result; } public BlockRigidBody addTemporaryBlock(Vector3f position, byte type, Vector3f impulse, BLOCK_SIZE size) { BlockRigidBody result = addBlock(position, type, impulse, size, true); return result; } public BlockRigidBody addBlock(Vector3f position, byte type, BLOCK_SIZE size, boolean temporary) { return addBlock(position, type, new Vector3f(0f, 0f, 0f), size, temporary); } /** * Adds a new physics block to be rendered as a rigid body. Translucent blocks are ignored. * * @param position The position * @param type The block type * @param impulse An impulse * @param size The size of the block * @return The created rigid body (if any) */ public synchronized BlockRigidBody addBlock(Vector3f position, byte type, Vector3f impulse, BLOCK_SIZE size, boolean temporary) { if (temporary && _blocks.size() > MAX_TEMP_BLOCKS) return null; BoxShape shape = _blockShape; Block block = BlockManager.getInstance().getBlock(type); if (block.isTranslucent()) return null; if (size == BLOCK_SIZE.HALF_SIZE) shape = _blockShapeHalf; else if (size == BLOCK_SIZE.QUARTER_SIZE) shape = _blockShapeQuarter; Matrix3f rot = new Matrix3f(); rot.setIdentity(); DefaultMotionState blockMotionState = new DefaultMotionState(new Transform(new Matrix4f(rot, position, 1.0f))); Vector3f fallInertia = new Vector3f(); shape.calculateLocalInertia(block.getMass(), fallInertia); RigidBodyConstructionInfo blockCI = new RigidBodyConstructionInfo(block.getMass(), blockMotionState, shape, fallInertia); BlockRigidBody rigidBlock = new BlockRigidBody(blockCI, type); rigidBlock.setRestitution(0.0f); rigidBlock.setAngularFactor(0.5f); rigidBlock.setFriction(0.5f); rigidBlock._temporary = temporary; // Apply impulse rigidBlock.applyImpulse(impulse, new Vector3f(0.0f, 0.0f, 0.0f)); _insertionQueue.add(rigidBlock); return rigidBlock; } public void updateChunks() { ArrayList<Chunk> chunks = Terasology.getInstance().getActiveWorldRenderer().getChunksInProximity(); HashSet<RigidBody> newBodies = new HashSet<RigidBody>(); for (int i = 0; i < 32 && i < chunks.size(); i++) { final Chunk chunk = chunks.get(i); if (chunk != null) { chunk.updateRigidBody(); RigidBody c = chunk.getRigidBody(); if (c != null) { newBodies.add(c); if (!_chunks.contains(c)) { _discreteDynamicsWorld.addRigidBody(c); } } } } for (RigidBody body : _chunks) { if (!newBodies.contains(body)) { _discreteDynamicsWorld.removeRigidBody(body); } } _chunks = newBodies; } public void render() { Player player = _parent.getPlayer(); FloatBuffer mBuffer = BufferUtils.createFloatBuffer(16); float[] mFloat = new float[16]; GL11.glPushMatrix(); Vector3d cameraPosition = Terasology.getInstance().getActiveCamera().getPosition(); GL11.glTranslated(-cameraPosition.x, -cameraPosition.y, -cameraPosition.z); List<CollisionObject> collisionObjects = _discreteDynamicsWorld.getCollisionObjectArray(); for (CollisionObject co : collisionObjects) { if (co.getClass().equals(BlockRigidBody.class)) { BlockRigidBody br = (BlockRigidBody) co; Block block = BlockManager.getInstance().getBlock(br.getType()); Transform t = new Transform(); br.getMotionState().getWorldTransform(t); t.getOpenGLMatrix(mFloat); mBuffer.put(mFloat); mBuffer.flip(); GL11.glPushMatrix(); GL11.glMultMatrix(mBuffer); if (br.getCollisionShape() == _blockShapeHalf) GL11.glScalef(0.5f, 0.5f, 0.5f); else if (br.getCollisionShape() == _blockShapeQuarter) GL11.glScalef(0.25f, 0.25f, 0.25f); block.renderWithLightValue(Terasology.getInstance().getActiveWorldRenderer().getRenderingLightValueAt(new Vector3d(t.origin))); GL11.glPopMatrix(); } } GL11.glPopMatrix(); } public void update(double delta) { addQueuedBodies(); try { _discreteDynamicsWorld.stepSimulation((float) (delta / 1000.0), 3); } catch (Exception e) { Terasology.getInstance().getLogger().log(Level.WARNING, "Somehow Bullet Physics managed to throw an exception again. Go along: " + e.toString()); } updateChunks(); removeTemporaryBlocks(); checkForLootedBlocks(); } private synchronized void addQueuedBodies() { while (!_insertionQueue.isEmpty()) { RigidBody body = _insertionQueue.poll(); if (body instanceof BlockRigidBody) _blocks.add((BlockRigidBody) body); _discreteDynamicsWorld.addRigidBody(body); } } private void checkForLootedBlocks() { Player player = Terasology.getInstance().getActivePlayer(); for (int i = _blocks.size() - 1; i >= 0; i--) { BlockRigidBody b = _blocks.get(i); if (b._temporary) continue; // Check if the block is close enough to the player if (b.distanceToPlayer() < 8.0f && !b._picked) { // Mark it as picked and remove it from the simulation b._picked = true; } // Block was marked as being picked if (b._picked && b.distanceToPlayer() < 32.0f) { // Animate the movement in direction of the player if (b.distanceToPlayer() > 1.0) { Transform t = new Transform(); b.getMotionState().getWorldTransform(t); Matrix4f tMatrix = new Matrix4f(); t.getMatrix(tMatrix); Vector3f blockPlayer = new Vector3f(); tMatrix.get(blockPlayer); blockPlayer.sub(new Vector3f(player.getPosition())); blockPlayer.normalize(); blockPlayer.scale(-16000f); b.applyCentralImpulse(blockPlayer); } else { // Block was looted (and reached the player) Block block = BlockManager.getInstance().getBlock(b.getType()); player.getInventory().addItem(new ItemBlock(block.getBlockGroup()), 1); AudioManager.play("Loot"); _blocks.remove(i); _discreteDynamicsWorld.removeRigidBody(b); } } } } private void removeTemporaryBlocks() { if (_blocks.size() > 0) { for (int i = _blocks.size() - 1; i >= 0; i--) { if (!_blocks.get(i)._temporary) continue; if (!_blocks.get(i).isActive() || _blocks.get(i).calcAgeInMs() > 10000) { _discreteDynamicsWorld.removeRigidBody(_blocks.get(i)); _blocks.remove(i); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.giraph.master; import org.apache.giraph.aggregators.Aggregator; import org.apache.giraph.combiner.Combiner; import org.apache.giraph.conf.DefaultImmutableClassesGiraphConfigurable; import org.apache.giraph.graph.Computation; import org.apache.giraph.graph.GraphState; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.Mapper; /** * Interface for defining a master vertex that can perform centralized * computation between supersteps. This class will be instantiated on the * master node and will run every superstep before the workers do. * * Communication with the workers should be performed via aggregators. The * values of the aggregators are broadcast to the workers before * vertex.compute() is called and collected by the master before * master.compute() is called. This means aggregator values used by the workers * are consistent with aggregator values from the master from the same * superstep and aggregator used by the master are consistent with aggregator * values from the workers from the previous superstep. */ public abstract class MasterCompute extends DefaultImmutableClassesGiraphConfigurable implements MasterAggregatorUsage, Writable { /** If true, do not do anymore computation on this vertex. */ private boolean halt = false; /** Master aggregator usage */ private MasterAggregatorUsage masterAggregatorUsage; /** Graph state */ private GraphState graphState; /** Computation and Combiner class used, which can be switched by master */ private SuperstepClasses superstepClasses; /** * Must be defined by user to specify what the master has to do. */ public abstract void compute(); /** * Initialize the MasterCompute class, this is the place to register * aggregators. */ public abstract void initialize() throws InstantiationException, IllegalAccessException; /** * Retrieves the current superstep. * * @return Current superstep */ public final long getSuperstep() { return graphState.getSuperstep(); } /** * Get the total (all workers) number of vertices that * existed in the previous superstep. * * @return Total number of vertices (-1 if first superstep) */ public final long getTotalNumVertices() { return graphState.getTotalNumVertices(); } /** * Get the total (all workers) number of edges that * existed in the previous superstep. * * @return Total number of edges (-1 if first superstep) */ public final long getTotalNumEdges() { return graphState.getTotalNumEdges(); } /** * After this is called, the computation will stop, even if there are * still messages in the system or vertices that have not voted to halt. */ public final void haltComputation() { halt = true; } /** * Has the master halted? * * @return True if halted, false otherwise. */ public final boolean isHalted() { return halt; } /** * Get the mapper context * * @return Mapper context */ public final Mapper.Context getContext() { return graphState.getContext(); } /** * Set Computation class to be used * * @param computationClass Computation class */ public final void setComputation( Class<? extends Computation> computationClass) { superstepClasses.setComputationClass(computationClass); } /** * Get Computation class to be used * * @return Computation class */ public final Class<? extends Computation> getComputation() { // Might be called prior to classes being set, do not return NPE if (superstepClasses == null) { return null; } return superstepClasses.getComputationClass(); } /** * Set Combiner class to be used * * @param combinerClass Combiner class */ public final void setCombiner(Class<? extends Combiner> combinerClass) { superstepClasses.setCombinerClass(combinerClass); } /** * Get Combiner class to be used * * @return Combiner class */ public final Class<? extends Combiner> getCombiner() { // Might be called prior to classes being set, do not return NPE if (superstepClasses == null) { return null; } return superstepClasses.getCombinerClass(); } @Override public final <A extends Writable> boolean registerAggregator( String name, Class<? extends Aggregator<A>> aggregatorClass) throws InstantiationException, IllegalAccessException { return masterAggregatorUsage.registerAggregator(name, aggregatorClass); } @Override public final <A extends Writable> boolean registerPersistentAggregator( String name, Class<? extends Aggregator<A>> aggregatorClass) throws InstantiationException, IllegalAccessException { return masterAggregatorUsage.registerPersistentAggregator( name, aggregatorClass); } @Override public final <A extends Writable> A getAggregatedValue(String name) { return masterAggregatorUsage.<A>getAggregatedValue(name); } @Override public final <A extends Writable> void setAggregatedValue( String name, A value) { masterAggregatorUsage.setAggregatedValue(name, value); } final void setGraphState(GraphState graphState) { this.graphState = graphState; } final void setMasterAggregatorUsage(MasterAggregatorUsage masterAggregatorUsage) { this.masterAggregatorUsage = masterAggregatorUsage; } final void setSuperstepClasses(SuperstepClasses superstepClasses) { this.superstepClasses = superstepClasses; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl; import com.intellij.openapi.roots.FileIndexFacade; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiUtilCore; import com.intellij.util.Function; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Map; /** * @author peter */ class TypeCorrector extends PsiTypeMapper { private final Map<PsiClassType, PsiClassType> myResultMap = ContainerUtil.newIdentityHashMap(); private final GlobalSearchScope myResolveScope; TypeCorrector(GlobalSearchScope resolveScope) { myResolveScope = resolveScope; } @Override public PsiType visitType(PsiType type) { if (type instanceof PsiLambdaParameterType || type instanceof PsiLambdaExpressionType || type instanceof PsiMethodReferenceType) { return type; } return super.visitType(type); } @Nullable public <T extends PsiType> T correctType(@NotNull T type) { if (type instanceof PsiClassType) { PsiClassType classType = (PsiClassType)type; if (classType.getParameterCount() == 0) { final PsiClassType.ClassResolveResult classResolveResult = classType.resolveGenerics(); final PsiClass psiClass = classResolveResult.getElement(); if (psiClass != null && classResolveResult.getSubstitutor() == PsiSubstitutor.EMPTY) { final PsiClass mappedClass = mapClass(psiClass); if (mappedClass == null || mappedClass == psiClass) return (T) classType; } } } return (T)type.accept(this); } @Override public PsiType visitClassType(final PsiClassType classType) { PsiClassType alreadyComputed = myResultMap.get(classType); if (alreadyComputed != null) { return alreadyComputed; } final PsiClassType.ClassResolveResult classResolveResult = classType.resolveGenerics(); final PsiClass psiClass = classResolveResult.getElement(); final PsiSubstitutor substitutor = classResolveResult.getSubstitutor(); if (psiClass == null) return classType; PsiUtilCore.ensureValid(psiClass); final PsiClass mappedClass = mapClass(psiClass); if (mappedClass == null) return classType; PsiClassType mappedType = new PsiCorrectedClassType(classType.getLanguageLevel(), classType, new CorrectedResolveResult(psiClass, mappedClass, substitutor, classResolveResult)); myResultMap.put(classType, mappedType); return mappedType; } @Nullable private PsiClass mapClass(@NotNull PsiClass psiClass) { String qualifiedName = psiClass.getQualifiedName(); if (qualifiedName == null) { return psiClass; } PsiFile file = psiClass.getContainingFile(); if (file == null || !file.getViewProvider().isPhysical()) { return psiClass; } final VirtualFile vFile = file.getVirtualFile(); if (vFile == null) { return psiClass; } final FileIndexFacade index = FileIndexFacade.getInstance(file.getProject()); if (!index.isInSource(vFile) && !index.isInLibrarySource(vFile) && !index.isInLibraryClasses(vFile)) { return psiClass; } return JavaPsiFacade.getInstance(psiClass.getProject()).findClass(qualifiedName, myResolveScope); } @NotNull private PsiSubstitutor mapSubstitutor(PsiClass originalClass, PsiClass mappedClass, PsiSubstitutor substitutor) { PsiTypeParameter[] typeParameters = mappedClass.getTypeParameters(); PsiTypeParameter[] originalTypeParameters = originalClass.getTypeParameters(); if (typeParameters.length != originalTypeParameters.length) { if (originalTypeParameters.length == 0) { return JavaPsiFacade.getElementFactory(mappedClass.getProject()).createRawSubstitutor(mappedClass); } return substitutor; } Map<PsiTypeParameter, PsiType> substitutionMap = substitutor.getSubstitutionMap(); PsiSubstitutor mappedSubstitutor = PsiSubstitutor.EMPTY; for (int i = 0; i < originalTypeParameters.length; i++) { if (!substitutionMap.containsKey(originalTypeParameters[i])) continue; PsiType originalSubstitute = substitutor.substitute(originalTypeParameters[i]); if (originalSubstitute != null) { PsiType substitute = mapType(originalSubstitute); if (substitute == null) return substitutor; mappedSubstitutor = mappedSubstitutor.put(typeParameters[i], substitute); } else { mappedSubstitutor = mappedSubstitutor.put(typeParameters[i], null); } } if (mappedClass.hasModifierProperty(PsiModifier.STATIC)) { return mappedSubstitutor; } PsiClass mappedContaining = mappedClass.getContainingClass(); PsiClass originalContaining = originalClass.getContainingClass(); //noinspection DoubleNegation if ((mappedContaining != null) != (originalContaining != null)) { return substitutor; } if (mappedContaining != null) { return mappedSubstitutor.putAll(mapSubstitutor(originalContaining, mappedContaining, substitutor)); } return mappedSubstitutor; } private class PsiCorrectedClassType extends PsiClassType.Stub { private final PsiClassType myDelegate; private final CorrectedResolveResult myResolveResult; public PsiCorrectedClassType(LanguageLevel languageLevel, PsiClassType delegate, CorrectedResolveResult resolveResult) { super(languageLevel, delegate.getAnnotationProvider()); myDelegate = delegate; myResolveResult = resolveResult; } @NotNull @Override public String getCanonicalText(boolean annotated) { return myDelegate.getCanonicalText(); } @NotNull @Override public PsiClass resolve() { return myResolveResult.myMappedClass; } @Override public String getClassName() { return myDelegate.getClassName(); } @NotNull @Override public PsiType[] getParameters() { return ContainerUtil.map2Array(myDelegate.getParameters(), PsiType.class, new Function<PsiType, PsiType>() { @Override public PsiType fun(PsiType type) { if (type == null) { LOG.error(myDelegate + " of " + myDelegate.getClass() + "; substitutor=" + myDelegate.resolveGenerics().getSubstitutor()); return null; } return mapType(type); } }); } @Override public int getParameterCount() { return myDelegate.getParameters().length; } @NotNull @Override public ClassResolveResult resolveGenerics() { return myResolveResult; } @NotNull @Override public PsiClassType rawType() { PsiClass psiClass = resolve(); PsiElementFactory factory = JavaPsiFacade.getElementFactory(psiClass.getProject()); return factory.createType(psiClass, factory.createRawSubstitutor(psiClass)); } @NotNull @Override public GlobalSearchScope getResolveScope() { return myResolveScope; } @NotNull @Override public LanguageLevel getLanguageLevel() { return myLanguageLevel; } @NotNull @Override public PsiClassType setLanguageLevel(@NotNull LanguageLevel languageLevel) { return new PsiCorrectedClassType(languageLevel, myDelegate, myResolveResult); } @NotNull @Override public String getPresentableText() { return myDelegate.getPresentableText(); } @NotNull @Override public String getInternalCanonicalText() { return myDelegate.getInternalCanonicalText(); } @Override public boolean isValid() { return myDelegate.isValid() && resolve().isValid(); } @Override public boolean equalsToText(@NotNull @NonNls String text) { return myDelegate.equalsToText(text); } } private class CorrectedResolveResult implements PsiClassType.ClassResolveResult { private final PsiClass myPsiClass; private final PsiClass myMappedClass; private final PsiSubstitutor mySubstitutor; private final PsiClassType.ClassResolveResult myClassResolveResult; private volatile PsiSubstitutor myLazySubstitutor; public CorrectedResolveResult(PsiClass psiClass, PsiClass mappedClass, PsiSubstitutor substitutor, PsiClassType.ClassResolveResult classResolveResult) { myPsiClass = psiClass; myMappedClass = mappedClass; mySubstitutor = substitutor; myClassResolveResult = classResolveResult; } @NotNull @Override public PsiSubstitutor getSubstitutor() { PsiSubstitutor result = myLazySubstitutor; if (result == null) { myLazySubstitutor = result = mapSubstitutor(myPsiClass, myMappedClass, mySubstitutor); } return result; } @Override public PsiClass getElement() { return myMappedClass; } @Override public boolean isPackagePrefixPackageReference() { return myClassResolveResult.isPackagePrefixPackageReference(); } @Override public boolean isAccessible() { return myClassResolveResult.isAccessible(); } @Override public boolean isStaticsScopeCorrect() { return myClassResolveResult.isStaticsScopeCorrect(); } @Override public PsiElement getCurrentFileResolveScope() { return myClassResolveResult.getCurrentFileResolveScope(); } @Override public boolean isValidResult() { return myClassResolveResult.isValidResult(); } } }
package pl.bubson.notepadjw.activities; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.os.Build; import android.os.Bundle; import android.preference.ListPreference; import android.preference.Preference; import android.preference.PreferenceFragment; import android.preference.PreferenceManager; import android.view.Menu; import android.view.MenuItem; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.appcompat.app.AlertDialog; import androidx.appcompat.app.AppCompatActivity; import pl.bubson.notepadjw.R; import pl.bubson.notepadjw.databases.BiblesDatabase; import pl.bubson.notepadjw.services.DownloadLanguageService; import pl.bubson.notepadjw.utils.Language; import pl.bubson.notepadjw.utils.Permissions; public class SettingsActivity extends AppCompatActivity { static Language chosenLanguage; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Display the fragment as the main content. getFragmentManager().beginTransaction() .replace(android.R.id.content, new SettingsFragment()) .commit(); } @Override public boolean onCreateOptionsMenu(Menu menu) { if (getSupportActionBar() != null) { getSupportActionBar().setDisplayHomeAsUpEnabled(true); } return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle item selection switch (item.getItemId()) { case android.R.id.home: finish(); return true; default: return super.onOptionsItemSelected(item); } } public static class SettingsFragment extends PreferenceFragment implements SharedPreferences.OnSharedPreferenceChangeListener { @Override public void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); loadPreferences(); checkIfSavedBibleIsAvailable(); // in case of restore settings after fresh installation (without downloaded Bibles) final ListPreference verseSizePreference = (ListPreference)findPreference(getString(R.string.verse_area_size_key)); final ListPreference versePositionPreference = (ListPreference)findPreference(getString(R.string.verse_position_key)); if(versePositionPreference.getValue().equals(getString(R.string.verse_position_top))) { verseSizePreference.setEnabled(true); } else { verseSizePreference.setEnabled(false); } } private void checkIfSavedBibleIsAvailable() { final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(getActivity()); chosenLanguage = Language.valueOf(sharedPreferences.getString(getString(R.string.verse_language_key), getString(R.string.english_language))); downloadBibleIfNeeded(); } @Override public void onResume() { super.onResume(); // Set up a listener whenever a key changes getPreferenceScreen().getSharedPreferences() .registerOnSharedPreferenceChangeListener(this); } @Override public void onPause() { super.onPause(); // Unregister the listener whenever a key changes getPreferenceScreen().getSharedPreferences() .unregisterOnSharedPreferenceChangeListener(this); } private void loadPreferences() { // Load the preferences from an XML resource addPreferencesFromResource(R.xml.preferences); // Add preference change listener to language preference - to check if given language need to be downloaded first final Preference langPreference = findPreference(getString(R.string.verse_language_key)); langPreference.setOnPreferenceChangeListener(new Preference.OnPreferenceChangeListener() { @Override public boolean onPreferenceChange(final Preference preference, final Object newValue) { try { chosenLanguage = Language.valueOf(newValue.toString()); if (downloadBibleIfNeeded()) return true; // setting will be saved } catch (Exception e) { e.printStackTrace(); } return false; // setting will not be saved } }); final ListPreference versePositionPreference = (ListPreference)findPreference(getString(R.string.verse_position_key)); final ListPreference verseSizePreference = (ListPreference)findPreference(getString(R.string.verse_area_size_key)); versePositionPreference.setOnPreferenceChangeListener(new Preference.OnPreferenceChangeListener() { @Override public boolean onPreferenceChange(final Preference preference, final Object newValue) { final String val = newValue.toString(); int index = versePositionPreference.findIndexOfValue(val); if(index==0) { verseSizePreference.setEnabled(true); } else { verseSizePreference.setEnabled(false); SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(getActivity()); SharedPreferences.Editor editor = preferences.edit(); editor.putString(getString(R.string.verse_area_size_key), getString(R.string.verse_area_size_auto)); editor.commit(); } return true; } }); } private boolean downloadBibleIfNeeded() { BiblesDatabase biblesDatabase = new BiblesDatabase(getActivity()); if (biblesDatabase.isBibleInDatabase(chosenLanguage)) { return true; } else { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setTitle(R.string.missing_language_dialog_title); builder.setMessage(R.string.missing_language_message); builder.setPositiveButton(R.string.yes, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { // User clicked OK button if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT) FileManagerActivity.askForPermissionsIfNotGranted(getActivity()); // Bibles are now saved to External Files for which permission needed only on < KITKAT (Android 4.4) Intent downloadServiceIntent = new Intent(getActivity(), DownloadLanguageService.class); downloadServiceIntent.putExtra("Language", chosenLanguage); getActivity().startService(downloadServiceIntent); } }); builder.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { // User cancelled the dialog } }); builder.show(); return false; } } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { // Refresh language preference view - it's needed to see changes in current PreferenceFragment if (key.equals(getString(R.string.verse_language_key))) { Preference pref = findPreference(key); if (pref instanceof ListPreference) { ListPreference listPreferenceDialog = (ListPreference) pref; String savedLanguage = sharedPreferences.getString(key, getString(R.string.english_language)); // english as default value above can stay here, because there is no chance to use it: // onSharedPreferenceChanged() is executed only when this shared preference is set listPreferenceDialog.setValue(savedLanguage); pref.setSummary(listPreferenceDialog.getEntry()); } } } } // Used when user selects Bible to download and accept permissions @Override public void onRequestPermissionsResult(int requestCode, @NonNull String permissions[], @NonNull int[] grantResults) { switch (requestCode) { case Permissions.MY_REQUEST_PERMISSIONS_CODE: { // If request is cancelled, the result arrays are empty. if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { Intent downloadServiceIntent = new Intent(this, DownloadLanguageService.class); downloadServiceIntent.putExtra("Language", chosenLanguage); this.startService(downloadServiceIntent); } else { Toast.makeText(this, R.string.permission_storage_not_granted, Toast.LENGTH_LONG).show(); } } } } }
package com.ark.android.arkwallpaper; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.SharedPreferences; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Movie; import android.graphics.Paint; import android.net.Uri; import android.os.Environment; import android.os.Handler; import android.os.SystemClock; import android.preference.PreferenceManager; import android.service.wallpaper.WallpaperService; import android.view.GestureDetector; import android.view.MotionEvent; import android.view.SurfaceHolder; import android.widget.Toast; import com.ark.android.arkanalytics.GATrackerManager; import com.ark.android.arkwallpaper.utils.WallPaperUtils; import com.ark.android.arkwallpaper.utils.uiutils.BitmapUtil; import com.ark.android.arkwallpaper.utils.uiutils.GlideBlurringTransformation; import com.bumptech.glide.Glide; import com.bumptech.glide.load.resource.bitmap.BitmapResource; import com.crashlytics.android.Crashlytics; import java.io.FileNotFoundException; import java.io.InputStream; import static com.ark.android.arkwallpaper.Constants.CHANGE_CURRENT_ALBUM_ACTION; import static com.ark.android.arkwallpaper.Constants.CHANGE_CURRENT_WALLPAPER_ACTION; import static com.ark.android.arkwallpaper.Constants.FORCE_UPDATE; import static com.ark.android.arkwallpaper.Constants.FORCE_UPDATE_URI; /** * here is where all the magic happen this service is used to change the wallpaper * Created by ahmed-basyouni on 5/9/17. */ public class ArkWallpaperService extends WallpaperService { private final Handler mHandler = new Handler(); @Override public Engine onCreateEngine() { return new WallpaperEngine(this); } class WallpaperEngine extends Engine implements SharedPreferences.OnSharedPreferenceChangeListener { // Canvas stuff private final Paint mPaint = new Paint(); private final Matrix mScaler = new Matrix(); private final Context mContext; private final SharedPreferences mPrefs; private int mWidth = 0; private int mHeight = 0; private int mMinWidth = 0; private float mXOffset = 0; private boolean mVisible = false; private Bitmap mBitmap = null; private String mBitmapPath = null; private long mLastDrawTime = 0; private boolean mStorageReady = true; private boolean mScroll = false; private boolean mTouchEvents = false; private boolean mScreenWake = false; private int mGifDuration; float mScaleX; float mScaleY; int mWhen; long mStart; private boolean isGif; private boolean mFit = true; private boolean mFill; private int mOriginalWidth; private int mOriginalHeight; private Movie movie; // Double tap listener private final GestureDetector doubleTapDetector; private final Runnable repeatDrawing = new Runnable() { public void run() { drawFrame(); } }; private BroadcastReceiver storageReceiver; WallpaperEngine(Context context) { final Paint paint = mPaint; paint.setColor(Color.WHITE); paint.setTextAlign(Paint.Align.CENTER); paint.setAntiAlias(true); paint.setTextSize(18f); mContext = context; mPrefs = PreferenceManager.getDefaultSharedPreferences(context); mPrefs.registerOnSharedPreferenceChangeListener(this); // Read the preferences onSharedPreferenceChanged(mPrefs, null); doubleTapDetector = new GestureDetector(ArkWallpaperService.this, new GestureDetector.SimpleOnGestureListener() { @Override public boolean onDoubleTap(MotionEvent e) { if (mTouchEvents) { mLastDrawTime = 0; showNotificationToast(); drawFrame(); return true; } return false; } @Override public boolean onSingleTapConfirmed(MotionEvent e) { return super.onSingleTapConfirmed(e); } }); } BroadcastReceiver changeWallpaperReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { showNotificationToast(); if(intent.getBooleanExtra(FORCE_UPDATE, false)){ movie = null; mBitmapPath = intent.getStringExtra(FORCE_UPDATE_URI); WallPaperUtils.changeCurrentWallpaperId(mBitmapPath); if(mBitmap != null) mBitmap.recycle(); drawFrame(); }else { mLastDrawTime = 0; drawFrame(); } } }; BroadcastReceiver changeAlbumReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { showNotificationToast(); mLastDrawTime = 0; drawFrame(); } }; @Override public void onCreate(SurfaceHolder surfaceHolder) { super.onCreate(surfaceHolder); if(!isPreview()) { WallPaperUtils.setLiveWallpaperIsRunning(true); registerReceiver(changeWallpaperReceiver, new IntentFilter(CHANGE_CURRENT_WALLPAPER_ACTION)); registerReceiver(changeAlbumReceiver, new IntentFilter(CHANGE_CURRENT_ALBUM_ACTION)); registerScreenWakeReceiver(); } registerStorageReceiver(); } private void registerScreenWakeReceiver() { registerReceiver(new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { System.out.println(Intent.ACTION_SCREEN_ON); if (mScreenWake) { mLastDrawTime = 0; drawFrame(); } } }, new IntentFilter(Intent.ACTION_SCREEN_ON)); } private void registerStorageReceiver() { // Register receiver for media events IntentFilter filter = new IntentFilter(); filter.addAction(Intent.ACTION_MEDIA_BAD_REMOVAL); filter.addAction(Intent.ACTION_MEDIA_CHECKING); filter.addAction(Intent.ACTION_MEDIA_MOUNTED); filter.addAction(Intent.ACTION_MEDIA_EJECT); filter.addAction(Intent.ACTION_MEDIA_NOFS); filter.addAction(Intent.ACTION_MEDIA_REMOVED); filter.addAction(Intent.ACTION_MEDIA_SHARED); filter.addAction(Intent.ACTION_MEDIA_UNMOUNTED); filter.addDataScheme("file"); storageReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); if (action.equals(Intent.ACTION_MEDIA_MOUNTED) || action.equals(Intent.ACTION_MEDIA_CHECKING)) { mStorageReady = true; setTouchEventsEnabled(true); drawFrame(); } else { mStorageReady = false; setTouchEventsEnabled(false); mHandler.removeCallbacks(repeatDrawing); } } }; registerReceiver(storageReceiver, filter); setTouchEventsEnabled(mStorageReady); } @Override public void onDestroy() { super.onDestroy(); if(!isPreview()) WallPaperUtils.setLiveWallpaperIsRunning(false); mHandler.removeCallbacks(repeatDrawing); mPrefs.unregisterOnSharedPreferenceChangeListener(this); } @Override public void onVisibilityChanged(boolean visible) { mVisible = visible; if (visible) { drawFrame(); } else { mHandler.removeCallbacks(repeatDrawing); } } @Override public void onSurfaceChanged(SurfaceHolder holder, int format, int width, int height) { super.onSurfaceChanged(holder, format, width, height); mWidth = width; mHeight = height; mMinWidth = width * 2; // cheap hack for scrolling if (mBitmap != null) { mBitmap.recycle(); } drawFrame(); } @Override public void onSurfaceCreated(SurfaceHolder holder) { super.onSurfaceCreated(holder); mLastDrawTime = 0; } @Override public void onSurfaceDestroyed(SurfaceHolder holder) { super.onSurfaceDestroyed(holder); mVisible = false; mHandler.removeCallbacks(repeatDrawing); } @Override public void onOffsetsChanged(float xOffset, float yOffset, float xStep, float yStep, int xPixels, int yPixels) { mXOffset = xOffset; drawFrame(); } @Override public void onTouchEvent(MotionEvent event) { super.onTouchEvent(event); this.doubleTapDetector.onTouchEvent(event); } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { if(key == null) { mFit = WallPaperUtils.getDisplayMode() == Constants.DISPLAY_MODE.FIT.ordinal(); mFill = WallPaperUtils.getDisplayMode() == Constants.DISPLAY_MODE.FILL.ordinal(); mScroll = WallPaperUtils.isScrolling(); mTouchEvents = WallPaperUtils.isChangedWithDoubleTap(); mScreenWake = WallPaperUtils.isChangeWithUnlock(); }else { if(key.equals(Constants.RANDOM_ORDER_KEY)) { mLastDrawTime = 0; drawFrame(); }else if(key.equals(Constants.CHANGE_SCROLLING_KEY) && movie == null){ mScroll = WallPaperUtils.isScrolling(); if(mBitmap != null) mBitmap.recycle(); drawFrame(); }else if(key.equals(Constants.GREY_SCALE_KEY) || key.equals(Constants.DIM_KEY) || key.equals(Constants.BLURRING_KEY)) { if (mBitmap != null) mBitmap.recycle(); drawFrame(); }else if(key.equals(Constants.CHANGE_DISPLAY_MODE_KEY) && movie == null){ if(WallPaperUtils.getDisplayMode() == Constants.DISPLAY_MODE.FIT.ordinal()){ mFit = true; mFill = false; }else{ mFit = false; mFill = true; } if(mBitmap != null) mBitmap.recycle(); drawFrame(); }else if(key.equals(Constants.CHANGE_WITH_UNLOCK_KEY)){ mScreenWake = WallPaperUtils.isChangeWithUnlock(); }else if(key.equals(Constants.CHANGE_WITH_DOUBLE_TAP_KEY)){ mTouchEvents = WallPaperUtils.isChangedWithDoubleTap(); } } } void tick() { if (mWhen == -1L) { mWhen = 0; mStart = SystemClock.uptimeMillis(); } else { long mDiff = SystemClock.uptimeMillis() - mStart; mWhen = (int) (mDiff % mGifDuration); } } void drawFrame(){ final SurfaceHolder holder = getSurfaceHolder(); Canvas c = null; String state = Environment.getExternalStorageState(); if (!state.equals(Environment.MEDIA_MOUNTED) && !state.equals(Environment.MEDIA_MOUNTED_READ_ONLY)) { return; } try { // Lock the canvas for writing c = holder.lockCanvas(); boolean loadNewImage = false; if (mBitmapPath == null || (mBitmap == null && !isGif)) { loadNewImage = true; } else if (mLastDrawTime == 0) { loadNewImage = true; } // Get image to draw if (loadNewImage) { movie = null; mBitmapPath = WallPaperUtils.getImages(); if (mBitmapPath == null || mBitmapPath.isEmpty()) throw new FileNotFoundException(); if (isGif()) { configGif(); } else { isGif = false; movie = null; mBitmap = getFormattedBitmap(mBitmapPath); } // Save the current time mLastDrawTime = System.currentTimeMillis(); } else if (mBitmap != null && mBitmap.isRecycled()) { if (isGif()) { configGif(); } else { isGif = false; movie = null; mBitmap = getFormattedBitmap(mBitmapPath); } } } catch (RuntimeException re) { GATrackerManager.getInstance().trackException(re); Crashlytics.logException(re); holder.unlockCanvasAndPost(c); return; } catch (FileNotFoundException e) { e.printStackTrace(); drawNoImagesFound(c); holder.unlockCanvasAndPost(c); return; } try { if (c != null) { int xPos = 0; int yPos; if (mScroll) { xPos = 10 - (int) (mWidth * mXOffset); } try { if (!isGif) { yPos = getYPos(); c.drawColor(Color.BLACK); c.drawBitmap(mBitmap, xPos, yPos , mPaint); } else { c.drawColor(Color.BLACK); tick(); c.save(); float scale = (float) mWidth / (float) movie.width(); c.scale(scale, scale); movie.setTime(mWhen); float movieY = (mHeight / 2 - ((float) movie.height() * scale / 2)) / scale; movie.draw(c, 0, movieY); c.restore(); } } catch (Throwable t) { t.printStackTrace(); } } } finally { if (c != null) { holder.unlockCanvasAndPost(c); } } // Reschedule the next redraw mHandler.removeCallbacks(repeatDrawing); if (mVisible && isGif) { mHandler.postDelayed(repeatDrawing, 50); } } private boolean isGif() { return getFileExt(mBitmapPath).equalsIgnoreCase("gif"); } private int getYPos() { int yPos = 0; if(mFill || mOriginalHeight > mOriginalWidth) { yPos = 0; }else if(mFit && mScroll){ float scale; if(mBitmap.getWidth() > mMinWidth) scale = (float)mBitmap.getWidth() / (float) mMinWidth; else scale = (float) mMinWidth / (float)mBitmap.getWidth(); yPos = (int) ((mHeight / 2 - ((float) mBitmap.getHeight() * scale / 2)) / scale); }else if(mFit){ float scale; if(mBitmap.getWidth() > mWidth) scale = (float)mBitmap.getWidth() / (float) mWidth; else scale = (float) mWidth / (float)mBitmap.getWidth(); yPos = (int) ((mHeight / 2 - ((float) mBitmap.getHeight() * scale / 2)) / scale); } return yPos; } private void configGif() throws FileNotFoundException { isGif = true; if (movie == null) { InputStream stream = getContentResolver().openInputStream(Uri.parse(mBitmapPath)); movie = Movie.decodeStream(stream); mWhen = -1; } mGifDuration = movie.duration(); mScaleX = 3f; mScaleY = 3f; } private void drawNoImagesFound(Canvas c) { c.drawColor(Color.BLACK); c.translate(0, 30); c.drawText("No photos found in selected folder, ", c.getWidth() / 2.0f, (c.getHeight() / 2.0f) - 15, mPaint); c.drawText("press Settings to select a folder...", c.getWidth() / 2.0f, (c.getHeight() / 2.0f) + 15, mPaint); } private String getFileExt(String fileName) { return fileName.substring(fileName.lastIndexOf(".") + 1, fileName.length()); } private void showNotificationToast() { Toast.makeText(ArkWallpaperService.this, getString(R.string.changeWallpaper), Toast.LENGTH_SHORT).show(); } private Bitmap getFormattedBitmap(String file) { int targetWidth = (mScroll)? mMinWidth: mWidth; int targetHeight = mHeight; Bitmap bitmap = BitmapUtil.makeBitmap(WallpaperApp.getWallpaperApp(), Math.max(mMinWidth, mHeight), mMinWidth * mHeight, file, null); if (bitmap == null) { return Bitmap.createBitmap(targetWidth, targetHeight, Bitmap.Config.ARGB_8888); } if(mFit && mScroll && bitmap.getWidth() > bitmap.getHeight()){ targetWidth = mMinWidth; targetHeight = bitmap.getHeight(); }else if(mFit && bitmap.getWidth() > bitmap.getHeight()){ targetWidth = mWidth; float scale; if(bitmap.getWidth() > mWidth) scale = (float)bitmap.getWidth() / (float) ((mScroll) ? mMinWidth : mWidth); else scale = (float) ((mScroll) ? mMinWidth : mWidth) / (float)bitmap.getWidth(); targetHeight = (int)(bitmap.getHeight()/ scale); }else if(mFit && mScroll && bitmap.getHeight() > bitmap.getWidth()){ targetWidth = mWidth; targetHeight = mHeight; } mOriginalWidth = bitmap.getWidth(); mOriginalHeight = bitmap.getHeight(); // Scale bitmap if (mOriginalWidth != targetWidth || mOriginalHeight != targetHeight) { bitmap = BitmapUtil.transform(mScaler, bitmap, targetWidth, targetHeight, true, true); } GlideBlurringTransformation glideBlurringTransformation = new GlideBlurringTransformation(mContext, WallPaperUtils.getCurrentBlurring(), WallPaperUtils.getCurrentGreyScale(), WallPaperUtils.getCurrentDim()); bitmap = glideBlurringTransformation.transform(BitmapResource.obtain(bitmap, Glide.get(mContext).getBitmapPool()), bitmap.getWidth(), bitmap.getHeight()).get(); return bitmap; } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.slm; import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotShardFailure; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ilm.OperationMode; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.slm.SnapshotLifecycleMetadata; import org.elasticsearch.xpack.core.slm.SnapshotLifecyclePolicy; import org.elasticsearch.xpack.core.slm.SnapshotLifecyclePolicyMetadata; import org.elasticsearch.xpack.core.slm.history.SnapshotHistoryItem; import org.elasticsearch.xpack.core.slm.history.SnapshotHistoryStore; import java.io.IOException; import java.time.ZoneId; import java.time.ZoneOffset; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.startsWith; public class SnapshotLifecycleTaskTests extends ESTestCase { public void testGetSnapMetadata() { final String id = randomAlphaOfLength(4); final SnapshotLifecyclePolicyMetadata slpm = makePolicyMeta(id); final SnapshotLifecycleMetadata meta = new SnapshotLifecycleMetadata(Collections.singletonMap(id, slpm), OperationMode.RUNNING, new SnapshotLifecycleStats()); final ClusterState state = ClusterState.builder(new ClusterName("test")) .metaData(MetaData.builder() .putCustom(SnapshotLifecycleMetadata.TYPE, meta) .build()) .build(); final Optional<SnapshotLifecyclePolicyMetadata> o = SnapshotLifecycleTask.getSnapPolicyMetadata(SnapshotLifecycleService.getJobId(slpm), state); assertTrue("the policy metadata should be retrieved from the cluster state", o.isPresent()); assertThat(o.get(), equalTo(slpm)); assertFalse(SnapshotLifecycleTask.getSnapPolicyMetadata("bad-jobid", state).isPresent()); } public void testSkipCreatingSnapshotWhenJobDoesNotMatch() { final String id = randomAlphaOfLength(4); final SnapshotLifecyclePolicyMetadata slpm = makePolicyMeta(id); final SnapshotLifecycleMetadata meta = new SnapshotLifecycleMetadata(Collections.singletonMap(id, slpm), OperationMode.RUNNING, new SnapshotLifecycleStats()); final ClusterState state = ClusterState.builder(new ClusterName("test")) .metaData(MetaData.builder() .putCustom(SnapshotLifecycleMetadata.TYPE, meta) .build()) .build(); final ThreadPool threadPool = new TestThreadPool("test"); try (ClusterService clusterService = ClusterServiceUtils.createClusterService(state, threadPool); VerifyingClient client = new VerifyingClient(threadPool, (a, r, l) -> { fail("should not have tried to take a snapshot"); return null; })) { SnapshotHistoryStore historyStore = new VerifyingHistoryStore(null, ZoneOffset.UTC, item -> fail("should not have tried to store an item")); SnapshotLifecycleTask task = new SnapshotLifecycleTask(client, clusterService, historyStore); // Trigger the event, but since the job name does not match, it should // not run the function to create a snapshot task.triggered(new SchedulerEngine.Event("nonexistent-job", System.currentTimeMillis(), System.currentTimeMillis())); } threadPool.shutdownNow(); } public void testCreateSnapshotOnTrigger() { final String id = randomAlphaOfLength(4); final SnapshotLifecyclePolicyMetadata slpm = makePolicyMeta(id); final SnapshotLifecycleMetadata meta = new SnapshotLifecycleMetadata(Collections.singletonMap(id, slpm), OperationMode.RUNNING, new SnapshotLifecycleStats()); final ClusterState state = ClusterState.builder(new ClusterName("test")) .metaData(MetaData.builder() .putCustom(SnapshotLifecycleMetadata.TYPE, meta) .build()) .build(); final ThreadPool threadPool = new TestThreadPool("test"); final String createSnapResponse = "{" + " \"snapshot\" : {" + " \"snapshot\" : \"snapshot_1\"," + " \"uuid\" : \"bcP3ClgCSYO_TP7_FCBbBw\"," + " \"version_id\" : " + Version.CURRENT.id + "," + " \"version\" : \"" + Version.CURRENT + "\"," + " \"indices\" : [ ]," + " \"include_global_state\" : true," + " \"state\" : \"SUCCESS\"," + " \"start_time\" : \"2019-03-19T22:19:53.542Z\"," + " \"start_time_in_millis\" : 1553033993542," + " \"end_time\" : \"2019-03-19T22:19:53.567Z\"," + " \"end_time_in_millis\" : 1553033993567," + " \"duration_in_millis\" : 25," + " \"failures\" : [ ]," + " \"shards\" : {" + " \"total\" : 0," + " \"failed\" : 0," + " \"successful\" : 0" + " }" + " }" + "}"; final AtomicBoolean clientCalled = new AtomicBoolean(false); final SetOnce<String> snapshotName = new SetOnce<>(); try (ClusterService clusterService = ClusterServiceUtils.createClusterService(state, threadPool); // This verifying client will verify that we correctly invoked // client.admin().createSnapshot(...) with the appropriate // request. It also returns a mock real response VerifyingClient client = new VerifyingClient(threadPool, (action, request, listener) -> { assertFalse(clientCalled.getAndSet(true)); assertThat(action, instanceOf(CreateSnapshotAction.class)); assertThat(request, instanceOf(CreateSnapshotRequest.class)); CreateSnapshotRequest req = (CreateSnapshotRequest) request; SnapshotLifecyclePolicy policy = slpm.getPolicy(); assertThat(req.snapshot(), startsWith(policy.getName() + "-")); assertThat(req.repository(), equalTo(policy.getRepository())); snapshotName.set(req.snapshot()); if (req.indices().length > 0) { assertThat(Arrays.asList(req.indices()), equalTo(policy.getConfig().get("indices"))); } boolean globalState = policy.getConfig().get("include_global_state") == null || Boolean.parseBoolean((String) policy.getConfig().get("include_global_state")); assertThat(req.includeGlobalState(), equalTo(globalState)); try { return CreateSnapshotResponse.fromXContent(createParser(JsonXContent.jsonXContent, createSnapResponse)); } catch (IOException e) { fail("failed to parse snapshot response"); return null; } })) { final AtomicBoolean historyStoreCalled = new AtomicBoolean(false); SnapshotHistoryStore historyStore = new VerifyingHistoryStore(null, ZoneOffset.UTC, item -> { assertFalse(historyStoreCalled.getAndSet(true)); final SnapshotLifecyclePolicy policy = slpm.getPolicy(); assertEquals(policy.getId(), item.getPolicyId()); assertEquals(policy.getRepository(), item.getRepository()); assertEquals(policy.getConfig(), item.getSnapshotConfiguration()); assertEquals(snapshotName.get(), item.getSnapshotName()); }); SnapshotLifecycleTask task = new SnapshotLifecycleTask(client, clusterService, historyStore); // Trigger the event with a matching job name for the policy task.triggered(new SchedulerEngine.Event(SnapshotLifecycleService.getJobId(slpm), System.currentTimeMillis(), System.currentTimeMillis())); assertTrue("snapshot should be triggered once", clientCalled.get()); assertTrue("history store should be called once", historyStoreCalled.get()); } threadPool.shutdownNow(); } public void testPartialFailureSnapshot() throws Exception { final String id = randomAlphaOfLength(4); final SnapshotLifecyclePolicyMetadata slpm = makePolicyMeta(id); final SnapshotLifecycleMetadata meta = new SnapshotLifecycleMetadata(Collections.singletonMap(id, slpm), OperationMode.RUNNING, new SnapshotLifecycleStats()); final ClusterState state = ClusterState.builder(new ClusterName("test")) .metaData(MetaData.builder() .putCustom(SnapshotLifecycleMetadata.TYPE, meta) .build()) .build(); final ThreadPool threadPool = new TestThreadPool("test"); final AtomicBoolean clientCalled = new AtomicBoolean(false); final SetOnce<String> snapshotName = new SetOnce<>(); try (ClusterService clusterService = ClusterServiceUtils.createClusterService(state, threadPool); VerifyingClient client = new VerifyingClient(threadPool, (action, request, listener) -> { assertFalse(clientCalled.getAndSet(true)); assertThat(action, instanceOf(CreateSnapshotAction.class)); assertThat(request, instanceOf(CreateSnapshotRequest.class)); CreateSnapshotRequest req = (CreateSnapshotRequest) request; SnapshotLifecyclePolicy policy = slpm.getPolicy(); assertThat(req.snapshot(), startsWith(policy.getName() + "-")); assertThat(req.repository(), equalTo(policy.getRepository())); snapshotName.set(req.snapshot()); if (req.indices().length > 0) { assertThat(Arrays.asList(req.indices()), equalTo(policy.getConfig().get("indices"))); } boolean globalState = policy.getConfig().get("include_global_state") == null || Boolean.parseBoolean((String) policy.getConfig().get("include_global_state")); assertThat(req.includeGlobalState(), equalTo(globalState)); return new CreateSnapshotResponse( new SnapshotInfo( new SnapshotId(req.snapshot(), "uuid"), Arrays.asList(req.indices()), randomNonNegativeLong(), "snapshot started", randomNonNegativeLong(), 3, Collections.singletonList( new SnapshotShardFailure("nodeId", new ShardId("index", "uuid", 0), "forced failure")), req.includeGlobalState(), req.userMetadata() )); })) { final AtomicBoolean historyStoreCalled = new AtomicBoolean(false); SnapshotHistoryStore historyStore = new VerifyingHistoryStore(null, ZoneOffset.UTC, item -> { assertFalse(historyStoreCalled.getAndSet(true)); final SnapshotLifecyclePolicy policy = slpm.getPolicy(); assertEquals(policy.getId(), item.getPolicyId()); assertEquals(policy.getRepository(), item.getRepository()); assertEquals(policy.getConfig(), item.getSnapshotConfiguration()); assertEquals(snapshotName.get(), item.getSnapshotName()); assertFalse("item should be a failure", item.isSuccess()); assertThat(item.getErrorDetails(), containsString("failed to create snapshot successfully, 1 out of 3 total shards failed")); assertThat(item.getErrorDetails(), containsString("forced failure")); }); SnapshotLifecycleTask task = new SnapshotLifecycleTask(client, clusterService, historyStore); // Trigger the event with a matching job name for the policy task.triggered(new SchedulerEngine.Event(SnapshotLifecycleService.getJobId(slpm), System.currentTimeMillis(), System.currentTimeMillis())); assertTrue("snapshot should be triggered once", clientCalled.get()); assertTrue("history store should be called once", historyStoreCalled.get()); } threadPool.shutdownNow(); } /** * A client that delegates to a verifying function for action/request/listener */ public static class VerifyingClient extends NoOpClient { private final TriFunction<ActionType<?>, ActionRequest, ActionListener<?>, ActionResponse> verifier; VerifyingClient(ThreadPool threadPool, TriFunction<ActionType<?>, ActionRequest, ActionListener<?>, ActionResponse> verifier) { super(threadPool); this.verifier = verifier; } @Override @SuppressWarnings("unchecked") protected <Request extends ActionRequest, Response extends ActionResponse> void doExecute(ActionType<Response> action, Request request, ActionListener<Response> listener) { listener.onResponse((Response) verifier.apply(action, request, listener)); } } private SnapshotLifecyclePolicyMetadata makePolicyMeta(final String id) { SnapshotLifecyclePolicy policy = SnapshotLifecycleServiceTests.createPolicy(id); Map<String, String> headers = new HashMap<>(); headers.put("X-Opaque-ID", randomAlphaOfLength(4)); return SnapshotLifecyclePolicyMetadata.builder() .setPolicy(policy) .setHeaders(headers) .setVersion(1) .setModifiedDate(1) .build(); } public static class VerifyingHistoryStore extends SnapshotHistoryStore { Consumer<SnapshotHistoryItem> verifier; public VerifyingHistoryStore(Client client, ZoneId timeZone, Consumer<SnapshotHistoryItem> verifier) { super(Settings.EMPTY, client, null); this.verifier = verifier; } @Override public void putAsync(SnapshotHistoryItem item) { verifier.accept(item); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.common.block; import io.airlift.slice.Slice; import io.airlift.slice.Slices; import javax.annotation.Nullable; import java.util.Arrays; import static io.airlift.slice.SizeOf.SIZE_OF_LONG; import static java.lang.Math.ceil; import static java.lang.Math.max; import static java.lang.Math.toIntExact; import static java.lang.String.format; import static java.util.Objects.requireNonNull; public final class BlockUtil { private static final double BLOCK_RESET_SKEW = 1.25; private static final int DEFAULT_CAPACITY = 64; // See java.util.ArrayList for an explanation static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; private BlockUtil() { } static void checkArrayRange(int[] array, int offset, int length) { requireNonNull(array, "array is null"); if (offset < 0 || length < 0 || offset + length > array.length) { throw new IndexOutOfBoundsException(format("Invalid offset %s and length %s in array with %s elements", offset, length, array.length)); } } static void checkArrayRange(boolean[] array, int offset, int length) { requireNonNull(array, "array is null"); if (offset < 0 || length < 0 || offset + length > array.length) { throw new IndexOutOfBoundsException(format("Invalid offset %s and length %s in array with %s elements", offset, length, array.length)); } } static void checkValidRegion(int positionCount, int positionOffset, int length) { if (positionOffset < 0 || length < 0 || positionOffset + length > positionCount) { throw new IndexOutOfBoundsException(format("Invalid position %s and length %s in block with %s positions", positionOffset, length, positionCount)); } } static void checkValidPositions(boolean[] positions, int positionCount) { if (positions.length != positionCount) { throw new IllegalArgumentException(format("Invalid positions array size %d, actual position count is %d", positions.length, positionCount)); } } static void checkValidPosition(int position, int positionCount) { if (position < 0 || position >= positionCount) { throw new IllegalArgumentException(format("Invalid position %s in block with %s positions", position, positionCount)); } } static int calculateNewArraySize(int currentSize) { // grow array by 50% long newSize = (long) currentSize + (currentSize >> 1); // verify new size is within reasonable bounds if (newSize < DEFAULT_CAPACITY) { newSize = DEFAULT_CAPACITY; } else if (newSize > MAX_ARRAY_SIZE) { newSize = MAX_ARRAY_SIZE; if (newSize == currentSize) { throw new IllegalArgumentException(format("Can not grow array beyond '%s'", MAX_ARRAY_SIZE)); } } return (int) newSize; } static int calculateBlockResetSize(int currentSize) { long newSize = (long) ceil(currentSize * BLOCK_RESET_SKEW); // verify new size is within reasonable bounds if (newSize < DEFAULT_CAPACITY) { newSize = DEFAULT_CAPACITY; } else if (newSize > MAX_ARRAY_SIZE) { newSize = MAX_ARRAY_SIZE; } return (int) newSize; } static int calculateNestedStructureResetSize(int currentNestedStructureSize, int currentNestedStructurePositionCount, int expectedPositionCount) { long newSize = max( (long) ceil(currentNestedStructureSize * BLOCK_RESET_SKEW), currentNestedStructurePositionCount == 0 ? currentNestedStructureSize : (long) currentNestedStructureSize * expectedPositionCount / currentNestedStructurePositionCount); if (newSize > MAX_ARRAY_SIZE) { return MAX_ARRAY_SIZE; } return toIntExact(newSize); } /** * Recalculate the <code>offsets</code> array for the specified range. * The returned <code>offsets</code> array contains <code>length + 1</code> integers * with the first value set to 0. * If the range matches the entire <code>offsets</code> array, the input array will be returned. */ static int[] compactOffsets(int[] offsets, int index, int length) { if (index == 0 && offsets.length == length + 1) { return offsets; } int[] newOffsets = new int[length + 1]; for (int i = 1; i <= length; i++) { newOffsets[i] = offsets[index + i] - offsets[index]; } return newOffsets; } /** * Returns a slice containing values in the specified range of the specified slice. * If the range matches the entire slice, the input slice will be returned. * Otherwise, a copy will be returned. */ static Slice compactSlice(Slice slice, int index, int length) { if (slice.isCompact() && index == 0 && length == slice.length()) { return slice; } return Slices.copyOf(slice, index, length); } /** * Returns an array containing elements in the specified range of the specified array. * If the range matches the entire array, the input array will be returned. * Otherwise, a copy will be returned. */ public static boolean[] compactArray(boolean[] array, int index, int length) { if (index == 0 && length == array.length) { return array; } return Arrays.copyOfRange(array, index, index + length); } public static byte[] compactArray(byte[] array, int index, int length) { if (index == 0 && length == array.length) { return array; } return Arrays.copyOfRange(array, index, index + length); } public static short[] compactArray(short[] array, int index, int length) { if (index == 0 && length == array.length) { return array; } return Arrays.copyOfRange(array, index, index + length); } public static int[] compactArray(int[] array, int index, int length) { if (index == 0 && length == array.length) { return array; } return Arrays.copyOfRange(array, index, index + length); } public static long[] compactArray(long[] array, int index, int length) { if (index == 0 && length == array.length) { return array; } return Arrays.copyOfRange(array, index, index + length); } static int countUsedPositions(boolean[] positions) { int used = 0; for (boolean position : positions) { if (position) { used++; } } return used; } /** * Returns <tt>true</tt> if the two specified arrays contain the same object in every position. * Unlike the {@link Arrays#equals(Object[], Object[])} method, this method compares using reference equals. */ static boolean arraySame(Object[] array1, Object[] array2) { if (array1 == null || array2 == null || array1.length != array2.length) { throw new IllegalArgumentException("array1 and array2 cannot be null and should have same length"); } for (int i = 0; i < array1.length; i++) { if (array1[i] != array2[i]) { return false; } } return true; } public static boolean internalPositionInRange(int internalPosition, int offset, int positionCount) { boolean withinRange = internalPosition >= offset && internalPosition < positionCount + offset; assert withinRange : format("internalPosition %s is not within range [%s, %s)", internalPosition, offset, positionCount + offset); return withinRange; } static boolean[] appendNullToIsNullArray(@Nullable boolean[] isNull, int offsetBase, int positionCount) { int desiredLength = offsetBase + positionCount + 1; boolean[] newIsNull = new boolean[desiredLength]; if (isNull != null) { checkArrayRange(isNull, offsetBase, positionCount); System.arraycopy(isNull, 0, newIsNull, 0, desiredLength - 1); } newIsNull[desiredLength - 1] = true; return newIsNull; } static int[] appendNullToOffsetsArray(int[] offsets, int offsetBase, int positionCount) { checkArrayRange(offsets, offsetBase, positionCount + 1); int desiredLength = offsetBase + positionCount + 2; int[] newOffsets = Arrays.copyOf(offsets, desiredLength); newOffsets[desiredLength - 1] = newOffsets[desiredLength - 2]; return newOffsets; } public static int getNum128Integers(int length) { int num128Integers = length / SIZE_OF_LONG / 2; if (num128Integers * SIZE_OF_LONG * 2 != length) { throw new IllegalArgumentException(format("length %d must be a multiple of 16.", length)); } return num128Integers; } /** * Returns the input blocks array if all blocks are already loaded, otherwise returns a new blocks array with all blocks loaded */ static Block[] ensureBlocksAreLoaded(Block[] blocks) { for (int i = 0; i < blocks.length; i++) { Block loaded = blocks[i].getLoadedBlock(); if (loaded != blocks[i]) { // Transition to new block creation mode after the first newly loaded block is encountered Block[] loadedBlocks = blocks.clone(); loadedBlocks[i++] = loaded; for (; i < blocks.length; i++) { loadedBlocks[i] = blocks[i].getLoadedBlock(); } return loadedBlocks; } } // No newly loaded blocks return blocks; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.12.23 at 02:06:11 PM CET // package org.openntf.base.jaxb.dxl; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * <p>Java class for agent complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="agent"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;group ref="{http://www.lotus.com/dxl}named.design.note.prolog"/> * &lt;element name="designchange" type="{http://www.lotus.com/dxl}designchange" minOccurs="0"/> * &lt;element name="trigger" type="{http://www.lotus.com/dxl}trigger"/> * &lt;element name="documentset" type="{http://www.lotus.com/dxl}documentset" minOccurs="0"/> * &lt;element name="code" type="{http://www.lotus.com/dxl}code" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="rundata" type="{http://www.lotus.com/dxl}rundata" minOccurs="0"/> * &lt;group ref="{http://www.lotus.com/dxl}note.epilog"/> * &lt;/sequence> * &lt;attGroup ref="{http://www.lotus.com/dxl}named.design.note.attrs"/> * &lt;attribute name="enabled" type="{http://www.w3.org/2001/XMLSchema}boolean" default="true" /> * &lt;attribute name="showinsearch" type="{http://www.w3.org/2001/XMLSchema}boolean" default="false" /> * &lt;attribute name="runaswebuser" type="{http://www.w3.org/2001/XMLSchema}boolean" default="false" /> * &lt;attribute name="runonbehalfof" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="activatable" type="{http://www.w3.org/2001/XMLSchema}boolean" /> * &lt;attribute name="unrestrictedoperations" type="{http://www.w3.org/2001/XMLSchema}boolean" /> * &lt;attribute name="clientbackgroundthread" type="{http://www.w3.org/2001/XMLSchema}boolean" default="false" /> * &lt;attribute name="allowremotedebugging" type="{http://www.w3.org/2001/XMLSchema}boolean" default="false" /> * &lt;attribute name="storehighlights" type="{http://www.w3.org/2001/XMLSchema}boolean" default="false" /> * &lt;attribute name="formulatype" type="{http://www.lotus.com/dxl}formula.types" default="modifydocs" /> * &lt;attribute name="restrictions" type="{http://www.lotus.com/dxl}agent.restrictions" default="restricted" /> * &lt;attribute name="profile" type="{http://www.w3.org/2001/XMLSchema}boolean" default="false" /> * &lt;attribute name="agenttype" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "agent", namespace = "http://www.lotus.com/dxl", propOrder = { "noteinfo", "updatedby", "revisions", "wassignedby", "textproperties", "designchange", "trigger", "documentset", "code", "rundata", "item" }) public class Agent { protected Noteinfo noteinfo; protected Updatedby updatedby; protected Revisions revisions; protected Wassignedby wassignedby; protected Textproperties textproperties; protected Designchange designchange; @XmlElement(required = true) protected Trigger trigger; protected Documentset documentset; protected List<Code> code; protected Rundata rundata; protected List<Item> item; @XmlAttribute protected Boolean enabled; @XmlAttribute protected Boolean showinsearch; @XmlAttribute protected Boolean runaswebuser; @XmlAttribute protected String runonbehalfof; @XmlAttribute protected Boolean activatable; @XmlAttribute protected Boolean unrestrictedoperations; @XmlAttribute protected Boolean clientbackgroundthread; @XmlAttribute protected Boolean allowremotedebugging; @XmlAttribute protected Boolean storehighlights; @XmlAttribute protected FormulaTypes formulatype; @XmlAttribute protected AgentRestrictions restrictions; @XmlAttribute protected Boolean profile; @XmlAttribute protected String agenttype; @XmlAttribute protected String name; @XmlAttribute protected String alias; @XmlAttribute protected String comment; @XmlAttribute protected List<DesignNoteHideTokens> hide; @XmlAttribute protected Boolean nocompose; @XmlAttribute protected Boolean noquery; @XmlAttribute protected Boolean norenderv3; @XmlAttribute protected Boolean insertdialog; @XmlAttribute protected Boolean newinsertdialog; @XmlAttribute protected Boolean otherdialog; @XmlAttribute protected Boolean noreplace; @XmlAttribute protected Boolean readonly; @XmlAttribute protected Boolean propagatenoreplace; @XmlAttribute protected Boolean useapplet; @XmlAttribute protected Boolean treatashtml; @XmlAttribute protected Boolean htmlallfields; @XmlAttribute protected Boolean publicaccess; @XmlAttribute protected String language; @XmlAttribute(name = "default") protected Boolean _default; @XmlAttribute(name = "private") protected Boolean _private; @XmlAttribute protected String designerversion; @XmlAttribute protected String fromtemplate; @XmlAttribute @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String key; @XmlAttribute protected Float version; @XmlAttribute protected Float maintenanceversion; @XmlAttribute protected Float milestonebuild; @XmlAttribute protected String replicaid; /** * Gets the value of the noteinfo property. * * @return * possible object is * {@link Noteinfo } * */ public Noteinfo getNoteinfo() { return noteinfo; } /** * Sets the value of the noteinfo property. * * @param value * allowed object is * {@link Noteinfo } * */ public void setNoteinfo(Noteinfo value) { this.noteinfo = value; } /** * Gets the value of the updatedby property. * * @return * possible object is * {@link Updatedby } * */ public Updatedby getUpdatedby() { return updatedby; } /** * Sets the value of the updatedby property. * * @param value * allowed object is * {@link Updatedby } * */ public void setUpdatedby(Updatedby value) { this.updatedby = value; } /** * Gets the value of the revisions property. * * @return * possible object is * {@link Revisions } * */ public Revisions getRevisions() { return revisions; } /** * Sets the value of the revisions property. * * @param value * allowed object is * {@link Revisions } * */ public void setRevisions(Revisions value) { this.revisions = value; } /** * Gets the value of the wassignedby property. * * @return * possible object is * {@link Wassignedby } * */ public Wassignedby getWassignedby() { return wassignedby; } /** * Sets the value of the wassignedby property. * * @param value * allowed object is * {@link Wassignedby } * */ public void setWassignedby(Wassignedby value) { this.wassignedby = value; } /** * Gets the value of the textproperties property. * * @return * possible object is * {@link Textproperties } * */ public Textproperties getTextproperties() { return textproperties; } /** * Sets the value of the textproperties property. * * @param value * allowed object is * {@link Textproperties } * */ public void setTextproperties(Textproperties value) { this.textproperties = value; } /** * Gets the value of the designchange property. * * @return * possible object is * {@link Designchange } * */ public Designchange getDesignchange() { return designchange; } /** * Sets the value of the designchange property. * * @param value * allowed object is * {@link Designchange } * */ public void setDesignchange(Designchange value) { this.designchange = value; } /** * Gets the value of the trigger property. * * @return * possible object is * {@link Trigger } * */ public Trigger getTrigger() { return trigger; } /** * Sets the value of the trigger property. * * @param value * allowed object is * {@link Trigger } * */ public void setTrigger(Trigger value) { this.trigger = value; } /** * Gets the value of the documentset property. * * @return * possible object is * {@link Documentset } * */ public Documentset getDocumentset() { return documentset; } /** * Sets the value of the documentset property. * * @param value * allowed object is * {@link Documentset } * */ public void setDocumentset(Documentset value) { this.documentset = value; } /** * Gets the value of the code property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the code property. * * <p> * For example, to add a new item, do as follows: * <pre> * getCode().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Code } * * */ public List<Code> getCode() { if (code == null) { code = new ArrayList<Code>(); } return this.code; } /** * Gets the value of the rundata property. * * @return * possible object is * {@link Rundata } * */ public Rundata getRundata() { return rundata; } /** * Sets the value of the rundata property. * * @param value * allowed object is * {@link Rundata } * */ public void setRundata(Rundata value) { this.rundata = value; } /** * Gets the value of the item property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the item property. * * <p> * For example, to add a new item, do as follows: * <pre> * getItem().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Item } * * */ public List<Item> getItem() { if (item == null) { item = new ArrayList<Item>(); } return this.item; } /** * Gets the value of the enabled property. * * @return * possible object is * {@link Boolean } * */ public boolean isEnabled() { if (enabled == null) { return true; } else { return enabled; } } /** * Sets the value of the enabled property. * * @param value * allowed object is * {@link Boolean } * */ public void setEnabled(Boolean value) { this.enabled = value; } /** * Gets the value of the showinsearch property. * * @return * possible object is * {@link Boolean } * */ public boolean isShowinsearch() { if (showinsearch == null) { return false; } else { return showinsearch; } } /** * Sets the value of the showinsearch property. * * @param value * allowed object is * {@link Boolean } * */ public void setShowinsearch(Boolean value) { this.showinsearch = value; } /** * Gets the value of the runaswebuser property. * * @return * possible object is * {@link Boolean } * */ public boolean isRunaswebuser() { if (runaswebuser == null) { return false; } else { return runaswebuser; } } /** * Sets the value of the runaswebuser property. * * @param value * allowed object is * {@link Boolean } * */ public void setRunaswebuser(Boolean value) { this.runaswebuser = value; } /** * Gets the value of the runonbehalfof property. * * @return * possible object is * {@link String } * */ public String getRunonbehalfof() { return runonbehalfof; } /** * Sets the value of the runonbehalfof property. * * @param value * allowed object is * {@link String } * */ public void setRunonbehalfof(String value) { this.runonbehalfof = value; } /** * Gets the value of the activatable property. * * @return * possible object is * {@link Boolean } * */ public Boolean isActivatable() { return activatable; } /** * Sets the value of the activatable property. * * @param value * allowed object is * {@link Boolean } * */ public void setActivatable(Boolean value) { this.activatable = value; } /** * Gets the value of the unrestrictedoperations property. * * @return * possible object is * {@link Boolean } * */ public Boolean isUnrestrictedoperations() { return unrestrictedoperations; } /** * Sets the value of the unrestrictedoperations property. * * @param value * allowed object is * {@link Boolean } * */ public void setUnrestrictedoperations(Boolean value) { this.unrestrictedoperations = value; } /** * Gets the value of the clientbackgroundthread property. * * @return * possible object is * {@link Boolean } * */ public boolean isClientbackgroundthread() { if (clientbackgroundthread == null) { return false; } else { return clientbackgroundthread; } } /** * Sets the value of the clientbackgroundthread property. * * @param value * allowed object is * {@link Boolean } * */ public void setClientbackgroundthread(Boolean value) { this.clientbackgroundthread = value; } /** * Gets the value of the allowremotedebugging property. * * @return * possible object is * {@link Boolean } * */ public boolean isAllowremotedebugging() { if (allowremotedebugging == null) { return false; } else { return allowremotedebugging; } } /** * Sets the value of the allowremotedebugging property. * * @param value * allowed object is * {@link Boolean } * */ public void setAllowremotedebugging(Boolean value) { this.allowremotedebugging = value; } /** * Gets the value of the storehighlights property. * * @return * possible object is * {@link Boolean } * */ public boolean isStorehighlights() { if (storehighlights == null) { return false; } else { return storehighlights; } } /** * Sets the value of the storehighlights property. * * @param value * allowed object is * {@link Boolean } * */ public void setStorehighlights(Boolean value) { this.storehighlights = value; } /** * Gets the value of the formulatype property. * * @return * possible object is * {@link FormulaTypes } * */ public FormulaTypes getFormulatype() { if (formulatype == null) { return FormulaTypes.MODIFYDOCS; } else { return formulatype; } } /** * Sets the value of the formulatype property. * * @param value * allowed object is * {@link FormulaTypes } * */ public void setFormulatype(FormulaTypes value) { this.formulatype = value; } /** * Gets the value of the restrictions property. * * @return * possible object is * {@link AgentRestrictions } * */ public AgentRestrictions getRestrictions() { if (restrictions == null) { return AgentRestrictions.RESTRICTED; } else { return restrictions; } } /** * Sets the value of the restrictions property. * * @param value * allowed object is * {@link AgentRestrictions } * */ public void setRestrictions(AgentRestrictions value) { this.restrictions = value; } /** * Gets the value of the profile property. * * @return * possible object is * {@link Boolean } * */ public boolean isProfile() { if (profile == null) { return false; } else { return profile; } } /** * Sets the value of the profile property. * * @param value * allowed object is * {@link Boolean } * */ public void setProfile(Boolean value) { this.profile = value; } /** * Gets the value of the agenttype property. * * @return * possible object is * {@link String } * */ public String getAgenttype() { return agenttype; } /** * Sets the value of the agenttype property. * * @param value * allowed object is * {@link String } * */ public void setAgenttype(String value) { this.agenttype = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the alias property. * * @return * possible object is * {@link String } * */ public String getAlias() { return alias; } /** * Sets the value of the alias property. * * @param value * allowed object is * {@link String } * */ public void setAlias(String value) { this.alias = value; } /** * Gets the value of the comment property. * * @return * possible object is * {@link String } * */ public String getComment() { return comment; } /** * Sets the value of the comment property. * * @param value * allowed object is * {@link String } * */ public void setComment(String value) { this.comment = value; } /** * Gets the value of the hide property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the hide property. * * <p> * For example, to add a new item, do as follows: * <pre> * getHide().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link DesignNoteHideTokens } * * */ public List<DesignNoteHideTokens> getHide() { if (hide == null) { hide = new ArrayList<DesignNoteHideTokens>(); } return this.hide; } /** * Gets the value of the nocompose property. * * @return * possible object is * {@link Boolean } * */ public Boolean isNocompose() { return nocompose; } /** * Sets the value of the nocompose property. * * @param value * allowed object is * {@link Boolean } * */ public void setNocompose(Boolean value) { this.nocompose = value; } /** * Gets the value of the noquery property. * * @return * possible object is * {@link Boolean } * */ public Boolean isNoquery() { return noquery; } /** * Sets the value of the noquery property. * * @param value * allowed object is * {@link Boolean } * */ public void setNoquery(Boolean value) { this.noquery = value; } /** * Gets the value of the norenderv3 property. * * @return * possible object is * {@link Boolean } * */ public Boolean isNorenderv3() { return norenderv3; } /** * Sets the value of the norenderv3 property. * * @param value * allowed object is * {@link Boolean } * */ public void setNorenderv3(Boolean value) { this.norenderv3 = value; } /** * Gets the value of the insertdialog property. * * @return * possible object is * {@link Boolean } * */ public Boolean isInsertdialog() { return insertdialog; } /** * Sets the value of the insertdialog property. * * @param value * allowed object is * {@link Boolean } * */ public void setInsertdialog(Boolean value) { this.insertdialog = value; } /** * Gets the value of the newinsertdialog property. * * @return * possible object is * {@link Boolean } * */ public Boolean isNewinsertdialog() { return newinsertdialog; } /** * Sets the value of the newinsertdialog property. * * @param value * allowed object is * {@link Boolean } * */ public void setNewinsertdialog(Boolean value) { this.newinsertdialog = value; } /** * Gets the value of the otherdialog property. * * @return * possible object is * {@link Boolean } * */ public Boolean isOtherdialog() { return otherdialog; } /** * Sets the value of the otherdialog property. * * @param value * allowed object is * {@link Boolean } * */ public void setOtherdialog(Boolean value) { this.otherdialog = value; } /** * Gets the value of the noreplace property. * * @return * possible object is * {@link Boolean } * */ public boolean isNoreplace() { if (noreplace == null) { return false; } else { return noreplace; } } /** * Sets the value of the noreplace property. * * @param value * allowed object is * {@link Boolean } * */ public void setNoreplace(Boolean value) { this.noreplace = value; } /** * Gets the value of the readonly property. * * @return * possible object is * {@link Boolean } * */ public Boolean isReadonly() { return readonly; } /** * Sets the value of the readonly property. * * @param value * allowed object is * {@link Boolean } * */ public void setReadonly(Boolean value) { this.readonly = value; } /** * Gets the value of the propagatenoreplace property. * * @return * possible object is * {@link Boolean } * */ public Boolean isPropagatenoreplace() { return propagatenoreplace; } /** * Sets the value of the propagatenoreplace property. * * @param value * allowed object is * {@link Boolean } * */ public void setPropagatenoreplace(Boolean value) { this.propagatenoreplace = value; } /** * Gets the value of the useapplet property. * * @return * possible object is * {@link Boolean } * */ public boolean isUseapplet() { if (useapplet == null) { return false; } else { return useapplet; } } /** * Sets the value of the useapplet property. * * @param value * allowed object is * {@link Boolean } * */ public void setUseapplet(Boolean value) { this.useapplet = value; } /** * Gets the value of the treatashtml property. * * @return * possible object is * {@link Boolean } * */ public boolean isTreatashtml() { if (treatashtml == null) { return false; } else { return treatashtml; } } /** * Sets the value of the treatashtml property. * * @param value * allowed object is * {@link Boolean } * */ public void setTreatashtml(Boolean value) { this.treatashtml = value; } /** * Gets the value of the htmlallfields property. * * @return * possible object is * {@link Boolean } * */ public Boolean isHtmlallfields() { return htmlallfields; } /** * Sets the value of the htmlallfields property. * * @param value * allowed object is * {@link Boolean } * */ public void setHtmlallfields(Boolean value) { this.htmlallfields = value; } /** * Gets the value of the publicaccess property. * * @return * possible object is * {@link Boolean } * */ public Boolean isPublicaccess() { return publicaccess; } /** * Sets the value of the publicaccess property. * * @param value * allowed object is * {@link Boolean } * */ public void setPublicaccess(Boolean value) { this.publicaccess = value; } /** * Gets the value of the language property. * * @return * possible object is * {@link String } * */ public String getLanguage() { return language; } /** * Sets the value of the language property. * * @param value * allowed object is * {@link String } * */ public void setLanguage(String value) { this.language = value; } /** * Gets the value of the default property. * * @return * possible object is * {@link Boolean } * */ public boolean isDefault() { if (_default == null) { return false; } else { return _default; } } /** * Sets the value of the default property. * * @param value * allowed object is * {@link Boolean } * */ public void setDefault(Boolean value) { this._default = value; } /** * Gets the value of the private property. * * @return * possible object is * {@link Boolean } * */ public boolean isPrivate() { if (_private == null) { return false; } else { return _private; } } /** * Sets the value of the private property. * * @param value * allowed object is * {@link Boolean } * */ public void setPrivate(Boolean value) { this._private = value; } /** * Gets the value of the designerversion property. * * @return * possible object is * {@link String } * */ public String getDesignerversion() { return designerversion; } /** * Sets the value of the designerversion property. * * @param value * allowed object is * {@link String } * */ public void setDesignerversion(String value) { this.designerversion = value; } /** * Gets the value of the fromtemplate property. * * @return * possible object is * {@link String } * */ public String getFromtemplate() { return fromtemplate; } /** * Sets the value of the fromtemplate property. * * @param value * allowed object is * {@link String } * */ public void setFromtemplate(String value) { this.fromtemplate = value; } /** * Gets the value of the key property. * * @return * possible object is * {@link String } * */ public String getKey() { return key; } /** * Sets the value of the key property. * * @param value * allowed object is * {@link String } * */ public void setKey(String value) { this.key = value; } /** * Gets the value of the version property. * * @return * possible object is * {@link Float } * */ public Float getVersion() { return version; } /** * Sets the value of the version property. * * @param value * allowed object is * {@link Float } * */ public void setVersion(Float value) { this.version = value; } /** * Gets the value of the maintenanceversion property. * * @return * possible object is * {@link Float } * */ public Float getMaintenanceversion() { return maintenanceversion; } /** * Sets the value of the maintenanceversion property. * * @param value * allowed object is * {@link Float } * */ public void setMaintenanceversion(Float value) { this.maintenanceversion = value; } /** * Gets the value of the milestonebuild property. * * @return * possible object is * {@link Float } * */ public Float getMilestonebuild() { return milestonebuild; } /** * Sets the value of the milestonebuild property. * * @param value * allowed object is * {@link Float } * */ public void setMilestonebuild(Float value) { this.milestonebuild = value; } /** * Gets the value of the replicaid property. * * @return * possible object is * {@link String } * */ public String getReplicaid() { return replicaid; } /** * Sets the value of the replicaid property. * * @param value * allowed object is * {@link String } * */ public void setReplicaid(String value) { this.replicaid = value; } }
package com.gdn.venice.facade; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import javax.ejb.EJBException; import javax.ejb.Stateless; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContextType; import javax.persistence.Query; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.XMLConfiguration; import org.apache.log4j.Logger; import com.gdn.venice.facade.callback.SessionCallback; import com.gdn.venice.facade.finder.FinderReturn; import com.gdn.venice.persistence.LogApprovalStatus; import com.djarum.raf.utilities.JPQLAdvancedQueryCriteria; import com.djarum.raf.utilities.JPQLQueryStringBuilder; import com.djarum.raf.utilities.Log4jLoggerFactory; /** * Session Bean implementation class LogApprovalStatusSessionEJBBean * * <p> * <b>author:</b> <a href="mailto:david@pwsindonesia.com">David Forden</a> * <p> * <b>version:</b> 1.0 * <p> * <b>since:</b> 2011 * */ @Stateless(mappedName = "LogApprovalStatusSessionEJBBean") public class LogApprovalStatusSessionEJBBean implements LogApprovalStatusSessionEJBRemote, LogApprovalStatusSessionEJBLocal { /* * Implements an IOC model for pre/post callbacks to persist, merge, and * remove operations. The onPrePersist, onPostPersist, onPreMerge, * onPostMerge, onPreRemove and OnPostRemove operations must be implemented * by the callback class. */ private String _sessionCallbackClassName = null; // A reference to the callback object that has been instantiated private SessionCallback _callback = null; protected static Logger _log = null; // The configuration file to use private String _configFile = System.getenv("VENICE_HOME") + "/conf/module-config.xml"; //The binding array used when binding variables into a JPQL query private Object[] bindingArray = null; @PersistenceContext(unitName = "GDN-Venice-Persistence", type = PersistenceContextType.TRANSACTION) protected EntityManager em; /** * Default constructor. */ public LogApprovalStatusSessionEJBBean() { super(); Log4jLoggerFactory loggerFactory = new Log4jLoggerFactory(); _log = loggerFactory .getLog4JLogger("com.gdn.venice.facade.LogApprovalStatusSessionEJBBean"); // If the configuration is successful then instantiate the callback if (this.configure()) this.instantiateTriggerCallback(); } /** * Reads the venice configuration file and configures the EJB's * triggerCallbackClassName */ private Boolean configure() { _log.debug("Venice Configuration File:" + _configFile); try { XMLConfiguration config = new XMLConfiguration(_configFile); /* * Get the index entry for the adapter configuration from the * configuration file - there will be multiple adapter * configurations */ @SuppressWarnings({ "rawtypes" }) List callbacks = config .getList("sessionBeanConfig.callback.[@name]"); Integer beanConfigIndex = new Integer(Integer.MAX_VALUE); @SuppressWarnings("rawtypes") Iterator i = callbacks.iterator(); while (i.hasNext()) { String beanName = (String) i.next(); if (this.getClass().getSimpleName().equals(beanName)) { beanConfigIndex = callbacks.indexOf(beanName); _log.debug("Bean configuration for " + beanName + " found at " + beanConfigIndex); } } this._sessionCallbackClassName = config .getString("sessionBeanConfig.callback(" + beanConfigIndex + ").[@class]"); _log.debug("Loaded configuration for _sessionCallbackClassName:" + _sessionCallbackClassName); } catch (ConfigurationException e) { _log.error("A ConfigurationException occured when processing the configuration file" + e.getMessage()); e.printStackTrace(); return Boolean.FALSE; } return Boolean.TRUE; } /** * Instantiates the trigger callback handler class * * @return */ Boolean instantiateTriggerCallback() { if (_sessionCallbackClassName != null && !_sessionCallbackClassName.isEmpty()) try { Class<?> c = Class.forName(_sessionCallbackClassName); _callback = (SessionCallback) c.newInstance(); } catch (ClassNotFoundException e) { _log.error("A ClassNotFoundException occured when trying to instantiate:" + this._sessionCallbackClassName); e.printStackTrace(); return Boolean.FALSE; } catch (InstantiationException e) { _log.error("A InstantiationException occured when trying to instantiate:" + this._sessionCallbackClassName); e.printStackTrace(); return Boolean.FALSE; } catch (IllegalAccessException e) { _log.error("A IllegalAccessException occured when trying to instantiate:" + this._sessionCallbackClassName); e.printStackTrace(); return Boolean.FALSE; } return Boolean.TRUE; } /* * (non-Javadoc) * * @see * com.gdn.venice.facade.LogApprovalStatusSessionEJBRemote#queryByRange(java.lang * .String, int, int) */ @Override @SuppressWarnings({ "unchecked" }) public List<LogApprovalStatus> queryByRange(String jpqlStmt, int firstResult, int maxResults) { Long startTime = System.currentTimeMillis(); _log.debug("queryByRange()"); Query query = null; try { query = em.createQuery(jpqlStmt); if(this.bindingArray != null){ for(int i = 0; i < bindingArray.length; ++i){ if(bindingArray[i] != null){ query.setParameter(i+1, bindingArray[i]); } } } } catch (Exception e) { _log.error("An exception occured when calling em.createQuery():" + e.getMessage()); throw new EJBException(e); } try { if (firstResult > 0) { query = query.setFirstResult(firstResult); } if (maxResults > 0) { query = query.setMaxResults(maxResults); } } catch (Exception e) { _log.error("An exception occured when accessing the result set of a query:" + e.getMessage()); throw new EJBException(e); } List<LogApprovalStatus> returnList = (List<LogApprovalStatus>)query.getResultList(); this.bindingArray = null; Long endTime = System.currentTimeMillis(); Long duration = startTime - endTime; _log.debug("queryByRange() duration:" + duration + "ms"); return returnList; } /* * (non-Javadoc) * * @see * com.gdn.venice.facade.LogApprovalStatusSessionEJBRemote#persistLogApprovalStatus(com * .gdn.venice.persistence.LogApprovalStatus) */ @Override @TransactionAttribute(TransactionAttributeType.REQUIRED) public LogApprovalStatus persistLogApprovalStatus(LogApprovalStatus logApprovalStatus) { Long startTime = System.currentTimeMillis(); _log.debug("persistLogApprovalStatus()"); // Call the onPrePersist() callback and throw an exception if it fails if (this._callback != null) { if (!this._callback.onPrePersist(logApprovalStatus)) { _log.error("An onPrePersist callback operation failed for:" + this._sessionCallbackClassName); throw new EJBException( "An onPrePersist callback operation failed for:" + this._sessionCallbackClassName); } } LogApprovalStatus existingLogApprovalStatus = null; if (logApprovalStatus != null && logApprovalStatus.getApprovalStatusId() != null) { _log.debug("persistLogApprovalStatus:em.find()"); try { existingLogApprovalStatus = em.find(LogApprovalStatus.class, logApprovalStatus.getApprovalStatusId()); } catch (Exception e) { _log.error("An exception occured when calling em.find():" + e.getMessage()); throw new EJBException(e); } } if (existingLogApprovalStatus == null) { _log.debug("persistLogApprovalStatus:em.persist()"); try { em.persist(logApprovalStatus); } catch (Exception e) { _log.error("An exception occured when calling em.persist():" + e.getMessage()); throw new EJBException(e); } _log.debug("persistLogApprovalStatus:em.flush()"); try { em.flush(); em.clear(); } catch (Exception e) { _log.error("An exception occured when calling em.flush():" + e.getMessage()); throw new EJBException(e); } // Call the onPostPersist() callback and throw an exception if it fails if (this._callback != null) { if (!this._callback.onPostPersist(logApprovalStatus)) { _log.error("An onPostPersist callback operation failed for:" + this._sessionCallbackClassName); throw new EJBException( "An onPostPersist callback operation failed for:" + this._sessionCallbackClassName); } } Long endTime = System.currentTimeMillis(); Long duration = startTime - endTime; _log.debug("persistLogApprovalStatus() duration:" + duration + "ms"); return logApprovalStatus; } else { throw new EJBException("LogApprovalStatus exists!. LogApprovalStatus = " + logApprovalStatus.getApprovalStatusId()); } } /* * (non-Javadoc) * * @see * com.gdn.venice.facade.LogApprovalStatusSessionEJBRemote#persistLogApprovalStatusList * (java.util.List) */ @Override @SuppressWarnings("rawtypes") @TransactionAttribute(TransactionAttributeType.REQUIRED) public ArrayList<LogApprovalStatus> persistLogApprovalStatusList( List<LogApprovalStatus> logApprovalStatusList) { _log.debug("persistLogApprovalStatusList()"); Iterator i = logApprovalStatusList.iterator(); while (i.hasNext()) { this.persistLogApprovalStatus((LogApprovalStatus) i.next()); } return (ArrayList<LogApprovalStatus>)logApprovalStatusList; } /* * (non-Javadoc) * * @see * com.gdn.venice.facade.LogApprovalStatusSessionEJBRemote#mergeLogApprovalStatus(com. * gdn.venice.persistence.LogApprovalStatus) */ @Override @TransactionAttribute(TransactionAttributeType.REQUIRED) public LogApprovalStatus mergeLogApprovalStatus(LogApprovalStatus logApprovalStatus) { Long startTime = System.currentTimeMillis(); _log.debug("mergeLogApprovalStatus()"); // Call the onPreMerge() callback and throw an exception if it fails if (this._callback != null) { if (!this._callback.onPreMerge(logApprovalStatus)) { _log.error("An onPreMerge callback operation failed for:" + this._sessionCallbackClassName); throw new EJBException( "An onPreMerge callback operation failed for:" + this._sessionCallbackClassName); } } LogApprovalStatus existing = null; if (logApprovalStatus.getApprovalStatusId() != null){ _log.debug("mergeLogApprovalStatus:em.find()"); existing = em.find(LogApprovalStatus.class, logApprovalStatus.getApprovalStatusId()); } if (existing == null) { return this.persistLogApprovalStatus(logApprovalStatus); } else { _log.debug("mergeLogApprovalStatus:em.merge()"); try { em.merge(logApprovalStatus); } catch (Exception e) { _log.error("An exception occured when calling em.merge():" + e.getMessage()); throw new EJBException(e); } _log.debug("mergeLogApprovalStatus:em.flush()"); try { em.flush(); em.clear(); } catch (Exception e) { _log.error("An exception occured when calling em.flush():" + e.getMessage()); throw new EJBException(e); } LogApprovalStatus newobject = em.find(LogApprovalStatus.class, logApprovalStatus.getApprovalStatusId()); _log.debug("mergeLogApprovalStatus():em.refresh"); try { em.refresh(newobject); } catch (Exception e) { _log.error("An exception occured when calling em.refresh():" + e.getMessage()); throw new EJBException(e); } // Call the onPostMerge() callback and throw an exception if it fails if (this._callback != null) { if (!this._callback.onPostMerge(newobject)) { _log.error("An onPostMerge callback operation failed for:" + this._sessionCallbackClassName); throw new EJBException( "An onPostMerge callback operation failed for:" + this._sessionCallbackClassName); } } Long endTime = System.currentTimeMillis(); Long duration = startTime - endTime; _log.debug("mergeLogApprovalStatus() duration:" + duration + "ms"); return newobject; } } /* * (non-Javadoc) * * @see * com.gdn.venice.facade.LogApprovalStatusSessionEJBRemote#mergeLogApprovalStatusList( * java.util.List) */ @Override @SuppressWarnings("rawtypes") @TransactionAttribute(TransactionAttributeType.REQUIRED) public ArrayList<LogApprovalStatus> mergeLogApprovalStatusList( List<LogApprovalStatus> logApprovalStatusList) { _log.debug("mergeLogApprovalStatusList()"); Iterator i = logApprovalStatusList.iterator(); while (i.hasNext()) { this.mergeLogApprovalStatus((LogApprovalStatus) i.next()); } return (ArrayList<LogApprovalStatus>)logApprovalStatusList; } /* * (non-Javadoc) * * @see * com.gdn.venice.facade.LogApprovalStatusSessionEJBRemote#removeLogApprovalStatus(com. * gdn.venice.persistence.LogApprovalStatus) */ @Override @TransactionAttribute(TransactionAttributeType.REQUIRED) public void removeLogApprovalStatus(LogApprovalStatus logApprovalStatus) { Long startTime = System.currentTimeMillis(); _log.debug("removeLogApprovalStatus()"); // Call the onPreRemove() callback and throw an exception if it fails if (this._callback != null) { if (!this._callback.onPreRemove(logApprovalStatus)) { _log.error("An onPreRemove callback operation failed for:" + this._sessionCallbackClassName); throw new EJBException( "An onPreRemove callback operation failed for:" + this._sessionCallbackClassName); } } _log.debug("removeLogApprovalStatus:em.find()"); logApprovalStatus = em.find(LogApprovalStatus.class, logApprovalStatus.getApprovalStatusId()); try { _log.debug("removeLogApprovalStatus:em.remove()"); em.remove(logApprovalStatus); } catch (Exception e) { _log.error("An exception occured when calling em.remove():" + e.getMessage()); throw new EJBException(e); } // Call the onPostRemove() callback and throw an exception if it fails if (this._callback != null) { if (!this._callback.onPostRemove(logApprovalStatus)) { _log.error("An onPostRemove callback operation failed for:" + this._sessionCallbackClassName); throw new EJBException( "An onPostRemove callback operation failed for:" + this._sessionCallbackClassName); } } _log.debug("removeLogApprovalStatus:em.flush()"); em.flush(); em.clear(); Long endTime = System.currentTimeMillis(); Long duration = startTime - endTime; _log.debug("removeLogApprovalStatus() duration:" + duration + "ms"); } /* * (non-Javadoc) * * @see * com.gdn.venice.facade.LogApprovalStatusSessionEJBRemote#removeLogApprovalStatusList( * java.util.List) */ @Override @SuppressWarnings("rawtypes") @TransactionAttribute(TransactionAttributeType.REQUIRED) public void removeLogApprovalStatusList(List<LogApprovalStatus> logApprovalStatusList) { _log.debug("removeLogApprovalStatusList()"); Iterator i = logApprovalStatusList.iterator(); while (i.hasNext()) { this.removeLogApprovalStatus((LogApprovalStatus) i.next()); } } /* * (non-Javadoc) * * @see * com.gdn.venice.facade.LogApprovalStatusSessionEJBRemote#findByLogApprovalStatusLike( * com.gdn.venice.persistence.LogApprovalStatus, int, int) */ @Override @SuppressWarnings({ "rawtypes", "unchecked" }) public List<LogApprovalStatus> findByLogApprovalStatusLike(LogApprovalStatus logApprovalStatus, JPQLAdvancedQueryCriteria criteria, int firstResult, int maxResults) { Long startTime = System.currentTimeMillis(); _log.debug("findByLogApprovalStatusLike()"); JPQLQueryStringBuilder qb = new JPQLQueryStringBuilder(logApprovalStatus); HashMap complexTypeBindings = new HashMap(); String stmt = qb.buildQueryString(complexTypeBindings, criteria); if(criteria != null){ /* * Get the binding array from the query builder and make * it available to the queryByRange method */ this.bindingArray = qb.getBindingArray(); for(int i = 0; i < qb.getBindingArray().length; i++){ System.out.println("Bindings:" + i + ":" + qb.getBindingArray()[i]); } List<LogApprovalStatus> logApprovalStatusList = this.queryByRange(stmt, firstResult, maxResults); Long endTime = System.currentTimeMillis(); Long duration = startTime - endTime; _log.debug("findByLogApprovalStatusLike() duration:" + duration + "ms"); return logApprovalStatusList; }else{ String errMsg = "A query has been initiated with null criteria."; _log.error(errMsg); throw new EJBException(errMsg); } } /* * (non-Javadoc) * * @see * com.gdn.venice.facade.LogApprovalStatusSessionEJBRemote#findByLogApprovalStatusLikeFR( * com.gdn.venice.persistence.LogApprovalStatus, int, int) */ @Override @SuppressWarnings({ "rawtypes", "unchecked" }) public FinderReturn findByLogApprovalStatusLikeFR(LogApprovalStatus logApprovalStatus, JPQLAdvancedQueryCriteria criteria, int firstResult, int maxResults) { Long startTime = System.currentTimeMillis(); _log.debug("findByLogApprovalStatusLikeFR()"); JPQLQueryStringBuilder qb = new JPQLQueryStringBuilder(logApprovalStatus); HashMap complexTypeBindings = new HashMap(); String stmt = qb.buildQueryString(complexTypeBindings, criteria); if(criteria != null){ /* * Get the binding array from the query builder and make * it available to the queryByRange method */ this.bindingArray = qb.getBindingArray(); for(int i = 0; i < qb.getBindingArray().length; i++){ System.out.println("Bindings:" + i + ":" + qb.getBindingArray()[i]); } //Set the finder return object with the count of the total query rows FinderReturn fr = new FinderReturn(); String countStmt = "select count(o) " + stmt.substring(stmt.indexOf("from")); Query query = null; try { query = em.createQuery(countStmt); if(this.bindingArray != null){ for(int i = 0; i < bindingArray.length; ++i){ if(bindingArray[i] != null){ query.setParameter(i+1, bindingArray[i]); } } } Long totalRows = (Long)query.getSingleResult(); fr.setNumQueryRows(totalRows); } catch (Exception e) { _log.error("An exception occured when calling em.createQuery():" + e.getMessage()); throw new EJBException(e); } //Set the finder return object with the query list fr.setResultList(this.queryByRange(stmt, firstResult, maxResults)); Long endTime = System.currentTimeMillis(); Long duration = startTime - endTime; _log.debug("findByLogApprovalStatusLike() duration:" + duration + "ms"); return fr; }else{ String errMsg = "A query has been initiated with null criteria."; _log.error(errMsg); throw new EJBException(errMsg); } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInspection.bytecodeAnalysis.asm; import org.jetbrains.org.objectweb.asm.Opcodes; import org.jetbrains.org.objectweb.asm.Type; import org.jetbrains.org.objectweb.asm.tree.*; import org.jetbrains.org.objectweb.asm.tree.analysis.*; import java.util.ArrayList; import java.util.List; /** * Extended version of {@link LiteAnalyzer}. * It handles frames <b>and</b> additional data. * * @author lambdamix */ public class LiteAnalyzerExt<V extends Value, Data, MyInterpreter extends Interpreter<V> & InterpreterExt<Data>> implements Opcodes { private final MyInterpreter interpreter; private final Data[] data; private Frame<V>[] frames; private boolean[] queued; private int[] queue; private int top; public LiteAnalyzerExt(MyInterpreter interpreter, Data[] data, Data startData) { this.interpreter = interpreter; this.data = data; if (data.length > 0) { data[0] = startData; } } public Data[] getData() { return data; } public Frame<V>[] analyze(String owner, MethodNode m) throws AnalyzerException { if ((m.access & (ACC_ABSTRACT | ACC_NATIVE)) != 0) { frames = ASMUtils.newFrameArray(0); return frames; } @SuppressWarnings("unchecked") V refV = (V)BasicValue.REFERENCE_VALUE; int n = m.instructions.size(); InsnList insns = m.instructions; List<TryCatchBlockNode>[] handlers = ASMUtils.newListArray(n); frames = ASMUtils.newFrameArray(n); queued = new boolean[n]; queue = new int[n]; top = 0; // computes exception handlers for each instruction for (int i = 0; i < m.tryCatchBlocks.size(); ++i) { TryCatchBlockNode tcb = m.tryCatchBlocks.get(i); int begin = insns.indexOf(tcb.start); int end = insns.indexOf(tcb.end); for (int j = begin; j < end; ++j) { List<TryCatchBlockNode> insnHandlers = handlers[j]; if (insnHandlers == null) { insnHandlers = new ArrayList<>(); handlers[j] = insnHandlers; } insnHandlers.add(tcb); } } // initializes the data structures for the control flow analysis Frame<V> current = newFrame(m.maxLocals, m.maxStack); Frame<V> handler = newFrame(m.maxLocals, m.maxStack); current.setReturn(interpreter.newReturnTypeValue(Type.getReturnType(m.desc))); Type[] args = Type.getArgumentTypes(m.desc); int local = 0; boolean isInstanceMethod = (m.access & ACC_STATIC) == 0; if (isInstanceMethod) { Type ctype = Type.getObjectType(owner); current.setLocal(local, interpreter.newParameterValue(true, local, ctype)); local++; } for (Type arg : args) { current.setLocal(local, interpreter.newParameterValue(isInstanceMethod, local, arg)); local++; if (arg.getSize() == 2) { current.setLocal(local, interpreter.newEmptyValue(local)); local++; } } while (local < m.maxLocals) { current.setLocal(local, interpreter.newEmptyValue(local)); local++; } interpreter.init(data[0]); merge(0, current); // control flow analysis while (top > 0) { int insn = queue[--top]; Frame<V> f = frames[insn]; queued[insn] = false; AbstractInsnNode insnNode = null; try { insnNode = m.instructions.get(insn); int insnOpcode = insnNode.getOpcode(); int insnType = insnNode.getType(); if (insnType == AbstractInsnNode.LABEL || insnType == AbstractInsnNode.LINE || insnType == AbstractInsnNode.FRAME) { interpreter.init(data[insn]); merge(insn + 1, f); } else { // delta interpreter.init(data[insn]); current.init(f).execute(insnNode, interpreter); if (insnNode instanceof JumpInsnNode) { JumpInsnNode j = (JumpInsnNode)insnNode; if (insnOpcode != GOTO && insnOpcode != JSR) { merge(insn + 1, current); } int jump = insns.indexOf(j.label); merge(jump, current); } else if (insnNode instanceof LookupSwitchInsnNode) { LookupSwitchInsnNode lsi = (LookupSwitchInsnNode)insnNode; int jump = insns.indexOf(lsi.dflt); merge(jump, current); for (int j = 0; j < lsi.labels.size(); ++j) { LabelNode label = lsi.labels.get(j); jump = insns.indexOf(label); merge(jump, current); } } else if (insnNode instanceof TableSwitchInsnNode) { TableSwitchInsnNode tsi = (TableSwitchInsnNode)insnNode; int jump = insns.indexOf(tsi.dflt); merge(jump, current); for (int j = 0; j < tsi.labels.size(); ++j) { LabelNode label = tsi.labels.get(j); jump = insns.indexOf(label); merge(jump, current); } } else if (insnOpcode != ATHROW && (insnOpcode < IRETURN || insnOpcode > RETURN)) { merge(insn + 1, current); } } List<TryCatchBlockNode> insnHandlers = handlers[insn]; if (insnHandlers != null) { for (TryCatchBlockNode tcb : insnHandlers) { int jump = insns.indexOf(tcb.handler); handler.init(f); handler.clearStack(); handler.push(refV); merge(jump, handler); } } } catch (AnalyzerException e) { throw new AnalyzerException(e.node, "Error at instruction " + insn + ": " + e.getMessage(), e); } catch (Exception e) { throw new AnalyzerException(insnNode, "Error at instruction " + insn + ": " + e.getMessage(), e); } } return frames; } public Frame<V>[] getFrames() { return frames; } protected Frame<V> newFrame(int nLocals, int nStack) { return new Frame<>(nLocals, nStack); } protected Frame<V> newFrame(Frame<? extends V> src) { return new Frame<>(src); } // ------------------------------------------------------------------------- private void merge(int insn, Frame<V> frame) throws AnalyzerException { Frame<V> oldFrame = frames[insn]; boolean changes; if (oldFrame == null) { frames[insn] = newFrame(frame); changes = true; } else { changes = oldFrame.merge(frame, interpreter); } Data oldData = data[insn]; Data newData = interpreter.getAfterData(insn); if (oldData == null) { data[insn] = newData; changes = true; } else if (newData != null) { Data mergedData = interpreter.merge(oldData, newData); data[insn] = mergedData; changes |= !oldData.equals(mergedData); } if (changes && !queued[insn]) { queued[insn] = true; queue[top++] = insn; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.hive.HiveSplitSourceProvider.HiveSplitSource; import com.facebook.presto.hive.util.SuspendingExecutor; import com.facebook.presto.spi.HostAddress; import com.facebook.presto.spi.Split; import com.google.common.util.concurrent.SettableFuture; import org.testng.annotations.Test; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertSame; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; public class TestHiveSplitSource { @Test public void testOutstandingSplitCount() throws Exception { SuspendingExecutor suspendingExecutor = createSuspendingExecutor(); HiveSplitSource hiveSplitSource = new HiveSplitSource("test", 10, suspendingExecutor); // add 10 splits for (int i = 0; i < 10; i++) { hiveSplitSource.addToQueue(new TestSplit(i)); assertEquals(hiveSplitSource.getOutstandingSplitCount(), i + 1); } // remove 1 split assertEquals(hiveSplitSource.getNextBatch(1).size(), 1); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 9); // remove 4 splits assertEquals(hiveSplitSource.getNextBatch(4).size(), 4); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 5); // try to remove 20 splits, and verify we only got 5 assertEquals(hiveSplitSource.getNextBatch(20).size(), 5); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 0); } @Test public void testSuspendResume() throws Exception { SuspendingExecutor suspendingExecutor = createSuspendingExecutor(); HiveSplitSource hiveSplitSource = new HiveSplitSource("test", 10, suspendingExecutor); // almost fill the source for (int i = 0; i < 9; i++) { hiveSplitSource.addToQueue(new TestSplit(i)); assertEquals(hiveSplitSource.getOutstandingSplitCount(), i + 1); assertFalse(suspendingExecutor.isSuspended()); } // add one more split so the source is now full and verify that the executor is suspended hiveSplitSource.addToQueue(new TestSplit(10)); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 10); assertTrue(suspendingExecutor.isSuspended()); // remove one split so the source is no longer full and verify the executor is resumed assertEquals(hiveSplitSource.getNextBatch(1).size(), 1); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 9); assertFalse(suspendingExecutor.isSuspended()); // add two more splits so the source is now full and verify that the executor is suspended hiveSplitSource.addToQueue(new TestSplit(11)); hiveSplitSource.addToQueue(new TestSplit(12)); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 11); assertTrue(suspendingExecutor.isSuspended()); // remove two splits so the source is no longer full and verify the executor is resumed assertEquals(hiveSplitSource.getNextBatch(2).size(), 2); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 9); assertFalse(suspendingExecutor.isSuspended()); } @Test public void testFail() throws Exception { SuspendingExecutor suspendingExecutor = createSuspendingExecutor(); HiveSplitSource hiveSplitSource = new HiveSplitSource("test", 10, suspendingExecutor); // add some splits for (int i = 0; i < 5; i++) { hiveSplitSource.addToQueue(new TestSplit(i)); assertEquals(hiveSplitSource.getOutstandingSplitCount(), i + 1); } // remove a split and verify assertEquals(hiveSplitSource.getNextBatch(1).size(), 1); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 4); // fail source hiveSplitSource.fail(new RuntimeException("test")); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 4); // try to remove a split and verify we got the expected exception try { hiveSplitSource.getNextBatch(1); fail("expected RuntimeException"); } catch (RuntimeException e) { assertEquals(e.getMessage(), "test"); } assertEquals(hiveSplitSource.getOutstandingSplitCount(), 4); // attempt to add another split and verify it does not work hiveSplitSource.addToQueue(new TestSplit(99)); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 4); // fail source again hiveSplitSource.fail(new RuntimeException("another failure")); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 4); // try to remove a split and verify we got the first exception try { hiveSplitSource.getNextBatch(1); fail("expected RuntimeException"); } catch (RuntimeException e) { assertEquals(e.getMessage(), "test"); } } @Test public void testReaderWaitsForSplits() throws Exception { SuspendingExecutor suspendingExecutor = createSuspendingExecutor(); final HiveSplitSource hiveSplitSource = new HiveSplitSource("test", 10, suspendingExecutor); final SettableFuture<Split> splits = SettableFuture.create(); // create a thread that will get a split final CountDownLatch started = new CountDownLatch(1); Thread getterThread = new Thread(new Runnable() { @Override public void run() { try { started.countDown(); List<Split> batch = hiveSplitSource.getNextBatch(1); assertEquals(batch.size(), 1); splits.set(batch.get(0)); } catch (Throwable e) { splits.setException(e); } } }); getterThread.start(); try { // wait for the thread to be started assertTrue(started.await(1, TimeUnit.SECONDS)); // sleep for a bit, and assure the thread is blocked TimeUnit.MILLISECONDS.sleep(200); assertTrue(!splits.isDone()); // add a split hiveSplitSource.addToQueue(new TestSplit(33)); // wait for thread to get the split Split split = splits.get(200, TimeUnit.MILLISECONDS); assertSame(split.getInfo(), 33); } finally { // make sure the thread exits getterThread.interrupt(); } } private SuspendingExecutor createSuspendingExecutor() { return new SuspendingExecutor(new Executor() { @Override public void execute(Runnable command) { throw new UnsupportedOperationException(); } }); } private static class TestSplit implements Split { private final int id; private TestSplit(int id) { this.id = id; } @Override public boolean isRemotelyAccessible() { throw new UnsupportedOperationException(); } @Override public List<HostAddress> getAddresses() { throw new UnsupportedOperationException(); } @Override public Object getInfo() { return id; } } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.Request; import com.amazonaws.services.ec2.model.transform.RegisterImageRequestMarshaller; /** * */ public class RegisterImageRequest extends AmazonWebServiceRequest implements Serializable, Cloneable, DryRunSupportedRequest<RegisterImageRequest> { /** * <p> * The full path to your AMI manifest in Amazon S3 storage. * </p> */ private String imageLocation; /** * <p> * A name for your AMI. * </p> * <p> * Constraints: 3-128 alphanumeric characters, parentheses (()), square * brackets ([]), spaces ( ), periods (.), slashes (/), dashes (-), single * quotes ('), at-signs (@), or underscores(_) * </p> */ private String name; /** * <p> * A description for your AMI. * </p> */ private String description; /** * <p> * The architecture of the AMI. * </p> * <p> * Default: For Amazon EBS-backed AMIs, <code>i386</code>. For instance * store-backed AMIs, the architecture specified in the manifest file. * </p> */ private String architecture; /** * <p> * The ID of the kernel. * </p> */ private String kernelId; /** * <p> * The ID of the RAM disk. * </p> */ private String ramdiskId; /** * <p> * The name of the root device (for example, <code>/dev/sda1</code>, or * <code>/dev/xvda</code>). * </p> */ private String rootDeviceName; /** * <p> * One or more block device mapping entries. * </p> */ private com.amazonaws.internal.SdkInternalList<BlockDeviceMapping> blockDeviceMappings; /** * <p> * The type of virtualization. * </p> * <p> * Default: <code>paravirtual</code> * </p> */ private String virtualizationType; /** * <p> * Set to <code>simple</code> to enable enhanced networking for the AMI and * any instances that you launch from the AMI. * </p> * <p> * There is no way to disable enhanced networking at this time. * </p> * <p> * This option is supported only for HVM AMIs. Specifying this option with a * PV AMI can make instances launched from the AMI unreachable. * </p> */ private String sriovNetSupport; /** * Default constructor for RegisterImageRequest object. Callers should use * the setter or fluent setter (with...) methods to initialize the object * after creating it. */ public RegisterImageRequest() { } /** * Constructs a new RegisterImageRequest object. Callers should use the * setter or fluent setter (with...) methods to initialize any additional * object members. * * @param imageLocation * The full path to your AMI manifest in Amazon S3 storage. */ public RegisterImageRequest(String imageLocation) { setImageLocation(imageLocation); } /** * <p> * The full path to your AMI manifest in Amazon S3 storage. * </p> * * @param imageLocation * The full path to your AMI manifest in Amazon S3 storage. */ public void setImageLocation(String imageLocation) { this.imageLocation = imageLocation; } /** * <p> * The full path to your AMI manifest in Amazon S3 storage. * </p> * * @return The full path to your AMI manifest in Amazon S3 storage. */ public String getImageLocation() { return this.imageLocation; } /** * <p> * The full path to your AMI manifest in Amazon S3 storage. * </p> * * @param imageLocation * The full path to your AMI manifest in Amazon S3 storage. * @return Returns a reference to this object so that method calls can be * chained together. */ public RegisterImageRequest withImageLocation(String imageLocation) { setImageLocation(imageLocation); return this; } /** * <p> * A name for your AMI. * </p> * <p> * Constraints: 3-128 alphanumeric characters, parentheses (()), square * brackets ([]), spaces ( ), periods (.), slashes (/), dashes (-), single * quotes ('), at-signs (@), or underscores(_) * </p> * * @param name * A name for your AMI.</p> * <p> * Constraints: 3-128 alphanumeric characters, parentheses (()), * square brackets ([]), spaces ( ), periods (.), slashes (/), dashes * (-), single quotes ('), at-signs (@), or underscores(_) */ public void setName(String name) { this.name = name; } /** * <p> * A name for your AMI. * </p> * <p> * Constraints: 3-128 alphanumeric characters, parentheses (()), square * brackets ([]), spaces ( ), periods (.), slashes (/), dashes (-), single * quotes ('), at-signs (@), or underscores(_) * </p> * * @return A name for your AMI.</p> * <p> * Constraints: 3-128 alphanumeric characters, parentheses (()), * square brackets ([]), spaces ( ), periods (.), slashes (/), * dashes (-), single quotes ('), at-signs (@), or underscores(_) */ public String getName() { return this.name; } /** * <p> * A name for your AMI. * </p> * <p> * Constraints: 3-128 alphanumeric characters, parentheses (()), square * brackets ([]), spaces ( ), periods (.), slashes (/), dashes (-), single * quotes ('), at-signs (@), or underscores(_) * </p> * * @param name * A name for your AMI.</p> * <p> * Constraints: 3-128 alphanumeric characters, parentheses (()), * square brackets ([]), spaces ( ), periods (.), slashes (/), dashes * (-), single quotes ('), at-signs (@), or underscores(_) * @return Returns a reference to this object so that method calls can be * chained together. */ public RegisterImageRequest withName(String name) { setName(name); return this; } /** * <p> * A description for your AMI. * </p> * * @param description * A description for your AMI. */ public void setDescription(String description) { this.description = description; } /** * <p> * A description for your AMI. * </p> * * @return A description for your AMI. */ public String getDescription() { return this.description; } /** * <p> * A description for your AMI. * </p> * * @param description * A description for your AMI. * @return Returns a reference to this object so that method calls can be * chained together. */ public RegisterImageRequest withDescription(String description) { setDescription(description); return this; } /** * <p> * The architecture of the AMI. * </p> * <p> * Default: For Amazon EBS-backed AMIs, <code>i386</code>. For instance * store-backed AMIs, the architecture specified in the manifest file. * </p> * * @param architecture * The architecture of the AMI.</p> * <p> * Default: For Amazon EBS-backed AMIs, <code>i386</code>. For * instance store-backed AMIs, the architecture specified in the * manifest file. * @see ArchitectureValues */ public void setArchitecture(String architecture) { this.architecture = architecture; } /** * <p> * The architecture of the AMI. * </p> * <p> * Default: For Amazon EBS-backed AMIs, <code>i386</code>. For instance * store-backed AMIs, the architecture specified in the manifest file. * </p> * * @return The architecture of the AMI.</p> * <p> * Default: For Amazon EBS-backed AMIs, <code>i386</code>. For * instance store-backed AMIs, the architecture specified in the * manifest file. * @see ArchitectureValues */ public String getArchitecture() { return this.architecture; } /** * <p> * The architecture of the AMI. * </p> * <p> * Default: For Amazon EBS-backed AMIs, <code>i386</code>. For instance * store-backed AMIs, the architecture specified in the manifest file. * </p> * * @param architecture * The architecture of the AMI.</p> * <p> * Default: For Amazon EBS-backed AMIs, <code>i386</code>. For * instance store-backed AMIs, the architecture specified in the * manifest file. * @return Returns a reference to this object so that method calls can be * chained together. * @see ArchitectureValues */ public RegisterImageRequest withArchitecture(String architecture) { setArchitecture(architecture); return this; } /** * <p> * The architecture of the AMI. * </p> * <p> * Default: For Amazon EBS-backed AMIs, <code>i386</code>. For instance * store-backed AMIs, the architecture specified in the manifest file. * </p> * * @param architecture * The architecture of the AMI.</p> * <p> * Default: For Amazon EBS-backed AMIs, <code>i386</code>. For * instance store-backed AMIs, the architecture specified in the * manifest file. * @return Returns a reference to this object so that method calls can be * chained together. * @see ArchitectureValues */ public void setArchitecture(ArchitectureValues architecture) { this.architecture = architecture.toString(); } /** * <p> * The architecture of the AMI. * </p> * <p> * Default: For Amazon EBS-backed AMIs, <code>i386</code>. For instance * store-backed AMIs, the architecture specified in the manifest file. * </p> * * @param architecture * The architecture of the AMI.</p> * <p> * Default: For Amazon EBS-backed AMIs, <code>i386</code>. For * instance store-backed AMIs, the architecture specified in the * manifest file. * @return Returns a reference to this object so that method calls can be * chained together. * @see ArchitectureValues */ public RegisterImageRequest withArchitecture(ArchitectureValues architecture) { setArchitecture(architecture); return this; } /** * <p> * The ID of the kernel. * </p> * * @param kernelId * The ID of the kernel. */ public void setKernelId(String kernelId) { this.kernelId = kernelId; } /** * <p> * The ID of the kernel. * </p> * * @return The ID of the kernel. */ public String getKernelId() { return this.kernelId; } /** * <p> * The ID of the kernel. * </p> * * @param kernelId * The ID of the kernel. * @return Returns a reference to this object so that method calls can be * chained together. */ public RegisterImageRequest withKernelId(String kernelId) { setKernelId(kernelId); return this; } /** * <p> * The ID of the RAM disk. * </p> * * @param ramdiskId * The ID of the RAM disk. */ public void setRamdiskId(String ramdiskId) { this.ramdiskId = ramdiskId; } /** * <p> * The ID of the RAM disk. * </p> * * @return The ID of the RAM disk. */ public String getRamdiskId() { return this.ramdiskId; } /** * <p> * The ID of the RAM disk. * </p> * * @param ramdiskId * The ID of the RAM disk. * @return Returns a reference to this object so that method calls can be * chained together. */ public RegisterImageRequest withRamdiskId(String ramdiskId) { setRamdiskId(ramdiskId); return this; } /** * <p> * The name of the root device (for example, <code>/dev/sda1</code>, or * <code>/dev/xvda</code>). * </p> * * @param rootDeviceName * The name of the root device (for example, <code>/dev/sda1</code>, * or <code>/dev/xvda</code>). */ public void setRootDeviceName(String rootDeviceName) { this.rootDeviceName = rootDeviceName; } /** * <p> * The name of the root device (for example, <code>/dev/sda1</code>, or * <code>/dev/xvda</code>). * </p> * * @return The name of the root device (for example, <code>/dev/sda1</code>, * or <code>/dev/xvda</code>). */ public String getRootDeviceName() { return this.rootDeviceName; } /** * <p> * The name of the root device (for example, <code>/dev/sda1</code>, or * <code>/dev/xvda</code>). * </p> * * @param rootDeviceName * The name of the root device (for example, <code>/dev/sda1</code>, * or <code>/dev/xvda</code>). * @return Returns a reference to this object so that method calls can be * chained together. */ public RegisterImageRequest withRootDeviceName(String rootDeviceName) { setRootDeviceName(rootDeviceName); return this; } /** * <p> * One or more block device mapping entries. * </p> * * @return One or more block device mapping entries. */ public java.util.List<BlockDeviceMapping> getBlockDeviceMappings() { if (blockDeviceMappings == null) { blockDeviceMappings = new com.amazonaws.internal.SdkInternalList<BlockDeviceMapping>(); } return blockDeviceMappings; } /** * <p> * One or more block device mapping entries. * </p> * * @param blockDeviceMappings * One or more block device mapping entries. */ public void setBlockDeviceMappings( java.util.Collection<BlockDeviceMapping> blockDeviceMappings) { if (blockDeviceMappings == null) { this.blockDeviceMappings = null; return; } this.blockDeviceMappings = new com.amazonaws.internal.SdkInternalList<BlockDeviceMapping>( blockDeviceMappings); } /** * <p> * One or more block device mapping entries. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setBlockDeviceMappings(java.util.Collection)} or * {@link #withBlockDeviceMappings(java.util.Collection)} if you want to * override the existing values. * </p> * * @param blockDeviceMappings * One or more block device mapping entries. * @return Returns a reference to this object so that method calls can be * chained together. */ public RegisterImageRequest withBlockDeviceMappings( BlockDeviceMapping... blockDeviceMappings) { if (this.blockDeviceMappings == null) { setBlockDeviceMappings(new com.amazonaws.internal.SdkInternalList<BlockDeviceMapping>( blockDeviceMappings.length)); } for (BlockDeviceMapping ele : blockDeviceMappings) { this.blockDeviceMappings.add(ele); } return this; } /** * <p> * One or more block device mapping entries. * </p> * * @param blockDeviceMappings * One or more block device mapping entries. * @return Returns a reference to this object so that method calls can be * chained together. */ public RegisterImageRequest withBlockDeviceMappings( java.util.Collection<BlockDeviceMapping> blockDeviceMappings) { setBlockDeviceMappings(blockDeviceMappings); return this; } /** * <p> * The type of virtualization. * </p> * <p> * Default: <code>paravirtual</code> * </p> * * @param virtualizationType * The type of virtualization.</p> * <p> * Default: <code>paravirtual</code> */ public void setVirtualizationType(String virtualizationType) { this.virtualizationType = virtualizationType; } /** * <p> * The type of virtualization. * </p> * <p> * Default: <code>paravirtual</code> * </p> * * @return The type of virtualization.</p> * <p> * Default: <code>paravirtual</code> */ public String getVirtualizationType() { return this.virtualizationType; } /** * <p> * The type of virtualization. * </p> * <p> * Default: <code>paravirtual</code> * </p> * * @param virtualizationType * The type of virtualization.</p> * <p> * Default: <code>paravirtual</code> * @return Returns a reference to this object so that method calls can be * chained together. */ public RegisterImageRequest withVirtualizationType(String virtualizationType) { setVirtualizationType(virtualizationType); return this; } /** * <p> * Set to <code>simple</code> to enable enhanced networking for the AMI and * any instances that you launch from the AMI. * </p> * <p> * There is no way to disable enhanced networking at this time. * </p> * <p> * This option is supported only for HVM AMIs. Specifying this option with a * PV AMI can make instances launched from the AMI unreachable. * </p> * * @param sriovNetSupport * Set to <code>simple</code> to enable enhanced networking for the * AMI and any instances that you launch from the AMI.</p> * <p> * There is no way to disable enhanced networking at this time. * </p> * <p> * This option is supported only for HVM AMIs. Specifying this option * with a PV AMI can make instances launched from the AMI * unreachable. */ public void setSriovNetSupport(String sriovNetSupport) { this.sriovNetSupport = sriovNetSupport; } /** * <p> * Set to <code>simple</code> to enable enhanced networking for the AMI and * any instances that you launch from the AMI. * </p> * <p> * There is no way to disable enhanced networking at this time. * </p> * <p> * This option is supported only for HVM AMIs. Specifying this option with a * PV AMI can make instances launched from the AMI unreachable. * </p> * * @return Set to <code>simple</code> to enable enhanced networking for the * AMI and any instances that you launch from the AMI.</p> * <p> * There is no way to disable enhanced networking at this time. * </p> * <p> * This option is supported only for HVM AMIs. Specifying this * option with a PV AMI can make instances launched from the AMI * unreachable. */ public String getSriovNetSupport() { return this.sriovNetSupport; } /** * <p> * Set to <code>simple</code> to enable enhanced networking for the AMI and * any instances that you launch from the AMI. * </p> * <p> * There is no way to disable enhanced networking at this time. * </p> * <p> * This option is supported only for HVM AMIs. Specifying this option with a * PV AMI can make instances launched from the AMI unreachable. * </p> * * @param sriovNetSupport * Set to <code>simple</code> to enable enhanced networking for the * AMI and any instances that you launch from the AMI.</p> * <p> * There is no way to disable enhanced networking at this time. * </p> * <p> * This option is supported only for HVM AMIs. Specifying this option * with a PV AMI can make instances launched from the AMI * unreachable. * @return Returns a reference to this object so that method calls can be * chained together. */ public RegisterImageRequest withSriovNetSupport(String sriovNetSupport) { setSriovNetSupport(sriovNetSupport); return this; } /** * This method is intended for internal use only. Returns the marshaled * request configured with additional parameters to enable operation * dry-run. */ @Override public Request<RegisterImageRequest> getDryRunRequest() { Request<RegisterImageRequest> request = new RegisterImageRequestMarshaller() .marshall(this); request.addParameter("DryRun", Boolean.toString(true)); return request; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getImageLocation() != null) sb.append("ImageLocation: " + getImageLocation() + ","); if (getName() != null) sb.append("Name: " + getName() + ","); if (getDescription() != null) sb.append("Description: " + getDescription() + ","); if (getArchitecture() != null) sb.append("Architecture: " + getArchitecture() + ","); if (getKernelId() != null) sb.append("KernelId: " + getKernelId() + ","); if (getRamdiskId() != null) sb.append("RamdiskId: " + getRamdiskId() + ","); if (getRootDeviceName() != null) sb.append("RootDeviceName: " + getRootDeviceName() + ","); if (getBlockDeviceMappings() != null) sb.append("BlockDeviceMappings: " + getBlockDeviceMappings() + ","); if (getVirtualizationType() != null) sb.append("VirtualizationType: " + getVirtualizationType() + ","); if (getSriovNetSupport() != null) sb.append("SriovNetSupport: " + getSriovNetSupport()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof RegisterImageRequest == false) return false; RegisterImageRequest other = (RegisterImageRequest) obj; if (other.getImageLocation() == null ^ this.getImageLocation() == null) return false; if (other.getImageLocation() != null && other.getImageLocation().equals(this.getImageLocation()) == false) return false; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getArchitecture() == null ^ this.getArchitecture() == null) return false; if (other.getArchitecture() != null && other.getArchitecture().equals(this.getArchitecture()) == false) return false; if (other.getKernelId() == null ^ this.getKernelId() == null) return false; if (other.getKernelId() != null && other.getKernelId().equals(this.getKernelId()) == false) return false; if (other.getRamdiskId() == null ^ this.getRamdiskId() == null) return false; if (other.getRamdiskId() != null && other.getRamdiskId().equals(this.getRamdiskId()) == false) return false; if (other.getRootDeviceName() == null ^ this.getRootDeviceName() == null) return false; if (other.getRootDeviceName() != null && other.getRootDeviceName().equals(this.getRootDeviceName()) == false) return false; if (other.getBlockDeviceMappings() == null ^ this.getBlockDeviceMappings() == null) return false; if (other.getBlockDeviceMappings() != null && other.getBlockDeviceMappings().equals( this.getBlockDeviceMappings()) == false) return false; if (other.getVirtualizationType() == null ^ this.getVirtualizationType() == null) return false; if (other.getVirtualizationType() != null && other.getVirtualizationType().equals( this.getVirtualizationType()) == false) return false; if (other.getSriovNetSupport() == null ^ this.getSriovNetSupport() == null) return false; if (other.getSriovNetSupport() != null && other.getSriovNetSupport().equals(this.getSriovNetSupport()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getImageLocation() == null) ? 0 : getImageLocation() .hashCode()); hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getArchitecture() == null) ? 0 : getArchitecture() .hashCode()); hashCode = prime * hashCode + ((getKernelId() == null) ? 0 : getKernelId().hashCode()); hashCode = prime * hashCode + ((getRamdiskId() == null) ? 0 : getRamdiskId().hashCode()); hashCode = prime * hashCode + ((getRootDeviceName() == null) ? 0 : getRootDeviceName() .hashCode()); hashCode = prime * hashCode + ((getBlockDeviceMappings() == null) ? 0 : getBlockDeviceMappings().hashCode()); hashCode = prime * hashCode + ((getVirtualizationType() == null) ? 0 : getVirtualizationType().hashCode()); hashCode = prime * hashCode + ((getSriovNetSupport() == null) ? 0 : getSriovNetSupport() .hashCode()); return hashCode; } @Override public RegisterImageRequest clone() { return (RegisterImageRequest) super.clone(); } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.runtime.commands; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.docgen.BlazeRuleHelpPrinter; import com.google.devtools.build.lib.Constants; import com.google.devtools.build.lib.analysis.BlazeVersionInfo; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.runtime.BlazeCommand; import com.google.devtools.build.lib.runtime.BlazeCommandUtils; import com.google.devtools.build.lib.runtime.BlazeModule; import com.google.devtools.build.lib.runtime.BlazeRuntime; import com.google.devtools.build.lib.runtime.Command; import com.google.devtools.build.lib.runtime.CommandEnvironment; import com.google.devtools.build.lib.util.ExitCode; import com.google.devtools.build.lib.util.io.OutErr; import com.google.devtools.common.options.Converters; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsParser; import com.google.devtools.common.options.OptionsProvider; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; /** * The 'blaze help' command, which prints all available commands as well as * specific help pages. */ @Command(name = "help", options = { HelpCommand.Options.class }, allowResidue = true, mustRunInWorkspace = false, shortDescription = "Prints help for commands, or the index.", completion = "command|{startup_options,target-syntax,info-keys}", help = "resource:help.txt") public final class HelpCommand implements BlazeCommand { private static final Joiner SPACE_JOINER = Joiner.on(" "); public static class Options extends OptionsBase { @Option(name = "help_verbosity", category = "help", defaultValue = "medium", converter = Converters.HelpVerbosityConverter.class, help = "Select the verbosity of the help command.") public OptionsParser.HelpVerbosity helpVerbosity; @Option(name = "long", abbrev = 'l', defaultValue = "null", category = "help", expansion = {"--help_verbosity", "long"}, help = "Show full description of each option, instead of just its name.") public Void showLongFormOptions; @Option(name = "short", defaultValue = "null", category = "help", expansion = {"--help_verbosity", "short"}, help = "Show only the names of the options, not their types or meanings.") public Void showShortFormOptions; } /** * Returns a map that maps option categories to descriptive help strings for categories that * are not part of the Bazel core. */ private ImmutableMap<String, String> getOptionCategories(BlazeRuntime runtime) { ImmutableMap.Builder<String, String> optionCategoriesBuilder = ImmutableMap.builder(); String name = Constants.PRODUCT_NAME; optionCategoriesBuilder .put("checking", String.format( "Checking options, which control %s's error checking and/or warnings", name)) .put("coverage", String.format( "Options that affect how %s generates code coverage information", name)) .put("experimental", "Experimental options, which control experimental (and potentially risky) features") .put("flags", "Flags options, for passing options to other tools") .put("help", "Help options") .put("host jvm startup", String.format( "Options that affect the startup of the %s server's JVM", name)) .put("misc", "Miscellaneous options") .put("package loading", "Options that specify how to locate packages") .put("query", String.format( "Options affecting the '%s query' dependency query command", name)) .put("run", String.format( "Options specific to '%s run'", name)) .put("semantics", "Semantics options, which affect the build commands and/or output file contents") .put("server startup", String.format( "Startup options, which affect the startup of the %s server", name)) .put("strategy", String.format( "Strategy options, which affect how %s will execute the build", name)) .put("testing", String.format( "Options that affect how %s runs tests", name)) .put("verbosity", String.format( "Verbosity options, which control what %s prints", name)) .put("version", "Version options, for selecting which version of other tools will be used") .put("what", "Output selection options, for determining what to build/test"); for (BlazeModule module : runtime.getBlazeModules()) { optionCategoriesBuilder.putAll(module.getOptionCategories()); } return optionCategoriesBuilder.build(); } @Override public void editOptions(CommandEnvironment env, OptionsParser optionsParser) {} @Override public ExitCode exec(CommandEnvironment env, OptionsProvider options) { BlazeRuntime runtime = env.getRuntime(); OutErr outErr = env.getReporter().getOutErr(); Options helpOptions = options.getOptions(Options.class); if (options.getResidue().isEmpty()) { emitBlazeVersionInfo(outErr); emitGenericHelp(runtime, outErr); return ExitCode.SUCCESS; } if (options.getResidue().size() != 1) { env.getReporter().handle(Event.error("You must specify exactly one command")); return ExitCode.COMMAND_LINE_ERROR; } String helpSubject = options.getResidue().get(0); if (helpSubject.equals("startup_options")) { emitBlazeVersionInfo(outErr); emitStartupOptions(outErr, helpOptions.helpVerbosity, runtime, getOptionCategories(runtime)); return ExitCode.SUCCESS; } else if (helpSubject.equals("target-syntax")) { emitBlazeVersionInfo(outErr); emitTargetSyntaxHelp(outErr, getOptionCategories(runtime)); return ExitCode.SUCCESS; } else if (helpSubject.equals("info-keys")) { emitInfoKeysHelp(runtime, outErr); return ExitCode.SUCCESS; } else if (helpSubject.equals("completion")) { emitCompletionHelp(runtime, outErr); return ExitCode.SUCCESS; } BlazeCommand command = runtime.getCommandMap().get(helpSubject); if (command == null) { ConfiguredRuleClassProvider provider = runtime.getRuleClassProvider(); RuleClass ruleClass = provider.getRuleClassMap().get(helpSubject); if (ruleClass != null && ruleClass.isDocumented()) { // There is a rule with a corresponding name outErr.printOut(BlazeRuleHelpPrinter.getRuleDoc(helpSubject, provider)); return ExitCode.SUCCESS; } else { env.getReporter().handle(Event.error( null, "'" + helpSubject + "' is neither a command nor a build rule")); return ExitCode.COMMAND_LINE_ERROR; } } emitBlazeVersionInfo(outErr); outErr.printOut(BlazeCommandUtils.getUsage( command.getClass(), getOptionCategories(runtime), helpOptions.helpVerbosity, runtime.getBlazeModules(), runtime.getRuleClassProvider())); return ExitCode.SUCCESS; } private void emitBlazeVersionInfo(OutErr outErr) { String releaseInfo = BlazeVersionInfo.instance().getReleaseName(); String line = String.format("[%s %s]", Constants.PRODUCT_NAME, releaseInfo); outErr.printOut(String.format("%80s\n", line)); } @SuppressWarnings("unchecked") // varargs generic array creation private void emitStartupOptions(OutErr outErr, OptionsParser.HelpVerbosity helpVerbosity, BlazeRuntime runtime, ImmutableMap<String, String> optionCategories) { outErr.printOut( BlazeCommandUtils.expandHelpTopic("startup_options", "resource:startup_options.txt", getClass(), BlazeCommandUtils.getStartupOptions(runtime.getBlazeModules()), optionCategories, helpVerbosity)); } private void emitCompletionHelp(BlazeRuntime runtime, OutErr outErr) { // First startup_options Iterable<BlazeModule> blazeModules = runtime.getBlazeModules(); ConfiguredRuleClassProvider ruleClassProvider = runtime.getRuleClassProvider(); Map<String, BlazeCommand> commandsByName = runtime.getCommandMap(); Set<String> commands = commandsByName.keySet(); outErr.printOutLn("BAZEL_COMMAND_LIST=\"" + SPACE_JOINER.join(commands) + "\""); outErr.printOutLn("BAZEL_INFO_KEYS=\""); for (InfoKey key : InfoKey.values()) { outErr.printOutLn(key.getName()); } outErr.printOutLn("\""); outErr.printOutLn("BAZEL_STARTUP_OPTIONS=\""); Iterable<Class<? extends OptionsBase>> options = BlazeCommandUtils.getStartupOptions(blazeModules); outErr.printOut(OptionsParser.newOptionsParser(options).getOptionsCompletion()); outErr.printOutLn("\""); for (String name : commands) { BlazeCommand command = commandsByName.get(name); String varName = name.toUpperCase().replace('-', '_'); Command annotation = command.getClass().getAnnotation(Command.class); if (!annotation.completion().isEmpty()) { outErr.printOutLn("BAZEL_COMMAND_" + varName + "_ARGUMENT=\"" + annotation.completion() + "\""); } options = BlazeCommandUtils.getOptions(command.getClass(), blazeModules, ruleClassProvider); outErr.printOutLn("BAZEL_COMMAND_" + varName + "_FLAGS=\""); outErr.printOut(OptionsParser.newOptionsParser(options).getOptionsCompletion()); outErr.printOutLn("\""); } } private void emitTargetSyntaxHelp(OutErr outErr, ImmutableMap<String, String> optionCategories) { outErr.printOut(BlazeCommandUtils.expandHelpTopic("target-syntax", "resource:target-syntax.txt", getClass(), ImmutableList.<Class<? extends OptionsBase>>of(), optionCategories, OptionsParser.HelpVerbosity.MEDIUM)); } private void emitInfoKeysHelp(BlazeRuntime runtime, OutErr outErr) { for (BlazeModule.InfoItem item : InfoCommand.getInfoItemMap(runtime, OptionsParser.newOptionsParser( ImmutableList.<Class<? extends OptionsBase>>of())).values()) { outErr.printOut(String.format("%-23s %s\n", item.getName(), item.getDescription())); } } private void emitGenericHelp(BlazeRuntime runtime, OutErr outErr) { outErr.printOut(String.format("Usage: %s <command> <options> ...\n\n", Constants.PRODUCT_NAME)); outErr.printOut("Available commands:\n"); Map<String, BlazeCommand> commandsByName = runtime.getCommandMap(); List<String> namesInOrder = new ArrayList<>(commandsByName.keySet()); Collections.sort(namesInOrder); for (String name : namesInOrder) { BlazeCommand command = commandsByName.get(name); Command annotation = command.getClass().getAnnotation(Command.class); if (annotation.hidden()) { continue; } String shortDescription = annotation.shortDescription(). replace("%{product}", Constants.PRODUCT_NAME); outErr.printOut(String.format(" %-19s %s\n", name, shortDescription)); } outErr.printOut("\n"); outErr.printOut("Getting more help:\n"); outErr.printOut(String.format(" %s help <command>\n", Constants.PRODUCT_NAME)); outErr.printOut(" Prints help and options for <command>.\n"); outErr.printOut(String.format(" %s help startup_options\n", Constants.PRODUCT_NAME)); outErr.printOut(String.format(" Options for the JVM hosting %s.\n", Constants.PRODUCT_NAME)); outErr.printOut(String.format(" %s help target-syntax\n", Constants.PRODUCT_NAME)); outErr.printOut(" Explains the syntax for specifying targets.\n"); outErr.printOut(String.format(" %s help info-keys\n", Constants.PRODUCT_NAME)); outErr.printOut(" Displays a list of keys used by the info command.\n"); } }
/** * Copyright (c) 2008-2012 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.osedu.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.profile2.tool.pages; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; import javax.servlet.http.Cookie; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.wicket.AttributeModifier; import org.apache.wicket.Component; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.markup.html.AjaxLink; import org.apache.wicket.ajax.markup.html.form.AjaxButton; import org.apache.wicket.behavior.Behavior; import org.apache.wicket.extensions.ajax.markup.html.IndicatingAjaxButton; import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.CheckBox; import org.apache.wicket.markup.html.form.DropDownChoice; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.Radio; import org.apache.wicket.markup.html.form.RadioGroup; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.markup.html.link.ExternalLink; import org.apache.wicket.markup.html.link.Link; import org.apache.wicket.markup.html.link.PopupSettings; import org.apache.wicket.markup.html.list.ListItem; import org.apache.wicket.markup.html.list.ListView; import org.apache.wicket.markup.html.list.PageableListView; import org.apache.wicket.markup.html.navigation.paging.PagingNavigator; import org.apache.wicket.model.IModel; import org.apache.wicket.model.LoadableDetachableModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.PropertyModel; import org.apache.wicket.model.ResourceModel; import org.apache.wicket.model.StringResourceModel; import org.apache.wicket.util.cookies.CookieUtils; import org.sakaiproject.profile2.model.Person; import org.sakaiproject.profile2.model.ProfileSearchTerm; import org.sakaiproject.profile2.tool.components.HashMapChoiceRenderer; import org.sakaiproject.profile2.tool.components.IconWithClueTip; import org.sakaiproject.profile2.tool.components.ProfileImage; import org.sakaiproject.profile2.tool.components.ProfileStatusRenderer; import org.sakaiproject.profile2.tool.models.FriendAction; import org.sakaiproject.profile2.tool.models.StringModel; import org.sakaiproject.profile2.tool.pages.windows.AddFriend; import org.sakaiproject.profile2.types.PrivacyType; import org.sakaiproject.profile2.util.ProfileConstants; import org.sakaiproject.profile2.util.ProfileUtils; import org.sakaiproject.site.api.Site; public class MySearch extends BasePage { private List<Person> results = new ArrayList<Person>(); private static final Logger log = LoggerFactory.getLogger(MySearch.class); private WebMarkupContainer numSearchResultsContainer; private Label numSearchResults; private WebMarkupContainer resultsContainer; private AjaxButton clearButton; private AjaxButton clearHistoryButton; private TextField<String> searchField; private RadioGroup<String> searchTypeRadioGroup; private CheckBox connectionsCheckBox; private CheckBox worksiteCheckBox; private DropDownChoice worksiteChoice; // Used independently of search history for current search, and // transient because Cookie isn't serializable private transient Cookie searchCookie = null; public MySearch() { log.debug("MySearch()"); disableLink(searchLink); //check for current search cookie CookieUtils utils = new CookieUtils(); searchCookie = utils.getCookie(ProfileConstants.SEARCH_COOKIE); //setup model to store the actions in the modal windows final FriendAction friendActionModel = new FriendAction(); //get current user info final String currentUserUuid = sakaiProxy.getCurrentUserId(); final String currentUserType = sakaiProxy.getUserType(currentUserUuid); /* * Combined search form */ //heading Label searchHeading = new Label("searchHeading", new ResourceModel("heading.search")); add(searchHeading); //setup form final StringModel searchStringModel = new StringModel(); Form<StringModel> searchForm = new Form<StringModel>("searchForm", new Model<StringModel>(searchStringModel)); searchForm.setOutputMarkupId(true); //search field searchForm.add(new Label("searchLabel", new ResourceModel("text.search.terms.label"))); searchField = new TextField<String>("searchField", new PropertyModel<String>(searchStringModel, "string")); searchField.setRequired(true); searchField.setMarkupId("searchinput"); searchField.setOutputMarkupId(true); searchForm.add(searchField); searchForm.add(new IconWithClueTip("searchToolTip", ProfileConstants.INFO_IMAGE, new ResourceModel("text.search.terms.tooltip"))); //by name or by interest radio group searchTypeRadioGroup = new RadioGroup<String>("searchTypeRadioGroup"); // so we can repaint after clicking on search history links searchTypeRadioGroup.setOutputMarkupId(true); searchTypeRadioGroup.setRenderBodyOnly(false); Radio<String> searchTypeRadioName = new Radio<String>("searchTypeName", new Model<String>(ProfileConstants.SEARCH_TYPE_NAME)); searchTypeRadioName.setMarkupId("searchtypenameinput"); searchTypeRadioName.setOutputMarkupId(true); searchTypeRadioName.add(new AttributeModifier("title", true, new ResourceModel("text.search.byname.tooltip"))); searchTypeRadioGroup.add(searchTypeRadioName); Radio<String> searchTypeRadioInterest = new Radio<String>("searchTypeInterest", new Model<String>(ProfileConstants.SEARCH_TYPE_INTEREST)); searchTypeRadioInterest.setMarkupId("searchtypeinterestinput"); searchTypeRadioInterest.setOutputMarkupId(true); searchTypeRadioInterest.add(new AttributeModifier("title", true, new ResourceModel("text.search.byinterest.tooltip"))); searchTypeRadioGroup.add(searchTypeRadioInterest); searchTypeRadioGroup.add(new Label("searchTypeNameLabel", new ResourceModel("text.search.byname.label"))); searchTypeRadioGroup.add(new Label("searchTypeInterestLabel", new ResourceModel("text.search.byinterest.label"))); searchForm.add(searchTypeRadioGroup); searchForm.add(new Label("connectionsLabel", new ResourceModel("text.search.include.connections"))); // model is true (include connections by default) connectionsCheckBox = new CheckBox("connectionsCheckBox", new Model<Boolean>(true)); connectionsCheckBox.setMarkupId("includeconnectionsinput"); connectionsCheckBox.setOutputMarkupId(true); //hide if connections disabled globally connectionsCheckBox.setVisible(sakaiProxy.isConnectionsEnabledGlobally()); searchForm.add(connectionsCheckBox); final List<Site> worksites = sakaiProxy.getUserSites(); final boolean hasWorksites = worksites.size() > 0; searchForm.add(new Label("worksiteLabel", new ResourceModel("text.search.include.worksite"))); // model is false (include all worksites by default) worksiteCheckBox = new CheckBox("worksiteCheckBox", new Model<Boolean>(false)); worksiteCheckBox.setMarkupId("limittositeinput"); worksiteCheckBox.setOutputMarkupId(true); worksiteCheckBox.setEnabled(hasWorksites); searchForm.add(worksiteCheckBox); final IModel<String> defaultWorksiteIdModel; if (hasWorksites) { defaultWorksiteIdModel = new Model<String>(worksites.get(0).getId()); } else { defaultWorksiteIdModel = new ResourceModel("text.search.no.worksite"); } final LinkedHashMap<String, String> worksiteMap = new LinkedHashMap<String, String>(); if (hasWorksites) { for (Site worksite : worksites) { worksiteMap.put(worksite.getId(), worksite.getTitle()); } } else { worksiteMap.put(defaultWorksiteIdModel.getObject(), defaultWorksiteIdModel.getObject()); } IModel worksitesModel = new Model() { public ArrayList<String> getObject() { return new ArrayList<String>(worksiteMap.keySet()); } }; worksiteChoice = new DropDownChoice("worksiteChoice", defaultWorksiteIdModel, worksitesModel, new HashMapChoiceRenderer(worksiteMap)); worksiteChoice.setMarkupId("worksiteselect"); worksiteChoice.setOutputMarkupId(true); worksiteChoice.setNullValid(false); worksiteChoice.setEnabled(hasWorksites); searchForm.add(worksiteChoice); /* * * RESULTS * */ //search results label/container numSearchResultsContainer = new WebMarkupContainer("numSearchResultsContainer"); numSearchResultsContainer.setOutputMarkupPlaceholderTag(true); numSearchResults = new Label("numSearchResults"); numSearchResults.setOutputMarkupId(true); numSearchResults.setEscapeModelStrings(false); numSearchResultsContainer.add(numSearchResults); //clear results button Form<Void> clearResultsForm = new Form<Void>("clearResults"); clearResultsForm.setOutputMarkupPlaceholderTag(true); clearButton = new AjaxButton("clearButton", clearResultsForm) { private static final long serialVersionUID = 1L; protected void onSubmit(AjaxRequestTarget target, Form<?> form) { // clear cookie if present if (null != searchCookie) { CookieUtils utils = new CookieUtils(); utils.remove(ProfileConstants.SEARCH_COOKIE); } //clear the fields, hide self, then repaint searchField.clearInput(); searchField.updateModel(); numSearchResultsContainer.setVisible(false); resultsContainer.setVisible(false); clearButton.setVisible(false); target.add(searchField); target.add(numSearchResultsContainer); target.add(resultsContainer); target.add(this); } }; clearButton.setOutputMarkupPlaceholderTag(true); if (null == searchCookie) { clearButton.setVisible(false); //invisible until we have something to clear } clearButton.setModel(new ResourceModel("button.search.clear")); clearResultsForm.add(clearButton); numSearchResultsContainer.add(clearResultsForm); add(numSearchResultsContainer); // model to wrap search results LoadableDetachableModel<List<Person>> resultsModel = new LoadableDetachableModel<List<Person>>(){ private static final long serialVersionUID = 1L; protected List<Person> load() { return results; } }; //container which wraps list resultsContainer = new WebMarkupContainer("searchResultsContainer"); resultsContainer.setOutputMarkupPlaceholderTag(true); if (null == searchCookie) { resultsContainer.setVisible(false); //hide initially } //connection window final ModalWindow connectionWindow = new ModalWindow("connectionWindow"); //search results final PageableListView<Person> resultsListView = new PageableListView<Person>("searchResults", resultsModel, sakaiProxy.getMaxSearchResultsPerPage()) { private static final long serialVersionUID = 1L; protected void populateItem(final ListItem<Person> item) { Person person = (Person)item.getModelObject(); //get basic values final String userUuid = person.getUuid(); final String displayName = person.getDisplayName(); final String userType = person.getType(); //get connection status int connectionStatus = connectionsLogic.getConnectionStatus(currentUserUuid, userUuid); boolean friend = (connectionStatus == ProfileConstants.CONNECTION_CONFIRMED) ? true : false; //image wrapper, links to profile Link<String> friendItem = new Link<String>("searchResultPhotoWrap") { private static final long serialVersionUID = 1L; public void onClick() { setResponsePage(new ViewProfile(userUuid)); } }; //image ProfileImage searchResultPhoto = new ProfileImage("searchResultPhoto", new Model<String>(userUuid)); searchResultPhoto.setSize(ProfileConstants.PROFILE_IMAGE_THUMBNAIL); friendItem.add(searchResultPhoto); item.add(friendItem); //name and link to profile (if allowed or no link) Link<String> profileLink = new Link<String>("searchResultProfileLink", new Model<String>(userUuid)) { private static final long serialVersionUID = 1L; public void onClick() { //if user found themself, go to own profile, else show other profile if(userUuid.equals(currentUserUuid)) { setResponsePage(new MyProfile()); } else { //gets userUuid of other user from the link's model setResponsePage(new ViewProfile((String)getModelObject())); } } }; profileLink.add(new Label("searchResultName", displayName)); item.add(profileLink); //status component ProfileStatusRenderer status = new ProfileStatusRenderer("searchResultStatus", person, "search-result-status-msg", "search-result-status-date") { @Override public boolean isVisible(){ return sakaiProxy.isProfileStatusEnabled(); } }; status.setOutputMarkupId(true); item.add(status); /* ACTIONS */ boolean isFriendsListVisible = privacyLogic.isActionAllowed(userUuid, currentUserUuid, PrivacyType.PRIVACY_OPTION_MYFRIENDS); boolean isConnectionAllowed = sakaiProxy.isConnectionAllowedBetweenUserTypes(userType, currentUserType); //ADD CONNECTION LINK final WebMarkupContainer c1 = new WebMarkupContainer("connectionContainer"); c1.setOutputMarkupId(true); if(!isConnectionAllowed && !sakaiProxy.isConnectionsEnabledGlobally()){ //add blank components - TODO turn this into an EmptyLink component AjaxLink<Void> emptyLink = new AjaxLink<Void>("connectionLink"){ private static final long serialVersionUID = 1L; public void onClick(AjaxRequestTarget target) {} }; emptyLink.add(new Label("connectionLabel")); c1.add(emptyLink); c1.setVisible(false); } else { //render the link final Label connectionLabel = new Label("connectionLabel"); connectionLabel.setOutputMarkupId(true); final AjaxLink<String> connectionLink = new AjaxLink<String>("connectionLink", new Model<String>(userUuid)) { private static final long serialVersionUID = 1L; public void onClick(AjaxRequestTarget target) { //get this item, reinit some values and set content for modal final String userUuid = (String)getModelObject(); connectionWindow.setContent(new AddFriend(connectionWindow.getContentId(), connectionWindow, friendActionModel, currentUserUuid, userUuid)); // connection modal window handler connectionWindow.setWindowClosedCallback(new ModalWindow.WindowClosedCallback() { private static final long serialVersionUID = 1L; public void onClose(AjaxRequestTarget target){ if(friendActionModel.isRequested()) { connectionLabel.setDefaultModel(new ResourceModel("text.friend.requested")); add(new AttributeModifier("class", true, new Model<String>("instruction icon connection-request"))); setEnabled(false); target.add(c1); } } }); //in preparation for the window being closed, update the text. this will only //be put into effect if its a successful model update from the window close //connectionLabel.setModel(new ResourceModel("text.friend.requested")); //this.add(new AttributeModifier("class", true, new Model("instruction"))); //this.setEnabled(false); //friendActionModel.setUpdateThisComponentOnSuccess(this); connectionWindow.show(target); target.appendJavaScript("fixWindowVertical();"); } }; connectionLink.add(connectionLabel); //setup 'add connection' link if(StringUtils.equals(userUuid, currentUserUuid)) { connectionLabel.setDefaultModel(new ResourceModel("text.friend.self")); connectionLink.add(new AttributeModifier("class", true, new Model<String>("instruction icon profile"))); connectionLink.setEnabled(false); } else if(friend) { connectionLabel.setDefaultModel(new ResourceModel("text.friend.confirmed")); connectionLink.add(new AttributeModifier("class", true, new Model<String>("instruction icon connection-confirmed"))); connectionLink.setEnabled(false); } else if (connectionStatus == ProfileConstants.CONNECTION_REQUESTED) { connectionLabel.setDefaultModel(new ResourceModel("text.friend.requested")); connectionLink.add(new AttributeModifier("class", true, new Model<String>("instruction icon connection-request"))); connectionLink.setEnabled(false); } else if (connectionStatus == ProfileConstants.CONNECTION_INCOMING) { connectionLabel.setDefaultModel(new ResourceModel("text.friend.pending")); connectionLink.add(new AttributeModifier("class", true, new Model<String>("instruction icon connection-request"))); connectionLink.setEnabled(false); } else { connectionLabel.setDefaultModel(new ResourceModel("link.friend.add")); } connectionLink.setOutputMarkupId(true); c1.add(connectionLink); } item.add(c1); //VIEW FRIENDS LINK WebMarkupContainer c2 = new WebMarkupContainer("viewFriendsContainer"); c2.setOutputMarkupId(true); final AjaxLink<String> viewFriendsLink = new AjaxLink<String>("viewFriendsLink") { private static final long serialVersionUID = 1L; public void onClick(AjaxRequestTarget target) { //if user found themself, go to MyFriends, else, ViewFriends if(userUuid.equals(currentUserUuid)) { setResponsePage(new MyFriends()); } else { setResponsePage(new ViewFriends(userUuid)); } } }; final Label viewFriendsLabel = new Label("viewFriendsLabel", new ResourceModel("link.view.friends")); viewFriendsLink.add(viewFriendsLabel); //hide if not allowed if(!isFriendsListVisible && !sakaiProxy.isConnectionsEnabledGlobally()) { viewFriendsLink.setEnabled(false); c2.setVisible(false); } viewFriendsLink.setOutputMarkupId(true); c2.add(viewFriendsLink); item.add(c2); WebMarkupContainer c3 = new WebMarkupContainer("emailContainer"); c3.setOutputMarkupId(true); ExternalLink emailLink = new ExternalLink("emailLink", "mailto:" + person.getProfile().getEmail(), new ResourceModel("profile.email").getObject()); c3.add(emailLink); if (StringUtils.isBlank(person.getProfile().getEmail()) || false == privacyLogic.isActionAllowed(person.getUuid(), currentUserUuid, PrivacyType.PRIVACY_OPTION_CONTACTINFO)) { c3.setVisible(false); } item.add(c3); WebMarkupContainer c4 = new WebMarkupContainer("websiteContainer"); c4.setOutputMarkupId(true); // TODO home page, university profile URL or academic/research URL (see PRFL-35) ExternalLink websiteLink = new ExternalLink("websiteLink", person.getProfile() .getHomepage(), new ResourceModel( "profile.homepage").getObject()).setPopupSettings(new PopupSettings()); c4.add(websiteLink); if (StringUtils.isBlank(person.getProfile().getHomepage()) || false == privacyLogic.isActionAllowed(person.getUuid(), currentUserUuid, PrivacyType.PRIVACY_OPTION_CONTACTINFO)) { c4.setVisible(false); } item.add(c4); // TODO personal, academic or business (see PRFL-35) if (true == privacyLogic.isActionAllowed( person.getUuid(), currentUserUuid, PrivacyType.PRIVACY_OPTION_BASICINFO)) { item.add(new Label("searchResultSummary", StringUtils.abbreviate(ProfileUtils.stripHtml( person.getProfile().getPersonalSummary()), 200))); } else { item.add(new Label("searchResultSummary", "")); } } }; resultsListView.add(new MySearchCookieBehavior(resultsListView)); resultsContainer.add(resultsListView); final PagingNavigator searchResultsNavigator = new PagingNavigator("searchResultsNavigator", resultsListView); searchResultsNavigator.setOutputMarkupId(true); searchResultsNavigator.setVisible(false); resultsContainer.add(searchResultsNavigator); add(connectionWindow); //add results container add(resultsContainer); /* * SEARCH HISTORY */ final WebMarkupContainer searchHistoryContainer = new WebMarkupContainer("searchHistoryContainer"); searchHistoryContainer.setOutputMarkupPlaceholderTag(true); Label searchHistoryLabel = new Label("searchHistoryLabel", new ResourceModel("text.search.history")); searchHistoryContainer.add(searchHistoryLabel); IModel<List<ProfileSearchTerm>> searchHistoryModel = new LoadableDetachableModel<List<ProfileSearchTerm>>() { private static final long serialVersionUID = 1L; @Override protected List<ProfileSearchTerm> load() { List<ProfileSearchTerm> searchHistory = searchLogic.getSearchHistory(currentUserUuid); if (null == searchHistory) { return new ArrayList<ProfileSearchTerm>(); } else { return searchHistory; } } }; ListView<ProfileSearchTerm> searchHistoryList = new ListView<ProfileSearchTerm>("searchHistoryList", searchHistoryModel) { private static final long serialVersionUID = 1L; @Override protected void populateItem(final ListItem<ProfileSearchTerm> item) { AjaxLink<String> link = new AjaxLink<String>("previousSearchLink") { private static final long serialVersionUID = 1L; @Override public void onClick(AjaxRequestTarget target) { if (null != target) { // post view event sakaiProxy.postEvent(ProfileConstants.EVENT_SEARCH_BY_NAME, "/profile/"+currentUserUuid, false); ProfileSearchTerm searchTerm = item.getModelObject(); // this will update its position in list searchLogic.addSearchTermToHistory(currentUserUuid, searchTerm); searchStringModel.setString(searchTerm.getSearchTerm()); searchTypeRadioGroup.setModel(new Model<String>(searchTerm.getSearchType())); connectionsCheckBox.setModel(new Model<Boolean>(searchTerm.isConnections())); if (null == searchTerm.getWorksite()) { worksiteCheckBox.setModel(new Model<Boolean>(false)); worksiteChoice.setModel(new Model(defaultWorksiteIdModel)); } else { worksiteCheckBox.setModel(new Model<Boolean>(true)); worksiteChoice.setModel(new Model(searchTerm.getWorksite())); } setSearchCookie(searchTerm.getSearchType(), searchTerm.getSearchTerm(), searchTerm.getSearchPageNumber(), searchTerm.isConnections(), searchTerm.getWorksite()); if (ProfileConstants.SEARCH_TYPE_NAME.equals(searchTerm.getSearchType())) { searchByName(resultsListView, searchResultsNavigator, searchHistoryContainer, target, searchTerm.getSearchTerm(), searchTerm.isConnections(), searchTerm.getWorksite()); } else if (ProfileConstants.SEARCH_TYPE_INTEREST.equals(searchTerm.getSearchType())) { searchByInterest(resultsListView, searchResultsNavigator, searchHistoryContainer, target, searchTerm.getSearchTerm(), searchTerm.isConnections(), searchTerm.getWorksite()); } } } }; link.add(new Label("previousSearchLabel", item.getModelObject().getSearchTerm())); item.add(link); } }; searchHistoryContainer.add(searchHistoryList); add(searchHistoryContainer); if (null == searchLogic.getSearchHistory(currentUserUuid)) { searchHistoryContainer.setVisible(false); } //clear button Form<Void> clearHistoryForm = new Form<Void>("clearHistory"); clearHistoryForm.setOutputMarkupPlaceholderTag(true); clearHistoryButton = new AjaxButton("clearHistoryButton", clearHistoryForm) { private static final long serialVersionUID = 1L; protected void onSubmit(AjaxRequestTarget target, Form<?> form) { searchLogic.clearSearchHistory(currentUserUuid); //clear the fields, hide self, then repaint searchField.clearInput(); searchField.updateModel(); searchHistoryContainer.setVisible(false); clearHistoryButton.setVisible(false); target.add(searchField); target.add(searchHistoryContainer); target.add(this); } }; clearHistoryButton.setOutputMarkupPlaceholderTag(true); if (null == searchLogic.getSearchHistory(currentUserUuid)) { clearHistoryButton.setVisible(false); //invisible until we have something to clear } clearHistoryButton.setModel(new ResourceModel("button.search.history.clear")); clearHistoryForm.add(clearHistoryButton); searchHistoryContainer.add(clearHistoryForm); /* * Combined search submit */ IndicatingAjaxButton searchSubmitButton = new IndicatingAjaxButton("searchSubmit", searchForm) { private static final long serialVersionUID = 1L; protected void onSubmit(AjaxRequestTarget target, Form<?> form) { if(target != null) { //get the model and text entered StringModel model = (StringModel) form.getModelObject(); //PRFL-811 - dont strip this down, we will lose i18n chars. //And there is no XSS risk since its only for the current user. String searchText = model.getString(); //get search type String searchType = searchTypeRadioGroup.getModelObject(); log.debug("MySearch search by " + searchType + ": " + searchText); if(StringUtils.isBlank(searchText)){ return; } // save search terms ProfileSearchTerm searchTerm = new ProfileSearchTerm(); searchTerm.setUserUuid(currentUserUuid); searchTerm.setSearchType(searchType); searchTerm.setSearchTerm(searchText); searchTerm.setSearchPageNumber(0); searchTerm.setSearchDate(new Date()); searchTerm.setConnections(connectionsCheckBox.getModelObject()); // set to worksite or empty depending on value of checkbox searchTerm.setWorksite((worksiteCheckBox.getModelObject() == true) ? worksiteChoice.getValue() : null); searchLogic.addSearchTermToHistory(currentUserUuid, searchTerm); // set cookie for current search (page 0 when submitting new search) setSearchCookie(searchTerm.getSearchType(), URLEncoder.encode(searchTerm.getSearchTerm()), searchTerm.getSearchPageNumber(), searchTerm.isConnections(), searchTerm.getWorksite()); if (ProfileConstants.SEARCH_TYPE_NAME.equals(searchType)) { searchByName(resultsListView, searchResultsNavigator, searchHistoryContainer, target, searchTerm.getSearchTerm(), searchTerm.isConnections(), searchTerm.getWorksite()); //post view event sakaiProxy.postEvent(ProfileConstants.EVENT_SEARCH_BY_NAME, "/profile/"+currentUserUuid, false); } else if (ProfileConstants.SEARCH_TYPE_INTEREST.equals(searchType)) { searchByInterest(resultsListView, searchResultsNavigator, searchHistoryContainer, target, searchTerm.getSearchTerm(), searchTerm.isConnections(), searchTerm.getWorksite()); //post view event sakaiProxy.postEvent(ProfileConstants.EVENT_SEARCH_BY_INTEREST, "/profile/"+currentUserUuid, false); } } } }; searchSubmitButton.setModel(new ResourceModel("button.search.generic")); searchForm.add(searchSubmitButton); add(searchForm); if (null != searchCookie) { String searchString = getCookieSearchString(searchCookie.getValue()); searchStringModel.setString(searchString); Boolean filterConnections = getCookieFilterConnections(searchCookie.getValue()); String worksiteId = getCookieFilterWorksite(searchCookie.getValue()); Boolean filterWorksite = (null == worksiteId) ? false : true; connectionsCheckBox.setModel(new Model<Boolean>(filterConnections)); worksiteCheckBox.setModel(new Model<Boolean>(filterWorksite)); worksiteChoice.setModel(new Model((null == worksiteId) ? defaultWorksiteIdModel : worksiteId)); if (searchCookie.getValue().startsWith(ProfileConstants.SEARCH_TYPE_NAME)) { searchTypeRadioGroup.setModel(new Model<String>(ProfileConstants.SEARCH_TYPE_NAME)); searchByName(resultsListView, searchResultsNavigator, searchHistoryContainer, null, searchString, filterConnections, worksiteId); } else if (searchCookie.getValue().startsWith(ProfileConstants.SEARCH_TYPE_INTEREST)) { searchTypeRadioGroup.setModel(new Model<String>(ProfileConstants.SEARCH_TYPE_INTEREST)); searchByInterest(resultsListView, searchResultsNavigator, searchHistoryContainer, null, searchString, filterConnections, worksiteId); } } else { // default search type is name searchTypeRadioGroup.setModel(new Model<String>(ProfileConstants.SEARCH_TYPE_NAME)); } } // use null target when using cookie private void searchByName( final PageableListView<Person> resultsListView, final PagingNavigator searchResultsNavigator, final WebMarkupContainer searchHistoryContainer, AjaxRequestTarget target, String searchTerm, boolean connections, String worksiteId) { //search both UDP and SakaiPerson for matches. results = new ArrayList<Person>(searchLogic.findUsersByNameOrEmail(searchTerm, connections, worksiteId)); Collections.sort(results); int numResults = results.size(); int maxResults = sakaiProxy.getMaxSearchResults(); int maxResultsPerPage = sakaiProxy.getMaxSearchResultsPerPage(); // set current page if previously-viewed search int currentPage = getCurrentPageNumber(); //show the label wrapper numSearchResultsContainer.setVisible(true); //text //Strip the chars for display purposes String cleanedSearchTerm = ProfileUtils.stripAndCleanHtml(searchTerm); if(numResults == 0) { numSearchResults.setDefaultModel(new StringResourceModel("text.search.byname.no.results", null, new Object[]{ cleanedSearchTerm } )); resultsContainer.setVisible(false); searchResultsNavigator.setVisible(false); } else if (numResults == 1) { numSearchResults.setDefaultModel(new StringResourceModel("text.search.byname.one.result", null, new Object[]{ cleanedSearchTerm } )); resultsContainer.setVisible(true); searchResultsNavigator.setVisible(false); } else if (numResults == maxResults) { resultsListView.setCurrentPage(currentPage); numSearchResults.setDefaultModel(new StringResourceModel("text.search.toomany.results", null, new Object[]{ cleanedSearchTerm, maxResults, maxResults } )); resultsContainer.setVisible(true); searchResultsNavigator.setVisible(true); } else if (numResults > maxResultsPerPage) { resultsListView.setCurrentPage(currentPage); numSearchResults.setDefaultModel(new StringResourceModel("text.search.byname.paged.results", null, new Object[]{ numResults, resultsListView.getViewSize(), cleanedSearchTerm } )); resultsContainer.setVisible(true); searchResultsNavigator.setVisible(true); } else { resultsListView.setCurrentPage(currentPage); numSearchResults.setDefaultModel(new StringResourceModel("text.search.byname.all.results", null, new Object[]{ numResults, cleanedSearchTerm } )); resultsContainer.setVisible(true); searchResultsNavigator.setVisible(false); } if (null != target) { //repaint components target.add(searchField); target.add(searchTypeRadioGroup); target.add(connectionsCheckBox); target.add(worksiteCheckBox); target.add(worksiteChoice); target.add(clearButton); target.add(numSearchResultsContainer); clearButton.setVisible(true); target.add(resultsContainer); clearHistoryButton.setVisible(true); searchHistoryContainer.setVisible(true); target.add(searchHistoryContainer); target.appendJavaScript("setMainFrameHeight(window.name);"); } } // use null target when using cookie private void searchByInterest( final PageableListView<Person> resultsListView, final PagingNavigator searchResultsNavigator, WebMarkupContainer searchHistoryContainer, AjaxRequestTarget target, String searchTerm, boolean connections, String worksiteId) { //search SakaiPerson for matches results = new ArrayList<Person>(searchLogic.findUsersByInterest(searchTerm, connections, worksiteId)); Collections.sort(results); int numResults = results.size(); int maxResults = sakaiProxy.getMaxSearchResults(); int maxResultsPerPage = sakaiProxy.getMaxSearchResultsPerPage(); // set current page if previously-viewed search int currentPage = getCurrentPageNumber(); //show the label wrapper numSearchResultsContainer.setVisible(true); //text //Strip the chars for display purposes String cleanedSearchTerm = ProfileUtils.stripAndCleanHtml(searchTerm); if(numResults == 0) { numSearchResults.setDefaultModel(new StringResourceModel("text.search.byinterest.no.results", null, new Object[]{ cleanedSearchTerm } )); resultsContainer.setVisible(false); searchResultsNavigator.setVisible(false); } else if (numResults == 1) { numSearchResults.setDefaultModel(new StringResourceModel("text.search.byinterest.one.result", null, new Object[]{ cleanedSearchTerm } )); resultsContainer.setVisible(true); searchResultsNavigator.setVisible(false); } else if (numResults == maxResults) { resultsListView.setCurrentPage(currentPage); numSearchResults.setDefaultModel(new StringResourceModel("text.search.toomany.results", null, new Object[]{ cleanedSearchTerm, maxResults, maxResults } )); resultsContainer.setVisible(true); searchResultsNavigator.setVisible(true); } else if (numResults > maxResultsPerPage) { resultsListView.setCurrentPage(currentPage); numSearchResults.setDefaultModel(new StringResourceModel("text.search.byinterest.paged.results", null, new Object[]{ numResults, resultsListView.getViewSize(), cleanedSearchTerm } )); resultsContainer.setVisible(true); searchResultsNavigator.setVisible(true); } else { resultsListView.setCurrentPage(currentPage); numSearchResults.setDefaultModel(new StringResourceModel("text.search.byinterest.all.results", null, new Object[]{ numResults, cleanedSearchTerm } )); resultsContainer.setVisible(true); searchResultsNavigator.setVisible(false); } if (null != target) { //repaint components target.add(searchField); target.add(searchTypeRadioGroup); target.add(connectionsCheckBox); target.add(worksiteCheckBox); target.add(worksiteChoice); target.add(clearButton); target.add(numSearchResultsContainer); clearButton.setVisible(true); target.add(resultsContainer); clearHistoryButton.setVisible(true); searchHistoryContainer.setVisible(true); target.add(searchHistoryContainer); target.appendJavaScript("setMainFrameHeight(window.name);"); } } private int getCurrentPageNumber() { if (null == searchCookie) { return 0; } else { return getCookiePageNumber(); } } private int getCookiePageNumber() { return Integer.parseInt(searchCookie.getValue().substring( searchCookie.getValue().indexOf(ProfileConstants.SEARCH_COOKIE_VALUE_PAGE_MARKER) + 1, searchCookie.getValue().indexOf(ProfileConstants.SEARCH_COOKIE_VALUE_SEARCH_MARKER))); } private void updatePageNumber(long l, String cookieString) { /* TODO update the cookies setSearchCookie(getCookieSearchType(cookieString), getCookieSearchString(cookieString), l, getCookieFilterConnections(cookieString), getCookieFilterWorksite(cookieString)); */ } private void setSearchCookie(String searchCookieValuePrefix, String searchText, int searchPageNumber, boolean connections, String worksiteId) { searchCookie = new Cookie( ProfileConstants.SEARCH_COOKIE, searchCookieValuePrefix + ProfileConstants.SEARCH_COOKIE_VALUE_CONNECTIONS_MARKER + connections + ProfileConstants.SEARCH_COOKIE_VALUE_WORKSITE_MARKER + worksiteId + ProfileConstants.SEARCH_COOKIE_VALUE_PAGE_MARKER + searchPageNumber + ProfileConstants.SEARCH_COOKIE_VALUE_SEARCH_MARKER + searchText); // don't persist indefinitely searchCookie.setMaxAge(-1); //getWebRequestCycle().getWebResponse().addCookie(searchCookie); //TODO sort out the cookies } private String getCookieSearchString(String cookieString) { return URLDecoder.decode(cookieString.substring(cookieString.indexOf(ProfileConstants.SEARCH_COOKIE_VALUE_SEARCH_MARKER) + 1)); } private String getCookieSearchType(String cookieString) { return cookieString.substring(0, cookieString.indexOf(ProfileConstants.SEARCH_COOKIE_VALUE_CONNECTIONS_MARKER)); } private boolean getCookieFilterConnections(String cookieString) { return Boolean.parseBoolean( cookieString.substring(cookieString.indexOf(ProfileConstants.SEARCH_COOKIE_VALUE_CONNECTIONS_MARKER) + 1, cookieString.indexOf(ProfileConstants.SEARCH_COOKIE_VALUE_WORKSITE_MARKER))); } private String getCookieFilterWorksite(String cookieString) { String worksiteId = cookieString.substring(cookieString.indexOf(ProfileConstants.SEARCH_COOKIE_VALUE_WORKSITE_MARKER) + 1, cookieString.indexOf(ProfileConstants.SEARCH_COOKIE_VALUE_PAGE_MARKER)); return (true == worksiteId.equals("null") ? null : worksiteId); } // behaviour so we can set the current search cookie when the navigator page changes private class MySearchCookieBehavior extends Behavior { private static final long serialVersionUID = 1L; private PageableListView<Person> view; public MySearchCookieBehavior(PageableListView<Person> view) { this.view = view; } @Override public void beforeRender(Component component) { if (searchCookie != null) { updatePageNumber(view.getCurrentPage(), searchCookie.getValue()); } } } }
package smp.presenters.api.clipboard; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import javafx.animation.FadeTransition; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.collections.ObservableList; import javafx.event.Event; import javafx.event.EventHandler; import javafx.geometry.Bounds; import javafx.scene.Node; import javafx.scene.Scene; import javafx.scene.image.ImageView; import javafx.scene.input.InputEvent; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyEvent; import javafx.scene.input.MouseEvent; import javafx.scene.layout.HBox; import javafx.scene.layout.Pane; import javafx.util.Duration; import smp.ImageIndex; import smp.ImageLoader; import smp.components.InstrumentIndex; public class StaffClipboardFilter extends HashSet<InstrumentIndex> { /** * wot */ private static final long serialVersionUID = 1L; private HBox instrumentLine; private ImageLoader il; private List<ImageView> filterImages = new ArrayList<>(); private List<FadeTransition> filterImagesFades = new ArrayList<>(); //the instrument that will be toggled when entering an instrumentimage private InstrumentIndex instInFocus; public StaffClipboardFilter(HBox instLine, ImageLoader im){ super(); instrumentLine = instLine; il = im; /* * wait for the scene to get initialized then add a keyevent handler * that listens for pressing 'f' to filter. this way we can avoid coding * requestfocus logic */ instLine.sceneProperty().addListener(new ChangeListener<Scene>() { @Override public void changed(ObservableValue<? extends Scene> observable, Scene oldScene, Scene newScene) { if (oldScene == null && newScene != null) { newScene.addEventHandler(KeyEvent.KEY_PRESSED, new EventHandler<KeyEvent>() { @Override public void handle(KeyEvent event) { if (event.getCode() == KeyCode.F) { if(instInFocus != null) toggleInstrumentNoImage(instInFocus); } } }); } } }); ObservableList<Node> instrumentImages = instLine.getChildren(); for (int i = 0; i < instrumentImages.size(); i++) { final int index = i; final Node instrumentImage = instrumentImages.get(i); addFilterImage(instrumentImage); instrumentImage.setOnMouseEntered(new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent event) { instInFocus = indexToInst(index); fadeFilterImages(false); for(ImageView filterImage : filterImages) filterImage.setOpacity(1.0); }}); instrumentImage.setOnMouseExited(new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent event) { instInFocus = null; fadeFilterImages(true); }}); } } /** * Add a filter icon on top of the instrument icon. * @param instrumentImage */ private void addFilterImage(final Node instrumentImage) { ImageView filterImage = new ImageView(); filterImages.add(filterImage); Pane instLinePane = (Pane) instrumentLine.getParent(); instLinePane.getChildren().add(filterImage); Bounds instrumentImageBounds = instrumentImage.localToScene(instrumentImage.getBoundsInLocal()); // the bounds are off by 2 for some reason filterImage.setTranslateX(instrumentImageBounds.getMinX() - 2); filterImage.setTranslateY(instrumentImageBounds.getMinY() - 2); filterImage.setOpacity(0.0); FadeTransition ft = new FadeTransition(Duration.millis(2000), filterImage); ft.setFromValue(1.0); ft.setToValue(0.0); filterImagesFades.add(ft); // filterImage consumes events so pass the events off to the instrumentImage filterImage.addEventHandler(InputEvent.ANY, new EventHandler<Event>() { @Override public void handle(Event event) { instrumentImage.fireEvent(event); } }); } private void fadeFilterImages(boolean fadeThem) { for (FadeTransition fade : filterImagesFades) if(fadeThem) fade.playFromStart(); else fade.pause(); } /** * @param ind * the instrument * @return if instrument is allowed copying, deleting, etc. */ public boolean isFiltered(InstrumentIndex ind) { return this.isEmpty() || this.contains(ind); } /** * turn instrument on/off in filter, display and fade the filter image * * @param ind * instrument to filter * @return true if it now contains ind, false if it doesn't */ public boolean toggleInstrument(InstrumentIndex ind) { if(toggleInstrumentNoImage(ind)) { fadeFilterImages(true); return true; } else { fadeFilterImages(true); return false; } } /** * toggleInstrument but don't display image * * @param ind * instrument to filter * @return true if it now contains ind, false if it doesn't */ public boolean toggleInstrumentNoImage(InstrumentIndex ind) { int index = instToIndex(ind); if(!this.contains(ind)){ this.add(ind); filterImages.get(index).setImage(il.getSpriteFX(ImageIndex.FILTER)); return true; } else{ this.remove(ind); filterImages.get(index).setImage(null); return false; } } /** * switch COIN and PIRANHA * @param ind * @return */ private int instToIndex(InstrumentIndex ind) { switch(ind.imageIndex()) { case COIN: return (ind.getChannel() - 1) + 1; case PIRANHA: return (ind.getChannel() - 1) - 1; default: return (ind.getChannel() - 1); } } /** * switch COIN AND PIRANHA * @param index * @return */ private InstrumentIndex indexToInst(int index) { if(index == InstrumentIndex.COIN.ordinal()) return InstrumentIndex.PIRANHA; if(index == InstrumentIndex.PIRANHA.ordinal()) return InstrumentIndex.COIN; return InstrumentIndex.values()[index]; } }
/* * $HeadURL: http://svn.apache.org/repos/asf/httpcomponents/httpclient/trunk/module-client/src/main/java/org/apache/http/conn/ssl/SSLSocketFactory.java $ * $Revision: 659194 $ * $Date: 2008-05-22 11:33:47 -0700 (Thu, 22 May 2008) $ * * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.ektorp.android.http; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Socket; import java.net.UnknownHostException; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.UnrecoverableKeyException; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSocket; import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; import javax.net.ssl.X509TrustManager; import org.apache.http.conn.scheme.HostNameResolver; import org.apache.http.conn.scheme.LayeredSocketFactory; import org.apache.http.conn.scheme.SocketFactory; import org.apache.http.conn.ssl.AllowAllHostnameVerifier; import org.apache.http.conn.ssl.BrowserCompatHostnameVerifier; import org.apache.http.conn.ssl.StrictHostnameVerifier; import org.apache.http.conn.ssl.X509HostnameVerifier; import org.apache.http.params.HttpConnectionParams; import org.apache.http.params.HttpParams; /** * Layered socket factory for TLS/SSL connections, based on JSSE. *. * <p> * SSLSocketFactory can be used to validate the identity of the HTTPS * server against a list of trusted certificates and to authenticate to * the HTTPS server using a private key. * </p> * * <p> * SSLSocketFactory will enable server authentication when supplied with * a {@link KeyStore truststore} file containg one or several trusted * certificates. The client secure socket will reject the connection during * the SSL session handshake if the target HTTPS server attempts to * authenticate itself with a non-trusted certificate. * </p> * * <p> * Use JDK keytool utility to import a trusted certificate and generate a truststore file: * <pre> * keytool -import -alias "my server cert" -file server.crt -keystore my.truststore * </pre> * </p> * * <p> * SSLSocketFactory will enable client authentication when supplied with * a {@link KeyStore keystore} file containg a private key/public certificate * pair. The client secure socket will use the private key to authenticate * itself to the target HTTPS server during the SSL session handshake if * requested to do so by the server. * The target HTTPS server will in its turn verify the certificate presented * by the client in order to establish client's authenticity * </p> * * <p> * Use the following sequence of actions to generate a keystore file * </p> * <ul> * <li> * <p> * Use JDK keytool utility to generate a new key * <pre>keytool -genkey -v -alias "my client key" -validity 365 -keystore my.keystore</pre> * For simplicity use the same password for the key as that of the keystore * </p> * </li> * <li> * <p> * Issue a certificate signing request (CSR) * <pre>keytool -certreq -alias "my client key" -file mycertreq.csr -keystore my.keystore</pre> * </p> * </li> * <li> * <p> * Send the certificate request to the trusted Certificate Authority for signature. * One may choose to act as her own CA and sign the certificate request using a PKI * tool, such as OpenSSL. * </p> * </li> * <li> * <p> * Import the trusted CA root certificate * <pre>keytool -import -alias "my trusted ca" -file caroot.crt -keystore my.keystore</pre> * </p> * </li> * <li> * <p> * Import the PKCS#7 file containg the complete certificate chain * <pre>keytool -import -alias "my client key" -file mycert.p7 -keystore my.keystore</pre> * </p> * </li> * <li> * <p> * Verify the content the resultant keystore file * <pre>keytool -list -v -keystore my.keystore</pre> * </p> * </li> * </ul> * @author <a href="mailto:oleg at ural.ru">Oleg Kalnichevski</a> * @author Julius Davies */ @SuppressWarnings("deprecation") public class AndroidSSLSocketFactory implements LayeredSocketFactory, SocketFactory { public static final String TLS = "TLS"; public static final String SSL = "SSL"; public static final String SSLV2 = "SSLv2"; public static final X509HostnameVerifier ALLOW_ALL_HOSTNAME_VERIFIER = new AllowAllHostnameVerifier(); public static final X509HostnameVerifier BROWSER_COMPATIBLE_HOSTNAME_VERIFIER = new BrowserCompatHostnameVerifier(); public static final X509HostnameVerifier STRICT_HOSTNAME_VERIFIER = new StrictHostnameVerifier(); /** * The factory using the default JVM settings for secure connections. */ private static final AndroidSSLSocketFactory DEFAULT_FACTORY = new AndroidSSLSocketFactory(); /** * Gets an singleton instance of the SSLProtocolSocketFactory. * @return a SSLProtocolSocketFactory */ public static AndroidSSLSocketFactory getSocketFactory() { return DEFAULT_FACTORY; } private final SSLContext sslcontext; private final javax.net.ssl.SSLSocketFactory socketfactory; private final HostNameResolver nameResolver; private X509HostnameVerifier hostnameVerifier = BROWSER_COMPATIBLE_HOSTNAME_VERIFIER; public AndroidSSLSocketFactory( String algorithm, final KeyStore keystore, final String keystorePassword, final KeyStore truststore, final SecureRandom random, final HostNameResolver nameResolver) throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException, UnrecoverableKeyException { super(); if (algorithm == null) { algorithm = TLS; } KeyManager[] keymanagers = null; if (keystore != null) { keymanagers = createKeyManagers(keystore, keystorePassword); } @SuppressWarnings("unused") TrustManager[] trustmanagers = null; if (truststore != null) { trustmanagers = createTrustManagers(truststore); } this.sslcontext = SSLContext.getInstance(algorithm); this.sslcontext.init(keymanagers, new TrustManager[]{TRUST_EVEYONE_MANAGER}, random); this.socketfactory = this.sslcontext.getSocketFactory(); this.nameResolver = nameResolver; } public AndroidSSLSocketFactory( final KeyStore keystore, final String keystorePassword, final KeyStore truststore) throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException, UnrecoverableKeyException { this(TLS, keystore, keystorePassword, truststore, null, null); } public AndroidSSLSocketFactory(final KeyStore keystore, final String keystorePassword) throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException, UnrecoverableKeyException { this(TLS, keystore, keystorePassword, null, null, null); } public AndroidSSLSocketFactory(final KeyStore truststore) throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException, UnrecoverableKeyException { this(TLS, null, null, truststore, null, null); } /** * Constructs an HttpClient SSLSocketFactory backed by the given JSSE * SSLSocketFactory. * * @hide */ public AndroidSSLSocketFactory(javax.net.ssl.SSLSocketFactory socketfactory) { super(); this.sslcontext = null; this.socketfactory = socketfactory; this.nameResolver = null; } /** * Creates the default SSL socket factory. * This constructor is used exclusively to instantiate the factory for * {@link #getSocketFactory getSocketFactory}. */ private AndroidSSLSocketFactory() { super(); this.sslcontext = null; this.socketfactory = HttpsURLConnection.getDefaultSSLSocketFactory(); this.nameResolver = null; } private static KeyManager[] createKeyManagers(final KeyStore keystore, final String password) throws KeyStoreException, NoSuchAlgorithmException, UnrecoverableKeyException { if (keystore == null) { throw new IllegalArgumentException("Keystore may not be null"); } KeyManagerFactory kmfactory = KeyManagerFactory.getInstance( KeyManagerFactory.getDefaultAlgorithm()); kmfactory.init(keystore, password != null ? password.toCharArray(): null); return kmfactory.getKeyManagers(); } private static TrustManager[] createTrustManagers(final KeyStore keystore) throws KeyStoreException, NoSuchAlgorithmException { if (keystore == null) { throw new IllegalArgumentException("Keystore may not be null"); } TrustManagerFactory tmfactory = TrustManagerFactory.getInstance( TrustManagerFactory.getDefaultAlgorithm()); tmfactory.init(keystore); return tmfactory.getTrustManagers(); } // non-javadoc, see interface org.apache.http.conn.SocketFactory public Socket createSocket() throws IOException { // the cast makes sure that the factory is working as expected return (SSLSocket) this.socketfactory.createSocket(); } // non-javadoc, see interface org.apache.http.conn.SocketFactory public Socket connectSocket( final Socket sock, final String host, final int port, final InetAddress localAddress, int localPort, final HttpParams params ) throws IOException { if (host == null) { throw new IllegalArgumentException("Target host may not be null."); } if (params == null) { throw new IllegalArgumentException("Parameters may not be null."); } SSLSocket sslsock = (SSLSocket) ((sock != null) ? sock : createSocket()); if ((localAddress != null) || (localPort > 0)) { // we need to bind explicitly if (localPort < 0) localPort = 0; // indicates "any" InetSocketAddress isa = new InetSocketAddress(localAddress, localPort); sslsock.bind(isa); } int connTimeout = HttpConnectionParams.getConnectionTimeout(params); int soTimeout = HttpConnectionParams.getSoTimeout(params); InetSocketAddress remoteAddress; if (this.nameResolver != null) { remoteAddress = new InetSocketAddress(this.nameResolver.resolve(host), port); } else { remoteAddress = new InetSocketAddress(host, port); } sslsock.connect(remoteAddress, connTimeout); sslsock.setSoTimeout(soTimeout); try { hostnameVerifier.verify(host, sslsock); // verifyHostName() didn't blowup - good! } catch (IOException iox) { // close the socket before re-throwing the exception try { sslsock.close(); } catch (Exception x) { /*ignore*/ } throw iox; } return sslsock; } /** * Checks whether a socket connection is secure. * This factory creates TLS/SSL socket connections * which, by default, are considered secure. * <br/> * Derived classes may override this method to perform * runtime checks, for example based on the cypher suite. * * @param sock the connected socket * * @return <code>true</code> * * @throws IllegalArgumentException if the argument is invalid */ public boolean isSecure(Socket sock) throws IllegalArgumentException { if (sock == null) { throw new IllegalArgumentException("Socket may not be null."); } // This instanceof check is in line with createSocket() above. if (!(sock instanceof SSLSocket)) { throw new IllegalArgumentException ("Socket not created by this factory."); } // This check is performed last since it calls the argument object. if (sock.isClosed()) { throw new IllegalArgumentException("Socket is closed."); } return true; } // isSecure // non-javadoc, see interface LayeredSocketFactory public Socket createSocket( final Socket socket, final String host, final int port, final boolean autoClose ) throws IOException, UnknownHostException { SSLSocket sslSocket = (SSLSocket) this.socketfactory.createSocket( socket, host, port, autoClose ); hostnameVerifier.verify(host, sslSocket); // verifyHostName() didn't blowup - good! return sslSocket; } public void setHostnameVerifier(X509HostnameVerifier hostnameVerifier) { if ( hostnameVerifier == null ) { throw new IllegalArgumentException("Hostname verifier may not be null"); } this.hostnameVerifier = hostnameVerifier; } public X509HostnameVerifier getHostnameVerifier() { return hostnameVerifier; } public final static TrustManager TRUST_EVEYONE_MANAGER = new X509TrustManager() { public void checkClientTrusted( java.security.cert.X509Certificate[] chain, String authType) throws java.security.cert.CertificateException { // Trust client } public void checkServerTrusted( java.security.cert.X509Certificate[] chain, String authType) throws java.security.cert.CertificateException { // Trust server } public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } }; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.transactions; import java.util.Collection; import java.util.Map; import javax.cache.Cache; import javax.cache.processor.EntryProcessor; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.CacheEntryPredicate; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheReturn; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.processors.cache.dr.GridCacheDrInfo; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.util.lang.GridInClosure3; import org.jetbrains.annotations.Nullable; /** * Local transaction API. */ public interface IgniteTxLocalEx extends IgniteInternalTx { /** * @return Minimum version involved in transaction. */ public GridCacheVersion minVersion(); /** * @return Commit error. */ @Nullable public Throwable commitError(); /** * @param e Commit error. */ public void commitError(Throwable e); /** * @throws IgniteCheckedException If commit failed. */ public void userCommit() throws IgniteCheckedException; /** * @throws IgniteCheckedException If rollback failed. */ public void userRollback() throws IgniteCheckedException; /** * @param cacheCtx Cache context. * @param keys Keys to get. * @param deserializeBinary Deserialize binary flag. * @param skipVals Skip values flag. * @param keepCacheObjects Keep cache objects * @param skipStore Skip store flag. * @return Future for this get. */ public <K, V> IgniteInternalFuture<Map<K, V>> getAllAsync( GridCacheContext cacheCtx, @Nullable AffinityTopologyVersion entryTopVer, Collection<KeyCacheObject> keys, boolean deserializeBinary, boolean skipVals, boolean keepCacheObjects, boolean skipStore, boolean needVer); /** * @param cacheCtx Cache context. * @param map Map to put. * @param retval Flag indicating whether a value should be returned. * @return Future for put operation. */ public <K, V> IgniteInternalFuture<GridCacheReturn> putAllAsync( GridCacheContext cacheCtx, @Nullable AffinityTopologyVersion entryTopVer, Map<? extends K, ? extends V> map, boolean retval); /** * @param cacheCtx Cache context. * @param key Key. * @param val Value. * @param retval Return value flag. * @param filter Filter. * @return Future for put operation. */ public <K, V> IgniteInternalFuture<GridCacheReturn> putAsync( GridCacheContext cacheCtx, @Nullable AffinityTopologyVersion entryTopVer, K key, V val, boolean retval, CacheEntryPredicate filter); /** * @param cacheCtx Cache context. * @param key Key. * @param entryProcessor Entry processor. * @param invokeArgs Optional arguments for entry processor. * @return Operation future. */ public <K, V> IgniteInternalFuture<GridCacheReturn> invokeAsync( GridCacheContext cacheCtx, @Nullable AffinityTopologyVersion entryTopVer, K key, EntryProcessor<K, V, Object> entryProcessor, Object... invokeArgs); /** * @param cacheCtx Cache context. * @param map Entry processors map. * @param invokeArgs Optional arguments for entry processor. * @return Operation future. */ public <K, V, T> IgniteInternalFuture<GridCacheReturn> invokeAsync( GridCacheContext cacheCtx, @Nullable AffinityTopologyVersion entryTopVer, Map<? extends K, ? extends EntryProcessor<K, V, Object>> map, Object... invokeArgs); /** * @param cacheCtx Cache context. * @param keys Keys to remove. * @param retval Flag indicating whether a value should be returned. * @param filter Filter. * @param singleRmv {@code True} for single key remove operation ({@link Cache#remove(Object)}. * @return Future for asynchronous remove. */ public <K, V> IgniteInternalFuture<GridCacheReturn> removeAllAsync( GridCacheContext cacheCtx, @Nullable AffinityTopologyVersion entryTopVer, Collection<? extends K> keys, boolean retval, CacheEntryPredicate filter, boolean singleRmv); /** * @param cacheCtx Cache context. * @param drMap DR map to put. * @return Future for DR put operation. */ public IgniteInternalFuture<?> putAllDrAsync( GridCacheContext cacheCtx, Map<KeyCacheObject, GridCacheDrInfo> drMap); /** * @param cacheCtx Cache context. * @param drMap DR map. * @return Future for asynchronous remove. */ public IgniteInternalFuture<?> removeAllDrAsync( GridCacheContext cacheCtx, Map<KeyCacheObject, GridCacheVersion> drMap); /** * Finishes transaction (either commit or rollback). * * @param commit {@code True} if commit, {@code false} if rollback. * @return {@code True} if state has been changed. * @throws IgniteCheckedException If finish failed. */ public boolean finish(boolean commit) throws IgniteCheckedException; /** * @param cacheCtx Cache context. * @param readThrough Read through flag. * @param async if {@code True}, then loading will happen in a separate thread. * @param keys Keys. * @param skipVals Skip values flag. * @param needVer If {@code true} version is required for loaded values. * @param c Closure to be applied for loaded values. * @return Future with {@code True} value if loading took place. */ public IgniteInternalFuture<Void> loadMissing( GridCacheContext cacheCtx, AffinityTopologyVersion topVer, boolean readThrough, boolean async, Collection<KeyCacheObject> keys, boolean skipVals, boolean needVer, boolean keepBinary, GridInClosure3<KeyCacheObject, Object, GridCacheVersion> c); }
package org.kie.dockerui.client.util; import com.google.gwt.core.client.GWT; import com.google.gwt.http.client.URL; import com.google.gwt.i18n.client.DateTimeFormat; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.safehtml.shared.SafeUri; import org.kie.dockerui.backend.servlet.KieArtifactsDownloadServlet; import org.kie.dockerui.client.resources.bundles.Images; import org.kie.dockerui.client.resources.i18n.Constants; import org.kie.dockerui.shared.KieImageTypeManager; import org.kie.dockerui.shared.model.*; import org.kie.dockerui.shared.model.impl.*; import org.kie.dockerui.shared.settings.Settings; import org.kie.dockerui.shared.util.SharedUtils; import java.util.*; public class ClientUtils { public static final KieContainerTemplates TEMPLATES = GWT.create(KieContainerTemplates.class); private static final DateTimeFormat IMAGE_TAG_DATE_FORMAT = DateTimeFormat.getFormat("yyyyMMdd-HHmmss"); private static final String DOWNLOAD_SERVLET_MAPPING = "download"; private static final long MILLISECONDS_IN_SECOND = 1000l; private static final long SECONDS_IN_MINUTE = 60l; private static final long MINUTES_IN_HOUR = 60l; private static final long HOURS_IN_DAY = 24l; private static final long MILLISECONDS_IN_DAY = MILLISECONDS_IN_SECOND * SECONDS_IN_MINUTE * MINUTES_IN_HOUR * HOURS_IN_DAY; public static String getPullAddress(final KieContainer container, final org.kie.dockerui.shared.model.KieContainerDetails details, final Settings settings) { final String host = settings.getPublicHost(); final int registryPort = settings.getRegistryPort(); final String image = container.getImage(); return TEMPLATES.pullAddress(host, registryPort, image).asString(); } public static String getWebAddress(final KieContainer container, final org.kie.dockerui.shared.model.KieContainerDetails details, final Settings settings) throws IllegalStateException { final String protocol = settings.getProtocol(); final String host = settings.getPublicHost(); final int httpPublicPort = SharedUtils.getPublicPort(8080, container); if (httpPublicPort < 0) throw new IllegalStateException("No ports available. Is the container running?"); final String contextPath = container.getType().getContextPath(); return TEMPLATES.webAddress(protocol, host, httpPublicPort, contextPath).asString(); } public static String getSiteAddress(final KieContainer container, final org.kie.dockerui.shared.model.KieContainerDetails details, final Settings settings) throws IllegalStateException { final String protocol = settings.getProtocol(); final String host = settings.getPublicHost(); final int httpPublicPort = SharedUtils.getPublicPort(8080, container); if (httpPublicPort < 0) throw new IllegalStateException("No ports available. Is the container running?"); final String contextPath = "site/dependencies.html"; return TEMPLATES.webAddress(protocol, host, httpPublicPort, contextPath).asString(); } public static String getDownloadURL(final Settings settings, final KieImage image) { final String tag = image.getTags().iterator().next(); final String downloadURL = ClientUtils.getDownloadURL(settings, image.getType(), image.getRepository(), tag); return downloadURL; } public static String getDownloadURL(final Settings settings, final KieContainer container) { final String downloadURL = ClientUtils.getDownloadURL(settings, container.getType(), container.getRepository(), container.getTag()); return downloadURL; } private static String getDownloadURL(final Settings settings, final KieImageType kieImageType, final String repository, final String tag) { final String artifactsPath = settings.getArtifactsPath(); final String artifactQualifier = repository.substring( repository.lastIndexOf("-") + 1 , repository.length() ); final String absolutePath = artifactsPath + "/" + tag + "/" + kieImageType.getArtifactId() + "-" + tag + "-" + artifactQualifier + ".war"; return _getDownloadURL(absolutePath); } public static String getDownloadURL(final KieArtifact artifact) { return _getDownloadURL(artifact.getAbsoluteFilePath()); } private static String _getDownloadURL(final String absolutePath) { final String moduleURL = GWT.getModuleBaseURL(); final String path = URL.encodeQueryString(absolutePath); final StringBuilder contextPath = new StringBuilder(moduleURL) .append(DOWNLOAD_SERVLET_MAPPING) .append("?").append(KieArtifactsDownloadServlet.FILE_PATH_PARAM) .append("=").append(path); return contextPath.toString(); } public static String getSSHCommand(final KieContainer container, final org.kie.dockerui.shared.model.KieContainerDetails details, final Settings settings) { final String host = settings.getPublicHost(); final String user = settings.getUser(); return TEMPLATES.sshSudoNsenterAddress(host, user, Integer.toString(details.getContainerPid())).asString(); } public static SafeUri getImageUri(final KieImageType containerType) { if (containerType == null) return null; if (KieWorkbenchType.INSTANCE.equals(containerType)) { return Images.INSTANCE.kieIde().getSafeUri(); } else if (KieDroolsWorkbenchType.INSTANCE.equals(containerType)) { return Images.INSTANCE.drools().getSafeUri(); } else if (KieServerType.INSTANCE.equals(containerType)) { return Images.INSTANCE.kie().getSafeUri(); } else if (UfDashbuilderType.INSTANCE.equals(containerType)) { return Images.INSTANCE.dashbuilderLogo().getSafeUri(); } else if (WildflyType.INSTANCE.equals(containerType)) { return Images.INSTANCE.wildfly().getSafeUri(); }else if (EAPType.INSTANCE.equals(containerType)) { return Images.INSTANCE.jbossEAP().getSafeUri(); }else if (TomcatType.INSTANCE.equals(containerType)) { return Images.INSTANCE.tomcat().getSafeUri(); }else if (H2Type.INSTANCE.equals(containerType)) { return Images.INSTANCE.h2().getSafeUri(); }else if (MySQLType.INSTANCE.equals(containerType)) { return Images.INSTANCE.mysql().getSafeUri(); } else if (PostgreSQLType.INSTANCE.equals(containerType)) { return Images.INSTANCE.postgresql().getSafeUri(); } return Images.INSTANCE.dockerIcon().getSafeUri(); } public static List<Map.Entry<String, String>> toMapEntries(final List<KieContainerPort> ports) { if (ports == null) return null; final List<Map.Entry<String, String>> result = new LinkedList<Map.Entry<String, String>>(); for (final KieContainerPort port : ports) { final Map.Entry<String, String> entry = new Map.Entry<String, String>() { @Override public String getKey() { return Integer.toString(port.getPrivatePort()); } @Override public String getValue() { return Integer.toString(port.getPublicPort()); } @Override public String setValue(String value) { return null; } }; result.add(entry); } return result; } public static List<Map.Entry<String, String>> toMapEntries(final String[] s, final String separator) { if (s == null) return null; final List<Map.Entry<String, String>> result = new LinkedList<Map.Entry<String, String>>(); for (final String _s : s) { final String[] split = _s.split(separator); final String[] values = split.length == 2 ? split : new String[] {_s , ""}; final Map.Entry<String, String> entry = new Map.Entry<String, String>() { @Override public String getKey() { return values[0]; } @Override public String getValue() { return values[1]; } @Override public String setValue(String value) { return null; } }; result.add(entry); } return result; } public static Map<String, String> toMap(final String[] s, final String separator) { if (s == null) return null; final Map<String, String> result = new LinkedHashMap<String, String>(); for (String _s : s) { // Split only for first ocurrence character. final String[] split = _s.split(separator, 2); final String[] values = split.length == 2 ? split : new String[] {_s , ""}; result.put(values[0], values[1]); } return result; } public static String getValue(final Map<String, String> map, final String key) { if (map == null) return null; for (final Map.Entry<String, String> entry : map.entrySet()) { final String _key = entry.getKey(); if (key.equals(_key)) return entry.getValue(); } return null; } public static Date goBack(final Date date, final int days) { return new Date(date.getTime () - ( (days - 1) * MILLISECONDS_IN_DAY)); } public static ImageResource getStatusImage(final KieAppStatus status) { ImageResource imageResource = Images.INSTANCE.circleGreyCloseIcon(); if (status != null) { switch (status) { case OK: imageResource = Images.INSTANCE.circleGreenIcon(); break; case FAILED: imageResource = Images.INSTANCE.circleRedIcon(); break; case NOT_EVALUATED: imageResource = Images.INSTANCE.circleGreyIcon(); break; } } return imageResource; } public static String getStatusText(final KieAppStatus status) { String iconTooltip = Constants.INSTANCE.statusNotApplicable(); if (status != null) { switch (status) { case OK: iconTooltip = Constants.INSTANCE.statusRunnable(); break; case FAILED: iconTooltip = Constants.INSTANCE.statusNotRunnable(); break; case NOT_EVALUATED: iconTooltip = Constants.INSTANCE.statusNotEvaluated(); break; } } return iconTooltip; } public static String getDbmsImageName(final KieImageType dbmsType, final Settings settings) { if (dbmsType != null) { if (dbmsType.equals(MySQLType.INSTANCE)) { return settings.getMysqlImage(); } else if (dbmsType.equals(PostgreSQLType.INSTANCE)) { return settings.getPostgresImage(); } } return null; } public static Date parseImageDateTag(final String dateTagged) { if (dateTagged == null || dateTagged.trim().length() == 0) return null; return IMAGE_TAG_DATE_FORMAT.parse(dateTagged); } public static String formatImageDateTag(final Date date) { if (date == null) return null; return DateTimeFormat.getFormat(DateTimeFormat.PredefinedFormat.DATE_MEDIUM).format(date); } public static String formatImageDateTag(final Date date, final String pattern) { if (date == null) return null; return DateTimeFormat.getFormat(pattern).format(date); } }
package edu.umass.cs.jfoley.coop.experiments.generic; import au.com.bytecode.opencsv.CSVReader; import au.com.bytecode.opencsv.CSVWriter; import ciir.jfoley.chai.collections.Pair; import ciir.jfoley.chai.collections.util.IterableFns; import ciir.jfoley.chai.collections.util.MapFns; import ciir.jfoley.chai.io.IO; import ciir.jfoley.chai.io.LinesIterable; import ciir.jfoley.chai.random.ReservoirSampler; import ciir.jfoley.chai.string.StrUtil; import ciir.jfoley.chai.xml.ChaiXML; import ciir.jfoley.chai.xml.XNode; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; import gnu.trove.map.hash.TIntIntHashMap; import gnu.trove.map.hash.TObjectIntHashMap; import org.lemurproject.galago.core.eval.QuerySetJudgments; import org.lemurproject.galago.core.eval.QuerySetResults; import org.lemurproject.galago.core.parse.Document; import org.lemurproject.galago.core.retrieval.LocalRetrieval; import org.lemurproject.galago.utility.Parameters; import org.lemurproject.galago.utility.json.JSONUtil; import org.xml.sax.SAXException; import javax.xml.parsers.ParserConfigurationException; import java.io.IOException; import java.io.PrintWriter; import java.util.*; /** * @author jfoley */ public class PoolJudgmentsToCSV { public static void main(String[] args) throws IOException { Parameters argp = Parameters.parseArgs(args); List<String> runs = argp.getAsList("runs", String.class); int cutoff = argp.get("depth", 5); LocalRetrieval docStore = null; if(argp.isString("index")) { docStore=new LocalRetrieval(argp.getString("index")); } Map<String, Set<String>> toJudgeByQuery = new HashMap<>(); for (String run : runs) { QuerySetResults qres = new QuerySetResults(run); for (String qid : qres.getQueryIterator()) { qres.get(qid).forEach(evalDoc -> { if (evalDoc.getRank() <= cutoff) { toJudgeByQuery.computeIfAbsent(qid, missing -> new HashSet<>()).add(evalDoc.getName()); } }); } } boolean skipNull = argp.get("skipNull", docStore != null); final LocalRetrieval finalDocStore = docStore; Cache<String, String> docText = Caffeine.newBuilder().build(); toJudgeByQuery.forEach((qid, docs) -> { for (String doc : docs) { String summary = docText.get(doc, missing -> { try { if (finalDocStore == null) { return null; } Document gdoc = finalDocStore.getDocument(doc, Document.DocumentComponents.JustText); if(gdoc == null) return null; return JSONUtil.escape(StrUtil.preview(gdoc.text.replace("<TEXT>", "").replace("</TEXT>", ""), 1024)); } catch (IOException e) { throw new RuntimeException(e); } }); if(skipNull && summary == null) continue; System.out.println(qid + "," + doc + ",\"" + summary+"\""); } }); } public static class CookMTurkInputFile { //description,query,e1title,e1abs,e1dblink,e1wlink,e2title,e2abs,e2dblink,e2wlink,e3title,e3abs,e3dblink,e3wlink,e4title,e4abs,e4dblink,e4wlink,e5title,e5abs,e5dblink,e5wlink public static void main(String[] args) throws IOException { Map<String, String> descriptions = new HashMap<>(); Map<String, String> titles = new HashMap<>(); LinesIterable.fromFile("coop/ecir2016runs/mturk/rob04.descs.tsv").slurp().forEach((line) -> { String[] col = line.split("\t"); descriptions.put(col[0], col[1]); }); LinesIterable.fromFile("coop/ecir2016runs/mturk/rob04.titles.tsv").slurp().forEach((line) -> { String[] col = line.split("\t"); titles.put(col[0], col[1]); }); Map<String, String> entToAbs = new HashMap<>(); Map<String, List<String>> qidToEnt = new HashMap<>(); try (CSVReader csv = new CSVReader(IO.openReader("coop/ecir2016runs/mturk/robust.needed.csv"))) { for (String[] row : csv.readAll()) { entToAbs.put(row[1], row[2]); qidToEnt.computeIfAbsent(row[0], missing -> new ArrayList<>()).add(row[1]); } } String[] headers = new String[] { "description","query","e1title","e1abs","e2title","e2abs","e3title","e3abs","e4title","e4abs","e5title","e5abs" }; List<String[]> allJobs = new ArrayList<>(); int numDuplicates = 0; int pageSize = 5; for (Map.Entry<String, List<String>> kv : qidToEnt.entrySet()) { String qid = kv.getKey(); List<String> entitiesToJudge = kv.getValue(); if(entitiesToJudge.size() < pageSize) { System.err.println(qid+"\t"+descriptions.get(qid)+"\t"+titles.get(qid)); // title System.err.println("\t"+entitiesToJudge); continue; } int amount = entitiesToJudge.size() % pageSize; if(amount > 0) { List<String> padLastPage = ReservoirSampler.take(pageSize - amount, entitiesToJudge); numDuplicates+= padLastPage.size(); entitiesToJudge.addAll(padLastPage); } // assert we fixed the last page: assert(entitiesToJudge.size() % pageSize == 0); for (List<String> pageOfEntities : IterableFns.batches(entitiesToJudge, 5)) { List<String> pageData = new ArrayList<>(); pageData.add(descriptions.get(qid)); // description pageData.add(titles.get(qid)); // title for (String ent : pageOfEntities) { pageData.add(ent); // e${n}title pageData.add(entToAbs.get(ent)); // e${n}abs } allJobs.add(pageData.toArray(new String[0])); } } System.err.println("Duplicates: "+numDuplicates); try (CSVWriter writer = new CSVWriter(IO.openPrintWriter("mturk_robust.csv"))) { writer.writeNext(headers); for (String[] job : allJobs) { writer.writeNext(job); } } } } public static class Clue12TopicXMLToDescriptions { public static void main(String[] args) throws ParserConfigurationException, SAXException, IOException { Map<String, String> descriptions = new HashMap<>(); for (String path : Arrays.asList("/home/jfoley/code/queries/clue12/trec-2014.topics.refined.xml", "/home/jfoley/code/queries/clue12/trec2013-topics.xml")) { XNode xNode = ChaiXML.fromFile(path); for (XNode topic : xNode.selectByTag("topic")) { String qid = topic.attr("number"); List<XNode> descs = topic.selectByTag("description"); assert(descs.size() == 1); String desc = descs.get(0).getText(); descriptions.put(qid, desc); } } System.out.println(descriptions.size()); try(PrintWriter pw = IO.openPrintWriter("/home/jfoley/code/queries/clue12/web1314.descs.tsv")) { for (String qid : IterableFns.sorted(descriptions.keySet())) { pw.println(qid+"\t"+descriptions.get(qid)); } } } } public static class CookClue12MTurkInputFile { //description,query,e1title,e1abs,e1dblink,e1wlink,e2title,e2abs,e2dblink,e2wlink,e3title,e3abs,e3dblink,e3wlink,e4title,e4abs,e4dblink,e4wlink,e5title,e5abs,e5dblink,e5wlink public static void main(String[] args) throws IOException { Map<String, String> descriptions = new HashMap<>(); Map<String, String> titles = new HashMap<>(); LinesIterable.fromFile("coop/ecir2016runs/mturk/clue12.descs.tsv").slurp().forEach((line) -> { String[] col = line.split("\t"); descriptions.put(col[0], col[1]); }); LinesIterable.fromFile("coop/ecir2016runs/mturk/clue12.titles.tsv").slurp().forEach((line) -> { String[] col = line.split("\t"); titles.put(col[0], col[1]); }); Map<String, String> entToAbs = new HashMap<>(); Map<String, List<String>> qidToEnt = new HashMap<>(); try (CSVReader csv = new CSVReader(IO.openReader("coop/ecir2016runs/mturk/clue12.needed.csv"))) { for (String[] row : csv.readAll()) { entToAbs.put(row[1], row[2]); qidToEnt.computeIfAbsent(row[0], missing -> new ArrayList<>()).add(row[1]); } } String[] headers = new String[] { "description","query","e1title","e1abs","e2title","e2abs","e3title","e3abs","e4title","e4abs","e5title","e5abs" }; List<String[]> allJobs = new ArrayList<>(); int numDuplicates = 0; int pageSize = 5; for (Map.Entry<String, List<String>> kv : qidToEnt.entrySet()) { String qid = kv.getKey(); List<String> entitiesToJudge = kv.getValue(); if(entitiesToJudge.size() < pageSize) { System.err.println(qid+"\t"+descriptions.get(qid)+"\t"+titles.get(qid)); // title System.err.println("\t"+entitiesToJudge); continue; } int amount = entitiesToJudge.size() % pageSize; if(amount > 0) { List<String> padLastPage = ReservoirSampler.take(pageSize - amount, entitiesToJudge); numDuplicates+= padLastPage.size(); entitiesToJudge.addAll(padLastPage); } // assert we fixed the last page: assert(entitiesToJudge.size() % pageSize == 0); for (List<String> pageOfEntities : IterableFns.batches(entitiesToJudge, 5)) { List<String> pageData = new ArrayList<>(); pageData.add(descriptions.get(qid)); // description pageData.add(titles.get(qid)); // title for (String ent : pageOfEntities) { pageData.add(ent); // e${n}title pageData.add(entToAbs.get(ent)); // e${n}abs } allJobs.add(pageData.toArray(new String[0])); } } System.err.println("Duplicates: "+numDuplicates); try (CSVWriter writer = new CSVWriter(IO.openPrintWriter("mturk_clue12.csv"))) { writer.writeNext(headers); for (String[] job : allJobs) { writer.writeNext(job); } } } } public static class MturkJudgment { final String qid; final String entity; final int label; final double approximateWorkTimeInSeconds; final String workerId; public MturkJudgment(String qid, String entity, int label, double approximateWorkTimeInSeconds, String workerId) { this.qid = qid; this.entity = entity; this.label = label; this.approximateWorkTimeInSeconds = approximateWorkTimeInSeconds; this.workerId = workerId; } @Override public String toString() { return workerId+": "+label; } public boolean binarize() { switch (label) { default: case -1: case 0: return false; case 1: case 2: return true; } } public boolean unsure() { return label == -1; } } public static class ProcessRobustResults { public static void main(String[] args) throws IOException { Parameters argp = Parameters.parseArgs(args); Map<String, String> rtitles = new HashMap<>(); boolean robustNotClue = argp.get("robust", false); String titleQueries = robustNotClue ? "coop/ecir2016runs/mturk/rob04.titles.tsv" : "coop/ecir2016runs/mturk/clue12.titles.tsv"; LinesIterable.fromFile(titleQueries).slurp().forEach((line) -> { String[] col = line.split("\t"); rtitles.put(col[1].trim(), col[0]); }); String existingJudgmentFile = robustNotClue ? "coop/ecir2016runs/qrels/robust04.x.ent.qrel" : "coop/ecir2016runs/qrels/clue12.x.ent.qrel"; Map<Pair<String,String>, List<Boolean>> binarized = new HashMap<>(); QuerySetJudgments qrel = new QuerySetJudgments(existingJudgmentFile, true, true); qrel.forEach((qid, qj) -> { qj.forEach((ent, wt) -> { MapFns.extendListInMap(binarized, Pair.of(qid,ent), wt > 0); }); }); List<Parameters> entries = new ArrayList<>(); try (CSVReader reader = new CSVReader(IO.openReader("coop/ecir2016runs/mturk/mturk_"+(robustNotClue ? "robust" : "clue12") +"_results.csv"))) { String[] header = reader.readNext(); while(true) { String[] row = reader.readNext(); if(row == null) break; Parameters entry = Parameters.create(); for (int i = 0; i < row.length; i++) { entry.put(header[i], row[i]); } entries.add(entry); } } List<MturkJudgment> judgments = new ArrayList<>(); for (Parameters entry : entries) { String queryText = entry.getString("Input.query").trim(); String qid = rtitles.get(queryText); if(qid == null) { System.err.println(queryText); System.err.println(rtitles.keySet()); } double time = Double.parseDouble(entry.getString("WorkTimeInSeconds")) / 5.0; String worker = entry.getString("WorkerId"); for (int i = 0; i < 5; i++) { int n = i+1; String ent = entry.getString("Input.e"+n+"title"); String labelS = entry.getString("Answer.e"+n+"j"); if(labelS.isEmpty()) { labelS = "-1"; } int label = Integer.parseInt(labelS); judgments.add(new MturkJudgment(qid, ent, label, time, worker)); } } int skipped = 0; double totalTime = 0.0; TIntIntHashMap labelFreqs = new TIntIntHashMap(); TObjectIntHashMap<String> uniqueWorkers = new TObjectIntHashMap<>(); Map<Pair<String,String>, List<MturkJudgment>> joinedJudgments = new HashMap<>(); for (MturkJudgment j : judgments) { if(j.label == -1) skipped++; uniqueWorkers.adjustOrPutValue(j.workerId, 1, 1); totalTime+=j.approximateWorkTimeInSeconds; labelFreqs.adjustOrPutValue(j.label, 1, 1); MapFns.extendListInMap(joinedJudgments, Pair.of(j.qid, j.entity), j); MapFns.extendListInMap(binarized, Pair.of(j.qid, j.entity), j.binarize()); } int agree = 0; int total = 0; for (Map.Entry<Pair<String, String>, List<MturkJudgment>> prjs : joinedJudgments.entrySet()) { List<MturkJudgment> js = prjs.getValue(); if(js.size() == 1) continue; int voteTrue = 0; int voteFalse = 0; for (MturkJudgment jm : js) { if(jm.unsure()) continue; if(jm.binarize()) { voteTrue++; } else voteFalse++; } if(voteTrue == 0 || voteFalse == 0) { agree++; } total++; } Map<String, Map<String, Boolean>> finalQrel = new HashMap<>(); int xagree = 0; int xtotal = 0; for (Map.Entry<Pair<String, String>,List<Boolean>> prjs : binarized.entrySet()) { Pair<String,String> pr = prjs.getKey(); String qid = pr.getKey(); String ent = pr.getValue(); List<Boolean> js = prjs.getValue(); Map<String, Boolean> pqj = finalQrel.computeIfAbsent(qid, missing -> new HashMap<>()); if(js.size() == 1) { pqj.put(ent, js.get(0)); } else { int voteTrue = 0; int voteFalse = 0; for (boolean jm : js) { if (jm) { voteTrue++; } else voteFalse++; } // break ties toward relevance. pqj.put(ent, voteTrue >= voteFalse); if (voteTrue == 0 || voteFalse == 0) { xagree++; } xtotal++; } } System.out.println(uniqueWorkers); System.out.println(labelFreqs); System.out.println("Agreement: "+agree+"/"+total+" "+(agree / (double) total)); System.out.println("With-Expert Agreement: "+xagree+"/"+xtotal+" "+(xagree / (double) xtotal)); System.out.println("Skipped: "+skipped); System.out.println("Total Time: "+totalTime+"s"); System.out.println("Total Time: "+(totalTime/60.0)+"m"); System.out.println("Total Time: "+(totalTime/3600.0)+"h"); try (PrintWriter pw = IO.openPrintWriter(robustNotClue ? "robust.mturk.qrel" : "clue12.mturk.qrel")) { finalQrel.forEach((qid, pqdata) -> { pqdata.forEach((ent, truthy) -> { pw.println(qid+" 0 "+ent+" "+ (truthy ? "1" : "0")); }); }); } } } }
/** * SIX OVAL - https://nakamura5akihito.github.io/ * Copyright (C) 2010 Akihito Nakamura * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.opensec.six.oval.model.windows; import io.opensec.six.oval.model.ComponentType; import io.opensec.six.oval.model.ElementRef; import io.opensec.six.oval.model.Family; import io.opensec.six.oval.model.definitions.EntityObjectStringType; import io.opensec.six.oval.model.definitions.Filter; import io.opensec.six.oval.model.definitions.Set; import io.opensec.six.oval.model.definitions.SystemObjectType; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; /** * The service_object is used by a service_test to define the specific service(s) * to be evaluated. * * @author Akihito Nakamura, AIST * @see <a href="http://oval.mitre.org/language/">OVAL Language</a> */ public class ServiceObject extends SystemObjectType { //TODO: XSD model. private Set set; //{1..1} private EntityObjectStringType service_name; //{1..1} private final Collection<Filter> filter = new ArrayList<Filter>(); //{0..*} /** * Constructor. */ public ServiceObject() { this( null, 0 ); } public ServiceObject( final String id, final int version ) { super( id, version ); _oval_family = Family.WINDOWS; _oval_component = ComponentType.SERVICE; } // public FileObject( // final String id, // final int version, // final String comment // ) // { // super( id, version, comment ); // } // // // public FileObject( // final String id, // final int version, // final String path, // final String filename // ) // { // this( id, version, // new EntityObjectStringType( path ), // new EntityObjectStringType( filename ) // ); // } // // // public FileObject( // final String id, // final int version, // final EntityObjectStringType path, // final EntityObjectStringType filename // ) // { // super( id, version ); // setPath( path ); // setFilename( filename ); // } /** */ public void setSet( final Set set ) { this.set = set; } public Set getSet() { return set; } /** */ public void setServiceName( final EntityObjectStringType service_name ) { this.service_name = service_name; } public EntityObjectStringType getServiceName() { return service_name; } /** */ public void setFilter( final Collection<? extends Filter> filters ) { if (filter != filters) { filter.clear(); if (filters != null && filters.size() > 0) { filter.addAll( filters ); } } } public boolean addFilter( final Filter filter ) { if (filter == null) { return false; } return this.filter.add( filter ); } public Collection<Filter> getFilter() { return filter; } public Iterator<Filter> iterateFilter() { return filter.iterator(); } //********************************************************************* // DefinitionsElement //********************************************************************* @Override public Collection<ElementRef> ovalGetElementRef() { Collection<ElementRef> ref_list = new ArrayList<ElementRef>(); ref_list.add( getServiceName() ); ref_list.addAll( getFilter() ); return ref_list; } //************************************************************** // java.lang.Object //************************************************************** @Override public int hashCode() { return super.hashCode(); } @Override public boolean equals( final Object obj ) { if (!(obj instanceof ServiceObject)) { return false; } return super.equals( obj ); } @Override public String toString() { return "file_object[" + super.toString() + ", set=" + getSet() + ", service_name=" + getServiceName() + ", filter=" + getFilter() + "]"; } } //
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model; import hudson.model.Fingerprint.RangeSet; import java.io.File; import jenkins.fingerprints.FileFingerprintStorage; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; /** * @author Kohsuke Kawaguchi */ public class FingerprintTest { @Rule public TemporaryFolder tmp = new TemporaryFolder(); @Test public void rangeSet() { RangeSet rs = new RangeSet(); assertFalse(rs.includes(0)); assertFalse(rs.includes(3)); assertFalse(rs.includes(5)); rs.add(3); assertFalse(rs.includes(2)); assertTrue(rs.includes(3)); assertFalse(rs.includes(4)); assertEquals("[3,4)",rs.toString()); rs.add(4); assertFalse(rs.includes(2)); assertTrue(rs.includes(3)); assertTrue(rs.includes(4)); assertFalse(rs.includes(5)); assertEquals("[3,5)",rs.toString()); rs.add(10); assertEquals("[3,5),[10,11)",rs.toString()); rs.add(9); assertEquals("[3,5),[9,11)",rs.toString()); rs.add(6); assertEquals("[3,5),[6,7),[9,11)",rs.toString()); rs.add(5); assertEquals("[3,7),[9,11)",rs.toString()); } @Test public void merge() { RangeSet x = new RangeSet(); x.add(1); x.add(2); x.add(3); x.add(5); x.add(6); assertEquals("[1,4),[5,7)",x.toString()); RangeSet y = new RangeSet(); y.add(3); y.add(4); y.add(5); assertEquals("[3,6)",y.toString()); x.add(y); assertEquals("[1,7)",x.toString()); } @Test public void merge2() { RangeSet x = new RangeSet(); x.add(1); x.add(2); x.add(5); x.add(6); assertEquals("[1,3),[5,7)",x.toString()); RangeSet y = new RangeSet(); y.add(3); y.add(4); assertEquals("[3,5)",y.toString()); x.add(y); assertEquals("[1,7)",x.toString()); } @Test public void merge3() { RangeSet x = new RangeSet(); x.add(1); x.add(5); assertEquals("[1,2),[5,6)",x.toString()); RangeSet y = new RangeSet(); y.add(3); y.add(5); y.add(7); assertEquals("[3,4),[5,6),[7,8)",y.toString()); x.add(y); assertEquals("[1,2),[3,4),[5,6),[7,8)",x.toString()); } @Test public void retainAll1() { RangeSet x = new RangeSet(); RangeSet y = new RangeSet(); x.addAll(1,2,3, 10,11, 20); y.addAll( 2, 11,12, 19,20,21); assertTrue(x.retainAll(y)); RangeSet z = new RangeSet(); z.addAll(2,11,20); assertEquals(x,z); } @Test public void retainAll2() { RangeSet x = new RangeSet(); RangeSet y = new RangeSet(); x.addAll(1,2,3,4,5,6,7,8,9,10, 13,14,15,16,17,18,19,20); y.addAll( 2,3, 5,6, 9,10,11,12,13, 15,16, 18,19); assertTrue(x.retainAll(y)); RangeSet z = new RangeSet(); z.addAll(2,3,5,6,9,10,13,15,16,18,19); assertEquals(x,z); } @Test public void retainAll3() { RangeSet x = new RangeSet(); RangeSet y = new RangeSet(); x.addAll(1,2,3,4,5); assertTrue(x.retainAll(y)); assertTrue(x.isEmpty()); } @Test public void removeAll1() { RangeSet x = new RangeSet(); RangeSet y = new RangeSet(); x.addAll(1,2,3, 10,11, 20); y.addAll( 2, 11,12, 19,20,21); assertTrue(x.removeAll(y)); RangeSet z = new RangeSet(); z.addAll(1,3,10); assertEquals(x,z); } @Test public void removeAll2() { RangeSet x = new RangeSet(); RangeSet y = new RangeSet(); x.addAll(1,2,3,4,5,6,7,8,9,10, 13,14,15,16,17,18,19,20); y.addAll( 2,3, 5,6, 9,10,11,12,13, 15,16, 18,19); assertTrue(x.removeAll(y)); RangeSet z = new RangeSet(); z.addAll(1,4,7,8,14,17,20); assertEquals(x,z); } @Test public void removeAll3() { RangeSet x = new RangeSet(); RangeSet y = new RangeSet(); x.addAll(1,2,3,4,5); assertFalse(x.removeAll(y)); } @Test public void deserialize() throws Exception { assertEquals("Fingerprint[" + "original=stapler/org.kohsuke.stapler:stapler-jelly #123," + "hash=069484c9e963cc615c51278327da8eab," + "fileName=org.kohsuke.stapler:stapler-jelly-1.207.jar," + "timestamp=2013-05-21 19:20:03.534 UTC," + "usages={stuff=[304,306),[307,324),[328,330), stuff/test:stuff=[2,67),[72,77),[84,223),[228,229),[232,268)}," + "facets=[]]", FileFingerprintStorage.load(new File(FingerprintTest.class.getResource("fingerprint.xml").toURI())).toString()); } @Test public void loadFingerprintWithoutUsages() throws Exception { Fingerprint fp = FileFingerprintStorage.load(new File(FingerprintTest.class.getResource("fingerprintWithoutUsages.xml").toURI())); assertNotNull(fp); assertEquals("test:jenkinsfile-example-1.0-SNAPSHOT.jar", fp.getFileName()); assertNotNull(fp.getUsages()); } @Test public void fromString() throws Exception { // // Single // // Numbers assertThat(RangeSet.fromString("1", true).toString(), equalTo("[1,2)")); assertThat(RangeSet.fromString("1", false).toString(), equalTo("[1,2)")); assertThat(RangeSet.fromString("+1", true).toString(), equalTo("[1,2)")); assertThat(RangeSet.fromString("+1", false).toString(), equalTo("[1,2)")); // Zero assertThat(RangeSet.fromString("0", true).toString(), equalTo("[0,1)")); assertThat(RangeSet.fromString("0", false).toString(), equalTo("[0,1)")); assertThat(RangeSet.fromString("+0", true).toString(), equalTo("[0,1)")); assertThat(RangeSet.fromString("+0", false).toString(), equalTo("[0,1)")); // Negative number assertThat(RangeSet.fromString("-1", true).toString(), equalTo("")); assertThat(expectIAE("-1", "Unable to parse '-1', expected string with a range M-N"), is(true)); // Exceeded int number assertThat(RangeSet.fromString("2147483648", true).toString(), equalTo("")); assertThat(expectIAE("2147483648", "Unable to parse '2147483648', expected number"), is(true)); // Invalid number assertThat(RangeSet.fromString("1a", true).toString(), equalTo("")); assertThat(expectIAE("1a", "Unable to parse '1a', expected number"), is(true)); assertThat(RangeSet.fromString("aa", true).toString(), equalTo("")); assertThat(expectIAE("aa", "Unable to parse 'aa', expected number"), is(true)); //Empty assertThat(RangeSet.fromString("", true).toString(), equalTo("")); assertThat(RangeSet.fromString("", false).toString(), equalTo("")); //Space assertThat(RangeSet.fromString(" ", true).toString(), equalTo("")); assertThat(expectIAE(" ", "Unable to parse ' ', expected number"), is(true)); // Comma assertThat(RangeSet.fromString(",", true).toString(), equalTo("")); assertThat(RangeSet.fromString(",", false).toString(), equalTo("")); // Hyphen assertThat(RangeSet.fromString("-", true).toString(), equalTo("")); assertThat(expectIAE("-", "Unable to parse '-', expected string with a range M-N"), is(true)); // // Multiple numbers // // Numbers assertThat(RangeSet.fromString("1,2", true).toString(), equalTo("[1,2),[2,3)")); assertThat(RangeSet.fromString("1,2", false).toString(), equalTo("[1,2),[2,3)")); assertThat(RangeSet.fromString("1,+2,5", true).toString(), equalTo("[1,2),[2,3),[5,6)")); assertThat(RangeSet.fromString("1,+2,5", false).toString(), equalTo("[1,2),[2,3),[5,6)")); assertThat(RangeSet.fromString("1,1", true).toString(), equalTo("[1,2),[1,2)")); assertThat(RangeSet.fromString("1,1", false).toString(), equalTo("[1,2),[1,2)")); // Zero assertThat(RangeSet.fromString("0,1,2", true).toString(), equalTo("[0,1),[1,2),[2,3)")); assertThat(RangeSet.fromString("0,1,2", false).toString(), equalTo("[0,1),[1,2),[2,3)")); assertThat(RangeSet.fromString("1,0,2", true).toString(), equalTo("[1,2),[0,1),[2,3)")); assertThat(RangeSet.fromString("1,0,2", false).toString(), equalTo("[1,2),[0,1),[2,3)")); assertThat(RangeSet.fromString("1,2,0", true).toString(), equalTo("[1,2),[2,3),[0,1)")); assertThat(RangeSet.fromString("1,2,0", false).toString(), equalTo("[1,2),[2,3),[0,1)")); // Negative number assertThat(RangeSet.fromString("-1,2,3", true).toString(), equalTo("[2,3),[3,4)")); assertThat(expectIAE("-1,2,3", "Unable to parse '-1,2,3', expected string with a range M-N"), is(true)); assertThat(RangeSet.fromString("1,-2,3", true).toString(), equalTo("[1,2),[3,4)")); assertThat(expectIAE("1,-2,3", "Unable to parse '1,-2,3', expected string with a range M-N"), is(true)); assertThat(RangeSet.fromString("1,2,-3", true).toString(), equalTo("[1,2),[2,3)")); assertThat(expectIAE("1,2,-3", "Unable to parse '1,2,-3', expected string with a range M-N"), is(true)); // Exceeded int number assertThat(RangeSet.fromString("2147483648,2,3", true).toString(), equalTo("[2,3),[3,4)")); assertThat(expectIAE("2147483648,1,2", "Unable to parse '2147483648,1,2', expected number"), is(true)); assertThat(RangeSet.fromString("1,2147483648,3", true).toString(), equalTo("[1,2),[3,4)")); assertThat(expectIAE("1,2147483648,2", "Unable to parse '1,2147483648,2', expected number"), is(true)); assertThat(RangeSet.fromString("1,2,2147483648", true).toString(), equalTo("[1,2),[2,3)")); assertThat(expectIAE("1,2,2147483648", "Unable to parse '1,2,2147483648', expected number"), is(true)); // Invalid number assertThat(RangeSet.fromString("1a,2,3", true).toString(), equalTo("[2,3),[3,4)")); assertThat(expectIAE("1a,1,2", "Unable to parse '1a,1,2', expected number"), is(true)); assertThat(RangeSet.fromString("1,2a,3", true).toString(), equalTo("[1,2),[3,4)")); assertThat(expectIAE("1,2a,2", "Unable to parse '1,2a,2', expected number"), is(true)); assertThat(RangeSet.fromString("1,2,3a", true).toString(), equalTo("[1,2),[2,3)")); assertThat(expectIAE("1,2,3a", "Unable to parse '1,2,3a', expected number"), is(true)); assertThat(RangeSet.fromString("aa,2,3", true).toString(), equalTo("[2,3),[3,4)")); assertThat(expectIAE("aa,1,2", "Unable to parse 'aa,1,2', expected number"), is(true)); assertThat(RangeSet.fromString("1,aa,3", true).toString(), equalTo("[1,2),[3,4)")); assertThat(expectIAE("1,aa,2", "Unable to parse '1,aa,2', expected number"), is(true)); assertThat(RangeSet.fromString("1,2,aa", true).toString(), equalTo("[1,2),[2,3)")); assertThat(expectIAE("1,2,aa", "Unable to parse '1,2,aa', expected number"), is(true)); //Empty assertThat(RangeSet.fromString(",1,2", true).toString(), equalTo("")); assertThat(expectIAE(",1,2", "Unable to parse ',1,2', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("1,,2", true).toString(), equalTo("")); assertThat(expectIAE("1,,2", "Unable to parse '1,,2', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("1,2,", true).toString(), equalTo("")); assertThat(expectIAE("1,2,", "Unable to parse '1,2,', expected correct notation M,N or M-N"), is(true)); // Space assertThat(RangeSet.fromString(" ,1,2", true).toString(), equalTo("[1,2),[2,3)")); assertThat(expectIAE(" ,1,2", "Unable to parse ' ,1,2', expected number"), is(true)); assertThat(RangeSet.fromString("1, ,2", true).toString(), equalTo("[1,2),[2,3)")); assertThat(expectIAE("1, ,2", "Unable to parse '1, ,2', expected number"), is(true)); assertThat(RangeSet.fromString("1,2, ", true).toString(), equalTo("[1,2),[2,3)")); assertThat(expectIAE("1,2, ", "Unable to parse '1,2, ', expected number"), is(true)); // Comma assertThat(RangeSet.fromString(",,1,2", true).toString(), equalTo("")); assertThat(expectIAE(",,1,2", "Unable to parse ',,1,2', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("1,,,2", true).toString(), equalTo("")); assertThat(expectIAE("1,,,2", "Unable to parse '1,,,2', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("1,2,,", true).toString(), equalTo("")); assertThat(expectIAE("1,2,,", "Unable to parse '1,2,,', expected correct notation M,N or M-N"), is(true)); // Hyphen assertThat(RangeSet.fromString("-,1,2", true).toString(), equalTo("[1,2),[2,3)")); assertThat(expectIAE("-,1,2", "Unable to parse '-,1,2', expected string with a range M-N"), is(true)); assertThat(RangeSet.fromString("1,-,2", true).toString(), equalTo("[1,2),[2,3)")); assertThat(expectIAE("1,-,2", "Unable to parse '1,-,2', expected string with a range M-N"), is(true)); assertThat(RangeSet.fromString("1,2,-", true).toString(), equalTo("[1,2),[2,3)")); assertThat(expectIAE("1,2,-", "Unable to parse '1,2,-', expected string with a range M-N"), is(true)); // // Single range // // Numbers assertThat(RangeSet.fromString("1-2", true).toString(), equalTo("[1,3)")); assertThat(RangeSet.fromString("1-2", false).toString(), equalTo("[1,3)")); assertThat(RangeSet.fromString("+1-+2", true).toString(), equalTo("[1,3)")); assertThat(RangeSet.fromString("+1-+2", false).toString(), equalTo("[1,3)")); assertThat(RangeSet.fromString("1-1", true).toString(), equalTo("[1,2)")); assertThat(RangeSet.fromString("1-1", false).toString(), equalTo("[1,2)")); assertThat(RangeSet.fromString("+1-+1", true).toString(), equalTo("[1,2)")); assertThat(RangeSet.fromString("+1-+1", false).toString(), equalTo("[1,2)")); assertThat(RangeSet.fromString("1-4", true).toString(), equalTo("[1,5)")); assertThat(RangeSet.fromString("1-4", false).toString(), equalTo("[1,5)")); assertThat(RangeSet.fromString("+1-+4", true).toString(), equalTo("[1,5)")); assertThat(RangeSet.fromString("+1-+4", false).toString(), equalTo("[1,5)")); //Zero assertThat(RangeSet.fromString("0-1", true).toString(), equalTo("[0,2)")); assertThat(RangeSet.fromString("0-1", false).toString(), equalTo("[0,2)")); assertThat(RangeSet.fromString("+0-+1", true).toString(), equalTo("[0,2)")); assertThat(RangeSet.fromString("+0-+1", false).toString(), equalTo("[0,2)")); assertThat(RangeSet.fromString("0-2", true).toString(), equalTo("[0,3)")); assertThat(RangeSet.fromString("0-2", false).toString(), equalTo("[0,3)")); assertThat(RangeSet.fromString("+0-+2", true).toString(), equalTo("[0,3)")); assertThat(RangeSet.fromString("+0-+2", false).toString(), equalTo("[0,3)")); assertThat(RangeSet.fromString("0--1", true).toString(), equalTo("")); assertThat(expectIAE("0--1", "Unable to parse '0--1', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("+0--1", true).toString(), equalTo("")); assertThat(expectIAE("+0--1", "Unable to parse '+0--1', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("0--2", true).toString(), equalTo("")); assertThat(expectIAE("0--2", "Unable to parse '0--2', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("+0--2", true).toString(), equalTo("")); assertThat(expectIAE("+0--2", "Unable to parse '+0--2', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("1-0", true).toString(), equalTo("")); assertThat(expectIAE("1-0", "Unable to parse '1-0', expected string with a range M-N where M<N"), is(true)); assertThat(RangeSet.fromString("+1-+0", true).toString(), equalTo("")); assertThat(expectIAE("+1-+0", "Unable to parse '+1-+0', expected string with a range M-N where M<N"), is(true)); assertThat(RangeSet.fromString("2-0", true).toString(), equalTo("")); assertThat(expectIAE("2-0", "Unable to parse '2-0', expected string with a range M-N where M<N"), is(true)); assertThat(RangeSet.fromString("+2-+0", true).toString(), equalTo("")); assertThat(expectIAE("+2-+0", "Unable to parse '+2-+0', expected string with a range M-N where M<N"), is(true)); assertThat(RangeSet.fromString("-1-0", true).toString(), equalTo("")); assertThat(expectIAE("-1-0", "Unable to parse '-1-0', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("-1-+0", true).toString(), equalTo("")); assertThat(expectIAE("-1-+0", "Unable to parse '-1-+0', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("-2-0", true).toString(), equalTo("")); assertThat(expectIAE("-2-0", "Unable to parse '-2-0', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("-2-+0", true).toString(), equalTo("")); assertThat(expectIAE("-2-+0", "Unable to parse '-2-+0', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("0-0", true).toString(), equalTo("[0,1)")); assertThat(RangeSet.fromString("0-0", false).toString(), equalTo("[0,1)")); assertThat(RangeSet.fromString("+0-+0", true).toString(), equalTo("[0,1)")); assertThat(RangeSet.fromString("+0-+0", false).toString(), equalTo("[0,1)")); // Negative number assertThat(RangeSet.fromString("-1-1", true).toString(), equalTo("")); assertThat(expectIAE("-1-1", "Unable to parse '-1-1', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("-1-+1", true).toString(), equalTo("")); assertThat(expectIAE("-1-+1", "Unable to parse '-1-+1', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("-1-2", true).toString(), equalTo("")); assertThat(expectIAE("-1-2", "Unable to parse '-1-2', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("-1-+2", true).toString(), equalTo("")); assertThat(expectIAE("-1-+2", "Unable to parse '-1-+2', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("1--1", true).toString(), equalTo("")); assertThat(expectIAE("1--1", "Unable to parse '1--1', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("+1--1", true).toString(), equalTo("")); assertThat(expectIAE("+1--1", "Unable to parse '+1--1', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("1--2", true).toString(), equalTo("")); assertThat(expectIAE("1--2", "Unable to parse '1--2', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("+1--2", true).toString(), equalTo("")); assertThat(expectIAE("+1--2", "Unable to parse '+1--2', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("-1--1", true).toString(), equalTo("")); assertThat(expectIAE("-1--1", "Unable to parse '-1--1', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("-2--1", true).toString(), equalTo("")); assertThat(expectIAE("-2--1", "Unable to parse '-2--1', expected correct notation M,N or M-N"), is(true)); // Exceeded int number assertThat(RangeSet.fromString("0-2147483648", true).toString(), equalTo("")); assertThat(expectIAE("0-2147483648", "Unable to parse '0-2147483648', expected number"), is(true)); assertThat(RangeSet.fromString("2147483648-0", true).toString(), equalTo("")); assertThat(expectIAE("2147483648-0", "Unable to parse '2147483648-0', expected number"), is(true)); assertThat(RangeSet.fromString("2147483648-2147483648", true).toString(), equalTo("")); assertThat(expectIAE("2147483648-2147483648", "Unable to parse '2147483648-2147483648', expected number"), is(true)); // Invalid number assertThat(RangeSet.fromString("1-2a", true).toString(), equalTo("")); assertThat(expectIAE("1-2a", "Unable to parse '1-2a', expected number"), is(true)); assertThat(RangeSet.fromString("2a-2", true).toString(), equalTo("")); assertThat(expectIAE("2a-2", "Unable to parse '2a-2', expected number"), is(true)); assertThat(RangeSet.fromString("2a-2a", true).toString(), equalTo("")); assertThat(expectIAE("2a-2a", "Unable to parse '2a-2a', expected number"), is(true)); assertThat(RangeSet.fromString("aa-2", true).toString(), equalTo("")); assertThat(expectIAE("aa-2", "Unable to parse 'aa-2', expected number"), is(true)); assertThat(RangeSet.fromString("1-aa", true).toString(), equalTo("")); assertThat(expectIAE("1-aa", "Unable to parse '1-aa', expected number"), is(true)); assertThat(RangeSet.fromString("aa-aa", true).toString(), equalTo("")); assertThat(expectIAE("aa-aa", "Unable to parse 'aa-aa', expected number"), is(true)); // Empty assertThat(RangeSet.fromString("-1", true).toString(), equalTo("")); assertThat(expectIAE("-1", "Unable to parse '-1', expected string with a range M-N"), is(true)); assertThat(RangeSet.fromString("1-", true).toString(), equalTo("")); assertThat(expectIAE("1-", "Unable to parse '1-', expected string with a range M-N"), is(true)); assertThat(RangeSet.fromString("-", true).toString(), equalTo("")); assertThat(expectIAE("-", "Unable to parse '-', expected string with a range M-N"), is(true)); // Space assertThat(RangeSet.fromString(" -1", true).toString(), equalTo("")); assertThat(expectIAE(" -1", "Unable to parse ' -1', expected string with a range M-N"), is(true)); assertThat(RangeSet.fromString("1- ", true).toString(), equalTo("")); assertThat(expectIAE("1- ", "Unable to parse '1- ', expected string with a range M-N"), is(true)); assertThat(RangeSet.fromString(" - ", true).toString(), equalTo("")); assertThat(expectIAE(" - ", "Unable to parse ' - ', expected string with a range M-N"), is(true)); // Comma assertThat(RangeSet.fromString(",-1", true).toString(), equalTo("")); assertThat(expectIAE(",-1", "Unable to parse ',-1', expected string with a range M-N"), is(true)); assertThat(RangeSet.fromString("1-,", true).toString(), equalTo("")); assertThat(expectIAE("1-,", "Unable to parse '1-,', expected string with a range M-N"), is(true)); assertThat(RangeSet.fromString(",-,", true).toString(), equalTo("")); assertThat(expectIAE(",-,", "Unable to parse ',-,', expected string with a range M-N"), is(true)); // Hyphen assertThat(RangeSet.fromString("--1", true).toString(), equalTo("")); assertThat(expectIAE("--1", "Unable to parse '--1', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("1--", true).toString(), equalTo("")); assertThat(expectIAE("1--", "Unable to parse '1--', expected correct notation M,N or M-N"), is(true)); assertThat(RangeSet.fromString("---", true).toString(), equalTo("")); assertThat(expectIAE("---", "Unable to parse '---', expected correct notation M,N or M-N"), is(true)); // Inverse range assertThat(RangeSet.fromString("2-1", true).toString(), equalTo("")); assertThat(expectIAE("2-1", "Unable to parse '2-1', expected string with a range M-N where M<N"), is(true)); assertThat(RangeSet.fromString("10-1", true).toString(), equalTo("")); assertThat(expectIAE("10-1", "Unable to parse '10-1', expected string with a range M-N where M<N"), is(true)); assertThat(RangeSet.fromString("-1--2", true).toString(), equalTo("")); assertThat(expectIAE("-1--2", "Unable to parse '-1--2', expected correct notation M,N or M-N"), is(true)); // Invalid range assertThat(RangeSet.fromString("1-3-", true).toString(), equalTo("")); assertThat(expectIAE("1-3-", "Unable to parse '1-3-', expected correct notation M,N or M-N"), is(true)); // // Multiple ranges // assertThat(RangeSet.fromString("1-3,3-5", true).toString(), equalTo("[1,4),[3,6)")); assertThat(RangeSet.fromString("1-3,4-6", true).toString(), equalTo("[1,4),[4,7)")); assertThat(RangeSet.fromString("1-3,5-7", true).toString(), equalTo("[1,4),[5,8)")); assertThat(RangeSet.fromString("1-3,2-3", true).toString(), equalTo("[1,4),[2,4)")); assertThat(RangeSet.fromString("1-5,2-3", true).toString(), equalTo("[1,6),[2,4)")); } private boolean expectIAE(final String expr, final String msg) { try { RangeSet.fromString(expr, false); } catch (Throwable e) { if (e instanceof IllegalArgumentException) { if (e.getMessage().isEmpty()) { return msg.isEmpty(); } else { return msg.isEmpty() ? false : e.getMessage().contains(msg); } } } // Exception wasn't throws or thrown Exception wasn't an instance of IllegalArgumentException fail("Should never be here"); return false; } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.benchmark.trove; import gnu.trove.map.custom_hash.TObjectIntCustomHashMap; import gnu.trove.map.hash.THashMap; import gnu.trove.map.hash.TIntIntHashMap; import gnu.trove.map.hash.TIntObjectHashMap; import gnu.trove.map.hash.TObjectIntHashMap; import gnu.trove.strategy.IdentityHashingStrategy; import jsr166y.ThreadLocalRandom; import org.elasticsearch.common.RandomStringGenerator; import org.elasticsearch.common.StopWatch; import org.elasticsearch.common.trove.StringIdentityHashingStrategy; import org.elasticsearch.common.unit.SizeValue; import java.util.HashMap; import java.util.IdentityHashMap; public class StringMapAdjustOrPutBenchmark { public static void main(String[] args) { int NUMBER_OF_KEYS = (int) SizeValue.parseSizeValue("20").singles(); int STRING_SIZE = 5; long PUT_OPERATIONS = SizeValue.parseSizeValue("5m").singles(); long ITERATIONS = 10; boolean REUSE = true; String[] values = new String[NUMBER_OF_KEYS]; for (int i = 0; i < values.length; i++) { values[i] = RandomStringGenerator.randomAlphabetic(STRING_SIZE); } StopWatch stopWatch; stopWatch = new StopWatch().start(); TObjectIntHashMap<String> map = new TObjectIntHashMap<String>(); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { map.clear(); } else { map = new TObjectIntHashMap<String>(); } for (long i = 0; i < PUT_OPERATIONS; i++) { map.adjustOrPutValue(values[(int) (i % NUMBER_OF_KEYS)], 1, 1); } } map.clear(); map = null; stopWatch.stop(); System.out.println("TObjectIntHashMap: " + stopWatch.totalTime() + ", " + stopWatch.totalTime().millisFrac() / ITERATIONS + "ms"); stopWatch = new StopWatch().start(); TObjectIntCustomHashMap<String> iMap = new TObjectIntCustomHashMap<String>(new StringIdentityHashingStrategy()); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { iMap.clear(); } else { iMap = new TObjectIntCustomHashMap<String>(new StringIdentityHashingStrategy()); } for (long i = 0; i < PUT_OPERATIONS; i++) { iMap.adjustOrPutValue(values[(int) (i % NUMBER_OF_KEYS)], 1, 1); } } stopWatch.stop(); System.out.println("TObjectIntCustomHashMap(StringIdentity): " + stopWatch.totalTime() + ", " + stopWatch.totalTime().millisFrac() / ITERATIONS + "ms"); iMap.clear(); iMap = null; stopWatch = new StopWatch().start(); iMap = new TObjectIntCustomHashMap<String>(new IdentityHashingStrategy<String>()); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { iMap.clear(); } else { iMap = new TObjectIntCustomHashMap<String>(new IdentityHashingStrategy<String>()); } for (long i = 0; i < PUT_OPERATIONS; i++) { iMap.adjustOrPutValue(values[(int) (i % NUMBER_OF_KEYS)], 1, 1); } } stopWatch.stop(); System.out.println("TObjectIntCustomHashMap(PureIdentity): " + stopWatch.totalTime() + ", " + stopWatch.totalTime().millisFrac() / ITERATIONS + "ms"); iMap.clear(); iMap = null; // now test with THashMap stopWatch = new StopWatch().start(); THashMap<String, StringEntry> tMap = new THashMap<String, StringEntry>(); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { tMap.clear(); } else { tMap = new THashMap<String, StringEntry>(); } for (long i = 0; i < PUT_OPERATIONS; i++) { String key = values[(int) (i % NUMBER_OF_KEYS)]; StringEntry stringEntry = tMap.get(key); if (stringEntry == null) { stringEntry = new StringEntry(key, 1); tMap.put(key, stringEntry); } else { stringEntry.counter++; } } } tMap.clear(); tMap = null; stopWatch.stop(); System.out.println("THashMap: " + stopWatch.totalTime() + ", " + stopWatch.totalTime().millisFrac() / ITERATIONS + "ms"); stopWatch = new StopWatch().start(); HashMap<String, StringEntry> hMap = new HashMap<String, StringEntry>(); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { hMap.clear(); } else { hMap = new HashMap<String, StringEntry>(); } for (long i = 0; i < PUT_OPERATIONS; i++) { String key = values[(int) (i % NUMBER_OF_KEYS)]; StringEntry stringEntry = hMap.get(key); if (stringEntry == null) { stringEntry = new StringEntry(key, 1); hMap.put(key, stringEntry); } else { stringEntry.counter++; } } } hMap.clear(); hMap = null; stopWatch.stop(); System.out.println("HashMap: " + stopWatch.totalTime() + ", " + stopWatch.totalTime().millisFrac() / ITERATIONS + "ms"); stopWatch = new StopWatch().start(); IdentityHashMap<String, StringEntry> ihMap = new IdentityHashMap<String, StringEntry>(); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { ihMap.clear(); } else { hMap = new HashMap<String, StringEntry>(); } for (long i = 0; i < PUT_OPERATIONS; i++) { String key = values[(int) (i % NUMBER_OF_KEYS)]; StringEntry stringEntry = ihMap.get(key); if (stringEntry == null) { stringEntry = new StringEntry(key, 1); ihMap.put(key, stringEntry); } else { stringEntry.counter++; } } } stopWatch.stop(); System.out.println("IdentityHashMap: " + stopWatch.totalTime() + ", " + stopWatch.totalTime().millisFrac() / ITERATIONS + "ms"); ihMap.clear(); ihMap = null; int[] iValues = new int[NUMBER_OF_KEYS]; for (int i = 0; i < values.length; i++) { iValues[i] = ThreadLocalRandom.current().nextInt(); } stopWatch = new StopWatch().start(); TIntIntHashMap intMap = new TIntIntHashMap(); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { intMap.clear(); } else { intMap = new TIntIntHashMap(); } for (long i = 0; i < PUT_OPERATIONS; i++) { int key = iValues[(int) (i % NUMBER_OF_KEYS)]; intMap.adjustOrPutValue(key, 1, 1); } } stopWatch.stop(); System.out.println("TIntIntHashMap: " + stopWatch.totalTime() + ", " + stopWatch.totalTime().millisFrac() / ITERATIONS + "ms"); intMap.clear(); intMap = null; // now test with THashMap stopWatch = new StopWatch().start(); TIntObjectHashMap<IntEntry> tIntMap = new TIntObjectHashMap<IntEntry>(); for (long iter = 0; iter < ITERATIONS; iter++) { if (REUSE) { tIntMap.clear(); } else { tIntMap = new TIntObjectHashMap<IntEntry>(); } for (long i = 0; i < PUT_OPERATIONS; i++) { int key = iValues[(int) (i % NUMBER_OF_KEYS)]; IntEntry intEntry = tIntMap.get(key); if (intEntry == null) { intEntry = new IntEntry(key, 1); tIntMap.put(key, intEntry); } else { intEntry.counter++; } } } tIntMap.clear(); tIntMap = null; stopWatch.stop(); System.out.println("TIntObjectHashMap: " + stopWatch.totalTime() + ", " + stopWatch.totalTime().millisFrac() / ITERATIONS + "ms"); } static class StringEntry { String key; int counter; StringEntry(String key, int counter) { this.key = key; this.counter = counter; } } static class IntEntry { int key; int counter; IntEntry(int key, int counter) { this.key = key; this.counter = counter; } } }
/* * Copyright (c) 2004-2022, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.user.hibernate; import static com.google.common.base.Preconditions.checkNotNull; import static java.lang.String.format; import static java.time.ZoneId.systemDefault; import static java.util.stream.Collectors.toSet; import java.time.LocalDate; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.CriteriaUpdate; import javax.persistence.criteria.JoinType; import javax.persistence.criteria.Root; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.hibernate.SessionFactory; import org.hibernate.annotations.QueryHints; import org.hibernate.query.Query; import org.hisp.dhis.common.IdentifiableObjectUtils; import org.hisp.dhis.common.hibernate.HibernateIdentifiableObjectStore; import org.hisp.dhis.commons.collection.CollectionUtils; import org.hisp.dhis.commons.util.SqlHelper; import org.hisp.dhis.commons.util.TextUtils; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.query.JpaQueryUtils; import org.hisp.dhis.query.Order; import org.hisp.dhis.query.QueryUtils; import org.hisp.dhis.schema.Schema; import org.hisp.dhis.schema.SchemaService; import org.hisp.dhis.security.acl.AclService; import org.hisp.dhis.user.CurrentUserGroupInfo; import org.hisp.dhis.user.CurrentUserService; import org.hisp.dhis.user.User; import org.hisp.dhis.user.UserAccountExpiryInfo; import org.hisp.dhis.user.UserCredentials; import org.hisp.dhis.user.UserInvitationStatus; import org.hisp.dhis.user.UserQueryParams; import org.hisp.dhis.user.UserStore; import org.springframework.context.ApplicationEventPublisher; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Repository; /** * @author Nguyen Hong Duc */ @Slf4j @Repository( "org.hisp.dhis.user.UserStore" ) public class HibernateUserStore extends HibernateIdentifiableObjectStore<User> implements UserStore { public static final String DISABLED_COLUMN = "disabled"; private final SchemaService schemaService; public HibernateUserStore( SessionFactory sessionFactory, JdbcTemplate jdbcTemplate, ApplicationEventPublisher publisher, CurrentUserService currentUserService, AclService aclService, SchemaService schemaService ) { super( sessionFactory, jdbcTemplate, publisher, User.class, currentUserService, aclService, true ); checkNotNull( schemaService ); this.schemaService = schemaService; } @Override public void save( User user, boolean clearSharing ) { super.save( user, clearSharing ); currentUserService.invalidateUserGroupCache( user.getUsername() ); } @Override public List<User> getUsers( UserQueryParams params, @Nullable List<String> orders ) { return extractUserQueryUsers( getUserQuery( params, orders, false ).list() ); } @Override public List<User> getUsers( UserQueryParams params ) { return getUsers( params, null ); } @Override public List<User> getExpiringUsers( UserQueryParams params ) { return extractUserQueryUsers( getUserQuery( params, null, false ).list() ); } @Override public List<UserAccountExpiryInfo> getExpiringUserAccounts( int inDays ) { Date expiryLookAheadDate = Date.from( LocalDate.now().plusDays( inDays ) .atStartOfDay( systemDefault() ).toInstant() ); String hql = "select new org.hisp.dhis.user.UserAccountExpiryInfo(uc.username, u.email, uc.accountExpiry) " + "from User u inner join u.userCredentials uc " + "where u.email is not null and uc.disabled = false and uc.accountExpiry <= :expiryLookAheadDate"; return getSession().createQuery( hql, UserAccountExpiryInfo.class ) .setParameter( "expiryLookAheadDate", expiryLookAheadDate ) .list(); } @Override public int getUserCount( UserQueryParams params ) { Long count = (Long) getUserQuery( params, null, true ).uniqueResult(); return count != null ? count.intValue() : 0; } @Nonnull private List<User> extractUserQueryUsers( @Nonnull List<?> result ) { if ( result.isEmpty() ) { return Collections.emptyList(); } final List<User> users = new ArrayList<>( result.size() ); for ( Object o : result ) { if ( o instanceof User ) { users.add( (User) o ); } else if ( o.getClass().isArray() ) { users.add( (User) ((Object[]) o)[0] ); } } return users; } private Query getUserQuery( UserQueryParams params, List<String> orders, boolean count ) { SqlHelper hlp = new SqlHelper(); List<Order> convertedOrder = null; String hql = null; if ( count ) { hql = "select count(distinct u) "; } else { Schema userSchema = schemaService.getSchema( User.class ); convertedOrder = QueryUtils.convertOrderStrings( orders, userSchema ); hql = Stream.of( "select distinct u", JpaQueryUtils.createSelectOrderExpression( convertedOrder, "u" ) ) .filter( Objects::nonNull ).collect( Collectors.joining( "," ) ); hql += " "; } hql += "from User u "; if ( count ) { hql += "inner join u.userCredentials uc "; } else { hql += "inner join fetch u.userCredentials uc "; } if ( params.isPrefetchUserGroups() && !count ) { hql += "left join fetch u.groups g "; } else { hql += "left join u.groups g "; } if ( params.hasOrganisationUnits() ) { hql += "left join u.organisationUnits ou "; if ( params.isIncludeOrgUnitChildren() ) { hql += hlp.whereAnd() + " ("; for ( OrganisationUnit ou : params.getOrganisationUnits() ) { hql += format( "ou.path like :ou%s or ", ou.getUid() ); } hql = TextUtils.removeLastOr( hql ) + ")"; } else { hql += hlp.whereAnd() + " ou.id in (:ouIds) "; } } if ( params.hasDataViewOrganisationUnits() ) { hql += "left join u.dataViewOrganisationUnits dwou "; if ( params.isIncludeOrgUnitChildren() ) { hql += hlp.whereAnd() + " ("; for ( OrganisationUnit ou : params.getDataViewOrganisationUnits() ) { hql += format( "dwou.path like :dwOu%s or ", ou.getUid() ); } hql = TextUtils.removeLastOr( hql ) + ")"; } else { hql += hlp.whereAnd() + " dwou.id in (:dwOuIds) "; } } if ( params.hasTeiSearchOrganisationUnits() ) { hql += "left join u.teiSearchOrganisationUnits tsou "; if ( params.isIncludeOrgUnitChildren() ) { hql += hlp.whereAnd() + " ("; for ( OrganisationUnit ou : params.getTeiSearchOrganisationUnits() ) { hql += format( "tsou.path like :tsOu%s or ", ou.getUid() ); } hql = TextUtils.removeLastOr( hql ) + ")"; } else { hql += hlp.whereAnd() + " tsou.id in (:tsOuIds) "; } } if ( params.hasUserGroups() ) { hql += hlp.whereAnd() + " g.id in (:userGroupIds) "; } if ( params.getDisabled() != null ) { hql += hlp.whereAnd() + " uc.disabled = :disabled "; } if ( params.isNot2FA() ) { hql += hlp.whereAnd() + " uc.secret is null "; } if ( params.getQuery() != null ) { hql += hlp.whereAnd() + " (" + "concat(lower(u.firstName),' ',lower(u.surname)) like :key " + "or lower(u.email) like :key " + "or lower(uc.username) like :key) "; } if ( params.getPhoneNumber() != null ) { hql += hlp.whereAnd() + " u.phoneNumber = :phoneNumber "; } if ( params.isCanManage() && params.getUser() != null ) { hql += hlp.whereAnd() + " g.id in (:ids) "; } if ( params.isAuthSubset() && params.getUser() != null ) { hql += hlp.whereAnd() + " not exists (" + "select uc2 from UserCredentials uc2 " + "inner join uc2.userAuthorityGroups ag2 " + "inner join ag2.authorities a " + "where uc2.id = uc.id " + "and a not in (:auths) ) "; } // TODO handle users with no user roles if ( params.isDisjointRoles() && params.getUser() != null ) { hql += hlp.whereAnd() + " not exists (" + "select uc3 from UserCredentials uc3 " + "inner join uc3.userAuthorityGroups ag3 " + "where uc3.id = uc.id " + "and ag3.id in (:roles) ) "; } if ( params.getLastLogin() != null ) { hql += hlp.whereAnd() + " uc.lastLogin >= :lastLogin "; } if ( params.getInactiveSince() != null ) { hql += hlp.whereAnd() + " uc.lastLogin < :inactiveSince "; } if ( params.getPasswordLastUpdated() != null ) { hql += hlp.whereAnd() + " uc.passwordLastUpdated < :passwordLastUpdated "; } if ( params.isSelfRegistered() ) { hql += hlp.whereAnd() + " uc.selfRegistered = true "; } if ( UserInvitationStatus.ALL.equals( params.getInvitationStatus() ) ) { hql += hlp.whereAnd() + " uc.invitation = true "; } if ( UserInvitationStatus.EXPIRED.equals( params.getInvitationStatus() ) ) { hql += hlp.whereAnd() + " uc.invitation = true " + "and uc.restoreToken is not null " + "and uc.restoreExpiry is not null " + "and uc.restoreExpiry < current_timestamp() "; } if ( !count ) { String orderExpression = JpaQueryUtils.createOrderExpression( convertedOrder, "u" ); hql += "order by " + StringUtils.defaultString( orderExpression, "u.surname, u.firstName" ); } // --------------------------------------------------------------------- // Query parameters // --------------------------------------------------------------------- log.debug( "User query HQL: '{}'", hql ); Query query = getQuery( hql ); if ( params.getQuery() != null ) { query.setParameter( "key", "%" + params.getQuery().toLowerCase() + "%" ); } if ( params.getPhoneNumber() != null ) { query.setParameter( "phoneNumber", params.getPhoneNumber() ); } if ( params.isCanManage() && params.getUser() != null ) { Collection<Long> managedGroups = IdentifiableObjectUtils .getIdentifiers( params.getUser().getManagedGroups() ); query.setParameterList( "ids", managedGroups ); } if ( params.getDisabled() != null ) { query.setParameter( DISABLED_COLUMN, params.getDisabled() ); } if ( params.isAuthSubset() && params.getUser() != null ) { Set<String> auths = params.getUser().getUserCredentials().getAllAuthorities(); query.setParameterList( "auths", auths ); } if ( params.isDisjointRoles() && params.getUser() != null ) { Collection<Long> roles = IdentifiableObjectUtils .getIdentifiers( params.getUser().getUserCredentials().getUserAuthorityGroups() ); query.setParameterList( "roles", roles ); } if ( params.getLastLogin() != null ) { query.setParameter( "lastLogin", params.getLastLogin() ); } if ( params.getPasswordLastUpdated() != null ) { query.setParameter( "passwordLastUpdated", params.getPasswordLastUpdated() ); } if ( params.getInactiveSince() != null ) { query.setParameter( "inactiveSince", params.getInactiveSince() ); } if ( params.hasOrganisationUnits() ) { if ( params.isIncludeOrgUnitChildren() ) { for ( OrganisationUnit ou : params.getOrganisationUnits() ) { query.setParameter( format( "ou%s", ou.getUid() ), "%/" + ou.getUid() + "%" ); } } else { Collection<Long> ouIds = IdentifiableObjectUtils.getIdentifiers( params.getOrganisationUnits() ); query.setParameterList( "ouIds", ouIds ); } } if ( params.hasDataViewOrganisationUnits() ) { if ( params.isIncludeOrgUnitChildren() ) { for ( OrganisationUnit ou : params.getDataViewOrganisationUnits() ) { query.setParameter( format( "dwOu%s", ou.getUid() ), "%/" + ou.getUid() + "%" ); } } else { Collection<Long> ouIds = IdentifiableObjectUtils .getIdentifiers( params.getDataViewOrganisationUnits() ); query.setParameterList( "dwOuIds", ouIds ); } } if ( params.hasTeiSearchOrganisationUnits() ) { if ( params.isIncludeOrgUnitChildren() ) { for ( OrganisationUnit ou : params.getTeiSearchOrganisationUnits() ) { query.setParameter( format( "tsOu%s", ou.getUid() ), "%/" + ou.getUid() + "%" ); } } else { Collection<Long> ouIds = IdentifiableObjectUtils .getIdentifiers( params.getTeiSearchOrganisationUnits() ); query.setParameterList( "tsOuIds", ouIds ); } } if ( params.hasUserGroups() ) { Collection<Long> userGroupIds = IdentifiableObjectUtils.getIdentifiers( params.getUserGroups() ); query.setParameterList( "userGroupIds", userGroupIds ); } if ( !count ) { if ( params.getFirst() != null ) { query.setFirstResult( params.getFirst() ); } if ( params.getMax() != null ) { query.setMaxResults( params.getMax() ); } } return query; } @Override public int getUserCount() { Query<Long> query = getTypedQuery( "select count(*) from User" ); return query.uniqueResult().intValue(); } @Override public User getUser( long id ) { return getSession().get( User.class, id ); } @Override public UserCredentials getUserCredentialsByUsername( String username ) { if ( username == null ) { return null; } String hql = "from UserCredentials uc where uc.username = :username"; TypedQuery<UserCredentials> typedQuery = sessionFactory.getCurrentSession().createQuery( hql, UserCredentials.class ); typedQuery.setParameter( "username", username ); typedQuery.setHint( QueryHints.CACHEABLE, true ); return QueryUtils.getSingleResult( typedQuery ); } @Override public CurrentUserGroupInfo getCurrentUserGroupInfo( long userId ) { CriteriaBuilder builder = getCriteriaBuilder(); CriteriaQuery<Object[]> query = builder.createQuery( Object[].class ); Root<User> root = query.from( User.class ); query.where( builder.equal( root.get( "id" ), userId ) ); query.select( builder.array( root.get( "uid" ), root.join( "groups", JoinType.LEFT ).get( "uid" ) ) ); List<Object[]> results = getSession().createQuery( query ).getResultList(); CurrentUserGroupInfo currentUserGroupInfo = new CurrentUserGroupInfo(); if ( CollectionUtils.isEmpty( results ) ) { return currentUserGroupInfo; } for ( Object[] result : results ) { if ( currentUserGroupInfo.getUserUID() == null ) { currentUserGroupInfo.setUserUID( result[0].toString() ); } if ( result[1] != null ) { currentUserGroupInfo.getUserGroupUIDs().add( result[1].toString() ); } } return currentUserGroupInfo; } @Override public int disableUsersInactiveSince( Date inactiveSince ) { CriteriaBuilder builder = getCriteriaBuilder(); CriteriaUpdate<UserCredentials> update = builder.createCriteriaUpdate( UserCredentials.class ); Root<UserCredentials> uc = update.from( UserCredentials.class ); update.where( builder.and( // just so we do not count rows already disabled builder.equal( uc.get( DISABLED_COLUMN ), false ), builder.lessThanOrEqualTo( uc.get( "lastLogin" ), inactiveSince ) ) ); update.set( DISABLED_COLUMN, true ); return getSession().createQuery( update ).executeUpdate(); } @Override public Set<String> findNotifiableUsersWithLastLoginBetween( Date from, Date to ) { String hql = "select u.email " + "from User u inner join u.userCredentials uc " + "where u.email is not null and uc.disabled = false and uc.lastLogin >= :from and uc.lastLogin < :to"; return getSession().createQuery( hql, String.class ) .setParameter( "from", from ) .setParameter( "to", to ) .stream().collect( toSet() ); } @Override public String getDisplayName( String userUid ) { String sql = "select concat(firstname, ' ', surname) from userinfo where uid =:uid"; Query<String> query = getSession().createNativeQuery( sql ); query.setParameter( "uid", userUid ); return query.getSingleResult(); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.timeseriesinsights.models; import com.azure.core.annotation.Fluent; import com.azure.core.annotation.JsonFlatten; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.timeseriesinsights.fluent.models.EnvironmentResourceInner; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.time.Duration; import java.time.OffsetDateTime; import java.util.List; import java.util.Map; import java.util.UUID; /** * An environment is a set of time-series data available for query, and is the top level Azure Time Series Insights * resource. Gen1 environments have data retention limits. */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "kind") @JsonTypeName("Gen1") @JsonFlatten @Fluent public class Gen1EnvironmentResource extends EnvironmentResourceInner { @JsonIgnore private final ClientLogger logger = new ClientLogger(Gen1EnvironmentResource.class); /* * ISO8601 timespan specifying the minimum number of days the environment's * events will be available for query. */ @JsonProperty(value = "properties.dataRetentionTime", required = true) private Duration dataRetentionTime; /* * The behavior the Time Series Insights service should take when the * environment's capacity has been exceeded. If "PauseIngress" is * specified, new events will not be read from the event source. If * "PurgeOldData" is specified, new events will continue to be read and old * events will be deleted from the environment. The default behavior is * PurgeOldData. */ @JsonProperty(value = "properties.storageLimitExceededBehavior") private StorageLimitExceededBehavior storageLimitExceededBehavior; /* * The list of event properties which will be used to partition data in the * environment. Currently, only a single partition key property is * supported. */ @JsonProperty(value = "properties.partitionKeyProperties") private List<TimeSeriesIdProperty> partitionKeyProperties; /* * Provisioning state of the resource. */ @JsonProperty(value = "properties.provisioningState", access = JsonProperty.Access.WRITE_ONLY) private ProvisioningState provisioningState; /* * The time the resource was created. */ @JsonProperty(value = "properties.creationTime", access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime creationTime; /* * An id used to access the environment data, e.g. to query the * environment's events or upload reference data for the environment. */ @JsonProperty(value = "properties.dataAccessId", access = JsonProperty.Access.WRITE_ONLY) private UUID dataAccessId; /* * The fully qualified domain name used to access the environment data, * e.g. to query the environment's events or upload reference data for the * environment. */ @JsonProperty(value = "properties.dataAccessFqdn", access = JsonProperty.Access.WRITE_ONLY) private String dataAccessFqdn; /* * An object that represents the status of the environment, and its * internal state in the Time Series Insights service. */ @JsonProperty(value = "properties.status", access = JsonProperty.Access.WRITE_ONLY) private EnvironmentStatus status; /** * Get the dataRetentionTime property: ISO8601 timespan specifying the minimum number of days the environment's * events will be available for query. * * @return the dataRetentionTime value. */ public Duration dataRetentionTime() { return this.dataRetentionTime; } /** * Set the dataRetentionTime property: ISO8601 timespan specifying the minimum number of days the environment's * events will be available for query. * * @param dataRetentionTime the dataRetentionTime value to set. * @return the Gen1EnvironmentResource object itself. */ public Gen1EnvironmentResource withDataRetentionTime(Duration dataRetentionTime) { this.dataRetentionTime = dataRetentionTime; return this; } /** * Get the storageLimitExceededBehavior property: The behavior the Time Series Insights service should take when the * environment's capacity has been exceeded. If "PauseIngress" is specified, new events will not be read from the * event source. If "PurgeOldData" is specified, new events will continue to be read and old events will be deleted * from the environment. The default behavior is PurgeOldData. * * @return the storageLimitExceededBehavior value. */ public StorageLimitExceededBehavior storageLimitExceededBehavior() { return this.storageLimitExceededBehavior; } /** * Set the storageLimitExceededBehavior property: The behavior the Time Series Insights service should take when the * environment's capacity has been exceeded. If "PauseIngress" is specified, new events will not be read from the * event source. If "PurgeOldData" is specified, new events will continue to be read and old events will be deleted * from the environment. The default behavior is PurgeOldData. * * @param storageLimitExceededBehavior the storageLimitExceededBehavior value to set. * @return the Gen1EnvironmentResource object itself. */ public Gen1EnvironmentResource withStorageLimitExceededBehavior( StorageLimitExceededBehavior storageLimitExceededBehavior) { this.storageLimitExceededBehavior = storageLimitExceededBehavior; return this; } /** * Get the partitionKeyProperties property: The list of event properties which will be used to partition data in the * environment. Currently, only a single partition key property is supported. * * @return the partitionKeyProperties value. */ public List<TimeSeriesIdProperty> partitionKeyProperties() { return this.partitionKeyProperties; } /** * Set the partitionKeyProperties property: The list of event properties which will be used to partition data in the * environment. Currently, only a single partition key property is supported. * * @param partitionKeyProperties the partitionKeyProperties value to set. * @return the Gen1EnvironmentResource object itself. */ public Gen1EnvironmentResource withPartitionKeyProperties(List<TimeSeriesIdProperty> partitionKeyProperties) { this.partitionKeyProperties = partitionKeyProperties; return this; } /** * Get the provisioningState property: Provisioning state of the resource. * * @return the provisioningState value. */ public ProvisioningState provisioningState() { return this.provisioningState; } /** * Get the creationTime property: The time the resource was created. * * @return the creationTime value. */ public OffsetDateTime creationTime() { return this.creationTime; } /** * Get the dataAccessId property: An id used to access the environment data, e.g. to query the environment's events * or upload reference data for the environment. * * @return the dataAccessId value. */ public UUID dataAccessId() { return this.dataAccessId; } /** * Get the dataAccessFqdn property: The fully qualified domain name used to access the environment data, e.g. to * query the environment's events or upload reference data for the environment. * * @return the dataAccessFqdn value. */ public String dataAccessFqdn() { return this.dataAccessFqdn; } /** * Get the status property: An object that represents the status of the environment, and its internal state in the * Time Series Insights service. * * @return the status value. */ public EnvironmentStatus status() { return this.status; } /** {@inheritDoc} */ @Override public Gen1EnvironmentResource withSku(Sku sku) { super.withSku(sku); return this; } /** {@inheritDoc} */ @Override public Gen1EnvironmentResource withLocation(String location) { super.withLocation(location); return this; } /** {@inheritDoc} */ @Override public Gen1EnvironmentResource withTags(Map<String, String> tags) { super.withTags(tags); return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ @Override public void validate() { super.validate(); if (dataRetentionTime() == null) { throw logger .logExceptionAsError( new IllegalArgumentException( "Missing required property dataRetentionTime in model Gen1EnvironmentResource")); } if (partitionKeyProperties() != null) { partitionKeyProperties().forEach(e -> e.validate()); } if (status() != null) { status().validate(); } } }
package editor; import hyperGraphs.HLH; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Point; import java.awt.RenderingHints; import java.awt.event.MouseEvent; import java.awt.geom.AffineTransform; import java.util.ArrayList; import javax.swing.JPanel; import rectangularBoard.Path; import util.Logger; public class FloorsEditor extends JPanel { private static final int Y_LAYOUT_GAP = 50; private static final int Y_LAYOUT_SIZE= MainWindow.DEFAULT_SIZE_Y; private static final double FLOOR_ZOOM_SCALE = .4; private static final int FLOOR_AREA_SIZE = (int)((Y_LAYOUT_SIZE +Y_LAYOUT_GAP)*FLOOR_ZOOM_SCALE); private static final int X_BASE_TRANSLATION = 150; private static final int Y_BASE_TRANSLATION = 10; private int sizeX = 1000, sizeY = 500; // pocztakowy rozmiar planszy (bez // zoomowania) private ArrayList<LayoutEditor> layoutEditorsList; MainWindow window; private Arrow tempArrow; private ArrayList<Arrow> arrows = new ArrayList<Arrow>(); int arr1x, arr1y; Path arrowBeg; boolean aarrStarted = false; public FloorsEditor(ArrayList<LayoutEditor> layoutEditorsList, MainWindow window) { this.layoutEditorsList = layoutEditorsList; this.window = window; } public void reset(ArrayList<LayoutEditor> layoutEditorsList) { this.layoutEditorsList = layoutEditorsList; arrows = new ArrayList<Arrow>(); } @Override public void paint(Graphics g) { Graphics2D g2D = (Graphics2D) g; g2D.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY); g2D.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY); g2D.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE); g2D.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE); g2D.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g2D.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON); g2D.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); g2D.setColor(Color.white); g2D.fillRect(0, 0, this.getWidth(), this.getHeight()); int layoutscount = layoutEditorsList.size(); int ycorrection = Y_LAYOUT_SIZE + Y_LAYOUT_GAP; AffineTransform saved = g2D.getTransform(); g2D.translate(X_BASE_TRANSLATION, Y_BASE_TRANSLATION); g2D.scale(FLOOR_ZOOM_SCALE, FLOOR_ZOOM_SCALE); // g2D.shear(-0.5, 0); for (int i = layoutscount-1; i >=0; i--) { layoutEditorsList.get(i).paintMe(g2D); g2D.translate(0, ycorrection); } g2D.setTransform(saved); drawTempArrow(g2D); drawArrows(g2D); } final static BasicStroke arrow_stroke = new BasicStroke(1.0f, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND); private void drawArrows(Graphics2D g2D) { g2D.setColor(Color.BLACK); g2D.setStroke(arrow_stroke); for (Arrow arr : arrows) { g2D.draw(Arrow.createTwoDirArrowShapeNice(arr)); } } private void drawTempArrow(Graphics2D g2d) { if (tempArrow == null) { return; } g2d.setColor(Color.BLACK); // g2d.draw(createArrowBase(tempArrStart, tempArrEnd )); // g2d.draw(createArrowBase(tempArrStart, tempArrEnd )); // g2d.draw(createArrowBase(tempArrStart, tempArrEnd )); g2d.draw(Arrow.createTwoDirArrowShapeNice(tempArrow)); } public void initLayout(int sizeX ) { this.sizeX = sizeX; this.sizeY = (int) ((MainWindow.DEFAULT_SIZE_Y + Y_LAYOUT_GAP) * layoutEditorsList.size() * FLOOR_ZOOM_SCALE); this.setPreferredSize(new Dimension(sizeX, sizeY)); this.setSize(sizeX, sizeY); repaint(); } public void addArrow(int arr1x, int arr1y, int a2x, int a2y, Path start, Path end, boolean isthick) { arrows.add(new Arrow(new Point(arr1x, arr1y), new Point(a2x, a2y), start, end, isthick)); } public void setTemporaryArrow(int arr1x, int arr1y, int a2x, int a2y) { tempArrow = new Arrow(new Point(arr1x, arr1y), new Point(a2x, a2y), false); } public void removeTempArrow() { tempArrow = null; } private int calculateFloorUnderMouse(MouseEvent e){ int base = e.getY()-Y_BASE_TRANSLATION; return window.floorCount - 1- base / FLOOR_AREA_SIZE; } private int calcxFloorTranslation(MouseEvent e){ // przeksztalcenie wspolrzednych lokalnych na te w drabinie pieter int xtrans = e.getX(); // poczatkowe przesuniecie return xtrans - X_BASE_TRANSLATION; } private int calcyFloorTranslation(MouseEvent e, int floorUnderMouse){ // przeksztalcenie wspolrzednych lokalnych na te w drabinie pieter int ytrans = e.getY(); // poczatkowe przesuniecie ytrans = ytrans - Y_BASE_TRANSLATION; // nte pietro return ytrans - ((int)(( window.floorCount - 1 -floorUnderMouse)*FLOOR_AREA_SIZE)); } void floorsEditorMouseClicked(MouseEvent e) { boolean isThick; if (e.getButton()==MouseEvent.BUTTON1){ isThick =true; }else{ isThick =false; } int floorUnderMouse = calculateFloorUnderMouse(e); LayoutEditor editorUnderMouse = window.getFloor(floorUnderMouse); double savedZoom = editorUnderMouse.getZoomedTo(); editorUnderMouse.setZoomedTo(FLOOR_ZOOM_SCALE); int xtrans = calcxFloorTranslation(e); int ytrans = calcyFloorTranslation(e, floorUnderMouse); switch (editorUnderMouse.mode) { case OUTLINE_FINISHED: if (editorUnderMouse.selectDevelopedPath(xtrans, ytrans)){ // zaznaczamy // editorUnderMouse.mode=Mode.AREA_SELECTED; window.setSelectedAreaInfo(editorUnderMouse.getDevelopedPath()); if (!aarrStarted) { arr1x = e.getX(); arr1y = e.getY(); aarrStarted = true; arrowBeg=editorUnderMouse.getDevelopedPath(); this.setTemporaryArrow(arr1x, arr1y, e.getX(), e.getY()); } else { aarrStarted = false; this.removeTempArrow(); Path arrowEnd = editorUnderMouse.getDevelopedPath(); this.addArrow(arr1x, arr1y, e.getX(), e.getY(),arrowBeg, arrowEnd, isThick); // tworzenie relacji miedzy pietrami String relKind; if (isThick){ relKind=HLH.KIND_ACC; }else{ relKind=HLH.KIND_VIS; } if(arrowBeg.getFloorNr()>arrowEnd.getFloorNr()){ window.controller.createMultiFloorRealtion(arrowBeg, arrowEnd, relKind); }else{ window.controller.createMultiFloorRealtion(arrowEnd, arrowBeg , relKind); } window.clearDevelopedPathSelection(); } }else { // nic nie zaznaczone window.setSelectedAreaInfo(null); window.clearDevelopedPathSelection(); aarrStarted = false; this.removeTempArrow(); } break; // case AREA_SELECTED: // if (! editorUnderMouse.selectDevelopedPath(xtrans, ytrans)){ //odznaczamy path // editorUnderMouse.mode=Mode.OUTLINE_FINISHED; // window.setSelectedAreaInfo(null); // }else { // window.setSelectedAreaInfo(editorUnderMouse.getDevelopedPath()); // } // break; default: break; } editorUnderMouse.setZoomedTo(savedZoom); repaint(); } synchronized void floorsEditorMouseMoved(MouseEvent e) { if (aarrStarted) { this.setTemporaryArrow(arr1x, arr1y, e.getX(), e.getY()); } int floorUnderMouse = calculateFloorUnderMouse(e); LayoutEditor editorUnderMouse = window.getFloor(floorUnderMouse); double savedZoom = editorUnderMouse.getZoomedTo(); editorUnderMouse.setZoomedTo(FLOOR_ZOOM_SCALE); int xtrans = calcxFloorTranslation(e); int ytrans = calcyFloorTranslation(e, floorUnderMouse); switch (editorUnderMouse.mode) { case OUTLINE_FINISHED: editorUnderMouse.markGrid(xtrans, ytrans); editorUnderMouse.highlightPath(xtrans, ytrans); break; case AREA_SELECTED: editorUnderMouse.markGrid(xtrans, ytrans); editorUnderMouse.highlightPath(xtrans, ytrans); break; default: editorUnderMouse.markGrid(xtrans, ytrans); break; } editorUnderMouse.setZoomedTo(savedZoom); repaint(); } }
/** */ package CIM.IEC61970.Meas.impl; import CIM.IEC61970.Meas.MeasPackage; import CIM.IEC61970.Meas.StringMeasurement; import CIM.IEC61970.Meas.StringMeasurementValue; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>String Measurement Value</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link CIM.IEC61970.Meas.impl.StringMeasurementValueImpl#getValue <em>Value</em>}</li> * <li>{@link CIM.IEC61970.Meas.impl.StringMeasurementValueImpl#getStringMeasurement <em>String Measurement</em>}</li> * </ul> * * @generated */ public class StringMeasurementValueImpl extends MeasurementValueImpl implements StringMeasurementValue { /** * The default value of the '{@link #getValue() <em>Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue() * @generated * @ordered */ protected static final String VALUE_EDEFAULT = null; /** * The cached value of the '{@link #getValue() <em>Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getValue() * @generated * @ordered */ protected String value = VALUE_EDEFAULT; /** * The cached value of the '{@link #getStringMeasurement() <em>String Measurement</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStringMeasurement() * @generated * @ordered */ protected StringMeasurement stringMeasurement; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected StringMeasurementValueImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return MeasPackage.Literals.STRING_MEASUREMENT_VALUE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getValue() { return value; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setValue(String newValue) { String oldValue = value; value = newValue; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MeasPackage.STRING_MEASUREMENT_VALUE__VALUE, oldValue, value)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public StringMeasurement getStringMeasurement() { if (stringMeasurement != null && stringMeasurement.eIsProxy()) { InternalEObject oldStringMeasurement = (InternalEObject)stringMeasurement; stringMeasurement = (StringMeasurement)eResolveProxy(oldStringMeasurement); if (stringMeasurement != oldStringMeasurement) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, MeasPackage.STRING_MEASUREMENT_VALUE__STRING_MEASUREMENT, oldStringMeasurement, stringMeasurement)); } } return stringMeasurement; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public StringMeasurement basicGetStringMeasurement() { return stringMeasurement; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetStringMeasurement(StringMeasurement newStringMeasurement, NotificationChain msgs) { StringMeasurement oldStringMeasurement = stringMeasurement; stringMeasurement = newStringMeasurement; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MeasPackage.STRING_MEASUREMENT_VALUE__STRING_MEASUREMENT, oldStringMeasurement, newStringMeasurement); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setStringMeasurement(StringMeasurement newStringMeasurement) { if (newStringMeasurement != stringMeasurement) { NotificationChain msgs = null; if (stringMeasurement != null) msgs = ((InternalEObject)stringMeasurement).eInverseRemove(this, MeasPackage.STRING_MEASUREMENT__STRING_MEASUREMENT_VALUES, StringMeasurement.class, msgs); if (newStringMeasurement != null) msgs = ((InternalEObject)newStringMeasurement).eInverseAdd(this, MeasPackage.STRING_MEASUREMENT__STRING_MEASUREMENT_VALUES, StringMeasurement.class, msgs); msgs = basicSetStringMeasurement(newStringMeasurement, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MeasPackage.STRING_MEASUREMENT_VALUE__STRING_MEASUREMENT, newStringMeasurement, newStringMeasurement)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MeasPackage.STRING_MEASUREMENT_VALUE__STRING_MEASUREMENT: if (stringMeasurement != null) msgs = ((InternalEObject)stringMeasurement).eInverseRemove(this, MeasPackage.STRING_MEASUREMENT__STRING_MEASUREMENT_VALUES, StringMeasurement.class, msgs); return basicSetStringMeasurement((StringMeasurement)otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MeasPackage.STRING_MEASUREMENT_VALUE__STRING_MEASUREMENT: return basicSetStringMeasurement(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case MeasPackage.STRING_MEASUREMENT_VALUE__VALUE: return getValue(); case MeasPackage.STRING_MEASUREMENT_VALUE__STRING_MEASUREMENT: if (resolve) return getStringMeasurement(); return basicGetStringMeasurement(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case MeasPackage.STRING_MEASUREMENT_VALUE__VALUE: setValue((String)newValue); return; case MeasPackage.STRING_MEASUREMENT_VALUE__STRING_MEASUREMENT: setStringMeasurement((StringMeasurement)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case MeasPackage.STRING_MEASUREMENT_VALUE__VALUE: setValue(VALUE_EDEFAULT); return; case MeasPackage.STRING_MEASUREMENT_VALUE__STRING_MEASUREMENT: setStringMeasurement((StringMeasurement)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case MeasPackage.STRING_MEASUREMENT_VALUE__VALUE: return VALUE_EDEFAULT == null ? value != null : !VALUE_EDEFAULT.equals(value); case MeasPackage.STRING_MEASUREMENT_VALUE__STRING_MEASUREMENT: return stringMeasurement != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (value: "); result.append(value); result.append(')'); return result.toString(); } } //StringMeasurementValueImpl
package test.deployer; import static aQute.lib.io.IO.collect; import static aQute.lib.io.IO.copy; import static aQute.lib.io.IO.delete; import static aQute.lib.io.IO.getFile; import static aQute.lib.io.IO.stream; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.security.MessageDigest; import java.util.Arrays; import java.util.Formatter; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.SortedSet; import org.mockito.Mockito; import aQute.bnd.service.RepositoryPlugin; import aQute.bnd.service.RepositoryPlugin.DownloadListener; import aQute.bnd.service.RepositoryPlugin.PutOptions; import aQute.bnd.service.RepositoryPlugin.PutResult; import aQute.bnd.service.repository.SearchableRepository.ResourceDescriptor; import aQute.bnd.version.Version; import aQute.lib.deployer.FileRepo; import aQute.lib.io.IO; import aQute.libg.cryptography.SHA1; import aQute.libg.cryptography.SHA256; import aQute.libg.map.MAP; import junit.framework.TestCase; @SuppressWarnings("resource") public class FileRepoTest extends TestCase { private FileRepo testRepo; private FileRepo nonExistentRepo; private FileRepo indexedRepo; private File tmp; private String hashToString(byte[] hash) { Formatter formatter = new Formatter(); for (byte b : hash) { formatter.format("%02x", b); } return formatter.toString(); } private byte[] calculateHash(MessageDigest algorithm, File file) throws Exception { algorithm.reset(); copy(file, algorithm); return algorithm.digest(); } @Override protected void setUp() throws Exception { File testRepoDir = IO.getFile("src/test/repo"); assertTrue(testRepoDir.isDirectory()); testRepo = createRepo(testRepoDir); File nonExistentDir = IO.getFile("invalidrepo"); nonExistentDir.mkdir(); nonExistentDir.setReadOnly(); nonExistentRepo = createRepo(nonExistentDir); tmp = IO.getFile("tmp" + getName()); tmp.mkdir(); indexedRepo = createRepo(tmp, MAP.$("index", "true")); } @Override protected void tearDown() throws Exception { File nonExistentDir = IO.getFile("invalidrepo"); delete(nonExistentDir); IO.delete(tmp); } private FileRepo createRepo(File root) { return createRepo(root, new HashMap<String,String>()); } private FileRepo createRepo(File root, Map<String,String> props) { FileRepo repo = new FileRepo(); props.put("location", root.getAbsolutePath()); repo.setProperties(props); return repo; } /** * Test a repo with an index */ public void testIndex() throws Exception { // // Check if the index property works // by verifying the diff between the // testRepo and the indexed Repo // assertNull(testRepo.getResources()); assertNotNull(indexedRepo.getResources()); // // Check that we can actually put a resource // PutResult put = indexedRepo.put(IO.getFile("jar/osgi.jar").toURI().toURL().openStream(), null); assertNotNull(put); // Can we get it? ResourceDescriptor desc = indexedRepo.getDescriptor("osgi", new Version("4.0")); assertNotNull(desc); // Got the same file? assertTrue(Arrays.equals(put.digest, desc.id)); // // Check if the description was copied // assertEquals("OSGi Service Platform Release 4 Interfaces and Classes for use in compiling bundles.", desc.description); // // We must be able to access by its sha1 // ResourceDescriptor resource = indexedRepo.getResource(put.digest); assertTrue(Arrays.equals(resource.id, desc.id)); // // Check if we now have a set of resources // SortedSet<ResourceDescriptor> resources = indexedRepo.getResources(); assertEquals(1, resources.size()); ResourceDescriptor rd = resources.iterator().next(); assertTrue(Arrays.equals(rd.id, put.digest)); // // Check if the bsn brings us back // File file = indexedRepo.get(desc.bsn, desc.version, null); assertNotNull(file); assertTrue(Arrays.equals(put.digest, SHA1.digest(file).digest())); byte[] digest = SHA256.digest(file).digest(); assertTrue(Arrays.equals(rd.sha256, digest)); // // Delete and see if it is really gone // indexedRepo.delete(desc.bsn, desc.version); resources = indexedRepo.getResources(); assertEquals(0, resources.size()); file = indexedRepo.get(desc.bsn, desc.version, null); assertNull(file); resource = indexedRepo.getResource(put.digest); assertNull(resource); } public void testListBSNs() throws Exception { List<String> list = testRepo.list(null); assertNotNull(list); assertEquals(4, list.size()); assertTrue(list.contains("ee.minimum")); assertTrue(list.contains("org.osgi.impl.service.cm")); assertTrue(list.contains("org.osgi.impl.service.io")); assertTrue(list.contains("osgi")); } public void testListNonExistentRepo() throws Exception { // Listing should succeed and return non-null empty list List<String> list = nonExistentRepo.list(null); assertNotNull(list); assertEquals(0, list.size()); } public void testBundleNotModifiedOnPut() throws Exception { MessageDigest sha1 = MessageDigest.getInstance("SHA-1"); File dstBundle = null; try { File srcBundle = IO.getFile("testresources/test.jar"); byte[] srcSha = calculateHash(sha1, srcBundle); PutOptions options = new RepositoryPlugin.PutOptions(); options.digest = srcSha; PutResult r = testRepo.put(new BufferedInputStream(new FileInputStream(srcBundle)), options); dstBundle = new File(r.artifact); assertEquals(hashToString(srcSha), hashToString(r.digest)); assertTrue(MessageDigest.isEqual(srcSha, r.digest)); } finally { if (dstBundle != null) { delete(dstBundle.getParentFile()); } } } public void testDownloadListenerCallback() throws Exception { try { FileRepo repo = new FileRepo("tmp", tmp, true); File srcBundle = IO.getFile("testresources/test.jar"); PutResult r = repo.put(IO.stream(IO.getFile("testresources/test.jar")), null); assertNotNull(r); assertNotNull(r.artifact); File f = new File(r.artifact); // file repo, so should match SHA1 sha1 = SHA1.digest(srcBundle); sha1.equals(SHA1.digest(f)); DownloadListener mock = Mockito.mock(DownloadListener.class); f = repo.get("test", new Version("0"), null, mock); Mockito.verify(mock).success(f); Mockito.verifyNoMoreInteractions(mock); Mockito.reset(mock); f = repo.get("XXXXXXXXXXXXXXXXX", new Version("0"), null, mock); assertNull(f); Mockito.verifyZeroInteractions(mock); } finally { IO.delete(tmp); } } public void testDeployToNonexistentRepoFails() throws Exception { if (System.getProperty("os.name").toLowerCase().indexOf("win") >= 0) { // File#setReadonly() is broken on windows return; } try { nonExistentRepo.put(new BufferedInputStream(new FileInputStream("testresources/test.jar")), new RepositoryPlugin.PutOptions()); fail("Should have thrown exception"); } catch (Exception e) { // OK, you cannot check for exception messages or exception type } } public void testCommands() throws Exception { FileRepo repo = new FileRepo(); File root = tmp; delete(root); try { Map<String,String> props = new HashMap<String,String>(); props.put(FileRepo.LOCATION, root.getAbsolutePath()); props.put(FileRepo.CMD_INIT, "echo init $0 $1 $2 $3 >>report"); props.put(FileRepo.CMD_OPEN, "echo open $0 $1 $2 $3 >>report"); props.put(FileRepo.CMD_BEFORE_GET, "echo beforeGet $0 $1 $2 $3 >>report"); props.put(FileRepo.CMD_BEFORE_PUT, "echo beforePut $0 $1 $2 $3 >>report"); props.put(FileRepo.CMD_AFTER_PUT, "echo afterPut $0 $1 $2 $3 >>report"); props.put(FileRepo.CMD_ABORT_PUT, "echo abortPut $0 $1 $2 $3 >>report"); props.put(FileRepo.CMD_REFRESH, "echo refresh $0 $1 $2 $3 >>report"); props.put(FileRepo.CMD_CLOSE, "echo close $0 $1 $2 $3 >>report"); props.put(FileRepo.CMD_PATH, "/xxx,$@,/yyy"); props.put(FileRepo.TRACE, true + ""); repo.setProperties(props); repo.refresh(); { InputStream in = stream(getFile("jar/osgi.jar")); try { repo.put(in, null); } finally { in.close(); } } { InputStream in = stream("not a valid zip"); try { repo.put(in, null); fail("expected failure"); } catch (Exception e) { // ignore } finally { in.close(); } } repo.close(); String s = collect(new File(root, "report")); System.out.println(s); s = s.replaceAll("\\\\", "/"); s = s.replaceAll(root.getAbsolutePath().replaceAll("\\\\", "/"), "@"); String parts[] = s.split("\r?\n"); assertEquals(8, parts.length); assertEquals(parts[0], "init @"); assertEquals(parts[1], "open @"); assertEquals(parts[2], "refresh @"); assertTrue(parts[3].matches("beforePut @ @/.*")); assertEquals(parts[4], "afterPut @ @/osgi/osgi-4.0.0.jar D37A1C9D5A9D3774F057B5452B7E47B6D1BB12D0"); assertTrue(parts[5].matches("beforePut @ @/.*")); assertTrue(parts[6].matches("abortPut @ @/.*")); assertEquals(parts[7], "close @"); } finally { delete(root); } } }
/* * Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. */ package com.intellij.openapi.roots.ui.configuration.projectRoot; import com.intellij.facet.Facet; import com.intellij.ide.CommonActionsManager; import com.intellij.ide.TreeExpander; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.KeymapManager; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.options.SearchableConfigurable; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ui.configuration.projectRoot.daemon.ProjectStructureDaemonAnalyzerListener; import com.intellij.openapi.roots.ui.configuration.projectRoot.daemon.ProjectStructureElement; import com.intellij.openapi.ui.MasterDetailsComponent; import com.intellij.openapi.ui.MasterDetailsState; import com.intellij.openapi.ui.MasterDetailsStateService; import com.intellij.openapi.ui.NamedConfigurable; import com.intellij.openapi.util.ActionCallback; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Disposer; import com.intellij.ui.TreeSpeedSearch; import com.intellij.ui.awt.RelativePoint; import com.intellij.ui.navigation.Place; import com.intellij.util.IconUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.LinkedMultiMap; import com.intellij.util.containers.MultiMap; import com.intellij.util.ui.tree.TreeUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.tree.TreePath; import java.awt.*; import java.util.*; import java.util.List; public abstract class BaseStructureConfigurable extends MasterDetailsComponent implements SearchableConfigurable, Disposable, Place.Navigator { protected StructureConfigurableContext myContext; protected final Project myProject; protected boolean myUiDisposed = true; private boolean myWasTreeInitialized; protected boolean myAutoScrollEnabled = true; protected BaseStructureConfigurable(Project project, MasterDetailsState state) { super(state); myProject = project; } protected BaseStructureConfigurable(@NotNull Project project) { myProject = project; } public void init(StructureConfigurableContext context) { myContext = context; myContext.getDaemonAnalyzer().addListener(new ProjectStructureDaemonAnalyzerListener() { @Override public void problemsChanged(@NotNull ProjectStructureElement element) { if (!myTree.isShowing()) return; myTree.revalidate(); myTree.repaint(); } }); } @Override protected MasterDetailsStateService getStateService() { return MasterDetailsStateService.getInstance(myProject); } @Override public ActionCallback navigateTo(@Nullable final Place place, final boolean requestFocus) { if (place == null) return ActionCallback.DONE; final Object object = place.getPath(TREE_OBJECT); final String byName = (String)place.getPath(TREE_NAME); if (object == null && byName == null) return ActionCallback.DONE; final MyNode node = object == null ? null : findNodeByObject(myRoot, object); final MyNode nodeByName = byName == null ? null : findNodeByName(myRoot, byName); if (node == null && nodeByName == null) return ActionCallback.DONE; final NamedConfigurable config; if (node != null) { config = node.getConfigurable(); } else { config = nodeByName.getConfigurable(); } final ActionCallback result = new ActionCallback().doWhenDone(() -> myAutoScrollEnabled = true); myAutoScrollEnabled = false; myAutoScrollHandler.cancelAllRequests(); final MyNode nodeToSelect = node != null ? node : nodeByName; selectNodeInTree(nodeToSelect, requestFocus).doWhenDone(() -> { setSelectedNode(nodeToSelect); Place.goFurther(config, place, requestFocus).notifyWhenDone(result); }); return result; } @Override public void queryPlace(@NotNull final Place place) { if (myCurrentConfigurable != null) { place.putPath(TREE_OBJECT, myCurrentConfigurable.getEditableObject()); Place.queryFurther(myCurrentConfigurable, place); } } @Override protected void initTree() { if (myWasTreeInitialized) return; myWasTreeInitialized = true; super.initTree(); new TreeSpeedSearch(myTree, treePath -> getTextForSpeedSearch((MyNode)treePath.getLastPathComponent()), true); ToolTipManager.sharedInstance().registerComponent(myTree); myTree.setCellRenderer(new ProjectStructureElementRenderer(myContext)); } @NotNull protected String getTextForSpeedSearch(MyNode node) { return node.getDisplayName(); } @Override public void disposeUIResources() { if (myUiDisposed) return; super.disposeUIResources(); myUiDisposed = true; myAutoScrollHandler.cancelAllRequests(); myContext.getDaemonAnalyzer().clear(); Disposer.dispose(this); } public void checkCanApply() throws ConfigurationException { } protected void addCollapseExpandActions(final List<? super AnAction> result) { final TreeExpander expander = new TreeExpander() { @Override public void expandAll() { TreeUtil.expandAll(myTree); } @Override public boolean canExpand() { return true; } @Override public void collapseAll() { TreeUtil.collapseAll(myTree, 0); } @Override public boolean canCollapse() { return true; } }; final CommonActionsManager actionsManager = CommonActionsManager.getInstance(); result.add(actionsManager.createExpandAllAction(expander, myTree)); result.add(actionsManager.createCollapseAllAction(expander, myTree)); } @Nullable public ProjectStructureElement getSelectedElement() { final TreePath selectionPath = myTree.getSelectionPath(); if (selectionPath != null && selectionPath.getLastPathComponent() instanceof MyNode) { MyNode node = (MyNode)selectionPath.getLastPathComponent(); final NamedConfigurable configurable = node.getConfigurable(); if (configurable instanceof ProjectStructureElementConfigurable) { return ((ProjectStructureElementConfigurable)configurable).getProjectStructureElement(); } } return null; } private class MyFindUsagesAction extends FindUsagesInProjectStructureActionBase { MyFindUsagesAction(JComponent parentComponent) { super(parentComponent, myProject); } @Override protected boolean isEnabled() { final TreePath selectionPath = myTree.getSelectionPath(); if (selectionPath != null){ final MyNode node = (MyNode)selectionPath.getLastPathComponent(); return !node.isDisplayInBold(); } else { return false; } } @Override protected StructureConfigurableContext getContext() { return myContext; } @Override protected ProjectStructureElement getSelectedElement() { return BaseStructureConfigurable.this.getSelectedElement(); } @Override protected RelativePoint getPointToShowResults() { final int selectedRow = myTree.getSelectionRows()[0]; final Rectangle rowBounds = myTree.getRowBounds(selectedRow); final Point location = rowBounds.getLocation(); location.x += rowBounds.width; return new RelativePoint(myTree, location); } } @Override public void reset() { myUiDisposed = false; if (!myWasTreeInitialized) { initTree(); myTree.setShowsRootHandles(false); loadTreeNodes(); } else { reloadTreeNodes(); } super.reset(); } private void loadTreeNodes() { loadTree(); for (ProjectStructureElement element : getProjectStructureElements()) { myContext.getDaemonAnalyzer().queueUpdate(element); } } protected final void reloadTreeNodes() { super.disposeUIResources(); myTree.setShowsRootHandles(false); loadTreeNodes(); } @NotNull protected Collection<? extends ProjectStructureElement> getProjectStructureElements() { return Collections.emptyList(); } protected abstract void loadTree(); @Override @NotNull protected ArrayList<AnAction> createActions(final boolean fromPopup) { final ArrayList<AnAction> result = new ArrayList<>(); AbstractAddGroup addAction = createAddAction(); if (addAction != null) { result.add(addAction); } result.add(new MyRemoveAction()); final List<? extends AnAction> copyActions = createCopyActions(fromPopup); result.addAll(copyActions); result.add(Separator.getInstance()); if (fromPopup) { result.add(new MyFindUsagesAction(myTree)); } return result; } @NotNull protected List<? extends AnAction> createCopyActions(boolean fromPopup) { return Collections.emptyList(); } public void onStructureUnselected() { } public void onStructureSelected() { } @Nullable protected abstract AbstractAddGroup createAddAction(); protected List<? extends RemoveConfigurableHandler<?>> getRemoveHandlers() { return Collections.emptyList(); } @NotNull private MultiMap<RemoveConfigurableHandler, MyNode> groupNodes(List<? extends MyNode> nodes) { List<? extends RemoveConfigurableHandler<?>> handlers = getRemoveHandlers(); MultiMap<RemoveConfigurableHandler, MyNode> grouped = new LinkedMultiMap<>(); for (MyNode node : nodes) { final NamedConfigurable<?> configurable = node.getConfigurable(); if (configurable == null) continue; RemoveConfigurableHandler handler = findHandler(handlers, configurable.getClass()); if (handler == null) continue; grouped.putValue(handler, node); } return grouped; } private static RemoveConfigurableHandler<?> findHandler(List<? extends RemoveConfigurableHandler<?>> handlers, Class<? extends NamedConfigurable> configurableClass) { for (RemoveConfigurableHandler<?> handler : handlers) { if (handler.getConfigurableClass().isAssignableFrom(configurableClass)) { return handler; } } return null; } protected class MyRemoveAction extends MyDeleteAction { public MyRemoveAction() { //noinspection Convert2Lambda super(new Condition<Object[]>() { @Override public boolean value(final Object[] objects) { List<MyNode> nodes = new ArrayList<>(); for (Object object : objects) { if (!(object instanceof MyNode)) return false; nodes.add((MyNode)object); } MultiMap<RemoveConfigurableHandler, MyNode> map = groupNodes(nodes); for (Map.Entry<RemoveConfigurableHandler, Collection<MyNode>> entry : map.entrySet()) { //noinspection unchecked if (!entry.getKey().canBeRemoved(getEditableObjects(entry.getValue()))) { return false; } } return true; } }); } @Override public void actionPerformed(@NotNull AnActionEvent e) { final TreePath[] paths = myTree.getSelectionPaths(); if (paths == null) return; List<MyNode> removedNodes = removeFromModel(paths); removeNodes(removedNodes); } private List<MyNode> removeFromModel(final TreePath[] paths) { List<MyNode> nodes = ContainerUtil.mapNotNull(paths, path -> { Object node = path.getLastPathComponent(); return node instanceof MyNode ? (MyNode)node : null; }); MultiMap<RemoveConfigurableHandler, MyNode> grouped = groupNodes(nodes); List<MyNode> removedNodes = new ArrayList<>(); for (Map.Entry<RemoveConfigurableHandler, Collection<MyNode>> entry : grouped.entrySet()) { //noinspection unchecked boolean removed = entry.getKey().remove(getEditableObjects(entry.getValue())); if (removed) { removedNodes.addAll(entry.getValue()); } } return removedNodes; } } private static List<?> getEditableObjects(Collection<? extends MyNode> value) { List<Object> objects = new ArrayList<>(); for (MyNode node : value) { objects.add(node.getConfigurable().getEditableObject()); } return objects; } protected void removeFacetNodes(@NotNull List<? extends Facet> facets) { for (Facet facet : facets) { MyNode node = findNodeByObject(myRoot, facet); if (node != null) { removePaths(TreeUtil.getPathFromRoot(node)); } } } protected abstract static class AbstractAddGroup extends ActionGroup implements ActionGroupWithPreselection { protected AbstractAddGroup(String text, Icon icon) { super(text, true); final Presentation presentation = getTemplatePresentation(); presentation.setIcon(icon); final Keymap active = KeymapManager.getInstance().getActiveKeymap(); if (active != null) { final Shortcut[] shortcuts = active.getShortcuts("NewElement"); setShortcutSet(new CustomShortcutSet(shortcuts)); } } public AbstractAddGroup(String text) { this(text, IconUtil.getAddIcon()); } @Override public ActionGroup getActionGroup() { return this; } } }
package coho.debug; import coho.common.number.*; import coho.lp.solver.*; public class STAT { /***************************************** * functions for debug non-generic bugs * @author chaoyan * ******************************************/ // public static int[] counters = new int[20]; // public static double[] timers = new double[20]; public static final boolean stat =false; public static void println(String msg) { DEBUG.println(msg,"STAT"); } /* * stastic */ // public static int lpCounter = 0; public static int[] lpResult = new int[12]; // public static int lpPivot = 0; public static int[] path = new int[5]; public static int basisCounter=0; public static int feasibleBasisCounter=0; // public static int illCondCounter=0; public static int[] condNumber = new int[12]; public static int[] interval = new int[5]; public static int[] findBasis = new int[2]; // public static int linearCounter=0; // public static int[] condCounter = new int[10]; // public static int[] interCounter = new int[10]; // public static void outStat(){ println("%-----------------------------------------%"); println("Stat data"); int lpCounter=0; for(int i=0;i<lpResult.length;i++) lpCounter+=lpResult[i]; println("There are "+lpCounter+" LP solved Totally."); println("%-----------------------------------------%"); println("Stat initial feasible basis method"); println("There are "+findBasis[0]+"("+findBasis[0]*100.0/lpCounter+" percent)"+" LP use BigM method to find basis."); println("There are "+findBasis[1]+"("+findBasis[1]*100.0/lpCounter+" percent)"+" LP have easy basis."); println("%-----------------------------------------%"); println("Stat interval of result"); println("There are "+lpResult[0]+"("+lpResult[0]*100.0/lpCounter+" percent)"+" results with relative interval greater than 1e-7"); println("There are "+lpResult[1]+"("+lpResult[1]*100.0/lpCounter+" percent)"+" results with relative interval greater than 1e-8"); println("There are "+lpResult[2]+"("+lpResult[2]*100.0/lpCounter+" percent)"+" results with relative interval greater than 1e-9"); println("There are "+lpResult[3]+"("+lpResult[3]*100.0/lpCounter+" percent)"+" results with relative interval greater than 1e-10"); println("There are "+lpResult[4]+"("+lpResult[4]*100.0/lpCounter+" percent)"+" results with relative interval greater than 1e-11"); println("There are "+lpResult[5]+"("+lpResult[5]*100.0/lpCounter+" percent)"+" results with relative interval greater than 1e-12"); println("There are "+lpResult[6]+"("+lpResult[6]*100.0/lpCounter+" percent)"+" results with relative interval greater than 1e-13"); println("There are "+lpResult[7]+"("+lpResult[7]*100.0/lpCounter+" percent)"+" results with relative interval greater than 1e-14"); println("There are "+lpResult[8]+"("+lpResult[8]*100.0/lpCounter+" percent)"+" results with relative interval greater than 1e-15"); println("There are "+lpResult[9]+"("+lpResult[9]*100.0/lpCounter+" percent)"+" results with relative interval greater than 1e-16"); println("There are "+lpResult[10]+"("+lpResult[10]*100.0/lpCounter+" percent)"+" results with relative interval greater than 1e-17"); println("There are "+lpResult[11]+"("+lpResult[11]*100.0/lpCounter+" percent)"+" results with relative interval less(equal) than 1e-17"); println("%-----------------------------------------%"); int lpPivot=0; for(int i=0;i<path.length;i++) lpPivot+=path[i]; println("Stat pivot"); println("There are "+lpPivot+" pivots totally. About "+(lpPivot+0.0)/lpCounter+" pivots per LP"); println("There are "+path[1]+"("+path[1]*100.0/lpPivot+" percent)"+" pivots that has an unique branches."); println("There are "+path[2]+"("+path[2]*100.0/lpPivot+" percent)"+" pivots that has 2 branches."); println("There are "+path[3]+"("+path[3]*100.0/lpPivot+" percent)"+" pivots that has 3 branches."); println("There are "+path[4]+"("+path[4]*100.0/lpPivot+" percent)"+" pivots that has 4 branches."); println("There are "+path[0]+"("+path[0]*100.0/lpPivot+" percent)"+" pivots that has more than 4 branches."); println("%-----------------------------------------%"); println("Stat basis"); println("There are "+basisCounter+" bases visited Totally. About "+(basisCounter+0.0)/lpCounter+" baes per LP"); println("With "+feasibleBasisCounter+"("+feasibleBasisCounter*100.0/basisCounter+" percent)"+" clearly feasible basis"); println("%-----------------------------------------%"); int cn=0, inter=0; for(int i=0;i<condNumber.length;i++) cn+=condNumber[i]; for(int i=0;i<interval.length;i++) inter+=interval[i]; int illCondCounter=cn+inter; println("Stat ill-condition exception"); println("There are "+illCondCounter+" Exceptions Totally. About "+(illCondCounter+0.0)/lpCounter+" ill conditions per LP"); println("Stat condition number exception"); println("There are "+cn+"("+cn*100.0/illCondCounter+" percent)"+" Exceptions caught by condition number estination"); println("There are "+condNumber[0]+"("+condNumber[0]*100.0/cn+" percent)"+" exception that the estimated condition number is less than 1e6(greater than 1e5)"); println("There are "+condNumber[1]+"("+condNumber[1]*100.0/cn+" percent)"+" exception that the estimated condition number is less than 1e7"); println("There are "+condNumber[2]+"("+condNumber[2]*100.0/cn+" percent)"+" exception that the estimated condition number is less than 1e8"); println("There are "+condNumber[3]+"("+condNumber[3]*100.0/cn+" percent)"+" exception that the estimated condition number is less than 1e9"); println("There are "+condNumber[4]+"("+condNumber[4]*100.0/cn+" percent)"+" exception that the estimated condition number is less than 1e10"); println("There are "+condNumber[5]+"("+condNumber[5]*100.0/cn+" percent)"+" exception that the estimated condition number is less than 1e11"); println("There are "+condNumber[6]+"("+condNumber[6]*100.0/cn+" percent)"+" exception that the estimated condition number is less than 1e12"); println("There are "+condNumber[7]+"("+condNumber[7]*100.0/cn+" percent)"+" exception that the estimated condition number is less than 1e13"); println("There are "+condNumber[8]+"("+condNumber[8]*100.0/cn+" percent)"+" exception that the estimated condition number is less than 1e14"); println("There are "+condNumber[9]+"("+condNumber[9]*100.0/cn+" percent)"+" exception that the estimated condition number is less than 1e15"); println("There are "+condNumber[10]+"("+condNumber[10]*100.0/cn+" percent)"+" exception that the estimated condition number is less than 1e16"); println("There are "+condNumber[11]+"("+condNumber[11]*100.0/cn+" percent)"+" exception that the estimated condition number is greater than 1e16"); println("Stat large interval exception"); println("There are "+inter+"("+inter*100.0/illCondCounter+" percent)"+" Exceptions caught by large interval of solution for linear system"); println("There are "+interval[0]+"("+interval[0]*100.0/inter+" percent)"+" exception that the interval is less than 1e-2"); println("There are "+interval[1]+"("+interval[1]*100.0/inter+" percent)"+" exception that the interval is less than 1e-1"); println("There are "+interval[2]+"("+interval[2]*100.0/inter+" percent)"+" exception that the interval is less than 1e-0"); println("There are "+interval[3]+"("+interval[3]*100.0/inter+" percent)"+" exception that the interval is less than 1e1"); println("There are "+interval[4]+"("+interval[4]*100.0/inter+" percent)"+" exception that the interval is greater than 1e1"); println("%-----------------------------------------%"); println("Stat linear system solver"); println("There are "+lsCounter+" linear system solved"); println("There are "+lsHybridCounter+" hybrid method called"); // //analysis interval and condition number for all case. // int solved = 0; // for(int i=0;i<condCounter.length;i++) // linearCounter+=condCounter[i]; // for(int i=0;i<interCounter.length;i++) // solved += interCounter[i]; // println("There are "+linearCounter+" linear systems to solve"); // println("There are "+solved+"("+solved*100.0/linearCounter+" percent)"+" linear systems solved"); // println("%-----------------------------------------\n%"); // println("There are "+condCounter[0]+"("+condCounter[0]*100.0/linearCounter+" percent)"+" linear systems with condition number less than 1e0"); // println("There are "+condCounter[1]+"("+condCounter[1]*100.0/linearCounter+" percent)"+" linear systems with condition number less than 1e1"); // println("There are "+condCounter[2]+"("+condCounter[2]*100.0/linearCounter+" percent)"+" linear systems with condition number less than 1e2"); // println("There are "+condCounter[3]+"("+condCounter[3]*100.0/linearCounter+" percent)"+" linear systems with condition number less than 1e3"); // println("There are "+condCounter[4]+"("+condCounter[4]*100.0/linearCounter+" percent)"+" linear systems with condition number less than 1e4"); // println("There are "+condCounter[5]+"("+condCounter[5]*100.0/linearCounter+" percent)"+" linear systems with condition number less than 1e5"); // println("There are "+condCounter[6]+"("+condCounter[6]*100.0/linearCounter+" percent)"+" linear systems with condition number less than 1e6"); // println("There are "+condCounter[7]+"("+condCounter[7]*100.0/linearCounter+" percent)"+" linear systems with condition number less than 1e7"); // println("There are "+condCounter[8]+"("+condCounter[8]*100.0/linearCounter+" percent)"+" linear systems with condition number less than 1e8"); // println("There are "+condCounter[9]+"("+condCounter[9]*100.0/linearCounter+" percent)"+" linear systems with condition number greater than 1e8"); // println("%-----------------------------------------\n%"); // println("There are "+interCounter[0]+"("+interCounter[0]*100.0/solved+" percent)"+" solved linear systems with interval less than 1e-15"); // println("There are "+interCounter[1]+"("+interCounter[1]*100.0/solved+" percent)"+" solved linear systems with interval less than 1e-14"); // println("There are "+interCounter[2]+"("+interCounter[2]*100.0/solved+" percent)"+" solved linear systems with interval less than 1e-13"); // println("There are "+interCounter[3]+"("+interCounter[3]*100.0/solved+" percent)"+" solved linear systems with interval less than 1e-12"); // println("There are "+interCounter[4]+"("+interCounter[4]*100.0/solved+" percent)"+" solved linear systems with interval less than 1e-11"); // println("There are "+interCounter[5]+"("+interCounter[5]*100.0/solved+" percent)"+" solved linear systems with interval less than 1e-10"); // println("There are "+interCounter[6]+"("+interCounter[6]*100.0/solved+" percent)"+" solved linear systems with interval less than 1e-9"); // println("There are "+interCounter[7]+"("+interCounter[7]*100.0/solved+" percent)"+" solved linear systems with interval less than 1e-8"); // println("There are "+interCounter[8]+"("+interCounter[8]*100.0/solved+" percent)"+" solved linear systems with interval less than 1e-7"); // println("There are "+interCounter[9]+"("+interCounter[9]*100.0/solved+" percent)"+" solved linear systems with interval greater than 1e-7"); } public static int stepLP = 0; public static int stepException=0; public static void outStepStat(int step){ if(step<0){ outStat(); return; } println("%-----------------------------------------%"); println("Step: "+step); int lpCounter=0; for(int i=0;i<lpResult.length;i++) lpCounter+=lpResult[i]; int lps = lpCounter-stepLP; println("LP: "+lps); stepLP = lpCounter; int cn=0, inter=0; for(int i=0;i<condNumber.length;i++) cn+=condNumber[i]; for(int i=0;i<interval.length;i++) inter+=interval[i]; int illCondCounter=cn+inter; int es = illCondCounter-stepException; println("Exception: "+es+" Percent:"+(es+0.0)/lps); stepException = illCondCounter; println("%-----------------------------------------%"); } public static void statLP(CohoSolverResult r,boolean findBais){ findBasis[findBais?1:0]++; DoubleInterval i = (DoubleInterval)r.optCost(); double order = Math.abs((i.hi().doubleValue()-i.lo().doubleValue())/i.x().doubleValue());//stupid! if(order>1e-7){ lpResult[0]++; }else if(order>1e-8){ lpResult[1]++; }else if(order>1e-9){ lpResult[2]++; }else if(order>1e-10){ lpResult[3]++; }else if(order>1e-11){ lpResult[4]++; }else if(order>1e-12){ lpResult[5]++; }else if(order>1e-13){ lpResult[6]++; }else if(order>1e-14){ lpResult[7]++; }else if(order>1e-15){ lpResult[8]++; }else if(order>1e-16){ lpResult[9]++; }else if(order>1e-17){ lpResult[10]++; }else{ lpResult[11]++; } } public static void statLPPath(int n){ path[n%5]++; } // public static void statBasis(LPBasis basis){ // basisCounter++; // if(basis.status()==LPbasis.fLPBasis) feasibleBasisCounter++; // } public static void statIllCond(int pos, double n){ switch(pos){ case 0: //cn if(n<1e6) condNumber[0]++; else if(n<1e7) condNumber[1]++; else if(n<1e8) condNumber[2]++; else if(n<1e9) condNumber[3]++; else if(n<1e10) condNumber[4]++; else if(n<1e11) condNumber[5]++; else if(n<1e12) condNumber[6]++; else if(n<1e13) condNumber[7]++; else if(n<1e14) condNumber[8]++; else if(n<1e15) condNumber[9]++; else if(n<1e16) condNumber[10]++; else condNumber[11]++; break; default://interval if(n<1e-2) interval[0]++; else if(n<1e-1) interval[1]++; else if(n<1) interval[2]++; else if(n<1e1) interval[3]++; else interval[4]++; } } public static int lsCounter = 0; public static int lsHybridCounter=0; public static void statLS(int counter){ lsCounter++; lsHybridCounter+=counter; } // public static void statLinearSystem(int pos, double n){ // switch(pos){ // case 0: //cn // if(n<1e0) // condCounter[0]++; // else if(n<1e1) // condCounter[1]++; // else if(n<1e2) // condCounter[2]++; // else if(n<1e3) // condCounter[3]++; // else if(n<1e4) // condCounter[4]++; // else if(n<1e5) // condCounter[5]++; // else if(n<1e6) // condCounter[6]++; // else if(n<1e7) // condCounter[7]++; // else if(n<1e8) // condCounter[8]++; // else // condCounter[9]++; // break; // default: // if(n<1e-15) // interCounter[0]++; // else if(n<1e-14) // interCounter[1]++; // else if(n<1e-13) // interCounter[2]++; // else if(n<1e-12) // interCounter[3]++; // else if(n<1e-11) // interCounter[4]++; // else if(n<1e-10) // interCounter[5]++; // else if(n<1e-9) // interCounter[6]++; // else if(n<1e-8) // interCounter[7]++; // else if(n<1e-7) // interCounter[8]++; // else // interCounter[9]++; // } // } // /** // * To debug ill-condition // */ // public static void compException(CohoSolverResult orig, CohoSolverResult except, double cn ){ // println("The condition number is "+cn,"CMP"); // if(orig==null) // println("The LP solver failed to solve the lp if not throw an exception","CMP"); // else{ // println("The result if not throw an exception\n"+orig.toString(),"CMP"); // println("The interval is "+(orig.optCost().hi()-orig.optCost().lo()),"CMP"); // } // println("","CMP"); // println("The result if throw an exception\n"+except.toString(),"CMP"); // println("The interval is "+(except.optCost().hi()-except.optCost().lo()),"CMP"); // println("------------------------------------\n","CMP"); // } // public static void debugIllCond(CohoMatrix A, DoubleIntervalMatrix b, DoubleIntervalMatrix c, // IntegerMatrix basis, double n, int rmVar) { // println("\n-----------Display an exception------------", "ILL"); // println(A.transpose().x().toString(),"ILL"); // println(c.x().toString(),"ILL"); // println(b.x().toString(),"ILL"); // println(basis.toString(),"ILL"); // println(""+rmVar,"ILL"); // println("With conditon number or relative interval:"+n,"ILL"); // println("------------------------------------\n","ILL"); // } // public static void debugIllCond(CohoMatrix A, DoubleIntervalMatrix b, DoubleIntervalMatrix c, // IntegerMatrix basis, double n) { // println("\n-----------Display an exception------------", "ILL"); // println(A.transpose().x().toString(),"ILL"); // println(c.x().toString(),"ILL"); // println(b.x().toString(),"ILL"); // println(basis.toString(),"ILL"); // println("With conditon number or relative interval:"+n,"ILL"); // println("------------------------------------\n","ILL"); // } }
/* * Copyright (c) 2008-2016 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.haulmont.cuba.security.app; import com.haulmont.bali.util.Preconditions; import com.haulmont.chile.core.datatypes.Datatype; import com.haulmont.chile.core.model.*; import com.haulmont.cuba.core.*; import com.haulmont.cuba.core.app.ServerConfig; import com.haulmont.cuba.core.app.dynamicattributes.DynamicAttributes; import com.haulmont.cuba.core.app.dynamicattributes.DynamicAttributesTools; import com.haulmont.cuba.core.app.dynamicattributes.DynamicAttributesUtils; import com.haulmont.cuba.core.entity.*; import com.haulmont.cuba.core.global.*; import com.haulmont.cuba.core.sys.AppContext; import com.haulmont.cuba.core.sys.AuditInfoProvider; import com.haulmont.cuba.core.sys.EntityManagerContext; import com.haulmont.cuba.security.entity.*; import org.apache.commons.lang3.BooleanUtils; import org.eclipse.persistence.descriptors.changetracking.ChangeTracker; import org.eclipse.persistence.internal.descriptors.changetracking.AttributeChangeListener; import org.eclipse.persistence.internal.sessions.ObjectChangeSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import org.springframework.transaction.support.TransactionSynchronizationAdapter; import org.springframework.transaction.support.TransactionSynchronizationManager; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import javax.inject.Inject; import java.beans.PropertyChangeListener; import java.io.IOException; import java.io.StringWriter; import java.util.*; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.stream.Collectors; import java.util.stream.Stream; @Component(EntityLogAPI.NAME) public class EntityLog implements EntityLogAPI { private static final Logger log = LoggerFactory.getLogger(EntityLog.class); @Inject protected TimeSource timeSource; @Inject protected Persistence persistence; @Inject protected Metadata metadata; @Inject protected MetadataTools metadataTools; @Inject protected AuditInfoProvider auditInfoProvider; @Inject protected ReferenceToEntitySupport referenceToEntitySupport; @Inject protected DynamicAttributes dynamicAttributes; @Inject protected DynamicAttributesTools dynamicAttributesTools; @Inject protected DataManager dataManager; @Inject protected ServerConfig serverConfig; protected volatile boolean loaded; protected EntityLogConfig config; @GuardedBy("lock") protected Map<String, Set<String>> entitiesManual; @GuardedBy("lock") protected Map<String, Set<String>> entitiesAuto; protected ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); protected ThreadLocal<Boolean> entityLogSwitchedOn = new ThreadLocal<>(); @Inject public EntityLog(Configuration configuration) { config = configuration.getConfig(EntityLogConfig.class); } @Override public void processLoggingForCurrentThread(boolean enabled) { entityLogSwitchedOn.set(enabled); } @Override public boolean isLoggingForCurrentThread() { return !Boolean.FALSE.equals(entityLogSwitchedOn.get()); } @Override public void flush() { EntityManagerContext context = persistence.getEntityManagerContext(); List<EntityLogItem> items = context.getAttribute(EntityLog.class.getName()); if (items == null || items.isEmpty()) return; for (EntityLogItem item : items) { List<EntityLogItem> sameEntityList = items.stream() .filter(entityLogItem -> entityLogItem.getDbGeneratedIdEntity() != null ? entityLogItem.getDbGeneratedIdEntity().equals(item.getDbGeneratedIdEntity()) : entityLogItem.getObjectEntityId().equals(item.getObjectEntityId())) .collect(Collectors.toList()); EntityLogItem itemToSave = sameEntityList.get(0); computeChanges(itemToSave, sameEntityList); saveItem(itemToSave); } } protected void computeChanges(EntityLogItem itemToSave, List<EntityLogItem> sameEntityList) { Set<String> allAttributes = sameEntityList.stream() .flatMap(entityLogItem -> entityLogItem.getAttributes().stream().map(EntityLogAttr::getName)) .collect(Collectors.toSet()); for (String attributeName : allAttributes) { // old value from the first item sameEntityList.get(0).getAttributes().stream() .filter(entityLogAttr -> entityLogAttr.getName().equals(attributeName)) .findFirst() .ifPresent(entityLogAttr -> setAttributeOldValue(entityLogAttr, itemToSave)); // new value from the last item sameEntityList.get(sameEntityList.size() - 1).getAttributes().stream() .filter(entityLogAttr -> entityLogAttr.getName().equals(attributeName)) .findFirst() .ifPresent(entityLogAttr -> setAttributeNewValue(entityLogAttr, itemToSave)); } Properties properties = new Properties(); for (EntityLogAttr attr : itemToSave.getAttributes()) { properties.setProperty(attr.getName(), attr.getValue()); if (attr.getValueId() != null) { properties.setProperty(attr.getName() + EntityLogAttr.VALUE_ID_SUFFIX, attr.getValueId()); } if (attr.getOldValue() != null) { properties.setProperty(attr.getName() + EntityLogAttr.OLD_VALUE_SUFFIX, attr.getOldValue()); } if (attr.getOldValueId() != null) { properties.setProperty(attr.getName() + EntityLogAttr.OLD_VALUE_ID_SUFFIX, attr.getOldValueId()); } if (attr.getMessagesPack() != null) { properties.setProperty(attr.getName() + EntityLogAttr.MP_SUFFIX, attr.getMessagesPack()); } } if (itemToSave.getType() == EntityLogItem.Type.MODIFY) { sameEntityList.stream() .filter(entityLogItem -> entityLogItem.getType() == EntityLogItem.Type.CREATE) .findFirst() .ifPresent(entityLogItem -> itemToSave.setType(EntityLogItem.Type.CREATE)); } itemToSave.setChanges(getChanges(properties)); } protected void setAttributeOldValue(EntityLogAttr entityLogAttr, EntityLogItem itemToSave) { EntityLogAttr attr = getAttrToSave(entityLogAttr, itemToSave); attr.setOldValue(entityLogAttr.getOldValue()); attr.setOldValueId(entityLogAttr.getOldValueId()); } protected void setAttributeNewValue(EntityLogAttr entityLogAttr, EntityLogItem itemToSave) { EntityLogAttr attr = getAttrToSave(entityLogAttr, itemToSave); attr.setValue(entityLogAttr.getValue()); attr.setValueId(entityLogAttr.getValueId()); } protected EntityLogAttr getAttrToSave(EntityLogAttr entityLogAttr, EntityLogItem itemToSave) { EntityLogAttr attr = itemToSave.getAttributes().stream() .filter(a -> a.getName().equals(entityLogAttr.getName())) .findFirst() .orElse(null); if (attr == null) { attr = metadata.create(EntityLogAttr.class); attr.setName(entityLogAttr.getName()); itemToSave.getAttributes().add(attr); } return attr; } protected void saveItem(EntityLogItem item) { String storeName = metadataTools.getStoreName(metadata.getClassNN(item.getEntity())); if (item.getDbGeneratedIdEntity() == null) { if (Stores.isMain(storeName)) { EntityManager em = persistence.getEntityManager(); em.persist(item); } else { // Create a new transaction in main DB if we are saving an entity from additional data store try (Transaction tx = persistence.createTransaction()) { EntityManager em = persistence.getEntityManager(); em.persist(item); tx.commit(); } } } else { TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronizationAdapter() { @Override public void afterCommit() { Number id = item.getDbGeneratedIdEntity().getId().getNN(); item.setObjectEntityId(id); try (Transaction tx = persistence.createTransaction()) { EntityManager em = persistence.getEntityManager(); em.persist(item); tx.commit(); } } }); } } @Override public synchronized boolean isEnabled() { return config.getEnabled() && isLoggingForCurrentThread(); } @Override public synchronized void setEnabled(boolean enabled) { if (enabled != config.getEnabled()) { config.setEnabled(enabled); } } @Override public void invalidateCache() { lock.writeLock().lock(); try { log.debug("Invalidating cache"); entitiesManual = null; entitiesAuto = null; loaded = false; } finally { lock.writeLock().unlock(); } } protected Set<String> getLoggedAttributes(String entity, boolean auto) { lock.readLock().lock(); try { if (!loaded) { // upgrade lock lock.readLock().unlock(); lock.writeLock().lock(); try { if (!loaded) { // recheck because we unlocked for a while loadEntities(); loaded = true; } } finally { // downgrade lock lock.writeLock().unlock(); lock.readLock().lock(); } } Set<String> attributes; if (auto) attributes = entitiesAuto.get(entity); else attributes = entitiesManual.get(entity); return attributes == null ? null : Collections.unmodifiableSet(attributes); } finally { lock.readLock().unlock(); } } protected void loadEntities() { log.debug("Loading entities"); entitiesManual = new HashMap<>(); entitiesAuto = new HashMap<>(); Transaction tx = persistence.createTransaction(); try { EntityManager em = persistence.getEntityManager(); TypedQuery<LoggedEntity> q = em.createQuery( "select e from sec$LoggedEntity e where e.auto = true or e.manual = true", LoggedEntity.class); List<LoggedEntity> list = q.getResultList(); for (LoggedEntity loggedEntity : list) { if (loggedEntity.getName() == null) { throw new IllegalStateException("Unable to initialize EntityLog: empty LoggedEntity.name"); } Set<String> attributes = new HashSet<>(); for (LoggedAttribute loggedAttribute : loggedEntity.getAttributes()) { if (loggedAttribute.getName() == null) { throw new IllegalStateException("Unable to initialize EntityLog: empty LoggedAttribute.name"); } attributes.add(loggedAttribute.getName()); } if (BooleanUtils.isTrue(loggedEntity.getAuto())) entitiesAuto.put(loggedEntity.getName(), attributes); if (BooleanUtils.isTrue(loggedEntity.getManual())) entitiesManual.put(loggedEntity.getName(), attributes); } tx.commit(); } finally { tx.end(); } log.debug("Loaded: entitiesAuto={}, entitiesManual={}", entitiesAuto.size(), entitiesManual.size()); } protected String getEntityName(Entity entity) { MetaClass metaClass; if (entity instanceof CategoryAttributeValue) { CategoryAttribute categoryAttribute = ((CategoryAttributeValue) entity).getCategoryAttribute(); Preconditions.checkNotNullArgument(categoryAttribute, "Category attribute is null"); metaClass = metadata.getClassNN(categoryAttribute.getCategoryEntityType()); } else { metaClass = metadata.getSession().getClassNN(entity.getClass()); } return metadata.getExtendedEntities().getOriginalOrThisMetaClass(metaClass).getName(); } protected boolean doNotRegister(Entity entity) { if (entity == null) { return true; } if (entity instanceof EntityLogItem) { return true; } if (metadata.getTools().hasCompositePrimaryKey(entity.getMetaClass()) && !(entity instanceof HasUuid)) { return true; } return !isEnabled(); } @Override public void registerCreate(Entity entity) { if (entity == null) return; registerCreate(entity, false); } @Override public void registerCreate(Entity entity, boolean auto) { try { if (doNotRegister(entity)) return; String masterEntityName = getEntityName(entity); boolean isCategoryAttributeValue = entity instanceof CategoryAttributeValue; Set<String> attributes = getLoggedAttributes(masterEntityName, auto); if (attributes != null && attributes.contains("*")) { attributes = getAllAttributes(entity); } if (attributes == null) { return; } MetaClass metaClass = metadata.getClassNN(masterEntityName); attributes = filterRemovedAttributes(metaClass, attributes); if (isCategoryAttributeValue) { internalRegisterModifyAttributeValue((CategoryAttributeValue) entity, null, attributes); } else { String storeName = metadata.getTools().getStoreName(metaClass); if (Stores.isMain(storeName)) { internalRegisterCreate(entity, masterEntityName, attributes); } else { // Create a new transaction in main DB if we are saving an entity from additional data store try (Transaction tx = persistence.createTransaction()) { internalRegisterCreate(entity, masterEntityName, attributes); tx.commit(); } } } } catch (Exception e) { logError(entity, e); } } protected Set<String> filterRemovedAttributes(MetaClass metaClass, Set<String> attributes) { // filter attributes that do not exists in entity anymore return attributes.stream() .filter(attributeName -> { if (DynamicAttributesUtils.isDynamicAttribute(attributeName)) { return dynamicAttributesTools.getMetaPropertyPath(metaClass, attributeName) != null; } else { return metaClass.getPropertyPath(attributeName) != null; } }) .collect(Collectors.toSet()); } protected void internalRegisterCreate(Entity entity, String entityName, Set<String> attributes) throws IOException { Date ts = timeSource.currentTimestamp(); EntityManager em = persistence.getEntityManager(); EntityLogItem item = metadata.create(EntityLogItem.class); item.setEventTs(ts); item.setUser(findUser(em)); item.setType(EntityLogItem.Type.CREATE); item.setEntity(entityName); if (entity instanceof BaseDbGeneratedIdEntity) { item.setDbGeneratedIdEntity((BaseDbGeneratedIdEntity) entity); } else { item.setObjectEntityId(referenceToEntitySupport.getReferenceId(entity)); } item.setAttributes(createLogAttributes(entity, attributes, null)); enqueueItem(item); } protected void internalRegisterModifyAttributeValue(CategoryAttributeValue entity, @Nullable EntityAttributeChanges changes, Set<String> attributes) { String propertyName = DynamicAttributesUtils.encodeAttributeCode(entity.getCode()); if (!attributes.contains(propertyName)) { return; } Date ts = timeSource.currentTimestamp(); EntityManager em = persistence.getEntityManager(); Set<String> dirty = changes == null ? persistence.getTools().getDirtyFields(entity) : changes.getOwnAttributes(); boolean registerDeleteOp = dirty.contains("deleteTs") && entity.isDeleted(); boolean hasChanges = dirty.stream().anyMatch(s -> s.endsWith("Value")); if (hasChanges) { EntityLogItem item = metadata.create(EntityLogItem.class); item.setEventTs(ts); item.setUser(findUser(em)); item.setType(EntityLogItem.Type.MODIFY); item.setEntity(getEntityName(entity)); item.setObjectEntityId(entity.getObjectEntityId()); item.setAttributes(createDynamicLogAttribute(entity, changes, registerDeleteOp)); enqueueItem(item); } } protected User findUser(EntityManager em) { UUID currentUserId = auditInfoProvider.getCurrentUserId(); if (AppContext.isStarted() && currentUserId != null) return em.getReference(User.class, currentUserId); else { String login = serverConfig.getJmxUserLogin(); TypedQuery<User> query = em.createQuery("select u from sec$User u where u.loginLowerCase = ?1", User.class); query.setParameter(1, login); User user = query.getFirstResult(); if (user != null) return user; else throw new RuntimeException("The user '" + login + "' specified in cuba.jmxUserLogin does not exist"); } } protected void enqueueItem(EntityLogItem item) { EntityManagerContext context = persistence.getEntityManagerContext(); List<EntityLogItem> items = context.getAttribute(EntityLog.class.getName()); if (items == null) { items = new ArrayList<>(); context.setAttribute(EntityLog.class.getName(), items); } items.add(item); } @Override public void registerModify(Entity entity) { registerModify(entity, false); } @Override public void registerModify(Entity entity, boolean auto) { registerModify(entity, auto, null); } @Override public void registerModify(Entity entity, boolean auto, @Nullable EntityAttributeChanges changes) { try { if (doNotRegister(entity)) return; String masterEntityName = getEntityName(entity); boolean isCategoryAttributeValue = entity instanceof CategoryAttributeValue; Set<String> attributes = getLoggedAttributes(masterEntityName, auto); if (attributes != null && attributes.contains("*")) { attributes = getAllAttributes(entity); } if (attributes == null) { return; } MetaClass metaClass = metadata.getClassNN(masterEntityName); attributes = filterRemovedAttributes(metaClass, attributes); if (isCategoryAttributeValue) { internalRegisterModifyAttributeValue((CategoryAttributeValue) entity, changes, attributes); } else { String storeName = metadataTools.getStoreName(metaClass); if (Stores.isMain(storeName)) { internalRegisterModify(entity, changes, metaClass, storeName, attributes); } else { // Create a new transaction in main DB if we are saving an entity from additional data store try (Transaction tx = persistence.createTransaction()) { internalRegisterModify(entity, changes, metaClass, storeName, attributes); tx.commit(); } } } } catch (Exception e) { logError(entity, e); } } protected void internalRegisterModify(Entity entity, @Nullable EntityAttributeChanges changes, MetaClass metaClass, String storeName, Set<String> attributes) { Date ts = timeSource.currentTimestamp(); EntityManager em = persistence.getEntityManager(); Set<String> dirty = calculateDirtyFields(entity, changes); Set<EntityLogAttr> entityLogAttrs; EntityLogItem.Type type; if (entity instanceof SoftDelete && dirty.contains("deleteTs") && !((SoftDelete) entity).isDeleted()) { type = EntityLogItem.Type.RESTORE; entityLogAttrs = createLogAttributes(entity, attributes, changes); } else { type = EntityLogItem.Type.MODIFY; Set<String> dirtyAttributes = new HashSet<>(); for (String attributePath : attributes) { MetaPropertyPath propertyPath = metaClass.getPropertyPath(attributePath); Preconditions.checkNotNullArgument(propertyPath, "Property path %s isn't exists for type %s", attributePath, metaClass.getName()); if (dirty.contains(attributePath)) { dirtyAttributes.add(attributePath); } else if (!Stores.getAdditional().isEmpty()) { String idAttributePath = getIdAttributePath(propertyPath, storeName); if (idAttributePath != null && dirty.contains(idAttributePath)) { dirtyAttributes.add(attributePath); } } } entityLogAttrs = createLogAttributes(entity, dirtyAttributes, changes); } if (!entityLogAttrs.isEmpty() || type == EntityLogItem.Type.RESTORE) { EntityLogItem item = metadata.create(EntityLogItem.class); item.setEventTs(ts); item.setUser(findUser(em)); item.setType(type); item.setEntity(metadata.getExtendedEntities().getOriginalOrThisMetaClass(metaClass).getName()); item.setObjectEntityId(referenceToEntitySupport.getReferenceId(entity)); item.setAttributes(entityLogAttrs); enqueueItem(item); } } protected Set<EntityLogAttr> createLogAttributes(Entity entity, Set<String> attributes, @Nullable EntityAttributeChanges changes) { Set<EntityLogAttr> result = new HashSet<>(); for (String name : attributes) { if (DynamicAttributesUtils.isDynamicAttribute(name)) { continue; } EntityLogAttr attr = metadata.create(EntityLogAttr.class); attr.setName(name); String value = stringify(entity.getValueEx(name), entity.getMetaClass().getProperty(name)); attr.setValue(value); Object valueId = getValueId(value); if (valueId != null) attr.setValueId(valueId.toString()); if (changes != null) { Object oldValue = changes.getOldValueEx(name); attr.setOldValue(stringify(oldValue, entity.getMetaClass().getProperty(name))); Object oldValueId = getValueId(oldValue); if (oldValueId != null) { attr.setOldValueId(oldValueId.toString()); } } if (entity.getMetaClass().getProperty(name) != null) { //skip embedded properties MessageTools messageTools = AppBeans.get(MessageTools.NAME); String mp = messageTools.inferMessagePack(name, entity); if (mp != null) attr.setMessagesPack(mp); } result.add(attr); } return result; } protected Set<EntityLogAttr> createDynamicLogAttribute(CategoryAttributeValue entity, @Nullable EntityAttributeChanges changes, boolean registerDeleteOp) { Set<EntityLogAttr> result = new HashSet<>(); EntityLogAttr attr = metadata.create(EntityLogAttr.class); attr.setName(DynamicAttributesUtils.encodeAttributeCode(entity.getCode())); MetaProperty valueMetaProperty = entity.getMetaClass().getProperty(getCategoryAttributeValueName(entity)); Object value = entity.getValue(); attr.setValue(stringify(value, valueMetaProperty)); Object valueId = getValueId(value); if (valueId != null) attr.setValueId(valueId.toString()); if (changes != null || registerDeleteOp) { Object oldValue = getOldCategoryAttributeValue(entity, changes); attr.setOldValue(stringify(oldValue, valueMetaProperty)); Object oldValueId = getValueId(oldValue); if (oldValueId != null) { attr.setOldValueId(oldValueId.toString()); } } result.add(attr); return result; } protected String getChanges(Properties properties) { try { StringWriter writer = new StringWriter(); properties.store(writer, null); String changes = writer.toString(); if (changes.startsWith("#")) changes = changes.substring(changes.indexOf("\n") + 1); // cut off comments line return changes; } catch (IOException e) { throw new RuntimeException("Error writing entity log attributes", e); } } @Override public void registerDelete(Entity entity) { registerDelete(entity, false); } @Override public void registerDelete(Entity entity, boolean auto) { try { if (doNotRegister(entity)) return; String masterEntityName = getEntityName(entity); boolean isCategoryAttributeValue = entity instanceof CategoryAttributeValue; Set<String> attributes = getLoggedAttributes(masterEntityName, auto); if (attributes != null && attributes.contains("*")) { attributes = getAllAttributes(entity); } if (attributes == null) { return; } MetaClass metaClass = metadata.getClassNN(masterEntityName); attributes = filterRemovedAttributes(metaClass, attributes); if (isCategoryAttributeValue) { internalRegisterModifyAttributeValue((CategoryAttributeValue) entity, null, attributes); } else { String storeName = metadata.getTools().getStoreName(metaClass); if (Stores.isMain(storeName)) { internalRegisterDelete(entity, masterEntityName, attributes); } else { // Create a new transaction in main DB if we are saving an entity from additional data store try (Transaction tx = persistence.createTransaction()) { internalRegisterDelete(entity, masterEntityName, attributes); tx.commit(); } } } } catch (Exception e) { logError(entity, e); } } protected void internalRegisterDelete(Entity entity, String entityName, Set<String> attributes) throws IOException { Date ts = timeSource.currentTimestamp(); EntityManager em = persistence.getEntityManager(); EntityLogItem item = metadata.create(EntityLogItem.class); item.setEventTs(ts); item.setUser(findUser(em)); item.setType(EntityLogItem.Type.DELETE); item.setEntity(entityName); item.setObjectEntityId(referenceToEntitySupport.getReferenceId(entity)); item.setAttributes(createLogAttributes(entity, attributes, null)); enqueueItem(item); } protected Set<String> getAllAttributes(Entity entity) { if (entity == null) { return null; } Set<String> attributes = new HashSet<>(); MetaClass metaClass = metadata.getClassNN(entity.getClass()); for (MetaProperty metaProperty : metaClass.getProperties()) { Range range = metaProperty.getRange(); if (range.isClass() && range.getCardinality().isMany()) { continue; } attributes.add(metaProperty.getName()); } Collection<CategoryAttribute> categoryAttributes = dynamicAttributes.getAttributesForMetaClass(metaClass); if (categoryAttributes != null) { for (CategoryAttribute categoryAttribute : categoryAttributes) { if (BooleanUtils.isNotTrue(categoryAttribute.getIsCollection())) { attributes.add( DynamicAttributesUtils.getMetaPropertyPath(metaClass, categoryAttribute).getMetaProperty().getName()); } } } return attributes; } protected Object getValueId(Object value) { if (value instanceof EmbeddableEntity) { return null; } else if (value instanceof BaseGenericIdEntity) { return referenceToEntitySupport.getReferenceId((Entity) value); } else { return null; } } protected String stringify(Object value, MetaProperty metaProperty) { if (value == null) return ""; else if (value instanceof Instance) { return metadataTools.getInstanceName((Instance) value); } else if (value instanceof Date) { Datatype datatype = metaProperty.getRange().asDatatype(); return datatype.format(value); } else if (value instanceof Iterable) { StringBuilder sb = new StringBuilder(); sb.append("["); for (Object obj : (Iterable) value) { sb.append(stringify(obj, metaProperty)).append(","); } if (sb.length() > 1) sb.deleteCharAt(sb.length() - 1); sb.append("]"); return sb.toString(); } else { return String.valueOf(value); } } protected Object getOldCategoryAttributeValue(CategoryAttributeValue attributeValue, EntityAttributeChanges changes) { CategoryAttribute categoryAttribute = attributeValue.getCategoryAttribute(); PersistenceTools persistenceTools = persistence.getTools(); String fieldName = null; switch (categoryAttribute.getDataType()) { case DATE: fieldName = "dateValue"; break; case ENUMERATION: case STRING: fieldName = "stringValue"; break; case INTEGER: fieldName = "intValue"; break; case DOUBLE: fieldName = "doubleValue"; break; case BOOLEAN: fieldName = "booleanValue"; break; } if (fieldName != null) { return changes != null ? changes.getOldValue(fieldName) : persistenceTools.getOldValue(attributeValue, fieldName); } return null; } protected Set<String> calculateDirtyFields(Entity entity, EntityAttributeChanges changes) { if (changes == null) { if (!(entity instanceof ChangeTracker) || !PersistenceHelper.isManaged(entity)) return Collections.emptySet(); PropertyChangeListener propertyChangeListener = ((ChangeTracker) entity)._persistence_getPropertyChangeListener(); if (propertyChangeListener == null) throw new IllegalStateException("Entity '" + entity + "' is a ChangeTracker but has no PropertyChangeListener"); changes = new EntityAttributeChanges(); ObjectChangeSet objectChanges = ((AttributeChangeListener) propertyChangeListener).getObjectChangeSet(); if (objectChanges != null) { changes.addChanges(objectChanges); } } return changes.getAttributes(); } protected String getIdAttributePath(MetaPropertyPath propertyPath, String storeName) { String idAttribute = metadataTools.getCrossDataStoreReferenceIdProperty(storeName, propertyPath.getMetaProperty()); if (idAttribute != null) { List<String> parts = Stream.of(propertyPath.getMetaProperties()) .map(MetaProperty::getName) .collect(Collectors.toList()); parts.set(parts.size() - 1, idAttribute); return String.join(".", parts); } return null; } protected String getCategoryAttributeValueName(CategoryAttributeValue attributeValue) { CategoryAttribute categoryAttribute = attributeValue.getCategoryAttribute(); String fieldName = null; switch (categoryAttribute.getDataType()) { case DATE: fieldName = "dateValue"; break; case ENUMERATION: case STRING: fieldName = "stringValue"; break; case INTEGER: fieldName = "intValue"; break; case DOUBLE: fieldName = "doubleValue"; break; case BOOLEAN: fieldName = "booleanValue"; break; } return fieldName; } protected void logError(Entity entity, Exception e) { log.warn("Unable to log entity {}, id={}", entity, entity.getId(), e); } }
package de.danoeh.antennapod.fragment; import android.annotation.SuppressLint; import android.app.Activity; import android.content.ActivityNotFoundException; import android.content.ClipData; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.support.v4.app.Fragment; import android.util.Log; import android.view.ContextMenu; import android.view.ContextMenu.ContextMenuInfo; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.webkit.WebSettings.LayoutAlgorithm; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.Toast; import de.danoeh.antennapod.R; import de.danoeh.antennapod.core.feed.FeedItem; import de.danoeh.antennapod.core.preferences.UserPreferences; import de.danoeh.antennapod.core.storage.DBReader; import de.danoeh.antennapod.core.util.Converter; import de.danoeh.antennapod.core.util.IntentUtils; import de.danoeh.antennapod.core.util.ShareUtils; import de.danoeh.antennapod.core.util.ShownotesProvider; import de.danoeh.antennapod.core.util.playback.Playable; import de.danoeh.antennapod.core.util.playback.PlaybackController; import de.danoeh.antennapod.core.util.playback.Timeline; /** * Displays the description of a Playable object in a Webview. */ public class ItemDescriptionFragment extends Fragment { private static final String TAG = "ItemDescriptionFragment"; private static final String PREF = "ItemDescriptionFragmentPrefs"; private static final String PREF_SCROLL_Y = "prefScrollY"; private static final String PREF_PLAYABLE_ID = "prefPlayableId"; private static final String ARG_PLAYABLE = "arg.playable"; private static final String ARG_FEEDITEM_ID = "arg.feeditem"; private static final String ARG_SAVE_STATE = "arg.saveState"; private static final String ARG_HIGHLIGHT_TIMECODES = "arg.highlightTimecodes"; private WebView webvDescription; private ShownotesProvider shownotesProvider; private Playable media; private AsyncTask<Void, Void, Void> webViewLoader; /** * URL that was selected via long-press. */ private String selectedURL; /** * True if Fragment should save its state (e.g. scrolling position) in a * shared preference. */ private boolean saveState; /** * True if Fragment should highlight timecodes (e.g. time codes in the HH:MM:SS format). */ private boolean highlightTimecodes; public static ItemDescriptionFragment newInstance(Playable media, boolean saveState, boolean highlightTimecodes) { ItemDescriptionFragment f = new ItemDescriptionFragment(); Bundle args = new Bundle(); args.putParcelable(ARG_PLAYABLE, media); args.putBoolean(ARG_SAVE_STATE, saveState); args.putBoolean(ARG_HIGHLIGHT_TIMECODES, highlightTimecodes); f.setArguments(args); return f; } public static ItemDescriptionFragment newInstance(FeedItem item, boolean saveState, boolean highlightTimecodes) { ItemDescriptionFragment f = new ItemDescriptionFragment(); Bundle args = new Bundle(); args.putLong(ARG_FEEDITEM_ID, item.getId()); args.putBoolean(ARG_SAVE_STATE, saveState); args.putBoolean(ARG_HIGHLIGHT_TIMECODES, highlightTimecodes); f.setArguments(args); return f; } @SuppressLint("NewApi") @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { Log.d(TAG, "Creating view"); webvDescription = new WebView(getActivity()); if (UserPreferences.getTheme() == R.style.Theme_AntennaPod_Dark) { if (Build.VERSION.SDK_INT >= 11 && Build.VERSION.SDK_INT <= Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1) { webvDescription.setLayerType(View.LAYER_TYPE_SOFTWARE, null); } webvDescription.setBackgroundColor(getResources().getColor( R.color.black)); } webvDescription.getSettings().setUseWideViewPort(false); webvDescription.getSettings().setLayoutAlgorithm( LayoutAlgorithm.NARROW_COLUMNS); webvDescription.getSettings().setLoadWithOverviewMode(true); webvDescription.setOnLongClickListener(webViewLongClickListener); webvDescription.setWebViewClient(new WebViewClient() { @Override public boolean shouldOverrideUrlLoading(WebView view, String url) { if (Timeline.isTimecodeLink(url)) { onTimecodeLinkSelected(url); } else { Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); try { startActivity(intent); } catch (ActivityNotFoundException e) { e.printStackTrace(); return true; } } return true; } @Override public void onPageFinished(WebView view, String url) { super.onPageFinished(view, url); Log.d(TAG, "Page finished"); // Restoring the scroll position might not always work view.postDelayed(new Runnable() { @Override public void run() { restoreFromPreference(); } }, 50); } }); registerForContextMenu(webvDescription); return webvDescription; } @Override public void onAttach(Activity activity) { super.onAttach(activity); Log.d(TAG, "Fragment attached"); } @Override public void onDetach() { super.onDetach(); Log.d(TAG, "Fragment detached"); if (webViewLoader != null) { webViewLoader.cancel(true); } } @Override public void onDestroy() { super.onDestroy(); Log.d(TAG, "Fragment destroyed"); if (webViewLoader != null) { webViewLoader.cancel(true); } if (webvDescription != null) { webvDescription.removeAllViews(); webvDescription.destroy(); } } @SuppressLint("NewApi") @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Log.d(TAG, "Creating fragment"); Bundle args = getArguments(); saveState = args.getBoolean(ARG_SAVE_STATE, false); highlightTimecodes = args.getBoolean(ARG_HIGHLIGHT_TIMECODES, false); } @Override public void onViewCreated(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); Bundle args = getArguments(); if (args.containsKey(ARG_PLAYABLE)) { media = args.getParcelable(ARG_PLAYABLE); shownotesProvider = media; startLoader(); } else if (args.containsKey(ARG_FEEDITEM_ID)) { AsyncTask<Void, Void, FeedItem> itemLoadTask = new AsyncTask<Void, Void, FeedItem>() { @Override protected FeedItem doInBackground(Void... voids) { return DBReader.getFeedItem(getActivity(), getArguments().getLong(ARG_FEEDITEM_ID)); } @Override protected void onPostExecute(FeedItem feedItem) { super.onPostExecute(feedItem); shownotesProvider = feedItem; startLoader(); } }; if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.GINGERBREAD_MR1) { itemLoadTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR); } else { itemLoadTask.execute(); } } } @Override public void onResume() { super.onResume(); } @SuppressLint("NewApi") private void startLoader() { webViewLoader = createLoader(); if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.GINGERBREAD_MR1) { webViewLoader.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR); } else { webViewLoader.execute(); } } private View.OnLongClickListener webViewLongClickListener = new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { WebView.HitTestResult r = webvDescription.getHitTestResult(); if (r != null && r.getType() == WebView.HitTestResult.SRC_ANCHOR_TYPE) { Log.d(TAG, "Link of webview was long-pressed. Extra: " + r.getExtra()); selectedURL = r.getExtra(); webvDescription.showContextMenu(); return true; } selectedURL = null; return false; } }; @SuppressWarnings("deprecation") @SuppressLint("NewApi") @Override public boolean onContextItemSelected(MenuItem item) { boolean handled = selectedURL != null; if (selectedURL != null) { switch (item.getItemId()) { case R.id.open_in_browser_item: Uri uri = Uri.parse(selectedURL); final Intent intent = new Intent(Intent.ACTION_VIEW, uri); if(IntentUtils.isCallable(getActivity(), intent)) { getActivity().startActivity(intent); } break; case R.id.share_url_item: ShareUtils.shareLink(getActivity(), selectedURL); break; case R.id.copy_url_item: if (android.os.Build.VERSION.SDK_INT >= 11) { ClipData clipData = ClipData.newPlainText(selectedURL, selectedURL); android.content.ClipboardManager cm = (android.content.ClipboardManager) getActivity() .getSystemService(Context.CLIPBOARD_SERVICE); cm.setPrimaryClip(clipData); } else { android.text.ClipboardManager cm = (android.text.ClipboardManager) getActivity() .getSystemService(Context.CLIPBOARD_SERVICE); cm.setText(selectedURL); } Toast t = Toast.makeText(getActivity(), R.string.copied_url_msg, Toast.LENGTH_SHORT); t.show(); break; case R.id.go_to_position_item: if (Timeline.isTimecodeLink(selectedURL)) { onTimecodeLinkSelected(selectedURL); } else { Log.e(TAG, "Selected go_to_position_item, but URL was no timecode link: " + selectedURL); } break; default: handled = false; break; } selectedURL = null; } return handled; } @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenuInfo menuInfo) { if (selectedURL != null) { super.onCreateContextMenu(menu, v, menuInfo); if (Timeline.isTimecodeLink(selectedURL)) { menu.add(Menu.NONE, R.id.go_to_position_item, Menu.NONE, R.string.go_to_position_label); menu.setHeaderTitle(Converter.getDurationStringLong(Timeline.getTimecodeLinkTime(selectedURL))); } else { Uri uri = Uri.parse(selectedURL); final Intent intent = new Intent(Intent.ACTION_VIEW, uri); if(IntentUtils.isCallable(getActivity(), intent)) { menu.add(Menu.NONE, R.id.open_in_browser_item, Menu.NONE, R.string.open_in_browser_label); } menu.add(Menu.NONE, R.id.copy_url_item, Menu.NONE, R.string.copy_url_label); menu.add(Menu.NONE, R.id.share_url_item, Menu.NONE, R.string.share_url_label); menu.setHeaderTitle(selectedURL); } } } private AsyncTask<Void, Void, Void> createLoader() { return new AsyncTask<Void, Void, Void>() { @Override protected void onCancelled() { super.onCancelled(); webViewLoader = null; } String data; @Override protected void onPostExecute(Void result) { super.onPostExecute(result); // /webvDescription.loadData(url, "text/html", "utf-8"); webvDescription.loadDataWithBaseURL(null, data, "text/html", "utf-8", "about:blank"); Log.d(TAG, "Webview loaded"); webViewLoader = null; } @Override protected void onPreExecute() { super.onPreExecute(); } @Override protected Void doInBackground(Void... params) { Log.d(TAG, "Loading Webview"); try { Activity activity = getActivity(); if (activity != null) { Timeline timeline = new Timeline(activity, shownotesProvider); data = timeline.processShownotes(highlightTimecodes); } else { cancel(true); } } catch (Exception e) { e.printStackTrace(); } return null; } }; } @Override public void onPause() { super.onPause(); savePreference(); } private void savePreference() { if (saveState) { Log.d(TAG, "Saving preferences"); SharedPreferences prefs = getActivity().getSharedPreferences(PREF, Activity.MODE_PRIVATE); SharedPreferences.Editor editor = prefs.edit(); if (media != null && webvDescription != null) { Log.d(TAG, "Saving scroll position: " + webvDescription.getScrollY()); editor.putInt(PREF_SCROLL_Y, webvDescription.getScrollY()); editor.putString(PREF_PLAYABLE_ID, media.getIdentifier() .toString()); } else { Log.d(TAG, "savePreferences was called while media or webview was null"); editor.putInt(PREF_SCROLL_Y, -1); editor.putString(PREF_PLAYABLE_ID, ""); } editor.commit(); } } private boolean restoreFromPreference() { if (saveState) { Log.d(TAG, "Restoring from preferences"); Activity activity = getActivity(); if (activity != null) { SharedPreferences prefs = activity.getSharedPreferences( PREF, Activity.MODE_PRIVATE); String id = prefs.getString(PREF_PLAYABLE_ID, ""); int scrollY = prefs.getInt(PREF_SCROLL_Y, -1); if (scrollY != -1 && media != null && id.equals(media.getIdentifier().toString()) && webvDescription != null) { Log.d(TAG, "Restored scroll Position: " + scrollY); webvDescription.scrollTo(webvDescription.getScrollX(), scrollY); return true; } } } return false; } private void onTimecodeLinkSelected(String link) { int time = Timeline.getTimecodeLinkTime(link); if (getActivity() != null && getActivity() instanceof ItemDescriptionFragmentCallback) { PlaybackController pc = ((ItemDescriptionFragmentCallback) getActivity()).getPlaybackController(); if (pc != null) { pc.seekTo(time); } } } public interface ItemDescriptionFragmentCallback { public PlaybackController getPlaybackController(); } }
package com.callfire.api11.client.integration; import com.callfire.api11.client.api.broadcasts.model.BroadcastCommand; import com.callfire.api11.client.api.broadcasts.model.BroadcastStats; import com.callfire.api11.client.api.broadcasts.model.BroadcastStatus; import com.callfire.api11.client.api.broadcasts.model.request.ControlBroadcastRequest; import com.callfire.api11.client.api.ccc.model.*; import com.callfire.api11.client.api.ccc.model.request.*; import com.callfire.api11.client.api.common.model.Result; import com.callfire.api11.client.api.common.model.RetryConfig; import com.callfire.api11.client.api.common.model.RetryPhoneType; import org.apache.commons.lang3.time.DateUtils; import org.junit.Ignore; import org.junit.Test; import java.util.Arrays; import java.util.List; import static com.callfire.api11.client.ClientConstants.TIME_FORMAT_PATTERN; import static java.util.Arrays.asList; import static org.junit.Assert.*; @Ignore public class CccsIntegrationTest extends AbstractIntegrationTest { @Test public void testCccBroadcastCrudOperations() throws Exception { CccBroadcast broadcast = new CccBroadcast(); broadcast.setName("Test CCC Broadcast"); broadcast.setFromNumber("12132212384"); Question q = new Question(); q.setLabel("testQuestion"); q.setResponseType(QuestionResponseType.STRING); q.setChoices(Arrays.asList("TEST1", "TEST2")); broadcast.setQuestions(Arrays.asList(q)); TransferNumber t = new TransferNumber(); t.setNumber("12132212384"); t.setName("Transfer name"); t.setAllowAssistedTransfer(true); broadcast.setTransferNumbers(Arrays.asList(t)); broadcast.setScript("test script"); broadcast.setScrubLevel(2); broadcast.setBeginTime(DateUtils.parseDate("10:10:10", TIME_FORMAT_PATTERN)); broadcast.setEndTime(DateUtils.parseDate("15:15:15", TIME_FORMAT_PATTERN)); broadcast.setSmartDropSoundId(1L); broadcast.setAgentGroupId(149740003L); broadcast.setAllowAnyTransfer(true); broadcast.setRecorded(true); broadcast.setMultilineDialingRatio(2); broadcast.setMultilineDialingEnabled(true); broadcast.setRetryConfig(new RetryConfig(2, 1, asList(Result.BUSY, Result.NO_ANS), asList(RetryPhoneType.MOBILE_PHONE, RetryPhoneType.HOME_PHONE))); broadcast.setTransferCallerId("12132212384"); Long createdBroadcastId = client.cccsApi().create(broadcast); assertNotNull(createdBroadcastId); System.out.println(createdBroadcastId); CccBroadcast savedBroadcast = client.cccsApi().get(createdBroadcastId); assertEquals("Test CCC Broadcast", savedBroadcast.getName()); assertEquals(broadcast.getFromNumber(), "12132212384"); assertTrue(broadcast.getQuestions().size() == 1); assertTrue(broadcast.getTransferNumbers().size() == 1); broadcast.setId(createdBroadcastId); broadcast.setName(broadcast.getName() + " updated"); broadcast.setRetryConfig(null); client.cccsApi().update(broadcast); savedBroadcast = client.cccsApi().get(createdBroadcastId); assertEquals("Test CCC Broadcast updated", savedBroadcast.getName()); // TODO uncomment once fixed //assertNull(savedBroadcast.getRetryConfig()); client.cccsApi().delete(createdBroadcastId); } @Test public void testQueryCccBroadcasts() throws Exception { CccBroadcast broadcast = new CccBroadcast(); broadcast.setName("Test CCC Broadcast"); broadcast.setFromNumber("12132212384"); Question q = new Question(); q.setLabel("testQuestion"); q.setResponseType(QuestionResponseType.STRING); broadcast.setQuestions(Arrays.asList(q)); broadcast.setAgentGroupId(149740003L); broadcast.setAllowAnyTransfer(true); broadcast.setRecorded(true); broadcast.setMultilineDialingRatio(2); broadcast.setMultilineDialingEnabled(true); Long createdBroadcastId = client.cccsApi().create(broadcast); QueryCccBroadcastsRequest request = QueryCccBroadcastsRequest.create() .name("Test CCC Broadcast") .running(false) .maxResults(1) .firstResult(0) .build(); List<CccBroadcast> broadcasts = client.cccsApi().query(request); assertEquals(broadcasts.size(), 1); assertEquals(broadcasts.get(0).getName(), "Test CCC Broadcast"); assertNotNull(broadcasts.get(0).getId()); client.cccsApi().delete(createdBroadcastId); } @Test public void testControlAndGetStats() throws Exception { CccBroadcast broadcast = new CccBroadcast(); broadcast.setName("Test CCC Broadcast"); broadcast.setFromNumber("12132212384"); Question q = new Question(); q.setLabel("testQuestion"); q.setResponseType(QuestionResponseType.STRING); broadcast.setQuestions(Arrays.asList(q)); broadcast.setAgentGroupId(149740003L); broadcast.setAllowAnyTransfer(true); broadcast.setRecorded(true); broadcast.setMultilineDialingRatio(2); broadcast.setMultilineDialingEnabled(true); Long createdBroadcastId = client.cccsApi().create(broadcast); ControlBroadcastRequest archiveRequest = ControlBroadcastRequest.create() .command(BroadcastCommand.ARCHIVE) .id(createdBroadcastId) .build(); client.cccsApi().control(archiveRequest); CccBroadcast savedBroadcast = client.cccsApi().get(createdBroadcastId); assertEquals(BroadcastStatus.ARCHIVED, savedBroadcast.getStatus()); BroadcastStats stats = client.cccsApi().getStats(createdBroadcastId); assertNotNull(stats); client.cccsApi().delete(createdBroadcastId); } @Test public void testDeleteQuestionsAndTransferNumbers() throws Exception { CccBroadcast broadcast = new CccBroadcast(); broadcast.setName("Test CCC Broadcast"); broadcast.setFromNumber("12132212384"); Question q = new Question(); q.setLabel("testQuestion"); q.setResponseType(QuestionResponseType.STRING); broadcast.setQuestions(Arrays.asList(q)); TransferNumber tn = new TransferNumber(); tn.setName("testNumber"); tn.setNumber("12132212384"); broadcast.setTransferNumbers(Arrays.asList(tn)); Long createdBroadcastId = client.cccsApi().create(broadcast); client.cccsApi().deleteQuestions(createdBroadcastId); client.cccsApi().deleteTransferNumbers(createdBroadcastId); CccBroadcast savedBroadcast = client.cccsApi().get(createdBroadcastId); assertNull(savedBroadcast.getQuestions()); assertNull(savedBroadcast.getTransferNumbers()); client.cccsApi().delete(createdBroadcastId); } @Test public void testAddRemoveAndGetCampaignAgents() throws Exception { CccBroadcast broadcast = new CccBroadcast(); broadcast.setName("Test CCC Broadcast"); broadcast.setFromNumber("12132212384"); Long createdBroadcastId = client.cccsApi().create(broadcast); AddAgentsRequest request = AddAgentsRequest.create() .broadcastId(createdBroadcastId) .agentIds(Arrays.asList(289020003L)) .build(); client.cccsApi().addCampaignAgents(request); AddAgentsRequest request2 = AddAgentsRequest.create() .broadcastId(createdBroadcastId) .agentEmails(Arrays.asList("vmalinovskiy+agent@callfire.com")) .build(); client.cccsApi().addCampaignAgents(request2); List<Agent> agents = client.cccsApi().queryCampaignAgents(createdBroadcastId); assertEquals(1, agents.size()); client.cccsApi().removeAgentFromCampaign(createdBroadcastId, agents.get(0).getId()); agents = client.cccsApi().queryCampaignAgents(createdBroadcastId); assertEquals(0, agents.size()); client.cccsApi().delete(createdBroadcastId); } @Test public void testGetAgents() throws Exception { QueryAgentsRequest request = QueryAgentsRequest.create() .agentEmail("vmalinovskiy+agent@callfire.com") .agentGroupId(149688003L) .campaignId(9901121003L) .build(); List<Agent> agents = client.cccsApi().queryAgents(request); assertTrue(agents.size() > 0); Agent agent = client.cccsApi().getAgent(agents.get(0).getId()); assertNotNull(agent); assertEquals(agent.getEmail(), "vmalinovskiy+agent@callfire.com"); } @Test public void testAgentGroupsCRUDOperations() throws Exception { /*CccBroadcast broadcast = new CccBroadcast(); broadcast.setName("Test CCC Broadcast"); broadcast.setFromNumber("12132212384"); Long createdBroadcastId = client.cccsApi().create(broadcast);*/ AgentGroup group = new AgentGroup(); group.setAgentIds(Arrays.asList(289020003L, 386074003L)); group.setName("test agent group"); Long createdAgentGroupId = client.cccsApi().createAgentGroup(group); AgentGroup createdGroup = client.cccsApi().getAgentGroup(createdAgentGroupId); assertNotNull(createdGroup); assertEquals(createdGroup.getName(), "test agent group"); createdGroup.setName(group.getName() + " updated"); createdGroup.setAgentIds(null); //createdGroup.setCampaignIds(Arrays.asList(createdBroadcastId)); client.cccsApi().updateAgentGroup(createdGroup); group = client.cccsApi().getAgentGroup(createdAgentGroupId); assertEquals(group.getName(), "test agent group updated"); // TODO uncomment once fixed //assertEquals(group.getCampaignIds().size(), 1); assertEquals(group.getAgentEmails().size(), 2); QueryAgentGroupsRequest queryRequest = QueryAgentGroupsRequest.create() .agentEmail("vmalinovskiy+agent@callfire.com") .build(); List<AgentGroup> groups = client.cccsApi().queryAgentGroups(queryRequest); assertTrue(groups.size() > 0); client.cccsApi().removeAgentGroup(createdAgentGroupId); //client.cccsApi().delete(createdBroadcastId); } @Test public void testCampaignAgentGroups() throws Exception { CccBroadcast broadcast = new CccBroadcast(); broadcast.setName("Test CCC Broadcast"); broadcast.setFromNumber("12132212384"); Long createdBroadcastId = client.cccsApi().create(broadcast); AgentGroup group = new AgentGroup(); group.setAgentIds(Arrays.asList(289020003L)); group.setName("test agent group"); Long createdAgentGroupId = client.cccsApi().createAgentGroup(group); AddAgentGroupsRequest addRequest = AddAgentGroupsRequest.create() .agentGroupIds(Arrays.asList(createdAgentGroupId)) .campaignId(createdBroadcastId) .build(); client.cccsApi().addCampaignAgentGroups(addRequest); List<AgentGroup> agentGroups = client.cccsApi().queryCampaignAgentGroups(createdBroadcastId); assertEquals(agentGroups.size(), 1); client.cccsApi().removeAgentGroupFromCampaign(createdBroadcastId, createdAgentGroupId); agentGroups = client.cccsApi().queryCampaignAgentGroups(createdBroadcastId); assertEquals(agentGroups.size(), 0); client.cccsApi().removeAgentGroup(createdAgentGroupId); client.cccsApi().delete(createdBroadcastId); } @Test public void testAgentSessions() throws Exception { QueryAgentSessionsRequest request = QueryAgentSessionsRequest.create() .agentEmail("vmalinovskiy+agent@callfire.com") .campaignId(9901983003L) .build(); List<AgentSession> sessions = client.cccsApi().queryAgentSessions(request); assertTrue(sessions.size() > 0); AgentSession session = client.cccsApi().getAgentSession(sessions.get(0).getId()); assertNotNull(session); assertTrue(session.getCampaignId().equals(9901983003L)); } @Test public void testCampaignAgentInvites() throws Exception { CccBroadcast broadcast = new CccBroadcast(); broadcast.setName("Test CCC Broadcast"); broadcast.setFromNumber("12132212384"); Long createdBroadcastId = client.cccsApi().create(broadcast); AgentGroup group = new AgentGroup(); group.setAgentIds(Arrays.asList(289020003L)); group.setName("test agent group for invite"); Long createdAgentGroupId = client.cccsApi().createAgentGroup(group); SendAgentInviteRequest request = SendAgentInviteRequest.create() .campaignId(createdBroadcastId) .agentGroupName("test agent group for invite") .agentEmails(Arrays.asList("vmalinovskiy+agent@callfire.com")) .build(); client.cccsApi().sendCampaignAgentInvite(request); AgentInvite agentInvite = client.cccsApi().getCampaignAgentInviteUri(createdBroadcastId, "vmalinovskiy+agent@callfire.com"); assertNotNull(agentInvite); assertEquals(agentInvite.getCampaignId(), createdBroadcastId); client.cccsApi().removeAgentGroup(createdAgentGroupId); client.cccsApi().delete(createdBroadcastId); } }
// Openbravo POS is a point of sales application designed for touch screens. // Copyright (C) 2007-2009 Openbravo, S.L. // http://www.openbravo.com/product/pos // // This file is part of Openbravo POS. // // Openbravo POS is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Openbravo POS is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Openbravo POS. If not, see <http://www.gnu.org/licenses/>. package com.openbravo.format; import java.text.DateFormat; import java.text.DecimalFormat; import java.text.NumberFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import com.openbravo.basic.BasicException; public abstract class Formats { public final static Formats NULL = new FormatsNULL(); public final static Formats LONG = new FormatsLONG(); public final static Formats INT = new FormatsINT(); public final static Formats STRING = new FormatsSTRING(); public final static Formats DOUBLE = new FormatsDOUBLE(); public final static Formats CURRENCY = new FormatsCURRENCY(); public final static Formats PERCENT = new FormatsPERCENT(); public final static Formats BOOLEAN = new FormatsBOOLEAN(); public final static Formats TIMESTAMP = new FormatsTIMESTAMP(); public final static Formats DATE = new FormatsDATE(); public final static Formats TIME = new FormatsTIME(); public final static Formats BYTEA = new FormatsBYTEA(); private static NumberFormat m_integerformat = NumberFormat.getIntegerInstance(); private static NumberFormat m_doubleformat = NumberFormat.getNumberInstance(); private static NumberFormat m_currencyformat = NumberFormat.getCurrencyInstance(); private static NumberFormat m_percentformat = new DecimalFormat("#,##0.##%"); private static DateFormat m_dateformat = DateFormat.getDateInstance(); private static DateFormat m_timeformat = DateFormat.getTimeInstance(); private static DateFormat m_datetimeformat = DateFormat.getDateTimeInstance(); /** Creates a new instance of Formats */ protected Formats() { } public static int getCurrencyDecimals() { return m_currencyformat.getMaximumFractionDigits(); } public String formatValue(Object value) { if (value == null || "".equals(value)) { return ""; } else { return formatValueInt(value); } } public Object parseValue(String value, Object defvalue) throws BasicException { if (value == null || "".equals(value)) { return defvalue; } else { try { return parseValueInt(value); } catch (ParseException e) { throw new BasicException(e.getMessage(), e); } } } public Object parseValue(String value) throws BasicException { return parseValue(value, null); } public static void setIntegerPattern(String pattern) { if (pattern == null || pattern.equals("")) { m_integerformat = NumberFormat.getIntegerInstance(); } else { m_integerformat = new DecimalFormat(pattern); } } public static void setDoublePattern(String pattern) { if (pattern == null || pattern.equals("")) { m_doubleformat = NumberFormat.getNumberInstance(); } else { m_doubleformat = new DecimalFormat(pattern); } } public static void setCurrencyPattern(String pattern) { if (pattern == null || pattern.equals("")) { m_currencyformat = NumberFormat.getCurrencyInstance(); } else { m_currencyformat = new DecimalFormat(pattern); } } public static void setPercentPattern(String pattern) { if (pattern == null || pattern.equals("")) { m_percentformat = new DecimalFormat("#,##0.##%"); } else { m_percentformat = new DecimalFormat(pattern); } } public static void setDatePattern(String pattern) { if (pattern == null || pattern.equals("")) { m_dateformat = DateFormat.getDateInstance(); } else { m_dateformat = new SimpleDateFormat(pattern); } } public static void setTimePattern(String pattern) { if (pattern == null || pattern.equals("")) { m_timeformat = DateFormat.getTimeInstance(); } else { m_timeformat = new SimpleDateFormat(pattern); } } public static void setDateTimePattern(String pattern) { if (pattern == null || pattern.equals("")) { m_datetimeformat = DateFormat.getDateTimeInstance(); } else { m_datetimeformat = new SimpleDateFormat(pattern); } } protected abstract String formatValueInt(Object value); protected abstract Object parseValueInt(String value) throws ParseException; public abstract int getAlignment(); private static final class FormatsNULL extends Formats { protected String formatValueInt(Object value) { return null; } protected Object parseValueInt(String value) throws ParseException { return null; } public int getAlignment() { return javax.swing.SwingConstants.LEFT; } } private static final class FormatsINT extends Formats { protected String formatValueInt(Object value) { return m_integerformat.format(((Number) value).longValue()); } protected Object parseValueInt(String value) throws ParseException { return new Integer(m_integerformat.parse(value).intValue()); } public int getAlignment() { return javax.swing.SwingConstants.RIGHT; } } private static final class FormatsLONG extends Formats { protected String formatValueInt(Object value) { try { return m_integerformat.format(((Number) value).longValue()); } catch (Exception e) { e.printStackTrace(); throw new IllegalArgumentException(); } } protected Object parseValueInt(String value) throws ParseException { return new Long(m_integerformat.parse(value).longValue()); } public int getAlignment() { return javax.swing.SwingConstants.CENTER; } } private static final class FormatsSTRING extends Formats { protected String formatValueInt(Object value) { return (String) value; } protected Object parseValueInt(String value) throws ParseException { return value; } public int getAlignment() { return javax.swing.SwingConstants.LEFT; } } private static final class FormatsDOUBLE extends Formats { protected String formatValueInt(Object value) { return m_doubleformat.format(DoubleUtils.fixDecimals((Number) value)); // quickfix for 3838 } protected Object parseValueInt(String value) throws ParseException { return new Double(m_doubleformat.parse(value).doubleValue()); } public int getAlignment() { return javax.swing.SwingConstants.RIGHT; } } private static final class FormatsPERCENT extends Formats { protected String formatValueInt(Object value) { return m_percentformat.format(DoubleUtils.fixDecimals((Number) value)); // quickfix for 3838 } protected Object parseValueInt(String value) throws ParseException { try { return new Double(m_percentformat.parse(value).doubleValue()); } catch (ParseException e) { // Segunda oportunidad como numero normalito return new Double(m_doubleformat.parse(value).doubleValue() / 100); } } public int getAlignment() { return javax.swing.SwingConstants.RIGHT; } } private static final class FormatsCURRENCY extends Formats { protected String formatValueInt(Object value) { return m_currencyformat.format(DoubleUtils.fixDecimals((Number) value)); // quickfix for 3838 } protected Object parseValueInt(String value) throws ParseException { try { return new Double(m_currencyformat.parse(value).doubleValue()); } catch (ParseException e) { // Segunda oportunidad como numero normalito return new Double(m_doubleformat.parse(value).doubleValue()); } } public int getAlignment() { return javax.swing.SwingConstants.RIGHT; } } private static final class FormatsBOOLEAN extends Formats { protected String formatValueInt(Object value) { return ((Boolean) value).toString(); } protected Object parseValueInt(String value) throws ParseException { return Boolean.valueOf(value); } public int getAlignment() { return javax.swing.SwingConstants.CENTER; } } private static final class FormatsTIMESTAMP extends Formats { protected String formatValueInt(Object value) { return m_datetimeformat.format((Date) value); } protected Object parseValueInt(String value) throws ParseException { try { return m_datetimeformat.parse(value); } catch (ParseException e) { // segunda oportunidad como fecha normalita return m_dateformat.parse(value); } } public int getAlignment() { return javax.swing.SwingConstants.CENTER; } } private static final class FormatsDATE extends Formats { protected String formatValueInt(Object value) { return m_dateformat.format((Date) value); } protected Object parseValueInt(String value) throws ParseException { return m_dateformat.parse(value); } public int getAlignment() { return javax.swing.SwingConstants.CENTER; } } private static final class FormatsTIME extends Formats { protected String formatValueInt(Object value) { return m_timeformat.format((Date) value); } protected Object parseValueInt(String value) throws ParseException { return m_timeformat.parse(value); } public int getAlignment() { return javax.swing.SwingConstants.CENTER; } } private static final class FormatsBYTEA extends Formats { protected String formatValueInt(Object value) { try { return new String((byte[]) value, "UTF-8"); } catch (java.io.UnsupportedEncodingException eu) { return ""; } } protected Object parseValueInt(String value) throws ParseException { try { return value.getBytes("UTF-8"); } catch (java.io.UnsupportedEncodingException eu) { return new byte[0]; } } public int getAlignment() { return javax.swing.SwingConstants.LEADING; } } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver13; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFOxmConnTrackingTpDstMaskedVer13 implements OFOxmConnTrackingTpDstMasked { private static final Logger logger = LoggerFactory.getLogger(OFOxmConnTrackingTpDstMaskedVer13.class); // version: 1.3 final static byte WIRE_VERSION = 4; final static int LENGTH = 8; private final static TransportPort DEFAULT_VALUE = TransportPort.NONE; private final static TransportPort DEFAULT_VALUE_MASK = TransportPort.NONE; // OF message fields private final TransportPort value; private final TransportPort mask; // // Immutable default instance final static OFOxmConnTrackingTpDstMaskedVer13 DEFAULT = new OFOxmConnTrackingTpDstMaskedVer13( DEFAULT_VALUE, DEFAULT_VALUE_MASK ); // package private constructor - used by readers, builders, and factory OFOxmConnTrackingTpDstMaskedVer13(TransportPort value, TransportPort mask) { if(value == null) { throw new NullPointerException("OFOxmConnTrackingTpDstMaskedVer13: property value cannot be null"); } if(mask == null) { throw new NullPointerException("OFOxmConnTrackingTpDstMaskedVer13: property mask cannot be null"); } this.value = value; this.mask = mask; } // Accessors for OF message fields @Override public long getTypeLen() { return 0x1fb04L; } @Override public TransportPort getValue() { return value; } @Override public TransportPort getMask() { return mask; } @Override public MatchField<TransportPort> getMatchField() { return MatchField.CONN_TRACKING_TP_DST; } @Override public boolean isMasked() { return true; } public OFOxm<TransportPort> getCanonical() { if (TransportPort.NO_MASK.equals(mask)) { return new OFOxmConnTrackingTpDstVer13(value); } else if(TransportPort.FULL_MASK.equals(mask)) { return null; } else { return this; } } @Override public OFVersion getVersion() { return OFVersion.OF_13; } public OFOxmConnTrackingTpDstMasked.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFOxmConnTrackingTpDstMasked.Builder { final OFOxmConnTrackingTpDstMaskedVer13 parentMessage; // OF message fields private boolean valueSet; private TransportPort value; private boolean maskSet; private TransportPort mask; BuilderWithParent(OFOxmConnTrackingTpDstMaskedVer13 parentMessage) { this.parentMessage = parentMessage; } @Override public long getTypeLen() { return 0x1fb04L; } @Override public TransportPort getValue() { return value; } @Override public OFOxmConnTrackingTpDstMasked.Builder setValue(TransportPort value) { this.value = value; this.valueSet = true; return this; } @Override public TransportPort getMask() { return mask; } @Override public OFOxmConnTrackingTpDstMasked.Builder setMask(TransportPort mask) { this.mask = mask; this.maskSet = true; return this; } @Override public MatchField<TransportPort> getMatchField() { return MatchField.CONN_TRACKING_TP_DST; } @Override public boolean isMasked() { return true; } @Override public OFOxm<TransportPort> getCanonical()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property canonical not supported in version 1.3"); } @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFOxmConnTrackingTpDstMasked build() { TransportPort value = this.valueSet ? this.value : parentMessage.value; if(value == null) throw new NullPointerException("Property value must not be null"); TransportPort mask = this.maskSet ? this.mask : parentMessage.mask; if(mask == null) throw new NullPointerException("Property mask must not be null"); // return new OFOxmConnTrackingTpDstMaskedVer13( value, mask ); } } static class Builder implements OFOxmConnTrackingTpDstMasked.Builder { // OF message fields private boolean valueSet; private TransportPort value; private boolean maskSet; private TransportPort mask; @Override public long getTypeLen() { return 0x1fb04L; } @Override public TransportPort getValue() { return value; } @Override public OFOxmConnTrackingTpDstMasked.Builder setValue(TransportPort value) { this.value = value; this.valueSet = true; return this; } @Override public TransportPort getMask() { return mask; } @Override public OFOxmConnTrackingTpDstMasked.Builder setMask(TransportPort mask) { this.mask = mask; this.maskSet = true; return this; } @Override public MatchField<TransportPort> getMatchField() { return MatchField.CONN_TRACKING_TP_DST; } @Override public boolean isMasked() { return true; } @Override public OFOxm<TransportPort> getCanonical()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property canonical not supported in version 1.3"); } @Override public OFVersion getVersion() { return OFVersion.OF_13; } // @Override public OFOxmConnTrackingTpDstMasked build() { TransportPort value = this.valueSet ? this.value : DEFAULT_VALUE; if(value == null) throw new NullPointerException("Property value must not be null"); TransportPort mask = this.maskSet ? this.mask : DEFAULT_VALUE_MASK; if(mask == null) throw new NullPointerException("Property mask must not be null"); return new OFOxmConnTrackingTpDstMaskedVer13( value, mask ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFOxmConnTrackingTpDstMasked> { @Override public OFOxmConnTrackingTpDstMasked readFrom(ByteBuf bb) throws OFParseError { // fixed value property typeLen == 0x1fb04L int typeLen = bb.readInt(); if(typeLen != 0x1fb04) throw new OFParseError("Wrong typeLen: Expected=0x1fb04L(0x1fb04L), got="+typeLen); TransportPort value = TransportPort.read2Bytes(bb); TransportPort mask = TransportPort.read2Bytes(bb); OFOxmConnTrackingTpDstMaskedVer13 oxmConnTrackingTpDstMaskedVer13 = new OFOxmConnTrackingTpDstMaskedVer13( value, mask ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", oxmConnTrackingTpDstMaskedVer13); return oxmConnTrackingTpDstMaskedVer13; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFOxmConnTrackingTpDstMaskedVer13Funnel FUNNEL = new OFOxmConnTrackingTpDstMaskedVer13Funnel(); static class OFOxmConnTrackingTpDstMaskedVer13Funnel implements Funnel<OFOxmConnTrackingTpDstMaskedVer13> { private static final long serialVersionUID = 1L; @Override public void funnel(OFOxmConnTrackingTpDstMaskedVer13 message, PrimitiveSink sink) { // fixed value property typeLen = 0x1fb04L sink.putInt(0x1fb04); message.value.putTo(sink); message.mask.putTo(sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFOxmConnTrackingTpDstMaskedVer13> { @Override public void write(ByteBuf bb, OFOxmConnTrackingTpDstMaskedVer13 message) { // fixed value property typeLen = 0x1fb04L bb.writeInt(0x1fb04); message.value.write2Bytes(bb); message.mask.write2Bytes(bb); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFOxmConnTrackingTpDstMaskedVer13("); b.append("value=").append(value); b.append(", "); b.append("mask=").append(mask); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFOxmConnTrackingTpDstMaskedVer13 other = (OFOxmConnTrackingTpDstMaskedVer13) obj; if (value == null) { if (other.value != null) return false; } else if (!value.equals(other.value)) return false; if (mask == null) { if (other.mask != null) return false; } else if (!mask.equals(other.mask)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((value == null) ? 0 : value.hashCode()); result = prime * result + ((mask == null) ? 0 : mask.hashCode()); return result; } }
/** * Copyright (C) 2012-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ninja.session; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import ninja.Context; import ninja.Cookie; import ninja.Result; import ninja.utils.Crypto; import ninja.utils.NinjaConstant; import ninja.utils.NinjaProperties; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Matchers; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) public class SessionCookieTest { @Mock private Context context; @Mock private Result result; @Captor private ArgumentCaptor<Cookie> cookieCaptor; private Crypto crypto; @Mock NinjaProperties ninjaProperties; @Before public void setUp() { when( ninjaProperties .getInteger(NinjaConstant.sessionExpireTimeInSeconds)) .thenReturn(10000); when( ninjaProperties.getBooleanWithDefault( NinjaConstant.sessionSendOnlyIfChanged, true)) .thenReturn(true); when( ninjaProperties.getBooleanWithDefault( NinjaConstant.sessionTransferredOverHttpsOnly, true)) .thenReturn(true); when( ninjaProperties.getBooleanWithDefault( NinjaConstant.sessionHttpOnly, true)).thenReturn(true); when(ninjaProperties.getOrDie(NinjaConstant.applicationSecret)) .thenReturn("secret"); when(ninjaProperties.getOrDie(NinjaConstant.applicationCookiePrefix)) .thenReturn("NINJA"); crypto = new Crypto(ninjaProperties); } @Test public void testSessionDoesNotGetWrittenToResponseWhenEmptyAndOnlySentWhenChanged() { Session sessionCookie = new SessionImpl(crypto, ninjaProperties); sessionCookie.init(context); // put nothing => empty session will not be sent as we send only changed // stuff... sessionCookie.save(context, result); // no cookie should be set as the flash scope is empty...: verify(result, never()).addCookie(Matchers.any(Cookie.class)); } @Test public void testSessionCookieSettingWorks() throws Exception { Session sessionCookie = new SessionImpl(crypto, ninjaProperties); sessionCookie.init(context); sessionCookie.put("hello", "session!"); // put nothing => intentionally to check if no session cookie will be // saved sessionCookie.save(context, result); // a cookie will be set verify(result).addCookie(cookieCaptor.capture()); // verify some stuff on the set cookie assertEquals("NINJA_SESSION", cookieCaptor.getValue().getName()); // assert some stuff... // Make sure that sign is valid: String cookieString = cookieCaptor.getValue().getValue(); String cookieFromSign = cookieString.substring(cookieString .indexOf("-") + 1); String computedSign = crypto.signHmacSha1(cookieFromSign); assertEquals(computedSign, cookieString.substring(0, cookieString.indexOf("-"))); // Make sure that cookie contains timestamp assertTrue(cookieString.contains("___TS")); } @Test public void testHttpsOnlyWorks() throws Exception { Session sessionCookie = new SessionImpl(crypto, ninjaProperties); sessionCookie.init(context); sessionCookie.put("hello", "session!"); // put nothing => intentionally to check if no session cookie will be // saved sessionCookie.save(context, result); // a cookie will be set verify(result).addCookie(cookieCaptor.capture()); // verify some stuff on the set cookie assertEquals(true, cookieCaptor.getValue().isSecure()); } @Test public void testNoHttpsOnlyWorks() throws Exception { // setup this testmethod when( ninjaProperties.getBooleanWithDefault( NinjaConstant.sessionTransferredOverHttpsOnly, true)) .thenReturn(false); Session sessionCookie = new SessionImpl(crypto, ninjaProperties); sessionCookie.init(context); sessionCookie.put("hello", "session!"); // put nothing => intentionally to check if no session cookie will be // saved sessionCookie.save(context, result); // a cookie will be set verify(result).addCookie(cookieCaptor.capture()); // verify some stuff on the set cookie assertEquals(false, cookieCaptor.getValue().isSecure()); } @Test public void testHttpOnlyWorks() throws Exception { Session sessionCookie = new SessionImpl(crypto, ninjaProperties); sessionCookie.init(context); sessionCookie.put("hello", "session!"); // put nothing => intentionally to check if no session cookie will be // saved sessionCookie.save(context, result); // a cookie will be set verify(result).addCookie(cookieCaptor.capture()); // verify some stuff on the set cookie assertEquals(true, cookieCaptor.getValue().isHttpOnly()); } @Test public void testNoHttpOnlyWorks() throws Exception { // setup this testmethod when( ninjaProperties.getBooleanWithDefault( NinjaConstant.sessionHttpOnly, true)).thenReturn(false); Session sessionCookie = new SessionImpl(crypto, ninjaProperties); sessionCookie.init(context); sessionCookie.put("hello", "session!"); // put nothing => intentionally to check if no session cookie will be // saved sessionCookie.save(context, result); // a cookie will be set verify(result).addCookie(cookieCaptor.capture()); // verify some stuff on the set cookie assertEquals(false, cookieCaptor.getValue().isHttpOnly()); } @Test public void testThatCookieSavingAndInitingWorks() { Session sessionCookie = new SessionImpl(crypto, ninjaProperties); sessionCookie.init(context); sessionCookie.put("key1", "value1"); sessionCookie.put("key2", "value2"); sessionCookie.put("key3", "value3"); // put nothing => intentionally to check if no session cookie will be // saved sessionCookie.save(context, result); // a cookie will be set verify(result).addCookie(cookieCaptor.capture()); // now we simulate a new request => the session storage will generate a // new cookie: Cookie newSessionCookie = Cookie.builder( cookieCaptor.getValue().getName(), cookieCaptor.getValue().getValue()).build(); // that will be returned by the httprequest... when(context.getCookie(cookieCaptor.getValue().getName())).thenReturn( newSessionCookie); // init new session from that cookie: Session sessionCookie2 = new SessionImpl(crypto, ninjaProperties); sessionCookie2.init(context); assertEquals("value1", sessionCookie2.get("key1")); assertEquals("value2", sessionCookie2.get("key2")); assertEquals("value3", sessionCookie2.get("key3")); } @Test public void testThatCorrectMethodOfNinjaPropertiesIsUsedSoThatStuffBreaksWhenPropertyIsAbsent() { // we did not set the cookie prefix when(ninjaProperties.getOrDie(NinjaConstant.applicationCookiePrefix)) .thenReturn(null); // stuff must break => ... Session sessionCookie = new SessionImpl(crypto, ninjaProperties); verify(ninjaProperties).getOrDie(NinjaConstant.applicationCookiePrefix); } @Test public void testSessionCookieDelete() { Session sessionCookie = new SessionImpl(crypto, ninjaProperties); sessionCookie.init(context); final String key = "mykey"; final String value = "myvalue"; sessionCookie.put(key, value); // value should have been set: assertEquals(value, sessionCookie.get(key)); // value should be returned when removing: assertEquals(value, sessionCookie.remove(key)); // after removing, value should not be there anymore: assertNull(sessionCookie.get(key)); } @Test public void testGetAuthenticityTokenWorks() { Session sessionCookie = new SessionImpl( crypto, ninjaProperties); sessionCookie.init(context); String authenticityToken = sessionCookie.getAuthenticityToken(); sessionCookie.save(context, result); // a cookie will be set verify(result).addCookie(cookieCaptor.capture()); String cookieValue = cookieCaptor.getValue().getValue(); //verify that the authenticity token is set assertTrue(cookieValue.contains("___AT=" + authenticityToken)); // also make sure the timestamp is there: assertTrue(cookieValue.contains("___TS=")); } @Test public void testGetIdTokenWorks() { Session sessionCookie = new SessionImpl( crypto, ninjaProperties); sessionCookie.init(context); String idToken = sessionCookie.getId(); sessionCookie.save(context, result); // a cookie will be set verify(result).addCookie(cookieCaptor.capture()); String cookieValue = cookieCaptor.getValue().getValue(); //verify that the id token is set: assertTrue(cookieValue.contains("___ID=" + idToken)); // also make sure the timestamp is there: assertTrue(cookieValue.contains("___TS=")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.index.old.mk.simple; import org.apache.jackrabbit.mk.api.MicroKernel; import org.apache.jackrabbit.mk.api.MicroKernelException; import org.apache.jackrabbit.mk.blobs.AbstractBlobStore; import org.apache.jackrabbit.mk.blobs.FileBlobStore; import org.apache.jackrabbit.mk.blobs.MemoryBlobStore; import org.apache.jackrabbit.mk.json.JsopReader; import org.apache.jackrabbit.mk.json.JsopStream; import org.apache.jackrabbit.mk.json.JsopTokenizer; import org.apache.jackrabbit.mk.json.JsopWriter; import org.apache.jackrabbit.mk.server.Server; import org.apache.jackrabbit.mk.util.Cache; import org.apache.jackrabbit.mk.util.CommitGate; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.plugins.index.old.mk.ExceptionFactory; import org.apache.jackrabbit.oak.plugins.index.old.mk.wrapper.MicroKernelWrapperBase; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; /* Node structure: /head/rev = 100 /head/nanos = nanos /head/commit/diff = [+ "/test"{}] /head/commit/msg = "hello ..." /head/config/ (optional) /head/data/ /99/head/nanos = nanos /99/98/head /99/98/97/head /99/90/head /99/90/89/head */ /** * A simple MicroKernel implementation. */ public class SimpleKernelImpl extends MicroKernelWrapperBase implements MicroKernel { private static final int REV_SKIP_OFFSET = 20; private final String name; private final AbstractBlobStore ds; private final AscendingClock clock = new AscendingClock(System.currentTimeMillis()); private final CommitGate gate = new CommitGate(); private final Cache<Long, Revision> revisionCache = Cache.newInstance(null, 1024 * 1024); private volatile long headRevId; private volatile String headRevision; private NodeMap nodeMap; private Server server; private boolean disposed; public SimpleKernelImpl(String name) { this.name = name; boolean startServer = false; if (name.startsWith("server:")) { startServer = true; name = name.substring("server:".length()); } nodeMap = new NodeMap(); if (name.startsWith("fs:")) { String dir = name.substring("fs:".length()); try { ds = new FileBlobStore(dir); } catch (IOException e) { throw ExceptionFactory.convert(e); } nodeMap = new NodeMapInDb(dir); } else { ds = new MemoryBlobStore(); } if (nodeMap.getRootId() == null) { NodeImpl head = new NodeImpl(nodeMap, 0); Revision revNode = new Revision(0, 0, "", ""); head = revNode.store(head, new NodeImpl(nodeMap, 0)); head.addChildNode("data", new NodeImpl(nodeMap, 0)); NodeImpl root = new NodeImpl(nodeMap, 0); root.addChildNode("head", head); nodeMap.commit(root); } else { NodeImpl head = getRoot().getNode("head"); String rev = head.getProperty("rev"); headRevId = Revision.parseId(JsopTokenizer.decodeQuoted(rev)); applyConfig(head); } headRevision = Revision.formatId(headRevId); if (startServer) { server = new Server(this); try { server.start(); } catch (IOException e) { throw ExceptionFactory.convert(e); } } } private void applyConfig(NodeImpl head) { // /head/config doesn't always exist if (head.exists("config")) { NodeImpl config = head.getNode("config"); for (int i = 0, size = config.getPropertyCount(); i < size; i++) { nodeMap.setSetting(config.getProperty(i), config.getPropertyValue(i)); } } } @Override public synchronized String commitStream(String rootPath, JsopReader jsonDiff, String revisionId, String message) { revisionId = revisionId == null ? headRevision : revisionId; // TODO message should be json // TODO read / write version // TODO getJournal and getRevision don't have a path, // which means we can't implement access rights using path prefixes try { return doCommit(rootPath, jsonDiff, revisionId, message); } catch (Exception e) { throw ExceptionFactory.convert(e); } } private String doCommit(String rootPath, JsopReader t, String revisionId, String message) { long oldRevision = headRevId, rev = headRevId + 1; NodeImpl root = nodeMap.getRootId().getNode(nodeMap); NodeImpl head = root.getNode("head"), oldHead = head; NodeImpl data = head.getNode("data"); JsopWriter diff = new JsopStream(); while (true) { int r = t.read(); if (r == JsopReader.END) { break; } String path = PathUtils.concat(rootPath, t.readString()); String from = PathUtils.relativize("/", path); switch (r) { case '+': t.read(':'); diff.tag('+').key(path); if (t.matches('{')) { NodeImpl n = NodeImpl.parse(nodeMap, t, rev); data = data.cloneAndAddChildNode(from, false, null, n, rev); n.append(diff, Integer.MAX_VALUE, 0, Integer.MAX_VALUE, false); } else { String value = t.readRawValue().trim(); String nodeName = PathUtils.getParentPath(from); String propertyName = PathUtils.getName(from); if (data.getNode(nodeName).hasProperty(propertyName)) { throw ExceptionFactory.get("Property already exists: " + propertyName); } data = data.cloneAndSetProperty(from, value, rev); diff.encodedValue(value); } diff.newline(); break; case '-': diff.tag('-').value(path).newline(); if (data.exists(from) || !getRevisionDataRoot(revisionId).exists(from)) { // this will fail if the node didn't exist data = data.cloneAndRemoveChildNode(from, rev); } break; case '^': t.read(':'); boolean isConfigChange = from.startsWith(":root/head/config/"); String value; if (t.matches(JsopReader.NULL)) { value = null; diff.tag('^').key(path).value(null); } else { value = t.readRawValue().trim(); String nodeName = PathUtils.getParentPath(from); String propertyName = PathUtils.getName(from); if (isConfigChange || data.getNode(nodeName).hasProperty(propertyName)) { diff.tag('^'); } else { diff.tag('+'); } diff.key(path).encodedValue(value); } if (isConfigChange) { String p = PathUtils.relativize(":root/head", from); if (!head.exists("config")) { head = head.setChild("config", new NodeImpl(nodeMap, rev), rev); } head = head.cloneAndSetProperty(p, value, rev); applyConfig(head); } else { data = data.cloneAndSetProperty(from, value, rev); } diff.newline(); break; case '>': { t.read(':'); diff.tag('>').key(path); String name = PathUtils.getName(from); String position, target, to; boolean rename; if (t.matches('{')) { rename = false; position = t.readString(); t.read(':'); target = t.readString(); t.read('}'); diff.object().key(position); if (!PathUtils.isAbsolute(target)) { target = PathUtils.concat(rootPath, target); } diff.value(target).endObject(); } else { rename = true; position = null; target = t.readString(); if (!PathUtils.isAbsolute(target)) { target = PathUtils.concat(rootPath, target); } diff.value(target); } diff.newline(); boolean before = false; if ("last".equals(position)) { target = PathUtils.concat(target, name); position = null; } else if ("first".equals(position)) { target = PathUtils.concat(target, name); position = null; before = true; } else if ("before".equals(position)) { position = PathUtils.getName(target); target = PathUtils.getParentPath(target); target = PathUtils.concat(target, name); before = true; } else if ("after".equals(position)) { position = PathUtils.getName(target); target = PathUtils.getParentPath(target); target = PathUtils.concat(target, name); } else if (position == null) { // move } else { throw ExceptionFactory.get("position: " + position); } to = PathUtils.relativize("/", target); boolean inPlaceRename = false; if (rename) { if (PathUtils.getParentPath(from).equals(PathUtils.getParentPath(to))) { inPlaceRename = true; position = PathUtils.getName(from); } } NodeImpl node = data.getNode(from); if (!inPlaceRename) { data = data.cloneAndRemoveChildNode(from, rev); } data = data.cloneAndAddChildNode(to, before, position, node, rev); if (inPlaceRename) { data = data.cloneAndRemoveChildNode(from, rev); } break; } case '*': { // TODO possibly support target position notation t.read(':'); String target = t.readString(); if (!PathUtils.isAbsolute(target)) { target = PathUtils.concat(rootPath, target); } diff.tag('*').key(path).value(target); String to = PathUtils.relativize("/", target); NodeImpl node = data.getNode(from); JsopStream json = new JsopStream(); node.append(json, Integer.MAX_VALUE, 0, Integer.MAX_VALUE, false); json.read('{'); NodeImpl n2 = NodeImpl.parse(nodeMap, json, rev); data = data.cloneAndAddChildNode(to, false, null, n2, rev); break; } default: throw ExceptionFactory.get("token: " + (char) t.getTokenType()); } } head = head.setChild("data", data, rev); Revision revNode = new Revision(rev, clock.nanoTime(), diff.toString(), message); revisionCache.put(rev, revNode); head = revNode.store(head, new NodeImpl(nodeMap, rev)); root = root.setChild("head", head, rev); String old = Revision.formatId(oldRevision); NodeImpl oldRev = new NodeImpl(nodeMap, rev); oldRev.addChildNode("head", oldHead); String lastRev = Revision.formatId(oldRevision - 1); if (root.exists(lastRev)) { NodeImpl lastRevNode = root.getNode(lastRev); root = root.cloneAndRemoveChildNode(lastRev, rev); oldRev.setChild(lastRev, lastRevNode, rev); if (oldRevision % REV_SKIP_OFFSET == 0) { long skip = oldRevision - REV_SKIP_OFFSET; NodeImpl n = getRevisionNode(getRoot(), skip, skip); if (n != null) { oldRev.setChild(Revision.formatId(skip), n, rev); // TODO remove old link to reduce descendant count } } } root = root.setChild(old, oldRev, rev); nodeMap.commit(root); headRevId = rev; headRevision = Revision.formatId(rev); gate.commit(headRevision); return headRevision; } private NodeImpl getRoot() { return nodeMap.getRootId().getNode(nodeMap); } @Override public String getHeadRevision() { return headRevision; } @Override public JsopReader getRevisionsStream(long since, int maxEntries, String path) { NodeImpl node = getRoot(); long sinceNanos = since * 1000000; ArrayList<Revision> revisions = new ArrayList<Revision>(); Revision r = Revision.get(node.getNode("head")); if (sinceNanos < r.getNanos() && maxEntries > 0) { revisions.add(r); while (revisions.size() < maxEntries) { String newest = null; for (int i = 0;; i++) { String next = node.getChildNodeName(i); if (next == null) { break; } else if (!next.equals("head") && !next.equals("config")) { if (newest == null || next.compareTo(newest) >= 0) { newest = next; } } } if (newest != null) { r = Revision.get(node); if (r == null) { break; } revisions.add(r); } } } JsopStream buff = new JsopStream().array(); Collections.sort(revisions); for (Revision rev : revisions) { if (rev.getNanos() > sinceNanos) { buff.encodedValue(rev.toString()); } } return buff.endArray(); } @Override public String waitForCommit(String oldHeadRevisionId, long maxWaitMillis) throws InterruptedException { return gate.waitForCommit(oldHeadRevisionId, maxWaitMillis); } @Override public JsopReader getJournalStream(String fromRevisionId, String toRevisionId, String path) { fromRevisionId = fromRevisionId == null ? headRevision : fromRevisionId; toRevisionId = toRevisionId == null ? headRevision : toRevisionId; long fromRev = Revision.parseId(fromRevisionId); long toRev = Revision.parseId(toRevisionId); NodeImpl node = getRoot(); ArrayList<Revision> revisions = new ArrayList<Revision>(); Revision r = Revision.get(node.getNode("head")); if (r.getId() >= fromRev && r.getId() <= toRev) { revisions.add(r); } if (r.getId() > fromRev) { node = getRevisionNode(node, fromRev, toRev); while (node != null) { r = Revision.get(node.getNode("head")); if (r.getId() >= fromRev && r.getId() <= toRev) { r = revisionCache.replace(r.getId(), r); revisions.add(r); } String next = Revision.formatId(r.getId() - 1); if (!node.exists(next)) { break; } node = node.getNode(next); } } JsopStream buff = new JsopStream().array().newline(); for (int i = revisions.size() - 1; i >= 0; i--) { Revision rev = revisions.get(i); if (rev.getId() >= fromRev && rev.getId() <= toRev) { rev.appendJournal(buff); } } return buff.endArray(); } private static NodeImpl getRevisionNode(NodeImpl node, long fromRev, long toRev) { while (true) { long next = -1; String nextRev = null; for (int i = 0;; i++) { String n = node.getChildNodeName(i); if (n == null) { break; } if ("head".equals(n)) { continue; } long rev = Revision.parseId(n); if (next == -1 || (rev >= toRev && (rev < next || next < toRev))) { next = rev; nextRev = n; } } if (next == -1 || fromRev > next) { return null; } else { node = node.getNode(nextRev); if (next <= toRev) { return node; } } } } @Override public JsopReader diffStream(String fromRevisionId, String toRevisionId, String path, int depth) { fromRevisionId = fromRevisionId == null ? headRevision : fromRevisionId; toRevisionId = toRevisionId == null ? headRevision : toRevisionId; // TODO implement if required return new JsopStream(); } /** * Get the nodes. The following prefixes are supported: * <ul><li>:root - get the root node (including all old revisions) * </li><li>:info - get internal info such as the node count * </li></ul> * * @param path the path * @param revisionId the revision * @return the json string */ @Override public JsopReader getNodesStream(String path, String revisionId, int depth, long offset, int count, String filter) { revisionId = revisionId == null ? headRevision : revisionId; // TODO offset > 0 should mean the properties are not included if (count < 0) { count = nodeMap.getMaxMemoryChildren(); } if (!PathUtils.isAbsolute(path)) { throw ExceptionFactory.get("Not an absolute path: " + path); } NodeImpl n; if (path.startsWith("/:")) { if (path.startsWith("/:root")) { n = getRoot().getNode(PathUtils.relativize("/:root", path)); } else if (path.startsWith("/:info")) { n = nodeMap.getInfo(PathUtils.relativize("/:info", path)); } else { n = getRevisionDataRoot(revisionId).getNode(path.substring(1)); } } else { n = getRevisionDataRoot(revisionId).getNode(path.substring(1)); } if (n == null) { return null; } JsopStream json = new JsopStream(); n.append(json, depth, offset, count, true); return json; } private NodeImpl getRevisionDataRoot(String revisionId) { NodeImpl rev = getRevisionIfExists(revisionId); if (rev == null) { throw ExceptionFactory.get("Revision not found: " + revisionId); } rev = rev.getNode("data"); return rev; } private NodeImpl getRevisionIfExists(String revisionId) { NodeImpl node = getRoot(); NodeImpl head = node.getNode("head"); String headRev; headRev = head.getProperty("rev"); headRev = headRev == null ? null : JsopTokenizer.decodeQuoted(headRev); // we can't rely on headRevId, as it's a volatile field if (revisionId.equals(headRev)) { return head; } else { long rev = Revision.parseId(revisionId); NodeImpl rnode = getRevisionNode(node, rev, rev); if (rnode != null) { return rnode.getNode("head"); } else { return null; } } } @Override public boolean nodeExists(String path, String revisionId) { revisionId = revisionId == null ? headRevision : revisionId; if (!PathUtils.isAbsolute(path)) { throw ExceptionFactory.get("Not an absolute path: " + path); } if (PathUtils.denotesRoot(path)) { return true; } else if (path.equals("/:info")) { return true; } return getRevisionDataRoot(revisionId).exists(path.substring(1)); } @Override public long getChildNodeCount(String path, String revisionId) { revisionId = revisionId == null ? headRevision : revisionId; if (!PathUtils.isAbsolute(path)) { throw ExceptionFactory.get("Not an absolute path: " + path); } return getRevisionDataRoot(revisionId).getNode(path).getChildNodeCount(); } @Override public long getLength(String blobId) { try { return ds.getBlobLength(blobId); } catch (Exception e) { throw ExceptionFactory.convert(e); } } @Override public int read(String blobId, long pos, byte[] buff, int off, int length) { try { return ds.readBlob(blobId, pos, buff, off, length); } catch (Exception e) { throw ExceptionFactory.convert(e); } } @Override public String write(InputStream in) { try { return ds.writeBlob(in); } catch (Exception e) { throw ExceptionFactory.convert(e); } } public synchronized void dispose() { if (!disposed) { disposed = true; gate.commit("end"); nodeMap.close(); if (server != null) { server.stop(); server = null; } } } @Override public String toString() { return "simple:" + name; } @Override public String branch(String trunkRevisionId) throws MicroKernelException { trunkRevisionId = trunkRevisionId == null ? headRevision : trunkRevisionId; // TODO OAK-45 support throw new UnsupportedOperationException(); } @Override public String merge(String branchRevisionId, String message) throws MicroKernelException { // TODO OAK-45 support throw new UnsupportedOperationException(); } }
package org.grobid.core.data; import org.grobid.core.utilities.TextUtilities; import org.grobid.core.lexicon.Lexicon; import java.util.ArrayList; import java.util.List; /** * Class for representing and exchanging affiliation information. * */ public class Affiliation { private String acronym = null; private String name = null; private String url = null; private List<String> institutions = null; // for additional institutions private List<String> departments = null; // for additional departments private List<String> laboratories = null; // for additional laboratories private String country = null; private String postCode = null; private String postBox = null; private String region = null; private String settlement = null; private String addrLine = null; private String marker = null; private String addressString = null; // unspecified address field private String affiliationString = null; // unspecified affiliation field private String rawAffiliationString = null; // raw affiliation text (excluding marker) private boolean failAffiliation = true; // tag for unresolved affiliation attachment // an identifier for the affiliation independent from the marker, present in the TEI result private String key = null; public Affiliation() { } public Affiliation(org.grobid.core.data.Affiliation aff) { acronym = aff.getAcronym(); name = aff.getName(); url = aff.getURL(); addressString = aff.getAddressString(); country = aff.getCountry(); marker = aff.getMarker(); departments = aff.getDepartments(); institutions = aff.getInstitutions(); laboratories = aff.getLaboratories(); postCode = aff.getPostCode(); postBox = aff.getPostBox(); region = aff.getRegion(); settlement = aff.getSettlement(); addrLine = aff.getAddrLine(); affiliationString = aff.getAffiliationString(); rawAffiliationString = aff.getRawAffiliationString(); } public String getAcronym() { return acronym; } public String getName() { return name; } public String getURL() { return url; } public String getAddressString() { return addressString; } public String getCountry() { return country; } public String getMarker() { return marker; } public String getPostCode() { return postCode; } public String getPostBox() { return postBox; } public String getRegion() { return region; } public String getSettlement() { return settlement; } public String getAddrLine() { return addrLine; } public String getAffiliationString() { return affiliationString; } public String getRawAffiliationString() { return rawAffiliationString; } public List<String> getInstitutions() { return institutions; } public List<String> getLaboratories() { return laboratories; } public List<String> getDepartments() { return departments; } public String getKey() { return key; } public void setAcronym(String s) { acronym = s; } public void setName(String s) { name = s; } public void setURL(String s) { url = s; } public void setAddressString(String s) { addressString = s; } public void setCountry(String s) { country = s; } public void setMarker(String s) { marker = s; } public void setPostCode(String s) { postCode = s; } public void setPostBox(String s) { postBox = s; } public void setRegion(String s) { region = s; } public void setSettlement(String s) { settlement = s; } public void setAddrLine(String s) { addrLine = s; } public void setAffiliationString(String s) { affiliationString = s; } public void setRawAffiliationString(String s) { rawAffiliationString = s; } public void setInstitutions(List<String> affs) { institutions = affs; } public void addInstitution(String aff) { if (institutions == null) institutions = new ArrayList<String>(); institutions.add(TextUtilities.cleanField(aff, true)); } public void setDepartments(List<String> affs) { departments = affs; } public void addDepartment(String aff) { if (departments == null) departments = new ArrayList<String>(); departments.add(TextUtilities.cleanField(aff, true)); } public void setLaboratories(List<String> affs) { laboratories = affs; } public void addLaboratory(String aff) { if (laboratories == null) laboratories = new ArrayList<String>(); laboratories.add(TextUtilities.cleanField(aff, true)); } public void extendFirstInstitution(String theExtend) { if (institutions == null) { institutions = new ArrayList<String>(); institutions.add(TextUtilities.cleanField(theExtend, true)); } else { String first = institutions.get(0); first = first + theExtend; institutions.set(0, first); } } public void extendLastInstitution(String theExtend) { if (institutions == null) { institutions = new ArrayList<String>(); institutions.add(TextUtilities.cleanField(theExtend, true)); } else { String first = institutions.get(institutions.size() - 1); first = first + theExtend; institutions.set(institutions.size() - 1, first); } } public void extendFirstDepartment(String theExtend) { if (departments == null) { departments = new ArrayList<String>(); departments.add(TextUtilities.cleanField(theExtend, true)); } else { String first = departments.get(0); first = first + theExtend; departments.set(0, first); } } public void extendLastDepartment(String theExtend) { if (departments == null) { departments = new ArrayList<String>(); departments.add(TextUtilities.cleanField(theExtend, true)); } else { String first = departments.get(departments.size() - 1); first = first + theExtend; departments.set(departments.size() - 1, first); } } public void extendFirstLaboratory(String theExtend) { if (laboratories == null) { laboratories = new ArrayList<String>(); laboratories.add(TextUtilities.cleanField(theExtend, true)); } else { String first = laboratories.get(0); first = first + theExtend; laboratories.set(0, first); } } public void extendLastLaboratory(String theExtend) { if (laboratories == null) { laboratories = new ArrayList<String>(); laboratories.add(TextUtilities.cleanField(theExtend, true)); } else { String first = laboratories.get(laboratories.size() - 1); first = first + theExtend; laboratories.set(laboratories.size() - 1, first); } } public boolean notNull() { return !((departments == null) & (institutions == null) & (laboratories == null) & (country == null) & (postCode == null) & (postBox == null) & (region == null) & (settlement == null) & (addrLine == null) & (affiliationString == null) & (addressString == null)); } public void setFailAffiliation(boolean b) { failAffiliation = b; } public boolean getFailAffiliation() { return failAffiliation; } public void setKey(String key) { this.key = key; } public void clean() { if (departments != null) { List<String> newDepartments = new ArrayList<String>(); for (String department : departments) { String dep = TextUtilities.cleanField(department, true); if (dep.length() > 2) { newDepartments.add(dep); } } departments = newDepartments; } if (institutions != null) { List<String> newInstitutions = new ArrayList<String>(); for (String institution : institutions) { String inst = TextUtilities.cleanField(institution, true); if (inst.length() > 1) { newInstitutions.add(inst); } } institutions = newInstitutions; } if (laboratories != null) { List<String> newLaboratories = new ArrayList<String>(); for (String laboratorie : laboratories) { String inst = TextUtilities.cleanField(laboratorie, true); if (inst.length() > 2) { newLaboratories.add(inst); } } laboratories = newLaboratories; } if (country != null) { country = TextUtilities.cleanField(country, true); if (country.endsWith(")")) { // for some reason the ) at the end of this field is not removed country = country.substring(0,country.length()-1); } if (country.length() < 2) country = null; } if (postCode != null) { postCode = TextUtilities.cleanField(postCode, true); if (postCode.length() < 2) postCode = null; } if (postBox != null) { postBox = TextUtilities.cleanField(postBox, true); if (postBox.length() < 2) postBox = null; } if (region != null) { region = TextUtilities.cleanField(region, true); if (region.length() < 2) region = null; } if (settlement != null) { settlement = TextUtilities.cleanField(settlement, true); if (settlement.length() < 2) settlement = null; } if (addrLine != null) { addrLine = TextUtilities.cleanField(addrLine, true); if (addrLine.length() < 2) addrLine = null; } if (addressString != null) { addressString = TextUtilities.cleanField(addressString, true); if (addressString.length() < 2) addressString = null; } if (affiliationString != null) { affiliationString = TextUtilities.cleanField(affiliationString, true); if (affiliationString.length() < 2) affiliationString = null; } if (marker != null) { marker = TextUtilities.cleanField(marker, true); marker = marker.replace(" ", ""); } } /** * Return the number of overall structure members (address included) */ public int nbStructures() { int nbStruct = 0; if (departments != null) { nbStruct += departments.size(); } if (institutions != null) { nbStruct += institutions.size(); } if (laboratories != null) { nbStruct += laboratories.size(); } if (country != null) { nbStruct++; } if (postCode != null) { nbStruct++; } if (postBox != null) { nbStruct++; } if (region != null) { nbStruct++; } if (settlement != null) { nbStruct++; } if (addrLine != null) { nbStruct++; } if (marker != null) { nbStruct++; } return nbStruct; } @Deprecated public String toTEI() { StringBuilder tei = new StringBuilder(); if (!notNull()) { return null; } else { tei.append("<affiliation"); if (key != null) tei.append(" key=\"").append(key).append("\""); tei.append(">"); if (departments != null) { if (departments.size() == 1) { tei.append("<orgName type=\"department\">").append(TextUtilities.HTMLEncode(departments.get(0))).append("</orgName>"); } else { int q = 1; for (String depa : departments) { tei.append("<orgName type=\"department\" key=\"dep").append(q).append("\">").append(TextUtilities.HTMLEncode(depa)).append("</orgName>"); q++; } } } if (laboratories != null) { if (laboratories.size() == 1) { tei.append("<orgName type=\"laboratory\">").append(TextUtilities.HTMLEncode(laboratories.get(0))).append("</orgName>"); } else { int q = 1; for (String labo : laboratories) { tei.append("<orgName type=\"laboratory\" key=\"lab").append(q).append("\">").append(TextUtilities.HTMLEncode(labo)).append("</orgName>"); q++; } } } if (institutions != null) { if (institutions.size() == 1) { tei.append("<orgName type=\"institution\">").append(TextUtilities.HTMLEncode(institutions.get(0))).append("</orgName>"); } else { int q = 1; for (String inst : institutions) { tei.append("<orgName type=\"institution\" key=\"instit").append(q).append("\">").append(TextUtilities.HTMLEncode(inst)).append("</orgName>"); q++; } } } if ((getAddressString() != null) | (getAddrLine() != null) | (getPostBox() != null) | (getPostCode() != null) | (getSettlement() != null) | (getRegion() != null) | (getCountry() != null)) { tei.append("<address>"); if (getAddressString() != null) { tei.append("<addrLine>").append(TextUtilities.HTMLEncode(getAddressString())).append("</addrLine>"); } if (getAddrLine() != null) { tei.append("<addrLine>").append(TextUtilities.HTMLEncode(getAddrLine())).append("</addrLine>"); } if (getPostBox() != null) { tei.append("<postBox>").append(TextUtilities.HTMLEncode(getPostBox())).append("</postBox>"); } if (getPostCode() != null) { tei.append("<postCode>").append(TextUtilities.HTMLEncode(getPostCode())).append("</postCode>"); } if (getSettlement() != null) { tei.append("<settlement>").append(TextUtilities.HTMLEncode(getSettlement())).append("</settlement>"); } if (getRegion() != null) { tei.append("<region>").append(TextUtilities.HTMLEncode(getRegion())).append("</region>"); } if (getCountry() != null) { Lexicon lexicon = Lexicon.getInstance(); String code = lexicon.getCountryCode(getCountry()); tei.append("<country"); if (code != null) tei.append(" key=\"").append(code).append("\""); tei.append(">").append(TextUtilities.HTMLEncode(getCountry())).append("</country>"); } tei.append("</address>"); } tei.append("</affiliation>"); } return tei.toString(); } public static String toTEI(Affiliation aff, int nbTag) { StringBuffer tei = new StringBuffer(); TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("<affiliation"); if (aff.getKey() != null) tei.append(" key=\"").append(aff.getKey()).append("\""); tei.append(">\n"); if (aff.getDepartments() != null) { if (aff.getDepartments().size() == 1) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"department\">" + TextUtilities.HTMLEncode(aff.getDepartments().get(0)) + "</orgName>\n"); } else { int q = 1; for (String depa : aff.getDepartments()) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"department\" key=\"dep" + q + "\">" + TextUtilities.HTMLEncode(depa) + "</orgName>\n"); q++; } } } if (aff.getLaboratories() != null) { if (aff.getLaboratories().size() == 1) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"laboratory\">" + TextUtilities.HTMLEncode(aff.getLaboratories().get(0)) + "</orgName>\n"); } else { int q = 1; for (String labo : aff.getLaboratories()) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"laboratory\" key=\"lab" + q + "\">" + TextUtilities.HTMLEncode(labo) + "</orgName>\n"); q++; } } } if (aff.getInstitutions() != null) { if (aff.getInstitutions().size() == 1) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"institution\">" + TextUtilities.HTMLEncode(aff.getInstitutions().get(0)) + "</orgName>\n"); } else { int q = 1; for (String inst : aff.getInstitutions()) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"institution\" key=\"instit" + q + "\">" + TextUtilities.HTMLEncode(inst) + "</orgName>\n"); q++; } } } if ((aff.getAddressString() != null) || (aff.getAddrLine() != null) || (aff.getPostBox() != null) || (aff.getPostCode() != null) || (aff.getSettlement() != null) || (aff.getRegion() != null) || (aff.getCountry() != null)) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<address>\n"); if (aff.getAddressString() != null) { TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<addrLine>" + TextUtilities.HTMLEncode(aff.getAddressString()) + "</addrLine>\n"); } if (aff.getAddrLine() != null) { TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<addrLine>" + TextUtilities.HTMLEncode(aff.getAddrLine()) + "</addrLine>\n"); } if (aff.getPostBox() != null) { TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<postBox>" + TextUtilities.HTMLEncode(aff.getPostBox()) + "</postBox>\n"); } if (aff.getPostCode() != null) { TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<postCode>" + TextUtilities.HTMLEncode(aff.getPostCode()) + "</postCode>\n"); } if (aff.getSettlement() != null) { TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<settlement>" + TextUtilities.HTMLEncode(aff.getSettlement()) + "</settlement>\n"); } if (aff.getRegion() != null) { TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<region>" + TextUtilities.HTMLEncode(aff.getRegion()) + "</region>\n"); } if (aff.getCountry() != null) { String code = Lexicon.getInstance().getCountryCode(aff.getCountry()); TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<country"); if (code != null) tei.append(" key=\"" + code + "\""); tei.append(">" + TextUtilities.HTMLEncode(aff.getCountry()) + "</country>\n"); } TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("</address>\n"); } TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("</affiliation>\n"); return tei.toString(); } @Override public String toString() { return "Affiliation{" + "name='" + name + '\'' + ", url='" + url + '\'' + ", key='" + key + '\'' + ", institutions=" + institutions + ", departments=" + departments + ", laboratories=" + laboratories + ", country='" + country + '\'' + ", postCode='" + postCode + '\'' + ", postBox='" + postBox + '\'' + ", region='" + region + '\'' + ", settlement='" + settlement + '\'' + ", addrLine='" + addrLine + '\'' + ", marker='" + marker + '\'' + ", addressString='" + addressString + '\'' + ", affiliationString='" + affiliationString + '\'' + ", rawAffiliationString='" + rawAffiliationString + '\'' + ", failAffiliation=" + failAffiliation + '}'; } }
/* * Copyright 2013 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.persistence.typeHandling.protobuf; import com.google.protobuf.ByteString; import gnu.trove.iterator.TDoubleIterator; import gnu.trove.iterator.TFloatIterator; import gnu.trove.iterator.TIntIterator; import gnu.trove.iterator.TLongIterator; import org.terasology.persistence.typeHandling.PersistedData; import org.terasology.persistence.typeHandling.SerializationContext; import org.terasology.persistence.typeHandling.TypeHandler; import org.terasology.persistence.typeHandling.TypeSerializationLibrary; import org.terasology.protobuf.EntityData; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collection; import java.util.Map; /** * @author Immortius */ public class ProtobufSerializationContext implements SerializationContext { private TypeSerializationLibrary library; public ProtobufSerializationContext(TypeSerializationLibrary library) { this.library = library; } @Override public PersistedData create(String value) { return createStrings(Arrays.asList(value)); } @Override public PersistedData create(String... values) { return createStrings(Arrays.asList(values)); } @Override public PersistedData createStrings(Iterable<String> value) { return new ProtobufPersistedData(EntityData.Value.newBuilder().addAllString(value).build()); } @Override public PersistedData create(float value) { return new ProtobufPersistedData(EntityData.Value.newBuilder().addFloat(value).build()); } @Override public PersistedData create(float... values) { EntityData.Value.Builder builder = EntityData.Value.newBuilder(); for (float val : values) { builder.addFloat(val); } return new ProtobufPersistedData(builder.build()); } @Override public PersistedData create(TFloatIterator value) { EntityData.Value.Builder builder = EntityData.Value.newBuilder(); while (value.hasNext()) { builder.addFloat(value.next()); } return new ProtobufPersistedData(builder.build()); } @Override public PersistedData create(int value) { return new ProtobufPersistedData(EntityData.Value.newBuilder().addInteger(value).build()); } @Override public PersistedData create(int... values) { EntityData.Value.Builder builder = EntityData.Value.newBuilder(); for (int val : values) { builder.addInteger(val); } return new ProtobufPersistedData(builder.build()); } @Override public PersistedData create(TIntIterator value) { EntityData.Value.Builder builder = EntityData.Value.newBuilder(); while (value.hasNext()) { builder.addInteger(value.next()); } return new ProtobufPersistedData(builder.build()); } @Override public PersistedData create(long value) { return new ProtobufPersistedData(EntityData.Value.newBuilder().addLong(value).build()); } @Override public PersistedData create(long... values) { EntityData.Value.Builder builder = EntityData.Value.newBuilder(); for (long val : values) { builder.addLong(val); } return new ProtobufPersistedData(builder.build()); } @Override public PersistedData create(TLongIterator value) { EntityData.Value.Builder builder = EntityData.Value.newBuilder(); while (value.hasNext()) { builder.addLong(value.next()); } return new ProtobufPersistedData(builder.build()); } @Override public PersistedData create(boolean value) { return new ProtobufPersistedData(EntityData.Value.newBuilder().addBoolean(value).build()); } @Override public PersistedData create(boolean... values) { EntityData.Value.Builder builder = EntityData.Value.newBuilder(); for (boolean val : values) { builder.addBoolean(val); } return new ProtobufPersistedData(builder.build()); } @Override public PersistedData create(double value) { return new ProtobufPersistedData(EntityData.Value.newBuilder().addDouble(value).build()); } @Override public PersistedData create(double... values) { EntityData.Value.Builder builder = EntityData.Value.newBuilder(); for (double val : values) { builder.addDouble(val); } return new ProtobufPersistedData(builder.build()); } @Override public PersistedData create(TDoubleIterator value) { EntityData.Value.Builder builder = EntityData.Value.newBuilder(); while (value.hasNext()) { builder.addDouble(value.next()); } return new ProtobufPersistedData(builder.build()); } @Override public PersistedData create(byte[] value) { return new ProtobufPersistedData(EntityData.Value.newBuilder().setBytes(ByteString.copyFrom(value)).build()); } @Override public PersistedData create(ByteBuffer value) { return new ProtobufPersistedData(EntityData.Value.newBuilder().setBytes(ByteString.copyFrom(value)).build()); } @Override public PersistedData create(PersistedData... data) { return create(Arrays.asList(data)); } @Override public PersistedData create(Iterable<PersistedData> data) { EntityData.Value.Builder builder = EntityData.Value.newBuilder(); for (PersistedData value : data) { builder.addValue(((ProtobufPersistedData) value).getValue()); } return new ProtobufPersistedData(builder.build()); } @Override public PersistedData create(Map<String, PersistedData> data) { EntityData.Value.Builder builder = EntityData.Value.newBuilder(); for (Map.Entry<String, PersistedData> entry : data.entrySet()) { builder.addNameValue(EntityData.NameValue.newBuilder() .setName(entry.getKey()) .setValue(((ProtobufPersistedData) entry.getValue()).getValue()).build()); } return new ProtobufPersistedData(builder.build()); } @Override public <T> PersistedData create(T data, Class<? extends T> type) { TypeHandler<T> handler = (TypeHandler<T>) library.getHandlerFor(type); return handler.serialize(data, this); } @Override public <T> PersistedData create(Collection<T> data, Class<T> type) { TypeHandler<T> handler = (TypeHandler<T>) library.getHandlerFor(type); return handler.serializeCollection(data, this); } @Override public PersistedData createNull() { return new ProtobufPersistedData(EntityData.Value.newBuilder().build()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.management.internal.cli.functions; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Properties; import org.apache.logging.log4j.Logger; import org.jmock.Expectations; import org.jmock.Mockery; import org.jmock.lib.legacy.ClassImposteriser; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import com.gemstone.gemfire.cache.Cache; import com.gemstone.gemfire.cache.CacheClosedException; import com.gemstone.gemfire.cache.execute.FunctionContext; import com.gemstone.gemfire.cache.execute.ResultSender; import com.gemstone.gemfire.cache.hdfs.HDFSStore; import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreConfigHolder; import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreImpl; import com.gemstone.gemfire.distributed.DistributedMember; import com.gemstone.gemfire.internal.cache.GemFireCacheImpl; import com.gemstone.gemfire.internal.logging.LogService; import com.gemstone.gemfire.management.internal.cli.commands.HDFSStoreCommandsJUnitTest; import com.gemstone.gemfire.management.internal.configuration.domain.XmlEntity; import com.gemstone.gemfire.test.junit.categories.HoplogTest; import com.gemstone.gemfire.test.junit.categories.IntegrationTest ; /** * The AlterHDFSStoreFunctionJUnitTest test suite class tests the contract and * functionality of the AlterHDFSStoreFunction class. </p> * * @author Namrata Thanvi * @see com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreImpl * @see com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreConfigHolder * @see com.gemstone.gemfire.management.internal.cli.functions.AlterHDFSStoreFunction * @see org.jmock.Expectations * @see org.jmock.Mockery * @see org.junit.Assert * @see org.junit.Test */ @SuppressWarnings( { "unused" }) @Category({IntegrationTest.class, HoplogTest.class}) public class CreateHDFSStoreFunctionJUnitTest { private static final Logger logger = LogService.getLogger(); private Mockery mockContext; private static Properties props = new Properties(); @Before public void setup() { mockContext = new Mockery() { { setImposteriser(ClassImposteriser.INSTANCE); } }; } @After public void tearDown() { mockContext.assertIsSatisfied(); mockContext = null; } @Test public void testExecute() throws Throwable { final GemFireCacheImpl mockCache = mockContext.mock(GemFireCacheImpl.class, "Cache"); final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember"); final FunctionContext mockFunctionContext = mockContext.mock(FunctionContext.class, "FunctionContext"); final XmlEntity xmlEntity = mockContext.mock(XmlEntity.class, "XmlEntity"); final String memberId = "mockMemberId"; final String memberName = "mockMemberName"; final TestResultSender testResultSender = new TestResultSender(); final HDFSStoreImpl mockHdfsStore = createMockHDFSStoreImpl(mockContext, "hdfsStoreName", "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 20, 20, null, false, 0, 1024, false, false, true, 20, 20, 10, 100); final HDFSStoreConfigHolder mockHdfsStoreConfigHolder = HDFSStoreCommandsJUnitTest.createMockHDFSStoreConfigHolder( mockContext, "hdfsStoreName", "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 40, 40, null, false, 0, 2048, true, true, true, 40, 40, 40, 800); final CreateHDFSStoreFunction function = new TestCreateHDFSStoreFunction(mockCache, mockMember, xmlEntity , mockHdfsStore); mockContext.checking(new Expectations() { { oneOf(mockMember).getId(); will(returnValue(memberId)); exactly(2).of(mockMember).getName(); will(returnValue(memberName)); oneOf(mockFunctionContext).getArguments(); will(returnValue(mockHdfsStoreConfigHolder)); oneOf(mockFunctionContext).getResultSender(); will(returnValue(testResultSender)); } }); function.execute(mockFunctionContext); final List<?> results = testResultSender.getResults(); assertNotNull(results); assertEquals(1, results.size()); final CliFunctionResult result = (CliFunctionResult)results.get(0); assertEquals(memberName, result.getMemberIdOrName()); assertEquals("Success", result.getMessage()); } @Test public void testExecuteOnMemberWithNoCache() throws Throwable { final FunctionContext mockFunctionContext = mockContext.mock(FunctionContext.class, "MockFunctionContext"); final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember"); final GemFireCacheImpl mockCache = mockContext.mock(GemFireCacheImpl.class, "Cache"); final XmlEntity xmlEntity = mockContext.mock(XmlEntity.class, "XmlEntity"); final String memberId = "mockMemberId"; final String memberName = "mockMemberName"; final TestResultSender testResultSender = new TestResultSender(); final HDFSStoreImpl mockHdfsStore = createMockHDFSStoreImpl(mockContext, "hdfsStoreName", "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 20, 20, null, false, 0, 1024, false, false, true, 20, 20, 10, 100); final HDFSStoreConfigHolder mockHdfsStoreConfigHolder = HDFSStoreCommandsJUnitTest.createMockHDFSStoreConfigHolder(mockContext, "hdfsStoreName", "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 40, 40, null, false, 0, 2048, true, true, true, 40, 40, 40, 800); final CreateHDFSStoreFunction function = new TestCreateHDFSStoreFunction(mockCache, mockMember, xmlEntity , mockHdfsStore) { @Override protected Cache getCache() { throw new CacheClosedException("Expected"); } }; mockContext.checking(new Expectations() { { oneOf(mockFunctionContext).getResultSender(); will(returnValue(testResultSender)); } }); function.execute(mockFunctionContext); final List<?> results = testResultSender.getResults(); assertNotNull(results); assertEquals(1, results.size()); final CliFunctionResult result = (CliFunctionResult)results.get(0); assertEquals("", result.getMemberIdOrName()); assertNull(result.getMessage()); } @Test public void testExecuteHandleRuntimeException() throws Throwable { final FunctionContext mockFunctionContext = mockContext.mock(FunctionContext.class, "MockFunctionContext"); final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember"); final GemFireCacheImpl mockCache = mockContext.mock(GemFireCacheImpl.class, "Cache"); final XmlEntity xmlEntity = mockContext.mock(XmlEntity.class, "XmlEntity"); final String memberId = "mockMemberId"; final String memberName = "mockMemberName"; final TestResultSender testResultSender = new TestResultSender(); final HDFSStoreImpl mockHdfsStore = createMockHDFSStoreImpl(mockContext, "hdfsStoreName", "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 20, 20, null, false, 0, 1024, false, false, true, 20, 20, 10, 100); final HDFSStoreConfigHolder mockHdfsStoreConfigHolder = HDFSStoreCommandsJUnitTest.createMockHDFSStoreConfigHolder( mockContext, "hdfsStoreName", "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 40, 40, null, false, 0, 2048, true, true, true, 40, 40, 40, 800); final CreateHDFSStoreFunction function = new TestCreateHDFSStoreFunction(mockCache, mockMember, xmlEntity , mockHdfsStore) { @Override protected Cache getCache() { throw new RuntimeException("expected"); } }; mockContext.checking(new Expectations() { { oneOf(mockFunctionContext).getResultSender(); will(returnValue(testResultSender)); } }); function.execute(mockFunctionContext); final List<?> results = testResultSender.getResults(); assertNotNull(results); assertEquals(1, results.size()); final CliFunctionResult result = (CliFunctionResult)results.get(0); assertEquals("", result.getMemberIdOrName()); assertEquals("expected", result.getThrowable().getMessage()); } public static HDFSStoreImpl createMockHDFSStoreImpl(Mockery mockContext, final String storeName, final String namenode, final String homeDir, final int maxFileSize, final int fileRolloverInterval, final float blockCachesize, final String clientConfigFile, final int batchSize, final int batchInterval, final String diskStoreName, final boolean syncDiskwrite, final int dispatcherThreads, final int maxMemory, final boolean bufferPersistent, final boolean minorCompact, final boolean majorCompact, final int majorCompactionInterval, final int majorCompactionThreads, final int minorCompactionThreads, final int purgeInterval) { HDFSStoreImpl mockHdfsStore = mockContext.mock(HDFSStoreImpl.class, "HDFSStoreImpl"); HDFSStoreCommandsJUnitTest.createMockStore(mockContext, mockHdfsStore, storeName, namenode, homeDir, maxFileSize, fileRolloverInterval, minorCompact, minorCompactionThreads, majorCompact, majorCompactionThreads, majorCompactionInterval, purgeInterval, blockCachesize, clientConfigFile, batchSize, batchInterval, diskStoreName, syncDiskwrite, dispatcherThreads, maxMemory, bufferPersistent); return mockHdfsStore; } protected static class TestCreateHDFSStoreFunction extends CreateHDFSStoreFunction { private static final long serialVersionUID = 1L; private final Cache cache; private final DistributedMember member; private final XmlEntity xml; private final HDFSStoreImpl hdfsStore; public TestCreateHDFSStoreFunction(Cache cache, DistributedMember member, XmlEntity xml , HDFSStoreImpl hdfsStore) { this.cache = cache; this.member = member; this.xml = xml; this.hdfsStore = hdfsStore; } @Override protected Cache getCache() { return this.cache; } @Override protected DistributedMember getDistributedMember(Cache cache) { return member; } @Override protected XmlEntity getXMLEntity(String storeName) { return xml; } @Override protected HDFSStoreImpl createHdfsStore(Cache cache, HDFSStoreConfigHolder configHolder){ return hdfsStore; } } protected static class TestResultSender implements ResultSender { private final List<Object> results = new LinkedList<Object>(); private Throwable t; protected List<Object> getResults() throws Throwable { if (t != null) { throw t; } return Collections.unmodifiableList(results); } public void lastResult(final Object lastResult) { results.add(lastResult); } public void sendResult(final Object oneResult) { results.add(oneResult); } public void sendException(final Throwable t) { this.t = t; } } }
/* * Copyright 2012-2015 Aerospike, Inc. * * Portions may be licensed to Aerospike, Inc. under one or more contributor * license agreements WHICH ARE COMPATIBLE WITH THE APACHE LICENSE, VERSION 2.0. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.aerospike.examples; import java.io.IOException; import java.net.ConnectException; import com.aerospike.client.AerospikeException; import com.aerospike.client.Bin; import com.aerospike.client.Key; import com.aerospike.client.Record; import com.aerospike.client.async.AsyncClient; import com.aerospike.client.listener.RecordListener; import com.aerospike.client.listener.WriteListener; import com.aerospike.client.policy.Policy; import com.aerospike.client.policy.WritePolicy; public class AsyncPutGet extends AsyncExample { private boolean completed; public AsyncPutGet(Console console) { super(console); } /** * Asynchronously write and read a bin using alternate methods. */ @Override public void runExample(AsyncClient client, Parameters params) throws Exception { Key key = new Key(params.namespace, params.set, "putgetkey"); Bin bin = new Bin(params.getBinName("putgetbin"), "value"); runPutGetInline(client, params, key, bin); waitTillComplete(); completed = false; runPutGetWithRetry(client, params, key, bin); waitTillComplete(); } // Inline asynchronous put/get calls. private void runPutGetInline(final AsyncClient client, final Parameters params, final Key key, final Bin bin) throws AerospikeException { console.info("Put: namespace=%s set=%s key=%s value=%s", key.namespace, key.setName, key.userKey, bin.value); client.put(params.writePolicy, new WriteListener() { public void onSuccess(final Key key) { try { // Write succeeded. Now call read. console.info("Get: namespace=%s set=%s key=%s", key.namespace, key.setName, key.userKey); client.get(params.policy, new RecordListener() { public void onSuccess(final Key key, final Record record) { validateBin(key, bin, record); notifyCompleted(); } public void onFailure(AerospikeException e) { console.error("Failed to get: namespace=%s set=%s key=%s exception=%s", key.namespace, key.setName, key.userKey, e.getMessage()); notifyCompleted(); } }, key); } catch (Exception e) { console.error("Failed to get: namespace=%s set=%s key=%s exception=%s", key.namespace, key.setName, key.userKey, e.getMessage()); } } public void onFailure(AerospikeException e) { console.error("Failed to put: namespace=%s set=%s key=%s exception=%s", key.namespace, key.setName, key.userKey, e.getMessage()); notifyCompleted(); } }, key, bin); } // Asynchronous put/get calls with retry. private void runPutGetWithRetry(AsyncClient client, Parameters params, Key key, Bin bin) throws Exception { console.info("Put: namespace=%s set=%s key=%s value=%s", key.namespace, key.setName, key.userKey, bin.value); client.put(params.writePolicy, new WriteHandler(client, params.writePolicy, key, bin), key, bin); } private class WriteHandler implements WriteListener { private final AsyncClient client; private final WritePolicy policy; private final Key key; private final Bin bin; private int failCount = 0; public WriteHandler(AsyncClient client, WritePolicy policy, Key key, Bin bin) { this.client = client; this.policy = policy; this.key = key; this.bin = bin; } // Write success callback. public void onSuccess(Key key) { try { // Write succeeded. Now call read. console.info("Get: namespace=%s set=%s key=%s", key.namespace, key.setName, key.userKey); client.get(policy, new ReadHandler(client, policy, key, bin), key); } catch (Exception e) { console.error("Failed to get: namespace=%s set=%s key=%s exception=%s", key.namespace, key.setName, key.userKey, e.getMessage()); } } // Error callback. public void onFailure(AerospikeException e) { // Retry up to 2 more times. if (++failCount <= 2) { Throwable t = e.getCause(); // Check for common socket errors. if (t != null && (t instanceof ConnectException || t instanceof IOException)) { console.info("Retrying put: " + key.userKey); try { client.put(policy, this, key, bin); return; } catch (Exception ex) { // Fall through to error case. } } } console.error("Put failed: namespace=%s set=%s key=%s exception=%s", key.namespace, key.setName, key.userKey, e.getMessage()); notifyCompleted(); } } private class ReadHandler implements RecordListener { private final AsyncClient client; private final Policy policy; private final Key key; private final Bin bin; private int failCount = 0; public ReadHandler(AsyncClient client, Policy policy, Key key, Bin bin) { this.client = client; this.policy = policy; this.key = key; this.bin = bin; } // Read success callback. public void onSuccess(Key key, Record record) { // Verify received bin value is what was written. validateBin(key, bin, record); notifyCompleted(); } // Error callback. public void onFailure(AerospikeException e) { // Retry up to 2 more times. if (++failCount <= 2) { Throwable t = e.getCause(); // Check for common socket errors. if (t != null && (t instanceof ConnectException || t instanceof IOException)) { console.info("Retrying get: " + key.userKey); try { client.get(policy, this, key); return; } catch (Exception ex) { // Fall through to error case. } } } console.error("Get failed: namespace=%s set=%s key=%s exception=%s", key.namespace, key.setName, key.userKey, e.getMessage()); notifyCompleted(); } } private void validateBin(Key key, Bin bin, Record record) { Object received = (record == null)? null : record.getValue(bin.name); String expected = bin.value.toString(); if (received != null && received.equals(expected)) { console.info("Bin matched: namespace=%s set=%s key=%s bin=%s value=%s", key.namespace, key.setName, key.userKey, bin.name, received); } else { console.error("Put/Get mismatch: Expected %s. Received %s.", expected, received); } } private synchronized void waitTillComplete() { while (! completed) { try { super.wait(); } catch (InterruptedException ie) { } } } private synchronized void notifyCompleted() { completed = true; super.notify(); } }
/* * Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.application.mgt; import org.apache.axiom.om.OMElement; import org.apache.axis2.AxisFault; import org.apache.axis2.description.AxisService; import org.apache.axis2.description.Parameter; import org.apache.axis2.engine.AxisConfiguration; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.rahas.impl.SAMLTokenIssuerConfig; import org.wso2.carbon.context.CarbonContext; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.context.RegistryType; import org.wso2.carbon.core.RegistryResources; import org.wso2.carbon.directory.server.manager.DirectoryServerManager; import org.wso2.carbon.identity.application.common.IdentityApplicationManagementException; import org.wso2.carbon.identity.application.common.model.ApplicationBasicInfo; import org.wso2.carbon.identity.application.common.model.ApplicationPermission; import org.wso2.carbon.identity.application.common.model.AuthenticationStep; import org.wso2.carbon.identity.application.common.model.IdentityProvider; import org.wso2.carbon.identity.application.common.model.InboundAuthenticationRequestConfig; import org.wso2.carbon.identity.application.common.model.LocalAuthenticatorConfig; import org.wso2.carbon.identity.application.common.model.PermissionsAndRoleConfig; import org.wso2.carbon.identity.application.common.model.RequestPathAuthenticatorConfig; import org.wso2.carbon.identity.application.common.model.ServiceProvider; import org.wso2.carbon.identity.application.common.util.IdentityApplicationConstants; import org.wso2.carbon.identity.application.mgt.cache.IdentityServiceProviderCache; import org.wso2.carbon.identity.application.mgt.cache.IdentityServiceProviderCacheEntry; import org.wso2.carbon.identity.application.mgt.cache.IdentityServiceProviderCacheKey; import org.wso2.carbon.identity.application.mgt.dao.ApplicationDAO; import org.wso2.carbon.identity.application.mgt.dao.IdentityProviderDAO; import org.wso2.carbon.identity.application.mgt.dao.OAuthApplicationDAO; import org.wso2.carbon.identity.application.mgt.dao.SAMLApplicationDAO; import org.wso2.carbon.identity.application.mgt.dao.impl.FileBasedApplicationDAO; import org.wso2.carbon.identity.application.mgt.internal.ApplicationManagementServiceComponent; import org.wso2.carbon.identity.application.mgt.internal.ApplicationManagementServiceComponentHolder; import org.wso2.carbon.identity.application.mgt.internal.ApplicationMgtListenerServiceComponent; import org.wso2.carbon.identity.application.mgt.listener.ApplicationMgtListener; import org.wso2.carbon.registry.api.RegistryException; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.security.SecurityConfigException; import org.wso2.carbon.security.config.SecurityServiceAdmin; import org.wso2.carbon.user.api.ClaimMapping; import org.wso2.carbon.user.api.UserStoreException; import org.wso2.carbon.utils.ServerConstants; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * Application management service implementation. Which can use as an osgi * service and reuse this in admin service */ public class ApplicationManagementServiceImpl extends ApplicationManagementService { private static Log log = LogFactory.getLog(ApplicationManagementServiceImpl.class); private static volatile ApplicationManagementServiceImpl appMgtService; /** * Private constructor which not allow to create object from outside */ private ApplicationManagementServiceImpl() { } /** * Get ApplicationManagementServiceImpl instance * * @return ApplicationManagementServiceImpl */ public static ApplicationManagementServiceImpl getInstance() { if (appMgtService == null) { synchronized (ApplicationManagementServiceImpl.class) { if (appMgtService == null) { appMgtService = new ApplicationManagementServiceImpl(); } } } return appMgtService; } @Override public int createApplication(ServiceProvider serviceProvider, String tenantDomain, String userName) throws IdentityApplicationManagementException { try { startTenantFlow(tenantDomain, userName); // invoking the listeners List<ApplicationMgtListener> listeners = ApplicationMgtListenerServiceComponent.getListners(); for (ApplicationMgtListener listener : listeners) { listener.createApplication(serviceProvider); } // first we need to create a role with the application name. // only the users in this role will be able to edit/update the // application. ApplicationMgtUtil.createAppRole(serviceProvider.getApplicationName()); ApplicationDAO appDAO = ApplicationMgtSystemConfig.getInstance().getApplicationDAO(); ApplicationMgtUtil.storePermission(serviceProvider.getApplicationName(), serviceProvider.getPermissionAndRoleConfig()); return appDAO.createApplication(serviceProvider, tenantDomain); } catch (Exception e) { try { ApplicationMgtUtil.deleteAppRole(serviceProvider.getApplicationName()); ApplicationMgtUtil.deletePermissions(serviceProvider.getApplicationName()); } catch (Exception ignored) { if (log.isDebugEnabled()) { log.debug("Ignored the exception occurred while trying to delete the role : ", e); } } String error = "Error occurred while creating the application : " + serviceProvider.getApplicationName(); log.error(error, e); throw new IdentityApplicationManagementException(error, e); } finally { endTenantFlow(); } } @Override public ServiceProvider getApplicationExcludingFileBasedSPs(String applicationName, String tenantDomain) throws IdentityApplicationManagementException { try { startTenantFlow(tenantDomain); ApplicationDAO appDAO = ApplicationMgtSystemConfig.getInstance().getApplicationDAO(); ServiceProvider serviceProvider = appDAO.getApplication(applicationName, tenantDomain); loadApplicationPermissions(applicationName, serviceProvider); return serviceProvider; } catch (Exception e) { String error = "Error occurred while retrieving the application, " + applicationName; log.error(error, e); throw new IdentityApplicationManagementException(error, e); } finally { endTenantFlow(); } } @Override public ApplicationBasicInfo[] getAllApplicationBasicInfo(String tenantDomain, String userName) throws IdentityApplicationManagementException { try { startTenantFlow(tenantDomain, userName); ApplicationDAO appDAO = ApplicationMgtSystemConfig.getInstance().getApplicationDAO(); return appDAO.getAllApplicationBasicInfo(); } catch (Exception e) { String error = "Error occurred while retrieving the all applications"; log.error(error, e); throw new IdentityApplicationManagementException(error, e); } finally { endTenantFlow(); } } @Override public void updateApplication(ServiceProvider serviceProvider, String tenantDomain, String userName) throws IdentityApplicationManagementException { try { try { startTenantFlow(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); IdentityServiceProviderCacheKey cacheKey = new IdentityServiceProviderCacheKey( tenantDomain, serviceProvider.getApplicationName()); IdentityServiceProviderCache.getInstance().clearCacheEntry(cacheKey); } finally { endTenantFlow(); startTenantFlow(tenantDomain, userName); } // invoking the listeners List<ApplicationMgtListener> listeners = ApplicationMgtListenerServiceComponent.getListners(); for (ApplicationMgtListener listener : listeners) { listener.updateApplication(serviceProvider); } // check whether use is authorized to update the application. if (!ApplicationConstants.LOCAL_SP.equals(serviceProvider.getApplicationName()) && !ApplicationMgtUtil.isUserAuthorized(serviceProvider.getApplicationName(), serviceProvider.getApplicationID())) { log.warn("Illegal Access! User " + CarbonContext.getThreadLocalCarbonContext().getUsername() + " does not have access to the application " + serviceProvider.getApplicationName()); throw new IdentityApplicationManagementException("User not authorized"); } ApplicationDAO appDAO = ApplicationMgtSystemConfig.getInstance().getApplicationDAO(); String storedAppName = appDAO.getApplicationName(serviceProvider.getApplicationID()); appDAO.updateApplication(serviceProvider); ApplicationPermission[] permissions = serviceProvider.getPermissionAndRoleConfig().getPermissions(); String applicationNode = ApplicationMgtUtil.getApplicationPermissionPath() + RegistryConstants .PATH_SEPARATOR + storedAppName; org.wso2.carbon.registry.api.Registry tenantGovReg = CarbonContext.getThreadLocalCarbonContext() .getRegistry(RegistryType.USER_GOVERNANCE); boolean exist = tenantGovReg.resourceExists(applicationNode); if (exist && !storedAppName.equals(serviceProvider.getApplicationName())) { ApplicationMgtUtil.renameAppPermissionPathNode(storedAppName, serviceProvider.getApplicationName()); } if (ArrayUtils.isNotEmpty(permissions)) { ApplicationMgtUtil.updatePermissions(serviceProvider.getApplicationName(), permissions); } } catch (Exception e) { String error = "Error occurred while updating the application"; log.error(error, e); throw new IdentityApplicationManagementException(error, e); } finally { endTenantFlow(); } } private void startTenantFlow(String tenantDomain) throws IdentityApplicationManagementException { int tenantId; try { tenantId = ApplicationManagementServiceComponentHolder.getInstance().getRealmService() .getTenantManager().getTenantId(tenantDomain); } catch (UserStoreException e) { throw new IdentityApplicationManagementException("Error when setting tenant domain. ", e); } PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantId(tenantId); } private void startTenantFlow(String tenantDomain, String userName) throws IdentityApplicationManagementException { int tenantId; try { tenantId = ApplicationManagementServiceComponentHolder.getInstance().getRealmService() .getTenantManager().getTenantId(tenantDomain); } catch (UserStoreException e) { throw new IdentityApplicationManagementException("Error when setting tenant domain. ", e); } PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantId(tenantId); PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(userName); } private void endTenantFlow() { PrivilegedCarbonContext.endTenantFlow(); } @Override public void deleteApplication(String applicationName, String tenantDomain, String userName) throws IdentityApplicationManagementException { try { startTenantFlow(tenantDomain, userName); // invoking the listeners List<ApplicationMgtListener> listeners = ApplicationMgtListenerServiceComponent.getListners(); for (ApplicationMgtListener listener : listeners) { listener.deleteApplication(applicationName); } if (!ApplicationMgtUtil.isUserAuthorized(applicationName)) { log.warn("Illegal Access! User " + CarbonContext.getThreadLocalCarbonContext().getUsername() + " does not have access to the application " + applicationName); throw new IdentityApplicationManagementException("User not authorized"); } ApplicationDAO appDAO = ApplicationMgtSystemConfig.getInstance().getApplicationDAO(); ServiceProvider serviceProvider = appDAO.getApplication(applicationName, tenantDomain); appDAO.deleteApplication(applicationName); ApplicationMgtUtil.deleteAppRole(applicationName); ApplicationMgtUtil.deletePermissions(applicationName); if (serviceProvider != null && serviceProvider.getInboundAuthenticationConfig() != null && serviceProvider.getInboundAuthenticationConfig().getInboundAuthenticationRequestConfigs() != null) { InboundAuthenticationRequestConfig[] configs = serviceProvider.getInboundAuthenticationConfig() .getInboundAuthenticationRequestConfigs(); for (InboundAuthenticationRequestConfig config : configs) { if (IdentityApplicationConstants.Authenticator.SAML2SSO.NAME. equalsIgnoreCase(config.getInboundAuthType()) && config.getInboundAuthKey() != null) { SAMLApplicationDAO samlDAO = ApplicationMgtSystemConfig.getInstance().getSAMLClientDAO(); samlDAO.removeServiceProviderConfiguration(config.getInboundAuthKey()); } else if (IdentityApplicationConstants.OAuth2.NAME.equalsIgnoreCase(config.getInboundAuthType()) && config.getInboundAuthKey() != null) { OAuthApplicationDAO oathDAO = ApplicationMgtSystemConfig.getInstance().getOAuthOIDCClientDAO(); oathDAO.removeOAuthApplication(config.getInboundAuthKey()); } else if ("kerberos".equalsIgnoreCase(config.getInboundAuthType()) && config.getInboundAuthKey() != null) { DirectoryServerManager directoryServerManager = new DirectoryServerManager(); directoryServerManager.removeServer(config.getInboundAuthKey()); } else if(IdentityApplicationConstants.Authenticator.WSTrust.NAME.equalsIgnoreCase( config.getInboundAuthType()) && config.getInboundAuthKey() != null) { try { AxisService stsService = getAxisConfig().getService(ServerConstants.STS_NAME); Parameter origParam = stsService.getParameter(SAMLTokenIssuerConfig.SAML_ISSUER_CONFIG.getLocalPart()); if (origParam != null) { OMElement samlConfigElem = origParam.getParameterElement() .getFirstChildWithName(SAMLTokenIssuerConfig.SAML_ISSUER_CONFIG); SAMLTokenIssuerConfig samlConfig = new SAMLTokenIssuerConfig(samlConfigElem); samlConfig.getTrustedServices().remove(config.getInboundAuthKey()); setSTSParameter(samlConfig); removeTrustedService(ServerConstants.STS_NAME, ServerConstants.STS_NAME, config.getInboundAuthKey()); } else { throw new IdentityApplicationManagementException( "missing parameter : " + SAMLTokenIssuerConfig.SAML_ISSUER_CONFIG.getLocalPart()); } } catch (Exception e) { String error = "Error while removing a trusted service"; log.error(error, e); throw new IdentityApplicationManagementException(error, e); } } } } } catch (Exception e) { String error = "Error occurred while deleting the application"; log.error(error, e); throw new IdentityApplicationManagementException(error, e); } finally { endTenantFlow(); } } @Override public IdentityProvider getIdentityProvider(String federatedIdPName, String tenantDomain) throws IdentityApplicationManagementException { try { startTenantFlow(tenantDomain); IdentityProviderDAO idpdao = ApplicationMgtSystemConfig.getInstance().getIdentityProviderDAO(); return idpdao.getIdentityProvider(federatedIdPName); } catch (Exception e) { String error = "Error occurred while retrieving Identity Provider"; log.error(error, e); throw new IdentityApplicationManagementException(error, e); } finally { endTenantFlow(); } } @Override public IdentityProvider[] getAllIdentityProviders(String tenantDomain) throws IdentityApplicationManagementException { try { startTenantFlow(tenantDomain); IdentityProviderDAO idpdao = ApplicationMgtSystemConfig.getInstance().getIdentityProviderDAO(); List<IdentityProvider> fedIdpList = idpdao.getAllIdentityProviders(); if (fedIdpList != null) { return fedIdpList.toArray(new IdentityProvider[fedIdpList.size()]); } return new IdentityProvider[0]; } catch (Exception e) { String error = "Error occurred while retrieving all Identity Providers"; log.error(error, e); throw new IdentityApplicationManagementException(error, e); } finally { endTenantFlow(); } } @Override public LocalAuthenticatorConfig[] getAllLocalAuthenticators(String tenantDomain) throws IdentityApplicationManagementException { try { startTenantFlow(tenantDomain); IdentityProviderDAO idpdao = ApplicationMgtSystemConfig.getInstance().getIdentityProviderDAO(); List<LocalAuthenticatorConfig> localAuthenticators = idpdao.getAllLocalAuthenticators(); if (localAuthenticators != null) { return localAuthenticators.toArray(new LocalAuthenticatorConfig[localAuthenticators.size()]); } return new LocalAuthenticatorConfig[0]; } catch (Exception e) { String error = "Error occurred while retrieving all Local Authenticators"; log.error(error, e); throw new IdentityApplicationManagementException(error, e); } finally { endTenantFlow(); } } @Override public RequestPathAuthenticatorConfig[] getAllRequestPathAuthenticators(String tenantDomain) throws IdentityApplicationManagementException { try { startTenantFlow(tenantDomain); IdentityProviderDAO idpdao = ApplicationMgtSystemConfig.getInstance().getIdentityProviderDAO(); List<RequestPathAuthenticatorConfig> reqPathAuthenticators = idpdao.getAllRequestPathAuthenticators(); if (reqPathAuthenticators != null) { return reqPathAuthenticators.toArray(new RequestPathAuthenticatorConfig[reqPathAuthenticators.size()]); } return new RequestPathAuthenticatorConfig[0]; } catch (Exception e) { String error = "Error occurred while retrieving all Request Path Authenticators"; log.error(error, e); throw new IdentityApplicationManagementException(error, e); } finally { endTenantFlow(); } } @Override public String[] getAllLocalClaimUris(String tenantDomain) throws IdentityApplicationManagementException { try { startTenantFlow(tenantDomain); String claimDialect = ApplicationMgtSystemConfig.getInstance().getClaimDialect(); ClaimMapping[] claimMappings = CarbonContext.getThreadLocalCarbonContext().getUserRealm().getClaimManager() .getAllClaimMappings(claimDialect); List<String> claimUris = new ArrayList<>(); for (ClaimMapping claimMap : claimMappings) { claimUris.add(claimMap.getClaim().getClaimUri()); } return claimUris.toArray(new String[claimUris.size()]); } catch (Exception e) { String error = "Error while reading system claims"; log.error(error, e); throw new IdentityApplicationManagementException(error, e); } finally { endTenantFlow(); } } @Override public String getServiceProviderNameByClientIdExcludingFileBasedSPs(String clientId, String type, String tenantDomain) throws IdentityApplicationManagementException { try { ApplicationDAO appDAO = ApplicationMgtSystemConfig.getInstance().getApplicationDAO(); return appDAO.getServiceProviderNameByClientId(clientId, type, tenantDomain); } catch (Exception e) { String error = "Error occurred while retrieving the service provider for client id : " + clientId; log.error(error, e); throw new IdentityApplicationManagementException(error, e); } } /** * [sp-claim-uri,local-idp-claim-uri] * * @param serviceProviderName * @param tenantDomain * @return * @throws IdentityApplicationManagementException */ @Override public Map<String, String> getServiceProviderToLocalIdPClaimMapping(String serviceProviderName, String tenantDomain) throws IdentityApplicationManagementException { ApplicationDAO appDAO = ApplicationMgtSystemConfig.getInstance().getApplicationDAO(); Map<String, String> claimMap = appDAO.getServiceProviderToLocalIdPClaimMapping( serviceProviderName, tenantDomain); if (claimMap == null || claimMap.isEmpty() && ApplicationManagementServiceComponent.getFileBasedSPs().containsKey( serviceProviderName)) { return new FileBasedApplicationDAO().getServiceProviderToLocalIdPClaimMapping( serviceProviderName, tenantDomain); } return claimMap; } /** * [local-idp-claim-uri,sp-claim-uri] * * @param serviceProviderName * @param tenantDomain * @return * @throws IdentityApplicationManagementException */ @Override public Map<String, String> getLocalIdPToServiceProviderClaimMapping(String serviceProviderName, String tenantDomain) throws IdentityApplicationManagementException { ApplicationDAO appDAO = ApplicationMgtSystemConfig.getInstance().getApplicationDAO(); Map<String, String> claimMap = appDAO.getLocalIdPToServiceProviderClaimMapping( serviceProviderName, tenantDomain); if (claimMap == null || claimMap.isEmpty() && ApplicationManagementServiceComponent.getFileBasedSPs().containsKey( serviceProviderName)) { return new FileBasedApplicationDAO().getLocalIdPToServiceProviderClaimMapping( serviceProviderName, tenantDomain); } return claimMap; } /** * Returns back the requested set of claims by the provided service provider in local idp claim * dialect. * * @param serviceProviderName * @param tenantDomain * @return * @throws IdentityApplicationManagementException */ @Override public List<String> getAllRequestedClaimsByServiceProvider(String serviceProviderName, String tenantDomain) throws IdentityApplicationManagementException { ApplicationDAO appDAO = ApplicationMgtSystemConfig.getInstance().getApplicationDAO(); List<String> reqClaims = appDAO.getAllRequestedClaimsByServiceProvider(serviceProviderName, tenantDomain); if (reqClaims == null || reqClaims.isEmpty() && ApplicationManagementServiceComponent.getFileBasedSPs().containsKey( serviceProviderName)) { return new FileBasedApplicationDAO().getAllRequestedClaimsByServiceProvider( serviceProviderName, tenantDomain); } return reqClaims; } /** * @param clientId * @param clientType * @param tenantDomain * @return * @throws IdentityApplicationManagementException */ @Override public String getServiceProviderNameByClientId(String clientId, String clientType, String tenantDomain) throws IdentityApplicationManagementException { String name; ApplicationDAO appDAO = ApplicationMgtSystemConfig.getInstance().getApplicationDAO(); name = appDAO.getServiceProviderNameByClientId(clientId, clientType, tenantDomain); if (name == null) { name = new FileBasedApplicationDAO().getServiceProviderNameByClientId(clientId, clientType, tenantDomain); } if (name == null) { ServiceProvider defaultSP = ApplicationManagementServiceComponent.getFileBasedSPs() .get(IdentityApplicationConstants.DEFAULT_SP_CONFIG); name = defaultSP.getApplicationName(); } return name; } /** * @param serviceProviderName * @param tenantDomain * @return * @throws IdentityApplicationManagementException */ @Override public ServiceProvider getServiceProvider(String serviceProviderName, String tenantDomain) throws IdentityApplicationManagementException { startTenantFlow(tenantDomain); ApplicationDAO appDAO = ApplicationMgtSystemConfig.getInstance().getApplicationDAO(); ServiceProvider serviceProvider = appDAO.getApplication(serviceProviderName, tenantDomain); if (serviceProvider != null) { loadApplicationPermissions(serviceProviderName, serviceProvider); } if (serviceProvider == null && ApplicationManagementServiceComponent.getFileBasedSPs().containsKey( serviceProviderName)) { serviceProvider = ApplicationManagementServiceComponent.getFileBasedSPs().get( serviceProviderName); } endTenantFlow(); return serviceProvider; } /** * @param clientId * @param clientType * @param tenantDomain * @return * @throws IdentityApplicationManagementException */ @Override public ServiceProvider getServiceProviderByClientId(String clientId, String clientType, String tenantDomain) throws IdentityApplicationManagementException { // client id can contain the @ to identify the tenant domain. if (clientId != null && clientId.contains("@")) { clientId = clientId.split("@")[0]; } String serviceProviderName; ServiceProvider serviceProvider = null; serviceProviderName = getServiceProviderNameByClientId(clientId, clientType, tenantDomain); try { startTenantFlow(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); IdentityServiceProviderCacheKey cacheKey = new IdentityServiceProviderCacheKey( tenantDomain, serviceProviderName); IdentityServiceProviderCacheEntry entry = ((IdentityServiceProviderCacheEntry) IdentityServiceProviderCache .getInstance().getValueFromCache(cacheKey)); if (entry != null) { return entry.getServiceProvider(); } } finally { endTenantFlow(); startTenantFlow(tenantDomain); } if (serviceProviderName != null) { ApplicationDAO appDAO = ApplicationMgtSystemConfig.getInstance().getApplicationDAO(); serviceProvider = appDAO.getApplication(serviceProviderName, tenantDomain); if (serviceProvider != null) { // if "Authentication Type" is "Default" we must get the steps from the default SP AuthenticationStep[] authenticationSteps = serviceProvider .getLocalAndOutBoundAuthenticationConfig().getAuthenticationSteps(); loadApplicationPermissions(serviceProviderName, serviceProvider); if (authenticationSteps == null || authenticationSteps.length == 0) { ServiceProvider defaultSP = ApplicationManagementServiceComponent .getFileBasedSPs().get(IdentityApplicationConstants.DEFAULT_SP_CONFIG); authenticationSteps = defaultSP.getLocalAndOutBoundAuthenticationConfig() .getAuthenticationSteps(); serviceProvider.getLocalAndOutBoundAuthenticationConfig() .setAuthenticationSteps(authenticationSteps); } } } if (serviceProvider == null && serviceProviderName != null && ApplicationManagementServiceComponent.getFileBasedSPs().containsKey( serviceProviderName)) { serviceProvider = ApplicationManagementServiceComponent.getFileBasedSPs().get( serviceProviderName); } endTenantFlow(); try { startTenantFlow(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME); IdentityServiceProviderCacheKey cacheKey = new IdentityServiceProviderCacheKey( tenantDomain, serviceProviderName); IdentityServiceProviderCacheEntry entry = new IdentityServiceProviderCacheEntry(); entry.setServiceProvider(serviceProvider); IdentityServiceProviderCache.getInstance().addToCache(cacheKey, entry); } finally { endTenantFlow(); } return serviceProvider; } private void loadApplicationPermissions(String serviceProviderName, ServiceProvider serviceProvider) throws IdentityApplicationManagementException { List<ApplicationPermission> permissionList = ApplicationMgtUtil.loadPermissions(serviceProviderName); if (permissionList != null) { PermissionsAndRoleConfig permissionAndRoleConfig; if (serviceProvider.getPermissionAndRoleConfig() == null) { permissionAndRoleConfig = new PermissionsAndRoleConfig(); } else { permissionAndRoleConfig = serviceProvider.getPermissionAndRoleConfig(); } permissionAndRoleConfig.setPermissions(permissionList.toArray( new ApplicationPermission[permissionList.size()])); serviceProvider.setPermissionAndRoleConfig(permissionAndRoleConfig); } } /** * Set STS parameters * * @param samlConfig SAML config * @throws org.apache.axis2.AxisFault * @throws org.wso2.carbon.registry.api.RegistryException */ private void setSTSParameter(SAMLTokenIssuerConfig samlConfig) throws AxisFault, RegistryException { new SecurityServiceAdmin(getAxisConfig(), getConfigSystemRegistry()). setServiceParameterElement(ServerConstants.STS_NAME, samlConfig.getParameter()); } /** * Remove trusted service * * @param groupName Group name * @param serviceName Service name * @param trustedService Trusted service name * @throws org.wso2.carbon.security.SecurityConfigException */ private void removeTrustedService(String groupName, String serviceName, String trustedService) throws SecurityConfigException { Registry registry; String resourcePath; Resource resource; try { resourcePath = RegistryResources.SERVICE_GROUPS + groupName + RegistryResources.SERVICES + serviceName + "/trustedServices"; registry = getConfigSystemRegistry(); if (registry != null) { if (registry.resourceExists(resourcePath)) { resource = registry.get(resourcePath); if (resource.getProperty(trustedService) != null) { resource.removeProperty(trustedService); } registry.put(resourcePath, resource); } } } catch (Exception e) { String error = "Error occurred while removing trusted service for STS"; log.error(error, e); throw new SecurityConfigException(error, e); } } /** * Get axis config * * @return axis configuration */ private AxisConfiguration getAxisConfig() { return ApplicationManagementServiceComponentHolder.getInstance().getConfigContextService() .getServerConfigContext().getAxisConfiguration(); } /** * Get config system registry * * @return config system registry * @throws org.wso2.carbon.registry.api.RegistryException */ private Registry getConfigSystemRegistry() throws RegistryException { return (Registry) ApplicationManagementServiceComponentHolder.getInstance().getRegistryService() .getConfigSystemRegistry(); } }
/*L * Copyright Georgetown University, Washington University. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cab2b/LICENSE.txt for details. */ package edu.wustl.cab2b.server.multimodelcategory; import static edu.wustl.cab2b.common.util.Constants.MMC_ENTITY_GROUP_NAME; import static edu.wustl.cab2b.common.util.Constants.MULTIMODELCATEGORY; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import edu.common.dynamicextensions.domain.DomainObjectFactory; import edu.common.dynamicextensions.domaininterface.AttributeInterface; import edu.common.dynamicextensions.domaininterface.EntityGroupInterface; import edu.common.dynamicextensions.domaininterface.EntityInterface; import edu.common.dynamicextensions.entitymanager.EntityManager; import edu.common.dynamicextensions.exception.DynamicExtensionsApplicationException; import edu.common.dynamicextensions.exception.DynamicExtensionsSystemException; import edu.wustl.cab2b.common.errorcodes.ErrorCodeConstants; import edu.wustl.cab2b.common.exception.RuntimeException; import edu.wustl.cab2b.common.multimodelcategory.MultiModelAttribute; import edu.wustl.cab2b.common.multimodelcategory.MultiModelAttributeImpl; import edu.wustl.cab2b.common.multimodelcategory.MultiModelCategory; import edu.wustl.cab2b.common.multimodelcategory.MultiModelCategoryImpl; import edu.wustl.cab2b.common.multimodelcategory.bean.MultiModelAttributeBean; import edu.wustl.cab2b.common.multimodelcategory.bean.MultiModelCategoryBean; import edu.wustl.cab2b.server.cache.EntityCache; import edu.wustl.cab2b.server.category.CategoryOperations; import edu.wustl.cab2b.server.category.InputCategorialAttribute; import edu.wustl.cab2b.server.category.InputCategorialClass; import edu.wustl.cab2b.server.category.InputCategory; import edu.wustl.cab2b.server.category.PersistCategory; import edu.wustl.cab2b.server.path.PathFinder; import edu.wustl.cab2b.server.util.DynamicExtensionUtility; import edu.wustl.common.querysuite.metadata.category.CategorialAttribute; import edu.wustl.common.querysuite.metadata.category.CategorialClass; import edu.wustl.common.querysuite.metadata.category.Category; import edu.wustl.common.querysuite.metadata.path.IPath; import edu.wustl.common.util.dbManager.DBUtil; /** * This class takes in the MultiModelCategoryBean and generates the MultiModelCategogry out of it and finally * persists it into database. MultiModelCategoryBean is expected to get populated from UI where user/admin selects * or sets the appropriate values. * * @author chetan_patil * @author rajesh_vyas * */ public class PersistMultiModelCategory { private static final Logger logger = edu.wustl.common.util.logger.Logger.getLogger(PersistMultiModelCategory.class); private MultiModelCategoryBean mmcBean; private Set<EntityInterface> entities; private Map<EntityInterface, Collection<IPath>> sourceEntityToPaths; private Map<EntityInterface, Collection<InputCategorialAttribute>> entityInputCatagorialAttributeMap; private Map<Long, CategorialAttribute> attributeIDToCatAttr; private Map<MultiModelAttribute, Collection<Long>> multiModelAttributeToSelectedAttributeIds; private Set<MultiModelAttribute> multiModelAttributes; /** * Default constructor */ public PersistMultiModelCategory() { attributeIDToCatAttr = new HashMap<Long, CategorialAttribute>(); sourceEntityToPaths = new HashMap<EntityInterface, Collection<IPath>>(); entityInputCatagorialAttributeMap = new HashMap<EntityInterface, Collection<InputCategorialAttribute>>(); multiModelAttributes = new HashSet<MultiModelAttribute>(); multiModelAttributeToSelectedAttributeIds = new HashMap<MultiModelAttribute, Collection<Long>>(); } /** * This method assumes that MultiModelCategoryBean is well populated, so this class can create InputCategory * objects. * * @param mmcBean * @return */ public MultiModelCategory persistMMC(MultiModelCategoryBean mmcBean) { this.mmcBean = mmcBean; MultiModelCategory multiModelCategory = initialize(); PersistCategory persistCategory = new PersistCategory(); CategoryOperations categoryOperations = new CategoryOperations(); Collection<InputCategory> inputCategories = createInputCategories(); Collection<Category> categories = new ArrayList<Category>(inputCategories.size()); for (InputCategory inCategory : inputCategories) { logger.debug("Saving category: " + inCategory); Category category = persistCategory.persistCategory(inCategory, null, true); category.setSystemGenerated(Boolean.TRUE); mapAIToCatAttr(category.getRootClass()); categoryOperations.saveCategory(category); categories.add(category); logger.debug("Saved category id: " + category.getDeEntityId()); } addCategorialAttrsToMMA(); multiModelCategory.setCategories(categories); multiModelCategory.setMultiModelAttributes(multiModelAttributes); return multiModelCategory; } /** * This method adds categorial attributes form category saved into MultiModelAttributes. Basically, it maps the * attributes of categories saved to MultiModelAttribute. */ private void addCategorialAttrsToMMA() { for (MultiModelAttribute multiModelAttr : multiModelAttributes) { for (Long attribute : multiModelAttributeToSelectedAttributeIds.get(multiModelAttr)) { multiModelAttr.addCategorialAttribute(attributeIDToCatAttr.get(attribute)); } } } /** * This method is recursively called so that all children are processed. It generates map of * AttributeInterface->CategorialAttribute, so mmaToSelectedAI can be used, to derive MMA->CategorialAttribute * * @param catClass */ private void mapAIToCatAttr(CategorialClass catClass) { for (CategorialAttribute catAttr : catClass.getCategorialAttributeCollection()) { Long ai = catAttr.getDeSourceClassAttributeId(); attributeIDToCatAttr.put(ai, catAttr); } //recursive call, so children CA's also mapped. for (CategorialClass childCatClass : catClass.getChildren()) { mapAIToCatAttr(childCatClass); } } /** * This method initializes and generates MultiModelCategory * * @return */ private MultiModelCategory initialize() { MultiModelCategory multiModelCategory = createMultiModelCategory(); entities = new HashSet<EntityInterface>(entityInputCatagorialAttributeMap.keySet()); Set<EntityInterface> targetEntities = initEntityPathMapAndGetTargetEntities(); //After removing all targets, 'entities' will only contain root categories entities.removeAll(targetEntities); return multiModelCategory; } /** * This method creates MultiModelCategory * * @return */ private MultiModelCategory createMultiModelCategory() { EntityInterface mmcEntity = initMMCEntity(); MultiModelCategory multiModelCategory = new MultiModelCategoryImpl(); multiModelCategory.setApplicationGroup(mmcBean.getApplicationGroup()); multiModelCategory.setEntity(mmcEntity); return multiModelCategory; } /** * This method creates DynamicExtension's entity for MultiModelCategory * * @return */ private EntityInterface initMMCEntity() { EntityInterface mmcEntity = DomainObjectFactory.getInstance().createEntity(); mmcEntity.setName(mmcBean.getName()); mmcEntity.setDescription(mmcBean.getDescription()); mmcEntity.setCreatedDate(new Date()); mmcEntity.addEntityGroupInterface(getMMCEntityGroup()); DynamicExtensionUtility.addTaggedValue(mmcEntity, MULTIMODELCATEGORY, MULTIMODELCATEGORY); initAttributes(mmcEntity); mmcEntity = DynamicExtensionUtility.persistEntity(mmcEntity); EntityCache.getInstance().addEntityToCache(mmcEntity); return mmcEntity; } /** * This method initializes the attributes into MultiModelCategory's DE entity. * * @param mmcEntity */ private void initAttributes(EntityInterface mmcEntity) { assert mmcBean != null; assert mmcBean.getMultiModelAttributes() != null; Collection<MultiModelAttributeBean> mmaBeans = mmcBean.getMultiModelAttributes(); for (MultiModelAttributeBean mmaBean : mmaBeans) { AttributeInterface deAttribute = MmcAttributeGenerator.getAttribute(mmaBean); deAttribute.setName(mmaBean.getName()); deAttribute.setDescription(mmaBean.getDescription()); mmcEntity.addAttribute(deAttribute); MultiModelAttribute multiModelAttribute = new MultiModelAttributeImpl(); multiModelAttribute.setVisible(mmaBean.isVisible()); multiModelAttribute.setAttribute(deAttribute); Collection<AttributeInterface> selectedMMCAttributes = mmaBean.getSelectedAttributes(); Collection<Long> selectedAttributesIds = new ArrayList<Long>(selectedMMCAttributes.size()); for (AttributeInterface selectedAttribute : selectedMMCAttributes) { selectedAttributesIds.add(selectedAttribute.getId()); InputCategorialAttribute inputCatagorialAttribute = new InputCategorialAttribute(); StringBuffer displayName = new StringBuffer(mmaBean.getName()).append('_').append(selectedAttribute.getName()); inputCatagorialAttribute.setDisplayName(displayName.toString()); inputCatagorialAttribute.setDynamicExtAttribute(selectedAttribute); EntityInterface entity = selectedAttribute.getEntity(); Collection<InputCategorialAttribute> inputCatagorialAttributes = entityInputCatagorialAttributeMap.get(entity); if (inputCatagorialAttributes == null) { inputCatagorialAttributes = new ArrayList<InputCategorialAttribute>(); entityInputCatagorialAttributeMap.put(entity, inputCatagorialAttributes); } inputCatagorialAttributes.add(inputCatagorialAttribute); } multiModelAttributeToSelectedAttributeIds.put(multiModelAttribute, selectedAttributesIds); multiModelAttributes.add(multiModelAttribute); } } /** * This method retrieves the MMC_Entity_Group if exists otherwise creates a new one. * * @return */ private EntityGroupInterface getMMCEntityGroup() { EntityGroupInterface entityGroup = null; try { entityGroup = EntityManager.getInstance().getEntityGroupByName(MMC_ENTITY_GROUP_NAME); } catch (DynamicExtensionsSystemException e) { throw new RuntimeException( "Got System exception from Dynamic Extension while fetching category entity group", e, ErrorCodeConstants.DB_0001); } catch (DynamicExtensionsApplicationException e) { throw new RuntimeException( "Got System exception from Dynamic Extension while fetching category entity group", e, ErrorCodeConstants.DB_0001); } if (entityGroup == null) { entityGroup = DynamicExtensionUtility.createEntityGroup(); entityGroup.setShortName(MMC_ENTITY_GROUP_NAME); entityGroup.setName(MMC_ENTITY_GROUP_NAME); entityGroup.setLongName(MMC_ENTITY_GROUP_NAME); entityGroup.setDescription(MMC_ENTITY_GROUP_NAME); DynamicExtensionUtility.markMetadataEntityGroup(entityGroup); entityGroup = DynamicExtensionUtility.persistEntityGroup(entityGroup); } return entityGroup; } /** * This method generates input categories which are needed for saving actual categories * * @return */ private Collection<InputCategory> createInputCategories() { assert entities != null; Collection<InputCategory> inputCategories = new ArrayList<InputCategory>(entities.size()); for (EntityInterface rootEntity : entities) { //Create InputCategorialClass InputCategorialClass rootClass = new InputCategorialClass(); rootClass.setAttributeList(new ArrayList<InputCategorialAttribute>(entityInputCatagorialAttributeMap .get(rootEntity))); rootClass.setChildren(processOutPaths(rootEntity)); rootClass.setPathFromParent(-1); //Create InputCategory InputCategory inputCategory = new InputCategory(); StringBuffer name = new StringBuffer(mmcBean.getName()).append('_').append(rootEntity.getName()); inputCategory.setName(name.toString()); StringBuffer description = new StringBuffer(mmcBean.getDescription()).append('_').append(rootEntity.getDescription()); inputCategory.setDescription(description.toString()); inputCategory.setRootCategorialClass(rootClass); inputCategory.setSubCategories(new ArrayList<InputCategory>()); inputCategories.add(inputCategory); } return inputCategories; } /** * This method recursively instantiate complete hierarchy for CategorialClass. */ private InputCategorialClass createInputCategorialClass(IPath outPath) { EntityInterface targetEntity = outPath.getTargetEntity(); InputCategorialClass inputCategorialClass = new InputCategorialClass(); inputCategorialClass.setPathFromParent(outPath.getPathId()); Collection<InputCategorialAttribute> catAttributes = entityInputCatagorialAttributeMap.get(targetEntity); ArrayList<InputCategorialAttribute> inputCatAttrs = new ArrayList<InputCategorialAttribute>(catAttributes); inputCategorialClass.setAttributeList(inputCatAttrs); inputCategorialClass.setChildren(processOutPaths(targetEntity)); return inputCategorialClass; } /** * This method processes out going paths * * @param sourceEntity * @return */ private List<InputCategorialClass> processOutPaths(EntityInterface sourceEntity) { assert sourceEntityToPaths != null; List<InputCategorialClass> childrenICCs = new ArrayList<InputCategorialClass>(); Collection<IPath> outPaths = sourceEntityToPaths.get(sourceEntity); if (outPaths != null) { for (IPath path : outPaths) { InputCategorialClass iccChild = createInputCategorialClass(path); childrenICCs.add(iccChild); } } return childrenICCs; } /** * This method initializes Entity->Paths map and returns the set of target entities. * * @return */ private Set<EntityInterface> initEntityPathMapAndGetTargetEntities() { assert entities != null; Set<EntityInterface> targetEntities = new HashSet<EntityInterface>(); Collection<IPath> paths = mmcBean.getPaths(); for (IPath path : paths) { EntityInterface source = path.getSourceEntity(); EntityInterface target = path.getTargetEntity(); targetEntities.add(target); Collection<IPath> associationPaths = sourceEntityToPaths.get(source); if (associationPaths == null) { associationPaths = new ArrayList<IPath>(); sourceEntityToPaths.put(source, associationPaths); } associationPaths.add(path); } return targetEntities; } /** * This method takes the path where where all the multi-model category XML files are stored and convert them to * MultiModelCategory objects to saves them into database. * * @param args * @throws IOException */ public static void main(String[] args) throws IOException { if (args == null || args.length == 0) { throw new java.lang.RuntimeException( "Please specify the path to directory where multi model category XMLs are stored."); } else if (!(new File(args[0])).isDirectory()) { throw new IllegalArgumentException("The directory specified in invalid. Please correct the path."); } PathFinder.getInstance(DBUtil.getConnection()); MultiModelCategoryOperations operations = new MultiModelCategoryOperations(); //save in database File mmcDir = new File(args[0]); File[] mmcFiles = mmcDir.listFiles(); MultiModelCategoryXmlParser parser = new MultiModelCategoryXmlParser(); for (File mmcFile : mmcFiles) { try { MultiModelCategoryBean mmcBean = parser.getMultiModelCategory(mmcFile); MultiModelCategory mmCategory = new PersistMultiModelCategory().persistMMC(mmcBean); operations.saveMultiModelCategory(mmCategory); } catch (Exception e) { System.out.println("Cannot parse and load " + mmcFile.getAbsolutePath() + "\nRoot cause: " + e.getMessage()); } } //delete from database //MultiModelCategory mmCategory = operations.getMultiModelCategoryById(1L); //System.out.println(mmCategory.getEntity().getName()); //operations.deleteMultiModelCategory(mmCategory); } }
package org.osmdroid.google.wrapper.v2; import android.graphics.Canvas; import android.graphics.drawable.BitmapDrawable; import android.view.MotionEvent; import org.osmdroid.api.IGeoPoint; import org.osmdroid.api.IMap; import org.osmdroid.api.IPosition; import org.osmdroid.api.IProjection; import org.osmdroid.api.Marker; import org.osmdroid.api.OnCameraChangeListener; import org.osmdroid.api.Polyline; import org.osmdroid.util.GeoPoint; import org.osmdroid.views.MapView; import org.osmdroid.views.overlay.ItemizedIconOverlay; import org.osmdroid.views.overlay.ItemizedOverlayWithFocus; import org.osmdroid.views.overlay.Overlay; import org.osmdroid.views.overlay.OverlayItem; import org.osmdroid.views.overlay.PathOverlay; import org.osmdroid.views.overlay.mylocation.MyLocationNewOverlay; import java.util.ArrayList; import java.util.HashMap; import java.util.Random; @Deprecated class OsmdroidMapWrapper implements IMap { private final MapView mMapView; private MyLocationNewOverlay mMyLocationOverlay; private ItemizedOverlayWithFocus<OverlayItem> mItemizedOverlay; private HashMap<Integer, PathOverlay> mPolylines; private OnCameraChangeListener mOnCameraChangeListener; private static final Random random = new Random(); OsmdroidMapWrapper(final MapView aMapView) { mMapView = aMapView; mMapView.getOverlays().add(new Overlay(mMapView.getContext()) { @Override public void draw(final Canvas c, final MapView osmv, final boolean shadow) { // nothing to draw } @Override public boolean onTouchEvent(final MotionEvent aMotionEvent, final MapView aMapView) { if (aMotionEvent.getAction() == MotionEvent.ACTION_UP) { onCameraChange(); } return super.onTouchEvent(aMotionEvent, aMapView); } }); } @Override public float getZoomLevel() { return (float) mMapView.getZoomLevelDouble(); } @Override public void setZoom(final float aZoomLevel) { mMapView.getController().setZoom(aZoomLevel); } @Override public IGeoPoint getCenter() { return mMapView.getMapCenter(); } @Override public void setCenter(final double aLatitude, final double aLongitude) { mMapView.getController().setCenter(new GeoPoint(aLatitude, aLongitude)); onCameraChange(); } @Override public float getBearing() { return -mMapView.getMapOrientation(); } @Override public void setBearing(final float aBearing) { mMapView.setMapOrientation(-aBearing); } @Override public void setPosition(final IPosition aPosition) { if (aPosition.hasBearing()) { setBearing(aPosition.getBearing()); } if (aPosition.hasZoomLevel()) { setZoom(aPosition.getZoomLevel()); } setCenter(aPosition.getLatitude(), aPosition.getLongitude()); } @Override public boolean zoomIn() { return mMapView.getController().zoomIn(); } @Override public boolean zoomOut() { return mMapView.getController().zoomOut(); } @Override public void setMyLocationEnabled(final boolean aEnabled) { if (aEnabled) { if (mMyLocationOverlay == null) { mMyLocationOverlay = new MyLocationNewOverlay(mMapView); mMapView.getOverlays().add(mMyLocationOverlay); } mMyLocationOverlay.enableMyLocation(); } if (!aEnabled && mMyLocationOverlay != null) { mMyLocationOverlay.disableMyLocation(); } } @Override public boolean isMyLocationEnabled() { return mMyLocationOverlay != null && mMyLocationOverlay.isMyLocationEnabled(); } @Override public IProjection getProjection() { return mMapView.getProjection(); } @Override public void addMarker(final Marker aMarker) { if (mItemizedOverlay == null) { // XXX this is a bit cumbersome. Maybe we should just do a simple ItemizedIconOverlay with null listener mItemizedOverlay = new ItemizedOverlayWithFocus<OverlayItem>(new ArrayList<OverlayItem>(), new ItemizedIconOverlay.OnItemGestureListener<OverlayItem>() { @Override public boolean onItemSingleTapUp(final int index, final OverlayItem item) { return false; } @Override public boolean onItemLongPress(final int index, final OverlayItem item) { return false; } }, this.mMapView.getContext()); mItemizedOverlay.setFocusItemsOnTap(true); mMapView.getOverlays().add(mItemizedOverlay); } final OverlayItem item = new OverlayItem(aMarker.title, aMarker.snippet, new GeoPoint(aMarker.latitude, aMarker.longitude)); if (aMarker.bitmap != null) { item.setMarker(new BitmapDrawable(mMapView.getResources(), aMarker.bitmap)); } else { if (aMarker.icon != 0) { item.setMarker(mMapView.getResources().getDrawable(aMarker.icon)); } } if (aMarker.anchor == Marker.Anchor.CENTER) { item.setMarkerHotspot(OverlayItem.HotspotPlace.CENTER); } mItemizedOverlay.addItem(item); } @Override public int addPolyline(final Polyline aPolyline) { if (mPolylines == null) { mPolylines = new HashMap<Integer, PathOverlay>(); } final PathOverlay overlay = new PathOverlay(aPolyline.color, aPolyline.width, mMapView.getContext()); overlay.addPoints(aPolyline.points); mMapView.getOverlays().add(0, overlay); // add polyline overlay below markers, etc final int id = random.nextInt(); mPolylines.put(id, overlay); return id; } @Override public void addPointsToPolyline(final int id, final IGeoPoint... aPoints) { getPolyline(id).addPoints(aPoints); } @Override public void clearPolyline(final int id) { final PathOverlay polyline = getPolyline(id); mMapView.getOverlays().remove(polyline); mPolylines.remove(id); } private PathOverlay getPolyline(final int id) { if (mPolylines == null) { throw new IllegalArgumentException("No such id"); } final PathOverlay polyline = mPolylines.get(id); if (polyline == null) { throw new IllegalArgumentException("No such id"); } return polyline; } @Override public void clear() { if (mItemizedOverlay != null) { mItemizedOverlay.removeAllItems(); } if (mPolylines != null) { for (final PathOverlay polyline : mPolylines.values()) { mMapView.getOverlays().remove(mPolylines.remove(polyline)); } mPolylines.clear(); } } @Override public void setOnCameraChangeListener(final OnCameraChangeListener aListener) { mOnCameraChangeListener = aListener; } private void onCameraChange() { if (mOnCameraChangeListener != null) { mOnCameraChangeListener.onCameraChange(null); // TODO set the parameter } } }
/** * Copyright 2013 isandlaTech * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cohorte.remote.multicast.utils; import java.io.IOException; import java.net.Inet4Address; import java.net.Inet6Address; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.NetworkInterface; import java.net.ProtocolFamily; import java.net.SocketAddress; import java.net.SocketException; import java.net.StandardProtocolFamily; import java.net.StandardSocketOptions; import java.nio.ByteBuffer; import java.nio.channels.DatagramChannel; import java.nio.channels.MembershipKey; import java.util.ArrayList; import java.util.Enumeration; import java.util.LinkedList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; import org.osgi.service.log.LogService; /** * A multicast receiver * * @author Thomas Calmant * * @see <a * href="http://atastypixel.com/blog/the-making-of-talkie-multi-interface-broadcasting-and-multicast/">The * Making of Talkie: Multi-interface broadcasting and multicast</a> */ public class MulticastHandler { /** Reception buffer size */ private static final int BUFFER_SIZE = 1500; /** The bundle name */ private static final String BUNDLE_NAME = "org.cohorte.remote.multicast"; /** The multicast group */ private final InetAddress pAddress; /** Channel */ private DatagramChannel pChannel; /** The listeners invocation thread "pool" */ private ExecutorService pExecutor; /** Joined group on different interfaces */ private final List<MembershipKey> pJoinedGroups = new LinkedList<>(); /** The packet listener */ private final IPacketListener pListener; /** An associated log service */ private LogService pLogger; /** The multicast port */ private final int pPort; /** The listening thread */ private Thread pThread; /** The thread loop control */ private boolean pThreadRun; /** * Sets up the receiver * * @param aListener * The multicast packets listener * @param aAddress * A multicast group address * @param aPort * A socket port * @throws IOException * Error opening the socket */ public MulticastHandler(final IPacketListener aListener, final InetAddress aAddress, final int aPort) { pListener = aListener; pAddress = aAddress; pPort = aPort; } /** * Leaves the group and closes the multicast socket. * * @throws IOException * Error reading the address or leaving the group */ private void closeMulticast() throws IOException { if (pChannel == null) { // Nothing to do return; } try { // Leave the group, on all interfaces for (final MembershipKey key : pJoinedGroups) { key.drop(); } } finally { // Close the socket pChannel.close(); } } /** * Retrieves all network interfaces that supports multicast * * @return All network interfaces that supports multicast * @throws SocketException * An error occurred retrieving network interfaces */ private NetworkInterface[] getMulticastInterfaces() throws SocketException { final List<NetworkInterface> multicastItfs = new ArrayList<NetworkInterface>(); // Loop over all interfaces final Enumeration<NetworkInterface> itfEnum = NetworkInterface .getNetworkInterfaces(); while (itfEnum.hasMoreElements()) { final NetworkInterface itf = itfEnum.nextElement(); try { if (itf.supportsMulticast()) { // Multicast is supported multicastItfs.add(itf); } } catch (final SocketException ex) { log(LogService.LOG_WARNING, "Error testing if an interface supports Multicast", ex); } } return multicastItfs .toArray(new NetworkInterface[multicastItfs.size()]); } /** * Logs an entry if a logger is present, else prints on the error output * * @param aLevel * Log level * @param aMessage * Log entry */ private void log(final int aLevel, final String aMessage) { log(aLevel, aMessage, null); } /** * Logs an entry if a logger is present, else prints on the error output * * @param aLevel * Log level * @param aMessage * Log entry * @param aThrowable * Associated error */ private void log(final int aLevel, final String aMessage, final Throwable aThrowable) { if (pLogger != null) { // Use the log service pLogger.log(aLevel, aMessage, aThrowable); } else { // Use a Java logger // Convert the level Level level; switch (aLevel) { case LogService.LOG_ERROR: level = Level.SEVERE; break; case LogService.LOG_WARNING: level = Level.WARNING; break; case LogService.LOG_INFO: level = Level.INFO; break; case LogService.LOG_DEBUG: default: level = Level.FINE; break; } // Log a record final LogRecord record = new LogRecord(level, aMessage); record.setThrown(aThrowable); Logger.getLogger(BUNDLE_NAME).log(record); } } /** * Waits for packets on the multicast socket */ private void receivePackets() { while (pThreadRun) { // Set up the buffer final ByteBuffer buffer = ByteBuffer.allocate(BUFFER_SIZE); try { // Reset the buffer buffer.clear(); // Wait for a packet (blocking) final SocketAddress sender = pChannel.receive(buffer); if (!(sender instanceof InetSocketAddress)) { // Unhandled kind of address, try next time log(LogService.LOG_WARNING, "Unhandled kind of socket address: " + sender.getClass().getName()); continue; } // Get the sender address final InetSocketAddress senderAddress = (InetSocketAddress) sender; // Extract the content of the packet final byte[] content = new byte[buffer.position()]; // Reset the position, else we will the content after what have // been read buffer.position(0); buffer.get(content); // Call the listener in a separate thread if (pListener != null) { pExecutor.submit(new Runnable() { @Override public void run() { try { pListener.handlePacket(senderAddress, content); } catch (final Exception e) { // Let the listener handle its own exception pListener.handleError(e); } } }); } } catch (final Exception ex) { // Call the listener only if the thread is still running... if (pThreadRun && pListener != null && !pListener.handleError(ex)) { // Listener told us to stop break; } } } } /** * Sends the given packet to the multicast group * * @param aData * Data to be sent * @throws IOException * Error sending the packet */ public void send(final byte[] aData) throws IOException { send(aData, pAddress, pPort); } /** * Sends the given packet to the given address * * @param aData * Data to be sent * @param aAddress * Target address * @param aPort * Target port * @throws IOException * Error sending the packet */ public void send(final byte[] aData, final InetAddress aAddress, final int aPort) throws IOException { // Send the datagram pChannel.send(ByteBuffer.wrap(aData), new InetSocketAddress(aAddress, aPort)); } /** * Sets the log service to use * * @param aLogService * A log service instance, or null */ public void setLogger(final LogService aLogService) { pLogger = aLogService; } /** * Sets up the multicast channel * * @throws IOException * Something wrong occurred (bad address, bad port, ...) */ private void setupMulticast() throws IOException { // Compute the address family final ProtocolFamily family; if (pAddress instanceof Inet4Address) { // IPv4 family = StandardProtocolFamily.INET; } else if (pAddress instanceof Inet6Address) { // IPv6 family = StandardProtocolFamily.INET6; } else { // Unknown throw new SocketException("Unknown multicast group family"); } // Create the UDP channel pChannel = DatagramChannel.open(family); pChannel.setOption(StandardSocketOptions.SO_REUSEADDR, true); pChannel.bind(new InetSocketAddress(pPort)); try { // Join the group on all interfaces for (final NetworkInterface itf : getMulticastInterfaces()) { pJoinedGroups.add(pChannel.join(pAddress, itf)); } } catch (final SocketException ex) { // Be nice... pChannel.close(); throw ex; } } /** * Creates the multicast socket and starts the listening thread * * @return True if the socket was created, false if it was already opened * @throws IOException * Error creating the socket */ public boolean start() throws IOException { if (pChannel != null) { return false; } setupMulticast(); startThread(); return true; } /** * Starts the listening thread */ private void startThread() { if (pThread != null) { stopThread(); } // Prepare the thread object pThread = new Thread(new Runnable() { @Override public void run() { receivePackets(); } }); // Start the listener notifier pExecutor = Executors.newFixedThreadPool(1); // Start the thread pThreadRun = true; pThread.start(); } /** * Stops the multicast receiver * * @throws IOException * Error closing the socket */ public void stop() throws IOException { // Stop the thread stopThread(); // Stop the notifying thread pExecutor.shutdownNow(); pExecutor = null; // Close the socket closeMulticast(); pChannel = null; } /** * Stops the listening thread */ private void stopThread() { pThreadRun = false; if (pThread == null) { return; } pThread.interrupt(); // Wait for it a little try { pThread.join(500); } catch (final InterruptedException e) { // Ignore } // Delete the reference pThread = null; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Waiter.Predicate; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Consistency; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.replication.regionserver.TestRegionReplicaReplication; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Tests failover of secondary region replicas. */ @Category(LargeTests.class) public class TestRegionReplicaFailover { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestRegionReplicaFailover.class); private static final Logger LOG = LoggerFactory.getLogger(TestRegionReplicaReplication.class); private static final HBaseTestingUtil HTU = new HBaseTestingUtil(); private static final int NB_SERVERS = 3; protected final byte[][] families = new byte[][] { HBaseTestingUtil.fam1, HBaseTestingUtil.fam2, HBaseTestingUtil.fam3 }; protected final byte[] fam = HBaseTestingUtil.fam1; protected final byte[] qual1 = Bytes.toBytes("qual1"); protected final byte[] value1 = Bytes.toBytes("value1"); protected final byte[] row = Bytes.toBytes("rowA"); protected final byte[] row2 = Bytes.toBytes("rowB"); @Rule public TestName name = new TestName(); private TableDescriptor htd; @Before public void before() throws Exception { Configuration conf = HTU.getConfiguration(); // Up the handlers; this test needs more than usual. conf.setInt(HConstants.REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT, 10); conf.setBoolean(ServerRegionReplicaUtil.REGION_REPLICA_REPLICATION_CONF_KEY, true); conf.setBoolean(ServerRegionReplicaUtil.REGION_REPLICA_WAIT_FOR_PRIMARY_FLUSH_CONF_KEY, true); conf.setInt("replication.stats.thread.period.seconds", 5); conf.setBoolean("hbase.tests.use.shortcircuit.reads", false); HTU.startMiniCluster(NB_SERVERS); htd = HTU.createModifyableTableDescriptor( TableName.valueOf(name.getMethodName().substring(0, name.getMethodName().length() - 3)), ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED).setRegionReplication(3).build(); HTU.getAdmin().createTable(htd); } @After public void after() throws Exception { HTU.deleteTableIfAny(htd.getTableName()); HTU.shutdownMiniCluster(); } /** * Tests the case where a newly created table with region replicas and no data, the secondary * region replicas are available to read immediately. */ @Test public void testSecondaryRegionWithEmptyRegion() throws IOException { // Create a new table with region replication, don't put any data. Test that the secondary // region replica is available to read. try (Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration()); Table table = connection.getTable(htd.getTableName())) { Get get = new Get(row); get.setConsistency(Consistency.TIMELINE); get.setReplicaId(1); table.get(get); // this should not block } } /** * Tests the case where if there is some data in the primary region, reopening the region replicas * (enable/disable table, etc) makes the region replicas readable. * @throws IOException */ @Test public void testSecondaryRegionWithNonEmptyRegion() throws IOException { // Create a new table with region replication and load some data // than disable and enable the table again and verify the data from secondary try (Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration()); Table table = connection.getTable(htd.getTableName())) { HTU.loadNumericRows(table, fam, 0, 1000); HTU.getAdmin().disableTable(htd.getTableName()); HTU.getAdmin().enableTable(htd.getTableName()); HTU.verifyNumericRows(table, fam, 0, 1000, 1); } } /** * Tests the case where killing a primary region with unflushed data recovers */ @Test public void testPrimaryRegionKill() throws Exception { try (Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration()); Table table = connection.getTable(htd.getTableName())) { HTU.loadNumericRows(table, fam, 0, 1000); // wal replication is async, we have to wait until the replication catches up, or we timeout verifyNumericRowsWithTimeout(table, fam, 0, 1000, 1, 30000); verifyNumericRowsWithTimeout(table, fam, 0, 1000, 2, 30000); // we should not have flushed files now, but data in memstores of primary and secondary // kill the primary region replica now, and ensure that when it comes back up, we can still // read from it the same data from primary and secondaries boolean aborted = false; for (RegionServerThread rs : HTU.getMiniHBaseCluster().getRegionServerThreads()) { for (Region r : rs.getRegionServer().getRegions(htd.getTableName())) { if (r.getRegionInfo().getReplicaId() == 0) { LOG.info("Aborting region server hosting primary region replica"); rs.getRegionServer().abort("for test"); aborted = true; break; } } } assertTrue(aborted); // wal replication is async, we have to wait until the replication catches up, or we timeout verifyNumericRowsWithTimeout(table, fam, 0, 1000, 0, 30000); verifyNumericRowsWithTimeout(table, fam, 0, 1000, 1, 30000); verifyNumericRowsWithTimeout(table, fam, 0, 1000, 2, 30000); } // restart the region server HTU.getMiniHBaseCluster().startRegionServer(); } /** wal replication is async, we have to wait until the replication catches up, or we timeout */ private void verifyNumericRowsWithTimeout(final Table table, final byte[] f, final int startRow, final int endRow, final int replicaId, final long timeout) throws Exception { try { HTU.waitFor(timeout, new Predicate<Exception>() { @Override public boolean evaluate() throws Exception { try { HTU.verifyNumericRows(table, f, startRow, endRow, replicaId); return true; } catch (AssertionError ae) { return false; } } }); } catch (Throwable t) { // ignore this, but redo the verify do get the actual exception HTU.verifyNumericRows(table, f, startRow, endRow, replicaId); } } /** * Tests the case where killing a secondary region with unflushed data recovers, and the replica * becomes available to read again shortly. */ @Test public void testSecondaryRegionKill() throws Exception { try (Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration()); Table table = connection.getTable(htd.getTableName())) { HTU.loadNumericRows(table, fam, 0, 1000); // wait for some time to ensure that async wal replication does it's magic verifyNumericRowsWithTimeout(table, fam, 0, 1000, 1, 30000); verifyNumericRowsWithTimeout(table, fam, 0, 1000, 2, 30000); // we should not have flushed files now, but data in memstores of primary and secondary // kill the secondary region replica now, and ensure that when it comes back up, we can still // read from it the same data boolean aborted = false; for (RegionServerThread rs : HTU.getMiniHBaseCluster().getRegionServerThreads()) { for (Region r : rs.getRegionServer().getRegions(htd.getTableName())) { if (r.getRegionInfo().getReplicaId() == 1) { LOG.info("Aborting region server hosting secondary region replica"); rs.getRegionServer().abort("for test"); aborted = true; break; } } } assertTrue(aborted); // It takes extra time for replica region is ready for read as during // region open process, it needs to ask primary region to do a flush and replica region // can open newly flushed hfiles to avoid data out-of-sync. verifyNumericRowsWithTimeout(table, fam, 0, 1000, 1, 30000); HTU.verifyNumericRows(table, fam, 0, 1000, 2); } // restart the region server HTU.getMiniHBaseCluster().startRegionServer(); } /** * Tests the case where there are 3 region replicas and the primary is continuously accepting * new writes while one of the secondaries is killed. Verification is done for both of the * secondary replicas. */ @Test public void testSecondaryRegionKillWhilePrimaryIsAcceptingWrites() throws Exception { try (Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration()); Table table = connection.getTable(htd.getTableName()); Admin admin = connection.getAdmin()) { // start a thread to do the loading of primary HTU.loadNumericRows(table, fam, 0, 1000); // start with some base admin.flush(table.getName()); HTU.loadNumericRows(table, fam, 1000, 2000); final AtomicReference<Throwable> ex = new AtomicReference<>(null); final AtomicBoolean done = new AtomicBoolean(false); final AtomicInteger key = new AtomicInteger(2000); Thread loader = new Thread() { @Override public void run() { while (!done.get()) { try { HTU.loadNumericRows(table, fam, key.get(), key.get()+1000); key.addAndGet(1000); } catch (Throwable e) { ex.compareAndSet(null, e); } } } }; loader.start(); Thread aborter = new Thread() { @Override public void run() { try { boolean aborted = false; for (RegionServerThread rs : HTU.getMiniHBaseCluster().getRegionServerThreads()) { for (Region r : rs.getRegionServer().getRegions(htd.getTableName())) { if (r.getRegionInfo().getReplicaId() == 1) { LOG.info("Aborting region server hosting secondary region replica"); rs.getRegionServer().abort("for test"); aborted = true; } } } assertTrue(aborted); } catch (Throwable e) { ex.compareAndSet(null, e); } } }; aborter.start(); aborter.join(); done.set(true); loader.join(); assertNull(ex.get()); assertTrue(key.get() > 1000); // assert that the test is working as designed LOG.info("Loaded up to key :" + key.get()); verifyNumericRowsWithTimeout(table, fam, 0, key.get(), 0, 30000); verifyNumericRowsWithTimeout(table, fam, 0, key.get(), 1, 30000); verifyNumericRowsWithTimeout(table, fam, 0, key.get(), 2, 30000); } // restart the region server HTU.getMiniHBaseCluster().startRegionServer(); } /** * Tests the case where we are creating a table with a lot of regions and replicas. Opening region * replicas should not block handlers on RS indefinitely. */ @Test public void testLotsOfRegionReplicas() throws IOException { int numRegions = NB_SERVERS * 20; int regionReplication = 10; String tableName = htd.getTableName().getNameAsString() + "2"; htd = HTU .createModifyableTableDescriptor(TableName.valueOf(tableName), ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED) .setRegionReplication(regionReplication).build(); // dont care about splits themselves too much byte[] startKey = Bytes.toBytes("aaa"); byte[] endKey = Bytes.toBytes("zzz"); byte[][] splits = HTU.getRegionSplitStartKeys(startKey, endKey, numRegions); HTU.getAdmin().createTable(htd, startKey, endKey, numRegions); try (Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration()); Table table = connection.getTable(htd.getTableName())) { for (int i = 1; i < splits.length; i++) { for (int j = 0; j < regionReplication; j++) { Get get = new Get(splits[i]); get.setConsistency(Consistency.TIMELINE); get.setReplicaId(j); table.get(get); // this should not block. Regions should be coming online } } } HTU.deleteTableIfAny(TableName.valueOf(tableName)); } }
/* * Copyright 2013-2014 S. Webber * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.projog.core.term; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.projog.TestUtils.assertStrictEquality; import static org.projog.TermFactory.atom; import static org.projog.TermFactory.decimalFraction; import static org.projog.TermFactory.integerNumber; import static org.projog.TermFactory.list; import static org.projog.TermFactory.structure; import static org.projog.TermFactory.variable; import java.util.HashMap; import java.util.Map; import org.junit.Test; /** * Test implementations of {@link Term} * <p> * As so much of the tests are about interactions between different classes of Terms it was decided to have a generic * TermTest class to test generic behaviour and have only specific behaviour tested in separate test classes specific to * a particular Term implementation. * * @see AtomTest * @see DecimalFractionTest * @see EmptyListTest * @see IntegerNumberTest * @see ListTest * @see StructureTest * @see VariableTest */ public class TermTest { private static final Term[] IMMUTABLE_TERMS = { atom("a"), atom("b"), atom("c"), atom("A"), atom("B"), atom("C"), atom("abc"), atom("ABC"), atom("AbC"), atom("0"), atom("1"), atom("-1"), atom("[]"), integerNumber(0), integerNumber(1), integerNumber(-1), integerNumber(Integer.MIN_VALUE), integerNumber(Integer.MAX_VALUE), decimalFraction(0), decimalFraction(1), decimalFraction(-1), decimalFraction(0.0001), decimalFraction(-0.0001), decimalFraction(Double.MIN_VALUE), decimalFraction(Double.MAX_VALUE), structure("abc", atom()), structure("abc", atom(), atom()), structure("ABC", atom()), structure("ABC", atom(), atom()), structure("1", atom()), structure("1", atom(), atom()), list(atom(), atom()), list(atom(), atom(), atom()), list(atom("a"), integerNumber(1), decimalFraction(1), structure("abc", atom())), EmptyList.EMPTY_LIST}; /** check both unify and strictEquality methods against various immutable Terms */ @Test public void testUnifyAndStrictEquality() { for (Term t1 : IMMUTABLE_TERMS) { for (Term t2 : IMMUTABLE_TERMS) { assertUnify(t1, t2, t1 == t2); assertStrictEquality(t1, t2, t1 == t2); } } } /** check calling copy() on an immutable Term returns the Term */ @Test public void testCopy() { for (Term t1 : IMMUTABLE_TERMS) { Map<Variable, Variable> sharedVariables = new HashMap<>(); Term t2 = t1.copy(sharedVariables); assertSame(t1, t2); assertTrue(sharedVariables.isEmpty()); } } /** check calling getValue() on an immutable Term returns the Term */ @Test public void testGetValue() { for (Term t1 : IMMUTABLE_TERMS) { Term t2 = t1.getTerm(); assertSame(t1, t2); } } @Test public void testIsImmutable() { for (Term element : IMMUTABLE_TERMS) { assertTrue(element.isImmutable()); } } /** check calling backtrack() has no effect on an immutable Term */ @Test public void testBacktrack() { for (Term t : IMMUTABLE_TERMS) { // keep track of the Term's current properties TermType originalType = t.getType(); int originalNumberOfArguments = t.getNumberOfArguments(); String originalToString = t.toString(); // perform the backtrack() t.backtrack(); // check properties are the same as prior to the backtrack() assertSame(originalType, t.getType()); assertSame(originalNumberOfArguments, t.getNumberOfArguments()); assertEquals(originalToString, t.toString()); } } @Test public void testUnifyAndStrictEqualityWithVariable() { for (Term t : IMMUTABLE_TERMS) { Variable v = variable("X"); // check equal assertStrictEquality(t, v, false); // check can unify (with unify called on t with v passed as a parameter) assertTrue(t.unify(v)); // check equal after unification assertVariableIsUnifiedToTerm(v, t); // backtrack v.backtrack(); // check backtrack undid result of unification assertSame(TermType.VARIABLE, v.getType()); assertStrictEquality(t, v, false); // check can unify again (but this time with unify called on v with t passed as a parameter) assertTrue(t.unify(v)); // check equal after unification assertVariableIsUnifiedToTerm(v, t); // backtrack v.backtrack(); // check backtrack undid result of unification assertSame(TermType.VARIABLE, v.getType()); assertStrictEquality(t, v, false); // unify v to something else v.unify(atom("some atom")); // check v and t can no longer be unified assertUnify(t, v, false); } } /** test {@link AnonymousVariable} unifies with everything and is strictly equal to nothing */ @Test public void testUnifyAndStrictEqualityWithAnonymousVariable() { for (Term t : IMMUTABLE_TERMS) { assertUnify(t, new Variable(), true); assertStrictEquality(t, new Variable(), false); } } private void assertVariableIsUnifiedToTerm(Variable v, Term t) { assertStrictEquality(t, v, true); assertFalse(v.equals(t)); assertFalse(t.equals(v)); assertEquals(t.toString(), v.toString()); assertSame(t.getType(), v.getType()); assertSame(t, v.getTerm()); assertSame(t, v.copy(null)); assertUnify(t, v, true); } private void assertUnify(Term t1, Term t2, boolean expected) { assertEquals(expected, t1.unify(t2)); assertEquals(expected, t2.unify(t1)); } }
package tests.wurstscript.tests; import de.peeeq.wurstio.languageserver.BufferManager; import de.peeeq.wurstio.languageserver.ModelManager; import de.peeeq.wurstio.languageserver.ModelManagerImpl; import de.peeeq.wurstio.languageserver.WFile; import de.peeeq.wurstio.languageserver.requests.GetCompletions; import org.eclipse.lsp4j.*; import org.testng.annotations.Test; import java.io.File; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; /** * tests the autocomplete functionality. * <p> * the position of the cursor is denoted by a bar "|" in the test cases */ public class AutoCompleteTests extends WurstLanguageServerTest { @Test public void simpleExample1() { CompletionTestData testData = input( "package test", " function int.foo()", " function int.bar()", " init", " int x = 5", " x.|", "endpackage" ); testCompletions(testData, "bar", "foo"); } @Test public void simpleExample2() { CompletionTestData testData = input( "package test", " function int.foo()", " function int.bar()", " init", " int x = 5", " x.f|", "endpackage" ); testCompletions(testData, "foo"); } @Test public void simpleExample3() { CompletionTestData testData = input( "package test", " function int.foo(int a, bool b)", " init", " int x = 5", " x.f|", "endpackage" ); testCompletions(testData, "foo"); } @Test public void testWithParentheses() { CompletionTestData testData = input( "package test", "init", " CreateG|()", "" ); CompletionList completions = calculateCompletions(testData); assertEquals(1, completions.getItems().size()); CompletionItem c = completions.getItems().get(0); assertEquals("CreateGroup", c.getInsertText()); } @Test public void testWithParentheses2() { CompletionTestData testData = input( "package test", "init", " CreateU|(x,y,z)", "" ); CompletionList completions = calculateCompletions(testData); assertFalse(completions.getItems().isEmpty()); CompletionItem comp = completions.getItems().stream() .filter(c -> c.getLabel().equals("CreateUnit")) .findFirst() .get(); assertEquals(comp.getInsertText(), "CreateUnit"); } @Test public void testWithoutParentheses() { CompletionTestData testData = input( "package test", " init", " CreateG|", "endpackage" ); CompletionList completions = calculateCompletions(testData); assertEquals(1, completions.getItems().size()); CompletionItem c = completions.getItems().get(0); assertEquals("CreateGroup()", c.getInsertText()); } @Test public void testWithoutParentheses2() { CompletionTestData testData = input( "package test", " init", " CreateU|", "endpackage" ); CompletionList completions = calculateCompletions(testData); assertFalse(completions.getItems().isEmpty()); CompletionItem comp = completions.getItems().stream() .filter(c -> c.getLabel().equals("CreateUnit")) .findFirst() .get(); assertEquals(comp.getInsertText(), "CreateUnit(${1:id}, ${2:unitid}, ${3:x}, ${4:y}, ${5:face})"); } @Test public void overload1() { CompletionTestData testData = input( "package test", " function int.foo()", " function int.foo(int x)", " init", " int x = 5", " x.f|", "endpackage" ); testCompletions(testData, "foo", "foo"); } @Test public void onlyFromClasses() { CompletionTestData testData = input( "package test", " function fuu()", " function int.foo()", " init", " int x = 5", " x.f|", "endpackage" ); testCompletions(testData, "foo"); } @Test public void inForLoop() { CompletionTestData testData = input( "package test", " function int.foo()", " function faaa()", " function int.bar()", " init", " int x = 5", " for i in x.f|", "endpackage" ); testCompletions(testData, "foo"); } @Test public void ratings_returnType1() { CompletionTestData testData = input( "package test", " function int.foo() returns int", " function int.fuu() returns bool", " init", " int x = 5", " int y = x.f|", "endpackage" ); testCompletions(testData, "foo", "fuu"); } @Test public void ratings_returnType2() { CompletionTestData testData = input( "package test", " function int.foo() returns int", " function int.fuu() returns bool", " init", " int x = 5", " bool y = x.f|", "endpackage" ); testCompletions(testData, "fuu", "foo"); } @Test public void completionAtEndOfFileWithNewline() { // see #584 CompletionTestData testData = input(false, "package test", "function int.foo() returns int", "function int.fuu() returns bool", "init", " int x = 5", "", " x.f|" ); testCompletions(testData, "foo", "fuu"); } @Test public void completionAtEndOfFileWithNewline2() { CompletionTestData testData = input(true, "package test", "function int.foo() returns int", "function int.fuu() returns bool", "init", " int x = 5", "", " x.f|" ); testCompletions(testData, "foo", "fuu"); } @Test public void testAfterDot() { CompletionTestData testData = input(true, "package test", "class A", "function A.foo() returns int", "function A.fuu() returns bool", "function test()", " new A()", " .|", "", "function a()" ); testCompletions(testData, "foo", "fuu"); } @Test public void closuresWithOperatorOverloading() { CompletionTestData testData = input( "package test", " function int.foo()", " function int.bar()", " class C", " construct(G g, int y)", " construct(F f, int y)", " interface F", " function apply(int i) returns int", " interface G", " function apply() returns int", " init", " int x = 5", " new C(i -> i + 1, x.|)", "endpackage" ); testCompletions(testData, "bar", "foo"); } @Test public void testPrivateMethod() { CompletionTestData testData = input(true, "package test", "class A", " function foo() returns int", " return 1", " private function fuu() returns bool", " return true", "function test()", " let a = new A()", " a.f|" ); testCompletions(testData, "foo"); } @Test public void testPrivateMethod2() { CompletionTestData testData = input(true, "package test", "class A", " function foo() returns int", " return 1", " private function fuu() returns bool", " return true", " static function test()", " let a = new A()", " a.f|" ); testCompletions(testData, "foo", "fuu"); } @Test public void testProtectedMethod() { CompletionTestData testData = input(true, "package test", "class A", " function foo() returns int", " return 1", " protected function fuu() returns bool", " return true", "package test2", "import test", "class B extends A", " static function test()", " let a = new A()", " a.f|" ); testCompletions(testData, "foo", "fuu"); } @Test public void testProtectedMethod2() { CompletionTestData testData = input(true, "package test", "class A", " function foo() returns int", " return 1", " protected function fuu() returns bool", " return true", "package test2", "import test", "class B", " static function test()", " let a = new A()", " a.f|" ); testCompletions(testData, "foo"); } @Test public void testNestedClass() { // see https://github.com/wurstscript/WurstScript/issues/753 CompletionTestData testData = input(true, "package test", "public class SchoolSpell", " static let qinyun = new QINYUN()", " static class QINYUN", " let leiyunjianqi = '0000'", " let xuanbinjinqi = '0000'", " let zhanguishen = '0000'", " let shenjianyulei = '0000'", "init", " let a = SchoolSpell.qinyun.|", " let x = 42" ); testCompletions(testData, "leiyunjianqi", "shenjianyulei", "xuanbinjinqi", "zhanguishen"); } @Test public void testDeprecated() { CompletionTestData testData = input(true, "package test", "@deprecated function getIndexedUnit() returns unit", " return null", "function getIndexingUnit() returns unit", " return null", "", "init", " unit u = getInd|" ); testCompletions(testData, "getIndexingUnit", "getIndexedUnit"); } @Test public void testInnerClasses() { CompletionTestData testData = input(true, "package test", "class A", " static class Blue", " static class Boris", " static int Banana = 42", "init", " let u = A.|" ); testCompletions(testData, "Banana", "Blue", "Boris"); } private void testCompletions(CompletionTestData testData, String... expectedCompletions) { testCompletions(testData, Arrays.asList(expectedCompletions)); } private void testCompletions(CompletionTestData testData, List<String> expectedCompletions) { CompletionList result = calculateCompletions(testData); List<String> completionLabels = result.getItems().stream() .sorted(Comparator.comparing(i -> i.getSortText())) .map(completion -> completion.getLabel()) .collect(Collectors.toList()); assertEquals(completionLabels, expectedCompletions, "completionLabels = " + completionLabels); } private CompletionList calculateCompletions(CompletionTestData testData) { BufferManager bufferManager = new BufferManager(); File projectPath = new File("./test-output").getAbsoluteFile(); ModelManager modelManager = new ModelManagerImpl(projectPath, bufferManager); String uri = projectPath.toURI().toString() + "/wurst/test.wurst"; bufferManager.updateFile(WFile.create(uri), testData.buffer); TextDocumentIdentifier textDocument = new TextDocumentIdentifier(uri); Position pos = new Position(testData.line, testData.column); CompletionParams position = new CompletionParams(textDocument, pos); GetCompletions getCompletions = new GetCompletions(position, bufferManager); //new GetCompletions(1, "test", testData.buffer, testData.line, testData.column); return getCompletions.execute(modelManager); } }
/* * Copyright 2014-2017 Real Logic Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.aeron; import io.aeron.exceptions.*; import org.agrona.*; import org.agrona.collections.*; import org.agrona.concurrent.*; import org.agrona.concurrent.status.UnsafeBufferPosition; import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.*; import static io.aeron.Aeron.IDLE_SLEEP_NS; import static io.aeron.Aeron.sleep; import static java.util.concurrent.TimeUnit.MILLISECONDS; /** * Client conductor takes responses and notifications from Media Driver and acts on them in addition to forwarding * commands from the various Client APIs to the Media Driver. */ class ClientConductor implements Agent, DriverListener { private static final long NO_CORRELATION_ID = -1; private static final long RESOURCE_TIMEOUT_NS = TimeUnit.SECONDS.toNanos(1); private static final long RESOURCE_LINGER_NS = TimeUnit.SECONDS.toNanos(3); private final long keepAliveIntervalNs; private final long driverTimeoutMs; private final long driverTimeoutNs; private final long interServiceTimeoutNs; private final long publicationConnectionTimeoutMs; private long timeOfLastKeepaliveNs; private long timeOfLastCheckResourcesNs; private long timeOfLastWorkNs; private boolean isDriverActive = true; private volatile boolean isClosed; private final Lock clientLock; private final EpochClock epochClock; private final FileChannel.MapMode imageMapMode; private final NanoClock nanoClock; private final DriverListenerAdapter driverListener; private final LogBuffersFactory logBuffersFactory; private final ActivePublications activePublications = new ActivePublications(); private final Long2ObjectHashMap<ExclusivePublication> activeExclusivePublications = new Long2ObjectHashMap<>(); private final ActiveSubscriptions activeSubscriptions = new ActiveSubscriptions(); private final ArrayList<ManagedResource> lingeringResources = new ArrayList<>(); private final UnavailableImageHandler defaultUnavailableImageHandler; private final AvailableImageHandler defaultAvailableImageHandler; private final UnsafeBuffer counterValuesBuffer; private final DriverProxy driverProxy; private final ErrorHandler errorHandler; private final AgentInvoker driverAgentInvoker; private RegistrationException driverException; ClientConductor(final Aeron.Context ctx) { clientLock = ctx.clientLock(); epochClock = ctx.epochClock(); nanoClock = ctx.nanoClock(); errorHandler = ctx.errorHandler(); counterValuesBuffer = ctx.countersValuesBuffer(); driverProxy = ctx.driverProxy(); logBuffersFactory = ctx.logBuffersFactory(); imageMapMode = ctx.imageMapMode(); keepAliveIntervalNs = ctx.keepAliveInterval(); driverTimeoutMs = ctx.driverTimeoutMs(); driverTimeoutNs = MILLISECONDS.toNanos(driverTimeoutMs); interServiceTimeoutNs = ctx.interServiceTimeout(); publicationConnectionTimeoutMs = ctx.publicationConnectionTimeout(); defaultAvailableImageHandler = ctx.availableImageHandler(); defaultUnavailableImageHandler = ctx.unavailableImageHandler(); driverListener = new DriverListenerAdapter(ctx.toClientBuffer(), this); driverAgentInvoker = ctx.driverAgentInvoker(); final long nowNs = nanoClock.nanoTime(); timeOfLastKeepaliveNs = nowNs; timeOfLastCheckResourcesNs = nowNs; timeOfLastWorkNs = nowNs; } public void onClose() { if (!isClosed) { isClosed = true; for (final ExclusivePublication publication : activeExclusivePublications.values()) { publication.forceClose(); } activeExclusivePublications.clear(); activePublications.close(); activeSubscriptions.close(); Thread.yield(); for (int i = 0, size = lingeringResources.size(); i < size; i++) { lingeringResources.get(i).delete(); } lingeringResources.clear(); } } public int doWork() { int workCount = 0; if (clientLock.tryLock()) { try { if (!isClosed) { workCount = doWork(NO_CORRELATION_ID, null); } } finally { clientLock.unlock(); } } return workCount; } public String roleName() { return "aeron-client-conductor"; } boolean isClosed() { return isClosed; } Lock clientLock() { return clientLock; } void handleError(final Throwable ex) { errorHandler.onError(ex); } Publication addPublication(final String channel, final int streamId) { verifyActive(); Publication publication = activePublications.get(channel, streamId); if (null == publication) { awaitResponse(driverProxy.addPublication(channel, streamId), channel); publication = activePublications.get(channel, streamId); } publication.incRef(); return publication; } ExclusivePublication addExclusivePublication(final String channel, final int streamId) { verifyActive(); final long registrationId = driverProxy.addExclusivePublication(channel, streamId); awaitResponse(registrationId, channel); return activeExclusivePublications.get(registrationId); } void releasePublication(final Publication publication) { if (isClosed) { throw new IllegalStateException("Aeron client is closed"); } if (publication == activePublications.remove(publication.channel(), publication.streamId())) { lingerResource(publication.managedResource()); awaitResponse(driverProxy.removePublication(publication.registrationId()), null); } } void releasePublication(final ExclusivePublication publication) { if (isClosed) { throw new IllegalStateException("Aeron client is closed"); } if (publication == activeExclusivePublications.remove(publication.registrationId())) { lingerResource(publication.managedResource()); awaitResponse(driverProxy.removePublication(publication.registrationId()), null); } } void asyncReleasePublication(final long registrationId) { driverProxy.removePublication(registrationId); } Subscription addSubscription(final String channel, final int streamId) { verifyActive(); final long correlationId = driverProxy.addSubscription(channel, streamId); final Subscription subscription = new Subscription( this, channel, streamId, correlationId, defaultAvailableImageHandler, defaultUnavailableImageHandler); activeSubscriptions.add(subscription); awaitResponse(correlationId, channel); return subscription; } Subscription addSubscription( final String channel, final int streamId, final AvailableImageHandler availableImageHandler, final UnavailableImageHandler unavailableImageHandler) { verifyActive(); final long correlationId = driverProxy.addSubscription(channel, streamId); final Subscription subscription = new Subscription( this, channel, streamId, correlationId, availableImageHandler, unavailableImageHandler); activeSubscriptions.add(subscription); awaitResponse(correlationId, channel); return subscription; } void releaseSubscription(final Subscription subscription) { if (isClosed) { throw new IllegalStateException("Aeron client is closed"); } awaitResponse(driverProxy.removeSubscription(subscription.registrationId()), null); activeSubscriptions.remove(subscription); } void asyncReleaseSubscription(final Subscription subscription) { driverProxy.removeSubscription(subscription.registrationId()); } void addDestination(final long registrationId, final String endpointChannel) { verifyActive(); awaitResponse(driverProxy.addDestination(registrationId, endpointChannel), null); } void removeDestination(final long registrationId, final String endpointChannel) { verifyActive(); awaitResponse(driverProxy.removeDestination(registrationId, endpointChannel), null); } public void onError(final long correlationId, final ErrorCode errorCode, final String message) { driverException = new RegistrationException(errorCode, message); } public void onNewPublication( final long correlationId, final long registrationId, final int streamId, final int sessionId, final int publicationLimitId, final String channel, final String logFileName) { final Publication publication = new Publication( this, channel, streamId, sessionId, new UnsafeBufferPosition(counterValuesBuffer, publicationLimitId), logBuffersFactory.map(logFileName, FileChannel.MapMode.READ_WRITE), registrationId, correlationId); activePublications.put(channel, streamId, publication); } public void onNewExclusivePublication( final long correlationId, final long registrationId, final int streamId, final int sessionId, final int publicationLimitId, final String channel, final String logFileName) { final ExclusivePublication publication = new ExclusivePublication( this, channel, streamId, sessionId, new UnsafeBufferPosition(counterValuesBuffer, publicationLimitId), logBuffersFactory.map(logFileName, FileChannel.MapMode.READ_WRITE), registrationId, correlationId); activeExclusivePublications.put(correlationId, publication); } public void onAvailableImage( final long correlationId, final int streamId, final int sessionId, final Long2LongHashMap subscriberPositionMap, final String logFileName, final String sourceIdentity) { activeSubscriptions.forEach( streamId, (subscription) -> { if (!subscription.hasImage(correlationId)) { final long positionId = subscriberPositionMap.get(subscription.registrationId()); if (DriverListenerAdapter.MISSING_REGISTRATION_ID != positionId) { final Image image = new Image( subscription, sessionId, new UnsafeBufferPosition(counterValuesBuffer, (int)positionId), logBuffersFactory.map(logFileName, imageMapMode), errorHandler, sourceIdentity, correlationId); try { final AvailableImageHandler handler = subscription.availableImageHandler(); if (null != handler) { handler.onAvailableImage(image); } } catch (final Throwable ex) { errorHandler.onError(ex); } subscription.addImage(image); } } }); } public void onUnavailableImage(final long correlationId, final int streamId) { activeSubscriptions.forEach( streamId, (subscription) -> { final Image image = subscription.removeImage(correlationId); if (null != image) { try { final UnavailableImageHandler handler = subscription.unavailableImageHandler(); if (null != handler) { handler.onUnavailableImage(image); } } catch (final Throwable ex) { errorHandler.onError(ex); } } }); } DriverListenerAdapter driverListenerAdapter() { return driverListener; } void lingerResource(final ManagedResource managedResource) { managedResource.timeOfLastStateChange(nanoClock.nanoTime()); lingeringResources.add(managedResource); } boolean isPublicationConnected(final long timeOfLastStatusMessageMs) { return epochClock.time() <= (timeOfLastStatusMessageMs + publicationConnectionTimeoutMs); } private int doWork(final long correlationId, final String expectedChannel) { int workCount = 0; try { workCount += onCheckTimeouts(); workCount += driverListener.pollMessage(correlationId, expectedChannel); } catch (final Throwable throwable) { errorHandler.onError(throwable); if (correlationId != NO_CORRELATION_ID) { // has been called from a user thread and not the conductor duty cycle. throw throwable; } } return workCount; } private void awaitResponse(final long correlationId, final String expectedChannel) { driverException = null; final long deadlineNs = nanoClock.nanoTime() + driverTimeoutNs; do { if (null == driverAgentInvoker) { sleep(1); } else { driverAgentInvoker.invoke(); } doWork(correlationId, expectedChannel); if (driverListener.lastReceivedCorrelationId() == correlationId) { if (null != driverException) { throw driverException; } return; } } while (nanoClock.nanoTime() < deadlineNs); throw new DriverTimeoutException("No response from driver within timeout"); } private void verifyActive() { if (isClosed) { throw new IllegalStateException("Aeron client is closed"); } if (!isDriverActive) { throw new DriverTimeoutException("MediaDriver is inactive"); } } private int onCheckTimeouts() { int workCount = 0; final long nowNs = nanoClock.nanoTime(); if (nowNs < (timeOfLastWorkNs + IDLE_SLEEP_NS)) { return workCount; } if (nowNs > (timeOfLastWorkNs + interServiceTimeoutNs)) { onClose(); throw new ConductorServiceTimeoutException( "Timeout between service calls over " + interServiceTimeoutNs + "ns"); } timeOfLastWorkNs = nowNs; if (nowNs > (timeOfLastKeepaliveNs + keepAliveIntervalNs)) { driverProxy.sendClientKeepalive(); checkDriverHeartbeat(); timeOfLastKeepaliveNs = nowNs; workCount++; } if (nowNs > (timeOfLastCheckResourcesNs + RESOURCE_TIMEOUT_NS)) { final ArrayList<ManagedResource> lingeringResources = this.lingeringResources; for (int lastIndex = lingeringResources.size() - 1, i = lastIndex; i >= 0; i--) { final ManagedResource resource = lingeringResources.get(i); if (nowNs > (resource.timeOfLastStateChange() + RESOURCE_LINGER_NS)) { ArrayListUtil.fastUnorderedRemove(lingeringResources, i, lastIndex); lastIndex--; resource.delete(); } } timeOfLastCheckResourcesNs = nowNs; workCount++; } return workCount; } private void checkDriverHeartbeat() { final long deadlineMs = driverProxy.timeOfLastDriverKeepaliveMs() + driverTimeoutMs; if (isDriverActive && (epochClock.time() > deadlineMs)) { isDriverActive = false; try { onClose(); } finally { errorHandler.onError(new DriverTimeoutException( "MediaDriver has been inactive for over " + driverTimeoutMs + "ms")); } } } }
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.am.integration.tests.api.lifecycle; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import org.wso2.am.integration.test.utils.APIManagerIntegrationTestException; import org.wso2.am.integration.test.utils.base.APIMIntegrationConstants; import org.wso2.am.integration.test.utils.bean.APICreationRequestBean; import org.wso2.am.integration.test.utils.clients.APIPublisherRestClient; import org.wso2.am.integration.test.utils.clients.APIStoreRestClient; import org.wso2.am.integration.test.utils.generic.APIMTestCaseUtils; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.automation.test.utils.http.client.HttpResponse; import org.wso2.carbon.integration.common.admin.client.UserManagementClient; import javax.xml.xpath.XPathExpressionException; import java.net.MalformedURLException; import java.net.URL; import java.util.List; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; /** * Create a API with Role visibility and check the visibility in Publisher Store. */ public class APIVisibilityByRoleTestCase extends APIManagerLifecycleBaseTest { private final String API_NAME_ADMIN_VISIBILITY = "APIVisibilityByRoleTest"; private final String API_NAME_SUBSCRIBER_VISIBILITY = "APIVisibilityByRole"; private final String API_CONTEXT1 = "testAPI1"; private final String API_CONTEXT2 = "testAPI2"; private final String API_TAGS = "testTag1, testTag2, testTag3"; private final String API_DESCRIPTION = "This is test API create by API manager integration test"; private final String API_VERSION_1_0_0 = "1.0.0"; private final String CARBON_SUPER_TENANT2_KEY = "userKey2"; private final String TENANT_DOMAIN_KEY = "wso2.com"; private final String TENANT_DOMAIN_ADMIN_KEY = "admin"; private final String USER_KEY_USER2 = "userKey1"; private final String OTHER_DOMAIN_TENANT_USER_KEY = "user1"; private final String CARBON_SUPER_SUBSCRIBER_USERNAME = "subscriberUser1"; private final char[] CARBON_SUPER_SUBSCRIBER_PASSWORD = "password@123".toCharArray(); private final String TENANT_SUBSCRIBER_USERNAME = "subscriberUser2"; private final char[] TENANT_SUBSCRIBER_PASSWORD = "password@123".toCharArray(); private final String INTERNAL_ROLE_SUBSCRIBER = "Internal/subscriber"; private final String ROLE_SUBSCRIBER = "subscriber"; private final String API_END_POINT_POSTFIX_URL = "jaxrs_basic/services/customers/customerservice/"; private String apiEndPointUrl; private String providerName; private APIPublisherRestClient apiPublisherClientCarbonSuperUser1; private APIStoreRestClient apiStoreClientCarbonSuperUser1; private APIPublisherRestClient apiPublisherClientCarbonSuperAdmin; private APIStoreRestClient apiStoreClientCarbonSuperAdmin; private APIIdentifier apiIdentifierAdminVisibility; private APIIdentifier apiIdentifierSubscriberVisibility; private APIStoreRestClient apiStoreClientCarbonSuperUser2; private APIPublisherRestClient apiPublisherClientCarbonSuperUser2; private APIStoreRestClient apiStoreClientAnotherUserOtherDomain; private APIPublisherRestClient apiPublisherClientAnotherUserOtherDomain; private APIStoreRestClient apiStoreClientAdminOtherDomain; private APIPublisherRestClient apiPublisherClientAdminOtherDomain; private UserManagementClient userManagementClient1; private UserManagementClient userManagementClient2; private APIStoreRestClient apiStoreClientSubscriberUserSameDomain; private APIStoreRestClient apiStoreClientSubscriberUserOtherDomain; private String apiCreatorStoreDomain; private String storeURLHttp; private String otherDomain; @BeforeClass(alwaysRun = true) public void initialize() throws Exception { //Creating CarbonSuper context super.init(); apiEndPointUrl = getGatewayURLHttp() + API_END_POINT_POSTFIX_URL; String publisherURLHttp = getPublisherURLHttp(); storeURLHttp = getStoreURLHttp(); //Login to API Publisher and Store with CarbonSuper admin apiPublisherClientCarbonSuperAdmin = new APIPublisherRestClient(publisherURLHttp); apiStoreClientCarbonSuperAdmin = new APIStoreRestClient(storeURLHttp); apiPublisherClientCarbonSuperAdmin.login(user.getUserName(), user.getPassword()); apiStoreClientCarbonSuperAdmin.login(user.getUserName(), user.getPassword()); //Login to API Publisher adn Store with CarbonSuper normal user1 apiPublisherClientCarbonSuperUser1 = new APIPublisherRestClient(publisherURLHttp); apiStoreClientCarbonSuperUser1 = new APIStoreRestClient(storeURLHttp); providerName = publisherContext.getContextTenant().getTenantUser(USER_KEY_USER2).getUserName(); apiPublisherClientCarbonSuperUser1.login( publisherContext.getContextTenant().getTenantUser(USER_KEY_USER2).getUserName(), publisherContext.getContextTenant().getTenantUser(USER_KEY_USER2).getPassword()); apiStoreClientCarbonSuperUser1.login( storeContext.getContextTenant().getTenantUser(USER_KEY_USER2).getUserName(), storeContext.getContextTenant().getTenantUser(USER_KEY_USER2).getPassword()); //Login to API Publisher adn Store with CarbonSuper normal user2 apiCreatorStoreDomain = storeContext.getContextTenant().getDomain(); apiStoreClientCarbonSuperUser2 = new APIStoreRestClient(storeURLHttp); apiPublisherClientCarbonSuperUser2 = new APIPublisherRestClient(publisherURLHttp); apiStoreClientCarbonSuperUser2.login( storeContext.getContextTenant().getTenantUser(CARBON_SUPER_TENANT2_KEY).getUserName(), storeContext.getContextTenant().getTenantUser(CARBON_SUPER_TENANT2_KEY).getPassword()); apiPublisherClientCarbonSuperUser2.login( publisherContext.getContextTenant().getTenantUser(CARBON_SUPER_TENANT2_KEY).getUserName(), publisherContext.getContextTenant().getTenantUser(CARBON_SUPER_TENANT2_KEY).getPassword()); // create new user in CarbonSuper with only subscriber role and login to the Store userManagementClient1 = new UserManagementClient(keyManagerContext.getContextUrls().getBackEndUrl(), createSession(keyManagerContext)); if (userManagementClient1.userNameExists(INTERNAL_ROLE_SUBSCRIBER, CARBON_SUPER_SUBSCRIBER_USERNAME)) { userManagementClient1.deleteUser(CARBON_SUPER_SUBSCRIBER_USERNAME); } userManagementClient1.addUser(CARBON_SUPER_SUBSCRIBER_USERNAME, String.valueOf(CARBON_SUPER_SUBSCRIBER_PASSWORD), new String[]{INTERNAL_ROLE_SUBSCRIBER}, null); apiStoreClientSubscriberUserSameDomain = new APIStoreRestClient(storeURLHttp); apiStoreClientSubscriberUserSameDomain.login( CARBON_SUPER_SUBSCRIBER_USERNAME, String.valueOf(CARBON_SUPER_SUBSCRIBER_PASSWORD)); //Creating Tenant contexts init(TENANT_DOMAIN_KEY, TENANT_DOMAIN_ADMIN_KEY); otherDomain = storeContext.getContextTenant().getDomain(); //Login to the API Publisher adn Store as Tenant user apiStoreClientAnotherUserOtherDomain = new APIStoreRestClient(storeURLHttp); apiPublisherClientAnotherUserOtherDomain = new APIPublisherRestClient(publisherURLHttp); apiStoreClientAnotherUserOtherDomain.login( storeContext.getContextTenant().getTenantUser(OTHER_DOMAIN_TENANT_USER_KEY).getUserName(), storeContext.getContextTenant().getTenantUser(OTHER_DOMAIN_TENANT_USER_KEY).getUserName()); apiPublisherClientAnotherUserOtherDomain.login( publisherContext.getContextTenant().getTenantUser(OTHER_DOMAIN_TENANT_USER_KEY).getUserName(), publisherContext.getContextTenant().getTenantUser(OTHER_DOMAIN_TENANT_USER_KEY).getUserName()); //Login to the API Publisher adn Store as Tenant admin apiStoreClientAdminOtherDomain = new APIStoreRestClient(storeURLHttp); apiPublisherClientAdminOtherDomain = new APIPublisherRestClient(publisherURLHttp); apiStoreClientAdminOtherDomain.login( storeContext.getContextTenant().getContextUser().getUserName(), storeContext.getContextTenant().getContextUser().getPassword()); apiPublisherClientAdminOtherDomain.login( publisherContext.getContextTenant().getContextUser().getUserName(), publisherContext.getContextTenant().getContextUser().getPassword()); // create new user in tenant with only subscriber role and login to the Store userManagementClient2 = new UserManagementClient( keyManagerContext.getContextUrls().getBackEndUrl(), createSession(keyManagerContext)); if (userManagementClient2.roleNameExists(INTERNAL_ROLE_SUBSCRIBER)) { userManagementClient2.deleteRole(INTERNAL_ROLE_SUBSCRIBER); } userManagementClient2.addInternalRole(ROLE_SUBSCRIBER, new String[]{}, new String[]{"/permission/admin/login", "/permission/admin/manage/api/subscribe"}); if (userManagementClient2.userNameExists(INTERNAL_ROLE_SUBSCRIBER, TENANT_SUBSCRIBER_USERNAME)) { userManagementClient2.deleteUser(TENANT_SUBSCRIBER_USERNAME); } userManagementClient2.addUser(TENANT_SUBSCRIBER_USERNAME, String.valueOf(TENANT_SUBSCRIBER_PASSWORD), new String[]{INTERNAL_ROLE_SUBSCRIBER}, null); apiStoreClientSubscriberUserOtherDomain = new APIStoreRestClient(storeURLHttp); apiStoreClientSubscriberUserOtherDomain.login(TENANT_SUBSCRIBER_USERNAME, String.valueOf(TENANT_SUBSCRIBER_PASSWORD)); } @Test(groups = {"wso2.am"}, description = "Test the visibility of API in Publisher for API creator ") public void testVisibilityForCreatorInPublisher() throws APIManagerIntegrationTestException, MalformedURLException, XPathExpressionException { apiIdentifierAdminVisibility = new APIIdentifier(providerName, API_NAME_ADMIN_VISIBILITY, API_VERSION_1_0_0); apiIdentifierSubscriberVisibility = new APIIdentifier(providerName, API_NAME_SUBSCRIBER_VISIBILITY, API_VERSION_1_0_0); //Create API with public visibility and publish. APICreationRequestBean apiCreationReqBeanVisibilityAdmin = new APICreationRequestBean(API_NAME_ADMIN_VISIBILITY, API_CONTEXT1, API_VERSION_1_0_0, providerName, new URL(apiEndPointUrl)); apiCreationReqBeanVisibilityAdmin.setTags(API_TAGS); apiCreationReqBeanVisibilityAdmin.setDescription(API_DESCRIPTION); apiCreationReqBeanVisibilityAdmin.setVisibility("restricted"); apiCreationReqBeanVisibilityAdmin.setRoles("admin"); apiPublisherClientCarbonSuperUser1.addAPI(apiCreationReqBeanVisibilityAdmin); publishAPI(apiIdentifierAdminVisibility, apiPublisherClientCarbonSuperUser1, false); waitForAPIDeploymentSync(apiIdentifierAdminVisibility.getProviderName(), apiIdentifierAdminVisibility.getApiName(), apiIdentifierAdminVisibility.getVersion(), APIMIntegrationConstants.IS_API_EXISTS); APICreationRequestBean apiCreationReqBeanVisibilityInternalSubscriber = new APICreationRequestBean(API_NAME_SUBSCRIBER_VISIBILITY, API_CONTEXT2, API_VERSION_1_0_0, providerName, new URL(apiEndPointUrl)); apiCreationReqBeanVisibilityInternalSubscriber.setTags(API_TAGS); apiCreationReqBeanVisibilityInternalSubscriber.setDescription(API_DESCRIPTION); apiCreationReqBeanVisibilityInternalSubscriber.setVisibility("restricted"); apiCreationReqBeanVisibilityInternalSubscriber.setRoles("Internal/subscriber"); apiPublisherClientCarbonSuperUser1.addAPI(apiCreationReqBeanVisibilityInternalSubscriber); publishAPI(apiIdentifierSubscriberVisibility, apiPublisherClientCarbonSuperUser1, false); waitForAPIDeploymentSync(apiIdentifierSubscriberVisibility.getProviderName(), apiIdentifierSubscriberVisibility.getApiName(), apiIdentifierSubscriberVisibility.getVersion(), APIMIntegrationConstants.IS_API_EXISTS); List<APIIdentifier> apiPublisherAPIIdentifierList = APIMTestCaseUtils.getAPIIdentifierListFromHttpResponse( apiPublisherClientCarbonSuperUser1.getAllAPIs()); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierAdminVisibility, apiPublisherAPIIdentifierList), "API with Role admin visibility is not visible to creator in API Publisher." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierSubscriberVisibility, apiPublisherAPIIdentifierList), "API with Role Internal/subscriber visibility is not visible to creator in API Publisher." + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility of API in Store for API creator", dependsOnMethods = "testVisibilityForCreatorInPublisher") public void testVisibilityForCreatorInStore() throws APIManagerIntegrationTestException { List<APIIdentifier> apiStoreAPIIdentifierList = APIMTestCaseUtils.getAPIIdentifierListFromHttpResponse(apiStoreClientCarbonSuperUser1. getAllPublishedAPIs(apiCreatorStoreDomain)); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierAdminVisibility, apiStoreAPIIdentifierList), "API with Role admin visibility is not visible to creator in API Store." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierSubscriberVisibility, apiStoreAPIIdentifierList), "API with Role Internal/subscriber is not visible to creator in API Store. " + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility of API in Publisher for admin in same domain ", dependsOnMethods = "testVisibilityForCreatorInStore") public void testVisibilityForAdminUserWithAdminAndSubscriberRoleInSameDomainInPublisher() throws APIManagerIntegrationTestException { List<APIIdentifier> apiPublisherAPIIdentifierList = APIMTestCaseUtils.getAPIIdentifierListFromHttpResponse(apiPublisherClientCarbonSuperAdmin.getAllAPIs()); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierAdminVisibility, apiPublisherAPIIdentifierList), "API with Role admin visibility is not visible to Admin user with Admin and subscriber role in same" + " domain in API Publisher." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierSubscriberVisibility, apiPublisherAPIIdentifierList), "API with Role Internal/subscriber visibility is not visible to Admin user with Admin and subscriber" + " role in same domain in API Publisher." + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility of API in Store for admin in same domain ", dependsOnMethods = "testVisibilityForAdminUserWithAdminAndSubscriberRoleInSameDomainInPublisher") public void testVisibilityForAdminUserWithAdminAndSubscriberRoleInSameDomainInStore() throws APIManagerIntegrationTestException { List<APIIdentifier> apiStoreAPIIdentifierList = APIMTestCaseUtils.getAPIIdentifierListFromHttpResponse(apiStoreClientCarbonSuperAdmin. getAllPublishedAPIs(apiCreatorStoreDomain)); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierAdminVisibility, apiStoreAPIIdentifierList), "API with Role admin visibility is not visible to Admin user with Admin and subscriber role in same " + "domain in API Store." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierSubscriberVisibility, apiStoreAPIIdentifierList), "API with Role Internal/subscriber is not visible to Admin user with Admin and subscriber role in same " + "domain in API Store. " + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility of API in Publisher for another user in same domain", dependsOnMethods = "testVisibilityForAdminUserWithAdminAndSubscriberRoleInSameDomainInStore") public void testVisibilityForAnotherUserWithAdminAndSubscriberRoleInSameDomainInPublisher() throws APIManagerIntegrationTestException { List<APIIdentifier> apiPublisherAPIIdentifierList = APIMTestCaseUtils.getAPIIdentifierListFromHttpResponse( apiPublisherClientCarbonSuperUser2.getAllAPIs()); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierAdminVisibility, apiPublisherAPIIdentifierList), "API with Role admin visibility is not visible to another user with Admin and subscriber role in same " + "domain in API Publisher." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierSubscriberVisibility, apiPublisherAPIIdentifierList), "API with Role Internal/subscriber visibility is not visible to another user with Admin and subscriber " + "role in same domain in API Publisher." + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility of API in Store for another user in same domain", dependsOnMethods = "testVisibilityForAnotherUserWithAdminAndSubscriberRoleInSameDomainInPublisher") public void testVisibilityForAnotherUserWithAdminAndSubscriberRoleInSameDomainInStore() throws APIManagerIntegrationTestException { List<APIIdentifier> apiStoreAPIIdentifierList = APIMTestCaseUtils.getAPIIdentifierListFromHttpResponse(apiStoreClientCarbonSuperUser2. getAllPublishedAPIs(apiCreatorStoreDomain)); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierAdminVisibility, apiStoreAPIIdentifierList), "API with Role admin visibility is not visible to another user with Admin and subscriber role in same" + " domain in API Store." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierSubscriberVisibility, apiStoreAPIIdentifierList), "API with Role Internal/subscriber is not visible to another user with Admin and subscriber role in" + " same domain in API Store. " + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility of API in Publisher for another user in other domain", dependsOnMethods = "testVisibilityForAnotherUserWithAdminAndSubscriberRoleInSameDomainInStore") public void testVisibilityForAnotherUserWithAdminAndSubscriberRoleInOtherDomainInPublisher() throws APIManagerIntegrationTestException { List<APIIdentifier> apiPublisherAPIIdentifierList = APIMTestCaseUtils.getAPIIdentifierListFromHttpResponse( apiPublisherClientAnotherUserOtherDomain.getAllAPIs()); assertFalse(APIMTestCaseUtils.isAPIAvailable(apiIdentifierAdminVisibility, apiPublisherAPIIdentifierList), "API with Role admin visibility is visible to another user with Admin and subscriber role in other " + "domain in API Publisher." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertFalse(APIMTestCaseUtils.isAPIAvailable(apiIdentifierSubscriberVisibility, apiPublisherAPIIdentifierList), "API with Role Internal/subscriber visibility is visible to another user with Admin and subscriber" + " role in other domain in API Publisher." + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility of API in Store for another user in other domain", dependsOnMethods = "testVisibilityForAnotherUserWithAdminAndSubscriberRoleInOtherDomainInPublisher") public void testVisibilityForAnotherUserWithAdminAndSubscriberRoleInOtherDomainInStore() throws APIManagerIntegrationTestException { List<APIIdentifier> apiStoreAPIIdentifierList = APIMTestCaseUtils.getAPIIdentifierListFromHttpResponse(apiStoreClientAnotherUserOtherDomain. getAllPublishedAPIs(apiCreatorStoreDomain)); assertFalse(APIMTestCaseUtils.isAPIAvailable(apiIdentifierAdminVisibility, apiStoreAPIIdentifierList), "API with Role admin visibility is visible to another user with Admin and subscriber role in other " + "domain in API Store." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertFalse(APIMTestCaseUtils.isAPIAvailable(apiIdentifierSubscriberVisibility, apiStoreAPIIdentifierList), "API with Role Internal/subscriber is visible to another user with Admin and subscriber role in other " + "domain in API Store. " + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility of API in Publisher for admin in other domain", dependsOnMethods = "testVisibilityForAnotherUserWithAdminAndSubscriberRoleInOtherDomainInStore") public void testVisibilityForAdminWithAdminAndSubscriberRoleInOtherDomainInPublisher() throws APIManagerIntegrationTestException { List<APIIdentifier> apiPublisherAPIIdentifierList = APIMTestCaseUtils.getAPIIdentifierListFromHttpResponse( apiPublisherClientAdminOtherDomain.getAllAPIs()); assertFalse(APIMTestCaseUtils.isAPIAvailable(apiIdentifierAdminVisibility, apiPublisherAPIIdentifierList), "API with Role admin visibility is visible to Admin user with Admin and subscriber role in other " + "domain in API Publisher." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertFalse(APIMTestCaseUtils.isAPIAvailable(apiIdentifierSubscriberVisibility, apiPublisherAPIIdentifierList), "API with Role Internal/subscriber visibility is visible to Admin user with Admin and subscriber role" + " in other domain in API Publisher." + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility of API in Store for admin in other domain", dependsOnMethods = "testVisibilityForAdminWithAdminAndSubscriberRoleInOtherDomainInPublisher") public void testVisibilityForAdminWithAdminAndSubscriberRoleInOtherDomainInStore() throws APIManagerIntegrationTestException { List<APIIdentifier> apiStoreAPIIdentifierList = APIMTestCaseUtils.getAPIIdentifierListFromHttpResponse(apiStoreClientAdminOtherDomain. getAllPublishedAPIs(apiCreatorStoreDomain)); assertFalse(APIMTestCaseUtils.isAPIAvailable(apiIdentifierAdminVisibility, apiStoreAPIIdentifierList), "API with Role admin visibility is visible to Admin user with Admin and subscriber role in other " + "domain in API Store." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertFalse(APIMTestCaseUtils.isAPIAvailable(apiIdentifierSubscriberVisibility, apiStoreAPIIdentifierList), "API with Role Internal/subscriber is visible to Admin user with Admin and subscriber role in other " + "domain in API Store. " + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility of API in Store for another user in same domain", dependsOnMethods = "testVisibilityForAdminWithAdminAndSubscriberRoleInOtherDomainInStore") public void testVisibilityForAnotherUserWithSubscriberRoleInSameDomainInStore() throws APIManagerIntegrationTestException { List<APIIdentifier> apiStoreAPIIdentifierList = APIMTestCaseUtils.getAPIIdentifierListFromHttpResponse(apiStoreClientSubscriberUserSameDomain. getAllPublishedAPIs(apiCreatorStoreDomain)); assertFalse(APIMTestCaseUtils.isAPIAvailable(apiIdentifierAdminVisibility, apiStoreAPIIdentifierList), "API with Role admin visibility is visible to another user with subscriber role in same domain " + "in API Store." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertTrue(APIMTestCaseUtils.isAPIAvailable(apiIdentifierSubscriberVisibility, apiStoreAPIIdentifierList), "API with Role Internal/subscriber is not visible to another user with subscriber role in same " + "domain in API Store. " + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility of API in Store for another user in same domain", dependsOnMethods = "testVisibilityForAnotherUserWithSubscriberRoleInSameDomainInStore") public void testVisibilityForAnotherUserWithSubscriberRoleInOtherDomainInStore() throws APIManagerIntegrationTestException { List<APIIdentifier> apiStoreAPIIdentifierList = APIMTestCaseUtils.getAPIIdentifierListFromHttpResponse(apiStoreClientSubscriberUserOtherDomain. getAllPublishedAPIs(apiCreatorStoreDomain)); assertFalse(APIMTestCaseUtils.isAPIAvailable(apiIdentifierAdminVisibility, apiStoreAPIIdentifierList), "API with Role admin visibility is visible to another user with subscriber role in same domain " + "in API Store." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertFalse(APIMTestCaseUtils.isAPIAvailable(apiIdentifierSubscriberVisibility, apiStoreAPIIdentifierList), "API with Role Internal/subscriber is visible to another user with subscriber role in same domain " + "in API Store. " + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility for API in other domainStore for anonymous user", dependsOnMethods = "testVisibilityForAnotherUserWithSubscriberRoleInOtherDomainInStore") public void testVisibilityForAnonymousUserInOtherDomainInStore() throws APIManagerIntegrationTestException { HttpResponse httpResponse = new APIStoreRestClient(storeURLHttp).getAPIListFromStoreAsAnonymousUser (apiCreatorStoreDomain); assertFalse(httpResponse.getData().contains(API_NAME_ADMIN_VISIBILITY), "API with Role admin visibility " + " is visible to anonymous user in other domain API Store." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertFalse(httpResponse.getData().contains(API_NAME_SUBSCRIBER_VISIBILITY), "API with Role " + "Internal/subscriber is visible to anonymous user in other domain API Store." + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @Test(groups = {"wso2.am"}, description = "Test the visibility for API in Same domainStore for anonymous user", dependsOnMethods = "testVisibilityForAnonymousUserInOtherDomainInStore") public void testVisibilityForAnonymousUserInSameDomainInStore() throws APIManagerIntegrationTestException { HttpResponse httpResponse = new APIStoreRestClient(storeURLHttp).getAPIListFromStoreAsAnonymousUser( otherDomain); assertFalse(httpResponse.getData().contains(API_NAME_ADMIN_VISIBILITY), "API with Role admin " + "visibility is not visible to anonymous user in same domain API Store." + getAPIIdentifierString(apiIdentifierAdminVisibility)); assertFalse(httpResponse.getData().contains(API_NAME_SUBSCRIBER_VISIBILITY), "API with Role " + "Internal/subscriber is not visible to anonymous user in same domain API Store. " + getAPIIdentifierString(apiIdentifierSubscriberVisibility)); } @AfterClass(alwaysRun = true) public void cleanUpArtifacts() throws Exception { deleteAPI(apiIdentifierAdminVisibility, apiPublisherClientCarbonSuperAdmin); deleteAPI(apiIdentifierSubscriberVisibility, apiPublisherClientCarbonSuperAdmin); userManagementClient1.deleteUser(CARBON_SUPER_SUBSCRIBER_USERNAME); userManagementClient2.deleteUser(TENANT_SUBSCRIBER_USERNAME); } }
/* * Copyright 2016 Richard Cartwright * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Log: PackageImpl.java,v $ * Revision 1.6 2011/10/05 17:30:40 vizigoth * Changing class abstraction to metadata only to support application metadata plugin class definition extensions. * * Revision 1.5 2011/07/27 17:33:23 vizigoth * Fixed imports to clear warnings. * * Revision 1.4 2011/02/14 22:32:49 vizigoth * First commit after major sourceforge outage. * * Revision 1.3 2011/01/19 21:55:59 vizigoth * Added property initialization code. * * Revision 1.2 2011/01/05 13:09:06 vizigoth * Created new forge for making record and union type values. * * Revision 1.1 2011/01/04 10:39:03 vizigoth * Refactor all package names to simpler forms more consistent with typical Java usage. * * Revision 1.6 2010/05/20 18:52:14 vizigoth * Adding support for Avid extensions. * * Revision 1.5 2010/05/19 22:22:58 vizigoth * Adding Avid extensions. * * Revision 1.4 2010/04/16 15:25:52 vizigoth * Fix to ensure the auto-initialized package ID has a not specified instance number type. * * Revision 1.3 2010/02/10 23:56:13 vizigoth * Improvements to create and mod time method names in Package to match meta dictionary. Also, added static initialize_PropertyName_ methods for required properties. * * Revision 1.2 2009/12/18 17:55:57 vizigoth * Interim check in to help with some training activities. Early support for reading Preface objects from MXF files. * * Revision 1.1 2009/05/14 16:15:13 vizigoth * Major refactor to remove dependency on JPA and introduce better interface and implementation separation. Removed all setPropertiesFromInterface and castFromInterface methods. * * Revision 1.1 2009/03/30 09:04:51 vizigoth * Refactor to use SMPTE harmonized names and add early KLV file support. * * Revision 1.6 2008/10/16 16:51:53 vizigoth * First early release 0.1. * * Revision 1.5 2008/10/16 01:14:25 vizigoth * Documentation improved to an early release level. Still with Javadoc warnings. * * Revision 1.4 2008/01/27 11:14:42 vizigoth * Fixed to match interface improvements. * * Revision 1.3 2007/12/13 11:31:51 vizigoth * Removed MediaCriteria interface and replaced with CriteriaType enumeration. * * Revision 1.2 2007/12/04 13:04:49 vizigoth * Removed safe collections and proxy handling. The first is not required due to ? extends in interface definitions and the second is not a solution to the shared resources problem. * * Revision 1.1 2007/11/13 22:09:15 vizigoth * Public release of MAJ API. */ package tv.amwa.maj.model.impl; // TODO extra package methods from ImplAAFMob.cpp // TODO what keeps track ids unique import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Vector; import tv.amwa.maj.constant.DataDefinitionConstant; import tv.amwa.maj.enumeration.AppendOption; import tv.amwa.maj.enumeration.Depend; import tv.amwa.maj.enumeration.IncludedMedia; import tv.amwa.maj.exception.AdjacentTransitionException; import tv.amwa.maj.exception.BadLengthException; import tv.amwa.maj.exception.BadPropertyException; import tv.amwa.maj.exception.EventSemanticsException; import tv.amwa.maj.exception.InsufficientTransitionMaterialException; import tv.amwa.maj.exception.InvalidDataDefinitionException; import tv.amwa.maj.exception.LeadingTransitionException; import tv.amwa.maj.exception.ObjectNotFoundException; import tv.amwa.maj.exception.PropertyNotPresentException; import tv.amwa.maj.exception.TimecodeNotFoundException; import tv.amwa.maj.exception.TrackExistsException; import tv.amwa.maj.exception.TrackNotFoundException; import tv.amwa.maj.extensions.avid.AvidConstants; import tv.amwa.maj.industry.MediaClass; import tv.amwa.maj.industry.MediaListAppend; import tv.amwa.maj.industry.MediaListGetAt; import tv.amwa.maj.industry.MediaListInsertAt; import tv.amwa.maj.industry.MediaListPrepend; import tv.amwa.maj.industry.MediaListRemoveAt; import tv.amwa.maj.industry.MediaProperty; import tv.amwa.maj.industry.MediaPropertyClear; import tv.amwa.maj.industry.MediaPropertyCount; import tv.amwa.maj.industry.MediaPropertyRemove; import tv.amwa.maj.industry.MediaPropertySetter; import tv.amwa.maj.industry.StrongReferenceVector; import tv.amwa.maj.industry.TypeDefinitions; import tv.amwa.maj.integer.Int64; import tv.amwa.maj.misctype.TrackID; import tv.amwa.maj.model.KLVData; import tv.amwa.maj.model.Package; import tv.amwa.maj.model.Segment; import tv.amwa.maj.model.Sequence; import tv.amwa.maj.model.SourceClip; import tv.amwa.maj.model.TaggedValue; import tv.amwa.maj.model.TimelineTrack; import tv.amwa.maj.model.Track; import tv.amwa.maj.record.AUID; import tv.amwa.maj.record.PackageID; import tv.amwa.maj.record.TimeStamp; import tv.amwa.maj.record.impl.AUIDImpl; import tv.amwa.maj.record.impl.PackageIDImpl; import tv.amwa.maj.record.impl.RationalImpl; import tv.amwa.maj.record.impl.TimeStampImpl; import tv.amwa.maj.record.impl.TimecodeValueImpl; /** * <p>Implements a package, which can describe a composition, essence, or * physical media. A package has a unique identifier and consists of metadata.</p> * * * * @see tv.amwa.maj.industry.TypeDefinitions#PackageWeakReference * @see tv.amwa.maj.industry.TypeDefinitions#PackageStrongReference * @see tv.amwa.maj.industry.TypeDefinitions#PackageStrongReferenceSet */ @MediaClass(uuid1 = 0x0d010101, uuid2 = 0x0101, uuid3 = 0x3400, uuid4 = {0x06, 0x0e, 0x2b, 0x34, 0x02, 0x06, 0x01, 0x01}, definedName = "Package", aliases = { "Mob" }, description = "Specifies a package, which can describe a composition, essence, or physical media.", symbol = "Package", isConcrete = false) public class PackageImpl extends InterchangeObjectImpl implements Package, tv.amwa.maj.extensions.avid.Package, Serializable, Cloneable { /** * */ private static final long serialVersionUID = -1772171014938118040L; private PackageID packageID; private String packageName = null; private List<Track> packageTracks = Collections.synchronizedList(new Vector<Track>()); private TimeStamp packageLastModified; private TimeStamp creationTime; private List<TaggedValue> packageUserComments = Collections.synchronizedList(new Vector<TaggedValue>()); private List<TaggedValue> packageAttributes = Collections.synchronizedList(new Vector<TaggedValue>()); private List<KLVData> packageKLVData = Collections.synchronizedList(new Vector<KLVData>()); private AUID packageUsage = null; @MediaProperty(uuid1 = 0x03010210, uuid2 = (short) 0x0700, uuid3 = (short) 0x0000, uuid4 = {0x06, 0x0e, 0x2b, 0x34, 0x01, 0x01, 0x01, 0x07}, definedName = "PackageAttributes", aliases = { "Attributes", "MobAttributes" }, typeName = "TaggedValueStrongReferenceVector", optional = true, uniqueIdentifier = false, pid = 0x4409, symbol = "PackageAttributes") public List<TaggedValue> getPackageAttributes() throws PropertyNotPresentException { if (packageAttributes.size() == 0) throw new PropertyNotPresentException("The optional attributes property is not present in this package."); return StrongReferenceVector.getOptionalList(packageAttributes); } public void appendPackageAttribute( String name, String value) throws NullPointerException { if (name == null) throw new NullPointerException("Cannot create and append a new attribute to the list of attributes for this package with a null name value."); if (value == null) throw new NullPointerException("Cannot create and append a new attribute to the list of attributes for this package with a null value."); TaggedValue taggedValue = new TaggedValueImpl( name, TypeDefinitions.UTF16String, value); StrongReferenceVector.appendNoClone(packageAttributes, taggedValue); } @MediaListAppend("PackageAttributes") public void appendPackageAttribute( TaggedValue packageAttribute) throws NullPointerException { if (packageAttribute == null) throw new NullPointerException("Cannot append a null attribute to the attributes of this package."); StrongReferenceVector.append(packageAttributes, packageAttribute); } @MediaListPrepend("PackageAttributes") public void prependPackageAttribute( TaggedValue packageAttribute) throws NullPointerException { if (packageAttribute == null) throw new NullPointerException("Cannot prepend a null attribute to the attributes of this package."); StrongReferenceVector.prepend(packageAttributes, packageAttribute); } @MediaPropertyCount("PackageAttributes") public int countPackageAttributes() { return packageAttributes.size(); } @MediaPropertyClear("PackageAttributes") public void clearPackageAttributes() { packageAttributes = Collections.synchronizedList(new Vector<TaggedValue>()); } @MediaPropertyRemove("PackageAttributes") public void removePackageAttribute( TaggedValue packageAttribute) throws NullPointerException, PropertyNotPresentException, ObjectNotFoundException { if (packageAttribute == null) throw new NullPointerException("Cannot remove an attribute from the list of attributes of this package using a null value."); if (packageAttributes.size() == 0) throw new PropertyNotPresentException("The optional attributes property is not present in this package."); if (!(packageAttributes.contains(packageAttribute))) throw new ObjectNotFoundException("Canoot remove the given attribute from the list of attributes of this package as it is not currently contained."); StrongReferenceVector.remove(packageAttributes, packageAttribute); } @MediaProperty(uuid1 = 0x03020102, uuid2 = (short) 0x0c00, uuid3 = (short) 0x0000, uuid4 = {0x06, 0x0e, 0x2b, 0x34, 0x01, 0x01, 0x01, 0x02}, definedName = "PackageUserComments", aliases = { "UserComments", "MobUserComments" }, typeName = "TaggedValueStrongReferenceVector", optional = true, uniqueIdentifier = false, pid = 0x4406, symbol = "PackageUserComments") public List<TaggedValue> getPackageUserComments() throws PropertyNotPresentException { if (packageUserComments.size() == 0) throw new PropertyNotPresentException("The optional user comments property is not present in this package."); return StrongReferenceVector.getOptionalList(packageUserComments); } public void appendPackageUserComment( String category, String comment) throws NullPointerException { if (category == null) throw new NullPointerException("Cannot create and append a new user comment for this package will a null category name value."); if (comment == null) throw new NullPointerException("Cannot create and append a new user comment for this package with a null comment value."); TaggedValue taggedValue = new TaggedValueImpl( category, TypeDefinitions.UTF16String, comment); StrongReferenceVector.appendNoClone(packageUserComments, taggedValue); } @MediaListAppend("PackageUserComments") public void appendPackageUserComment( TaggedValue packageUserComment) throws NullPointerException { if (packageUserComment == null) throw new NullPointerException("Cannot append a new user comment with a null tagged value to this package."); StrongReferenceVector.append(packageUserComments, packageUserComment); } @MediaListPrepend("PackageUserComments") public void prependPackageUserComment( TaggedValue comment) throws NullPointerException { if (comment == null) throw new NullPointerException("Cannot prepend a new user comment with a null tagged value to this package."); StrongReferenceVector.prepend(packageUserComments, comment); } @MediaPropertyCount("PackageUserComments") public int countPackageUserComments() { return packageUserComments.size(); } @MediaPropertyClear("PackageUserComments") public void clearPackageUserComments() { packageUserComments = Collections.synchronizedList(new Vector<TaggedValue>()); } @MediaPropertyRemove("PackageUserComments") public void removePackageUserComment( TaggedValue packageUserComment) throws NullPointerException, PropertyNotPresentException, ObjectNotFoundException { if (packageUserComment == null) throw new NullPointerException("Cannot remove the given comment from the user comments of this package as it is null."); if (packageUserComments.size() == 0) throw new PropertyNotPresentException("The optional user comments property is not present in this package."); if (!(packageUserComments.contains(packageUserComment))) throw new ObjectNotFoundException("Cannot remove the given comment for the user comments of this package as it is not currently contained."); StrongReferenceVector.remove(packageUserComments, packageUserComment); } @MediaProperty(uuid1 = 0x03010210, uuid2 = (short) 0x0300, uuid3 = (short) 0x0000, uuid4 = {0x06, 0x0e, 0x2b, 0x34, 0x01, 0x01, 0x01, 0x02}, definedName = "PackageKLVData", aliases = { "MobKLVData", "KLVData" }, typeName = "KLVDataStrongReferenceVector", optional = true, uniqueIdentifier = false, pid = 0x4407, symbol = "PackageKLVData") public List<KLVData> getPackageKLVData() throws PropertyNotPresentException { if (packageKLVData.size() == 0) throw new PropertyNotPresentException("No KLV data values are present in this package."); return StrongReferenceVector.getOptionalList(packageKLVData); } @MediaListAppend("PackageKLVData") public void appendPackageKLVData( KLVData packageKLVdata) throws NullPointerException { if (packageKLVdata == null) throw new NullPointerException("Cannot append a null klv data item to the list of klv data items of this package."); StrongReferenceVector.append(this.packageKLVData, packageKLVdata); } @MediaListPrepend("PackageKLVData") public void prependPackageKLVData( KLVData packageKLVdata) throws NullPointerException { if (packageKLVdata == null) throw new NullPointerException("Cannot prepend a null klv data item to the list of klv data items of this package."); StrongReferenceVector.prepend(this.packageKLVData, packageKLVdata); } @MediaPropertyCount("PackageKLVData") public int countPackageKLVData() { return packageKLVData.size(); } @MediaPropertyClear("PackageKLVData") public void clearPackageKLVData() { packageKLVData = Collections.synchronizedList(new Vector<KLVData>()); } @MediaPropertyRemove("PackageKLVData") public void removePackageKLVData( KLVData packageKLVdata) throws NullPointerException, ObjectNotFoundException { if (packageKLVdata == null) throw new NullPointerException("Cannot remove a null value from the list of klv data items of this package."); if (!(packageKLVData.contains(packageKLVdata))) throw new ObjectNotFoundException("Cannot remove the given klv data item from the list of klv data items of this package as it is not currently contained."); StrongReferenceVector.remove(this.packageKLVData, packageKLVdata); } @MediaProperty(uuid1 = 0x06010104, uuid2 = (short) 0x0605, uuid3 = (short) 0x0000, uuid4 = {0x06, 0x0e, 0x2b, 0x34, 0x01, 0x01, 0x01, 0x02}, definedName = "PackageTracks", aliases = { "Slots", "MobSlots" }, typeName = "TrackStrongReferenceVector", optional = false, uniqueIdentifier = false, pid = 0x4403, symbol = "PackageTracks") public List<Track> getPackageTracks() { return StrongReferenceVector.getRequiredList(packageTracks); } public final static List<Track> initializePackageTracks() { List<Track> initialTracks = new ArrayList<Track>(1); Segment filler = new FillerImpl(DataDefinitionImpl.forName("Unknown"), 0l); TimelineTrack firstTrack = new TimelineTrackImpl(1, filler, new RationalImpl(1, 1), 0l); initialTracks.add(firstTrack); return initialTracks; } public EventTrackImpl appendNewEventTrack( tv.amwa.maj.record.Rational editRate, tv.amwa.maj.model.Segment segment, int trackID, String trackName) // Removed origin property as not required for event tracks. throws NullPointerException, IllegalArgumentException, TrackExistsException { EventTrackImpl createdTrack = new EventTrackImpl(trackID, segment, editRate); createdTrack.setTrackName(trackName); appendPackageTrack(createdTrack); return createdTrack; } public StaticTrackImpl appendNewStaticTrack( tv.amwa.maj.model.Segment segment, int trackID, String trackName) throws NullPointerException, IllegalArgumentException, TrackExistsException { StaticTrackImpl createdTrack = new StaticTrackImpl(trackID, segment); createdTrack.setTrackName(trackName); appendPackageTrack(createdTrack); return createdTrack; } public TimelineTrackImpl appendNewTimelineTrack( tv.amwa.maj.record.Rational editRate, tv.amwa.maj.model.Segment segment, @TrackID int trackID, String trackName, long origin) throws NullPointerException, IllegalArgumentException, TrackExistsException { TimelineTrackImpl createdTrack = new TimelineTrackImpl(trackID, segment, editRate, origin); createdTrack.setTrackName(trackName); appendPackageTrack(createdTrack); return createdTrack; } @MediaListAppend("PackageTracks") public void appendPackageTrack( Track track) throws NullPointerException, TrackExistsException { if (track == null) throw new NullPointerException("Cannot append a null track to the list of tracks for this package."); // Clear the default value out of the way if ((packageTracks.size() == 1) && (packageTracks.get(0) instanceof TimelineTrackImpl) && (((TimelineTrack) packageTracks.get(0)).getDataDefinition().equals(DataDefinitionImpl.forAUID(DataDefinitionConstant.Unknown))) && (((TimelineTrack) packageTracks.get(0)).getEditRate().equals(new RationalImpl(1, 1)))) clearPackageTracks(); int givenTrackID = track.getTrackID(); for ( Track trackItem : packageTracks ) if (trackItem.getTrackID() == givenTrackID) throw new TrackExistsException("A track with track id " + givenTrackID + " is already contained in this package."); StrongReferenceVector.append(packageTracks, track); } @MediaPropertyCount("PackageTracks") public int countPackageTracks() { return packageTracks.size(); } @MediaListGetAt("PackageTracks") public Track getPackageTrackAt( int index) throws IndexOutOfBoundsException { return StrongReferenceVector.getAt(packageTracks, index); } @MediaListInsertAt("PackageTracks") public void insertPackageTrackAt( int index, Track track) throws NullPointerException, IndexOutOfBoundsException, TrackExistsException { if (track == null) throw new NullPointerException("Cannot insert a null-valued track into the list of tracks of this package."); // Clear the default value out of the way if ((packageTracks.size() == 1) && (packageTracks.get(0) instanceof TimelineTrackImpl) && (((TimelineTrack) packageTracks.get(0)).getDataDefinition().equals(DataDefinitionImpl.forAUID(DataDefinitionConstant.Unknown))) && (((TimelineTrack) packageTracks.get(0)).getEditRate().equals(new RationalImpl(1, 1)))) clearPackageTracks(); int givenTrackId = track.getTrackID(); for ( Track trackItem : packageTracks ) if (trackItem.getTrackID() == givenTrackId) throw new TrackExistsException("The given track id of " + givenTrackId + " is already used to identify a track contained in this package."); StrongReferenceVector.insert(packageTracks, index, track); } public Track lookupPackageTrack( int trackID) throws TrackNotFoundException { if (trackID < 0) throw new TrackNotFoundException("Cannot lookup a track with a negative track number from the list of tracks of this package."); for ( Track track : packageTracks) { if (track.getTrackID() == trackID) return track; } throw new TrackNotFoundException("Could not find a track with the given track id in the list of tracks of this package."); } @MediaListPrepend("PackageTracks") public void prependPackageTrack( Track track) throws NullPointerException, TrackExistsException { if (track == null) throw new NullPointerException("Cannot prepend a null track to the list of tracks for this package."); // Clear the default value out of the way if ((packageTracks.size() == 1) && (packageTracks.get(0) instanceof TimelineTrackImpl) && (((TimelineTrack) packageTracks.get(0)).getDataDefinition().equals(DataDefinitionImpl.forAUID(DataDefinitionConstant.Unknown))) && (((TimelineTrack) packageTracks.get(0)).getEditRate().equals(new RationalImpl(1, 1)))) clearPackageTracks(); int givenTrackId = track.getTrackID(); for ( Track trackItem : packageTracks ) if (trackItem.getTrackID() == givenTrackId) throw new TrackExistsException("The given track id of " + givenTrackId + " is already used to identify a track contained in this package."); StrongReferenceVector.prepend(packageTracks, track); } @MediaListRemoveAt("PackageTracks") public void removePackageTrackAt( int index) throws IndexOutOfBoundsException { StrongReferenceVector.remove(packageTracks, index); } @MediaPropertyClear("PackageTracks") public void clearPackageTracks() { packageTracks = Collections.synchronizedList(new Vector<Track>()); } @MediaProperty(uuid1 = 0x07020110, uuid2 = (short) 0x0103, uuid3 = (short) 0x0000, uuid4 = {0x06, 0x0e, 0x2b, 0x34, 0x01, 0x01, 0x01, 0x02}, definedName = "CreationTime", aliases = { "CreateTime", "CreationDate" }, typeName = "TimeStamp", optional = false, uniqueIdentifier = false, pid = 0x4405, symbol = "CreationTime") public TimeStamp getCreationTime() { return creationTime.clone(); } @MediaPropertySetter("CreationTime") public void setCreationTime( TimeStamp createTime) throws NullPointerException { if (createTime == null) throw new NullPointerException("Cannot set the required creation time property of this package with a null value."); this.creationTime = createTime.clone(); } public final static TimeStamp initializeCreationTime() { return new TimeStampImpl(); } @MediaProperty(uuid1 = 0x01011510, uuid2 = (short) 0x0000, uuid3 = (short) 0x0000, uuid4 = {0x06, 0x0e, 0x2b, 0x34, 0x01, 0x01, 0x01, 0x01}, definedName = "PackageID", aliases = { "MobID" }, typeName = "PackageIDType", optional = false, uniqueIdentifier = true, pid = 0x4401, symbol = "PackageID") public PackageID getPackageID() { return packageID.clone(); } @MediaPropertySetter("PackageID") public void setPackageID( PackageID packageID) throws NullPointerException { if (packageID == null) throw new NullPointerException("Canoot set the package id of this package using a null value."); this.packageID = packageID.clone(); // mobCache.put(this.mobId, this); } public final static PackageID initializePackageID() { return PackageIDImpl.umidFactory( tv.amwa.maj.enumeration.MaterialType.NotIdentified, tv.amwa.maj.record.MaterialNumberGeneration.UUID_UL, tv.amwa.maj.record.InstanceNumberGeneration.NotDefined, AUIDImpl.randomAUID().getAUIDValue()); } @MediaProperty(uuid1 = 0x07020110, uuid2 = (short) 0x0205, uuid3 = (short) 0x0000, uuid4 = {0x06, 0x0e, 0x2b, 0x34, 0x01, 0x01, 0x01, 0x02}, definedName = "PackageLastModified", aliases = { "LastModified", "MobLastModified", "ModificationDate" }, typeName = "TimeStamp", optional = false, uniqueIdentifier = false, pid = 0x4404, symbol = "PackageLastModified") public TimeStamp getPackageLastModified() { return packageLastModified.clone(); } @MediaPropertySetter("PackageLastModified") public void setPackageLastModified( tv.amwa.maj.record.TimeStamp modTime) throws NullPointerException { if (modTime == null) throw new NullPointerException("Cannot set the last modificaiton value of this package with a null value."); this.packageLastModified = modTime.clone(); } public final static TimeStamp initializePackageLastModified() { return new TimeStampImpl(); } @MediaProperty(uuid1 = 0x01030302, uuid2 = (short) 0x0100, uuid3 = (short) 0x0000, uuid4 = {0x06, 0x0e, 0x2b, 0x34, 0x01, 0x01, 0x01, 0x01}, definedName = "PackageName", aliases = { "Name", "MobName" }, typeName = "UTF16String", optional = true, uniqueIdentifier = false, pid = 0x4402, symbol = "PackageName") public String getPackageName() throws PropertyNotPresentException { if (packageName == null) throw new PropertyNotPresentException("The optional name property is not present in this package."); return packageName; } @MediaPropertySetter("PackageName") public void setPackageName( String packageName) { this.packageName = packageName; } @MediaProperty(uuid1 = 0x05010108, uuid2 = (short) 0x0000, uuid3 = (short) 0x0000, uuid4 = {0x06, 0x0e, 0x2b, 0x34, 0x01, 0x01, 0x01, 0x07}, definedName = "PackageUsage", aliases = { "UsageCode" }, typeName = "UsageType", optional = true, uniqueIdentifier = false, pid = 0x4408, symbol = "PackageUsage") public AUID getPackageUsage() throws PropertyNotPresentException { if (packageUsage == null) throw new PropertyNotPresentException("The optional package usage property is not present in this package."); return packageUsage; } @MediaPropertySetter("PackageUsage") public void setPackageUsage( tv.amwa.maj.record.AUID packageUsage) { if (packageUsage == null) this.packageUsage = null; else this.packageUsage = packageUsage.clone(); } public TimecodeValueImpl offsetToPackageTimecode( tv.amwa.maj.model.Segment tcSeg, long offset) throws NullPointerException, TimecodeNotFoundException { // TODO Auto-generated method stub return null; } public void changeReference( tv.amwa.maj.record.PackageID oldPackageID, tv.amwa.maj.record.PackageID newPackageID) throws NullPointerException { // TODO Auto-generated method stub } public tv.amwa.maj.model.Package cloneExternal( Depend resolveDependencies, IncludedMedia includedMedia, tv.amwa.maj.model.AAFFile file) { // TODO Auto-generated method stub return null; } public tv.amwa.maj.model.Package copy( String destMobName) throws NullPointerException { // TODO Auto-generated method stub return null; } void addPhysicalSourceReference( AppendOption addType, tv.amwa.maj.record.Rational editRate, int trackId, tv.amwa.maj.model.DataDefinition essenceKind, tv.amwa.maj.union.SourceReferenceValue reference, long sourceReferenceLength) throws NullPointerException, InvalidDataDefinitionException, BadLengthException, TrackExistsException { SourceClip sourceClip = new SourceClipImpl(essenceKind, sourceReferenceLength, reference); Track track = null; try { track = lookupPackageTrack(trackId); } catch (TrackNotFoundException snfe) { /* Use mobSlot == null to represent slot not found. */ } if (track != null) { switch (addType) { case ForceOverwrite: track.setTrackSegment(sourceClip); break; case Append: Segment trackSegment = track.getTrackSegment(); Sequence sequence = trackSegment.generateSequence(); try { sequence.appendComponentObject(sourceClip); } catch (LeadingTransitionException e) { // Appending a segment } catch (EventSemanticsException e) { // Physical source reference, so not an event } catch (BadPropertyException e) { // This is unlikely but could happen, so leave the stack trace in e.printStackTrace(); } catch (BadLengthException e) { // Very unlikely, but leave stack trace just in case. e.printStackTrace(); } catch (AdjacentTransitionException e) { // Appending a segment } catch (InsufficientTransitionMaterialException e) { // Previous element is not a transition } track.setTrackSegment(sequence); // TODO this line is missing in ImplAAFMob.cpp break; default: // No other options for enumeration. break; } } else { appendNewTimelineTrack(editRate, sourceClip, trackId, null, 0l); } } // // @SuppressWarnings("unused") // protected List<tv.amwa.maj.iface.FindSourceInformation> internalSearchSource( // int trackID, // long offset, // PackageKind mobKind, // CriteriaType mediaCriteria, // OperationChoice operationChoice) // throws NullPointerException, // InvalidPackageTypeException, // TraversalNotPossibleException { // // List<tv.amwa.maj.iface.FindSourceInformation> sourceList = // new Vector<tv.amwa.maj.iface.FindSourceInformation>(); // // Track track = null; // try { // track = lookupPackageTrack(trackID); // FoundSegment foundSegment = track.findSegment(offset); // long componentLength = foundSegment.rootObject.getComponentLength(); // FindSourceInformation sourceInfo = new FindSourceInformation( // null, 0, 0l, null, 0l, foundSegment.rootObject); // // MobFindLeafArguments arguments = new MobFindLeafArguments( // track, // mediaCriteria, // operationChoice, // foundSegment.rootObject, // offset, // componentLength, // null, // null, // null, // 0l); // // // TODO implementation halted until its relevance has been determined // } // catch (Exception e) { } // // // return null; // } // // /** TODO implementation, comments and test // * // * <p></p> // * // * // */ // static class ScopeStack { // // // TODO does this actually exist? Cannot find it in the C version. // } // // /** TODO implementation, comments and test // * // * <p></p> // * // * // */ // static class MobFindLeafArguments // implements Cloneable { // // Track track; // CriteriaType mediaCriteria; // OperationChoice operationChoice; // ComponentImpl rootObject; // @PositionType long rootPosition; // @LengthType long rootLength; // ComponentImpl previousObject; // ComponentImpl nextObject; // ScopeStack scopeStack; // @PositionType long currentObjectPosition; // ComponentImpl foundObject = null; // @LengthType long minimumLength = 0l; // boolean foundTransition = false; // OperationGroup effectObject = null; // @Int32 int nestDepth = 0; // @PositionType long differencePosition = 0l; // PackageImpl mob = null; // // MobFindLeafArguments( // Track track, // CriteriaType mediaCriteria, // OperationChoice operationChoice, // ComponentImpl rootObject, // @PositionType long rootPosition, // @LengthType long rootLength, // ComponentImpl previousObject, // ComponentImpl nextObject, // ScopeStack scopeStack, // @PositionType long currentObjectPosition) { // // this.track = track; // this.mediaCriteria = mediaCriteria; // this.operationChoice = operationChoice; // this.rootObject = rootObject; // this.rootPosition = rootPosition; // this.rootLength = rootLength; // this.previousObject = previousObject; // this.nextObject = nextObject; // this.scopeStack = scopeStack; // this.currentObjectPosition = currentObjectPosition; // } // // public MobFindLeafArguments clone() { // // try { // return (MobFindLeafArguments) super.clone(); // } // catch (CloneNotSupportedException cnse) { // // Implements cloneable so should not happen // cnse.printStackTrace(); // return null; // } // } // } // // void mobFindLeaf( // MobFindLeafArguments arguments) // throws TraversalNotPossibleException { // // arguments.mob = this; // // arguments.rootObject.getMinimumBounds(arguments); // } public Package clone() { return (Package) super.clone(); } /* public final void forget() { mobCache.remove(this.mobId); } */ // AVID extension properties - start private Boolean convertFrameRate = null; @MediaProperty(uuid1 = 0xd4243bd4, uuid2 = (short) 0x0142, uuid3 = (short) 0x4595, uuid4 = { (byte) 0xa8, (byte) 0xf3, (byte) 0xf2, (byte) 0xeb, (byte) 0xa5, (byte) 0x42, (byte) 0x44, (byte) 0xde }, definedName = "ConvertFrameRate", typeName = "Boolean", optional = true, uniqueIdentifier = false, pid = 0, symbol = "ConvertFrameRate", namespace = AvidConstants.AVID_NAMESPACE, prefix = AvidConstants.AVID_PREFIX) public boolean getConvertFrameRate() throws PropertyNotPresentException { if (convertFrameRate == null) throw new PropertyNotPresentException("The optional convert frame rate property is not present for this package."); return convertFrameRate; } @MediaPropertySetter("ConvertFrameRate") public void setConvertFrameRate( Boolean convertFrameRate) { this.convertFrameRate = convertFrameRate; } private List<TaggedValue> mobAttributeList = Collections.synchronizedList(new Vector<TaggedValue>()); @MediaProperty(uuid1 = 0x60958183, uuid2 = (short) 0x47b1, uuid3 = (short) 0x11d4, uuid4 = { (byte) 0xa0, 0x1c, 0x00, 0x04, (byte) 0xac, (byte) 0x96, (byte) 0x9f, 0x50 }, definedName = "MobAttributeList", typeName = "TaggedValueStrongReferenceVector", optional = true, uniqueIdentifier = false, pid = 0, symbol = "MobAttributeList", namespace = AvidConstants.AVID_NAMESPACE, prefix = AvidConstants.AVID_PREFIX) public List<TaggedValue> getMobAttributeList() throws PropertyNotPresentException { if (mobAttributeList.size() == 0) throw new PropertyNotPresentException("The optional mob attribute list property is not present for this package."); return StrongReferenceVector.getOptionalList(mobAttributeList); } @MediaListAppend("MobAttributeList") public void appendMobAttributeItem( TaggedValue mobAttributeItem) throws NullPointerException { if (mobAttributeItem == null) throw new NullPointerException("Cannot append to the mob attribute list of this package using a null value."); StrongReferenceVector.append(mobAttributeList, mobAttributeItem); } @MediaListPrepend("MobAttributeList") public void prependMobAttributeItem( TaggedValue mobAttributeItem) throws NullPointerException { if (mobAttributeItem == null) throw new NullPointerException("Cannot prepend to the mob attribute list of this package using a null value."); StrongReferenceVector.prepend(mobAttributeList, mobAttributeItem); } @MediaListInsertAt("MobAttributeList") public void insertMobAttributeItem( int index, TaggedValue mobAttributeItem) throws NullPointerException, IndexOutOfBoundsException { if (mobAttributeItem == null) throw new NullPointerException("Cannot insert into the mob attribute list of this package using a null value."); StrongReferenceVector.insert(mobAttributeList, index, mobAttributeItem); } @MediaPropertyCount("MobAttributeList") public int countMobAttributeList() { return mobAttributeList.size(); } @MediaPropertyClear("MobAttributeList") public void clearMobAttributeList() { mobAttributeList.clear(); } @MediaListGetAt("MobAttributeList") public TaggedValue getMobAttributeItemAt( int index) throws IndexOutOfBoundsException { return StrongReferenceVector.getAt(mobAttributeList, index); } @MediaListRemoveAt("MobAttributeList") public void removeMobAttributeItemAt( int index) throws IndexOutOfBoundsException { StrongReferenceVector.remove(mobAttributeList, index); } private @Int64 Long subclipFullLength = null; @MediaProperty(uuid1 = 0x1262bf7b, uuid2 = (short) 0xfce2, uuid3 = (short) 0x4dfe, uuid4 = { (byte) 0xa0, (byte) 0xf6, (byte) 0xce, (byte) 0xec, 0x04, 0x7c, (byte) 0x80, (byte) 0xaa }, definedName = "SubclipFullLength", typeName = "Int64", optional = true, uniqueIdentifier = false, pid = 0, symbol = "SubclipFullLength1", namespace = AvidConstants.AVID_NAMESPACE, prefix = AvidConstants.AVID_PREFIX) public @Int64 long getSubclipFullLength() throws PropertyNotPresentException { if (subclipFullLength == null) throw new PropertyNotPresentException("The optional subclip full length property is not present for this package."); return subclipFullLength; } @MediaPropertySetter("SubclipFullLength") public void setSubclipFullLength( @Int64 Long subclipFullLength) { this.subclipFullLength = subclipFullLength; } private @Int64 Long subclipBegin= null; @MediaProperty(uuid1 = 0xaa24b657, uuid2 = (short) 0xfcbb, uuid3 = (short) 0x4921, uuid4 = { (byte) 0x95, 0x1d, 0x3a, 0x20, 0x38, 0x39, 0x67, 0x22 }, definedName = "SubclipBegin", typeName = "Int64", optional = true, uniqueIdentifier = false, pid = 0, symbol = "SubclipBegin", namespace = AvidConstants.AVID_NAMESPACE, prefix = AvidConstants.AVID_PREFIX) public @Int64 long getSubclipBegin() throws PropertyNotPresentException { if (subclipBegin== null) throw new PropertyNotPresentException("The optional subclip begin property is not present for this package."); return subclipBegin; } @MediaPropertySetter("SubclipBegin") public void setSubclipBegin( @Int64 Long subclipBegin) { this.subclipBegin = subclipBegin; } // AVID extension properties - end public String getPackageUsageString() { return AUIDImpl.toPersistentForm(packageUsage); } public void setPackageUsageString( String packageUsage) { this.packageUsage = AUIDImpl.fromPersistentForm(packageUsage); } public String getPackageLastModifiedString() { return TimeStampImpl.toPersistentForm(packageLastModified); } public void setPackageLastModifiedString( String packageLastModified) { this.packageLastModified = TimeStampImpl.fromPersistentForm(packageLastModified); } public String getCreationTimeString() { return TimeStampImpl.toPersistentForm(creationTime); } public void setCreationTimeString( String creationTime) { this.creationTime = TimeStampImpl.fromPersistentForm(creationTime); } public String getPackageIDString() { return PackageIDImpl.toPersistentForm(packageID); } public void setPackageIDString( String packageID) { this.packageID = PackageIDImpl.fromPersistentForm(packageID); } public String getWeakTargetReference(){ return "PackageWeakReference"; } /** TODO * <p>Is this the right AUID?</p> */ public AUID getAUID() { return this.packageID.getMaterial(); } }
/* * Copyright (c) Lightstreamer Srl * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.lightstreamer.adapters.remote.metadata; import java.util.HashMap; import java.util.Map; import java.util.StringTokenizer; import com.lightstreamer.adapters.remote.AccessException; import com.lightstreamer.adapters.remote.CreditsException; import com.lightstreamer.adapters.remote.ItemsException; import com.lightstreamer.adapters.remote.MetadataProviderAdapter; import com.lightstreamer.adapters.remote.MetadataProviderException; import com.lightstreamer.adapters.remote.SchemaException; /** * Simple full implementation of a Metadata Adapter, made available * in Lightstreamer SDK. <BR> * * The class handles Item List specifications, a special case of Item Group name * formed by simply concatenating the names of the Items contained in a List * in a space separated way. Similarly, the class * handles Field List specifications, a special case of Field Schema name * formed by concatenating the names of the contained Fields. <BR> * * The resource levels are assigned the same for all Items and Users, * according with values that can be supplied together with adapter * configuration, inside the "metadata_provider" element that defines the * Adapter. <BR> * The return of the getAllowedMaxBandwidth method can be supplied in a * "max_bandwidth" parameter; the return of the getAllowedMaxItemFrequency * method can be supplied in a "max_frequency" parameter; the return of the * getAllowedBufferSize method can be supplied in a "buffer_size" parameter; * the return of the getDistinctSnapshotLength method can be supplied * in a "distinct_snapshot_length" parameter. All resource limits not supplied * are granted as unlimited, but for distinct_snapshot_length, which defaults as 10. <BR> * There are no access restrictions, but an optional User name check is * performed if a comma separated list of User names is supplied in an * "allowed_users" parameter. */ public class LiteralBasedProvider extends MetadataProviderAdapter { private String [] _allowedUsers; private double _maxBandwidth; private double _maxFrequency; private int _bufferSize; private int _distinctSnapshotLength; /** * Void constructor required by the Remote Server. */ public LiteralBasedProvider() { } /** * Reads configuration settings for user and resource constraints. * If some setting is missing, the corresponding constraint is not set. * * @param parameters Can contain the configuration settings. * @param configFile Not used. * @throws MetadataProviderException in case of configuration errors. */ @Override public void init(Map<String,String> parameters, String configFile) throws MetadataProviderException { if (parameters == null) { parameters = new HashMap<String,String>(); } String users = parameters.get("allowed_users"); if (users != null) { _allowedUsers = mySplit(users, ","); } try { String mb = parameters.get("max_bandwidth"); if (mb != null) { _maxBandwidth = Double.parseDouble(mb); } String mf = parameters.get("max_frequency"); if (mf != null) { _maxFrequency = Double.parseDouble(mf); } String bs = parameters.get("buffer_size"); if (bs != null) { _bufferSize = Integer.parseInt(bs); } String dsl = parameters.get("distinct_snapshot_length"); if (dsl != null) { _distinctSnapshotLength = Integer.parseInt(dsl); } else { _distinctSnapshotLength = 10; } } catch(NumberFormatException nfe) { throw new MetadataProviderException("Configuration error: " + nfe.getMessage()); } } /** * Resolves an Item List specification supplied in a Request. The names of the Items * in the List are returned. * <BR>Item List Specifications are expected to be formed by simply concatenating the names * of the contained Items, in a space separated way. * * @param user A User name. * @param sessionID A Session ID. Not used. * @param itemList An Item List specification. * @return An array with the names of the Items in the List. */ @Override public String[] getItems(String user, String sessionID, String itemList) { return mySplit(itemList, " "); } /** * Resolves a Field List specification supplied in a Request. The names of the Fields * in the List are returned. * <BR>Field List specifications are expected to be formed by simply concatenating the names * of the contained Fields, in a space separated way. * * @param user A User name. Not used. * @param sessionID A Session ID. Not used. * @param itemList The specification of the Item List whose Items the Field List * is to be applied to. * @param fieldList A Field List specification. * @return An array with the names of the Fields in the List. */ @Override public String[] getSchema(String user, String sessionID, String itemList, String fieldList) { return mySplit(fieldList, " "); } /** * Checks if a user is enabled to make Requests to the related Data * Providers. * If a list of User names has been configured, this list is checked. * Otherwise, any User name is allowed. No password check is performed. * * @param user A User name. * @param password An optional password. Not used. * @param httpHeaders A Map that contains a name-value pair for each * header found in the HTTP request that originated the call. Not used. * @throws AccessException if a list of User names has been configured * and the supplied name does not belong to the list. */ @Override public void notifyUser(String user, String password, Map<String,String> httpHeaders) throws AccessException { if (!checkUser(user)) { throw new AccessException("Unauthorized user"); } } /** * Returns the bandwidth level to be allowed to a User for a push Session. * * @param user A User name. Not used. * @return The bandwidth, in Kbit/sec, as supplied in the Metadata * Adapter configuration. */ @Override public double getAllowedMaxBandwidth(String user) { return _maxBandwidth; } /** * Returns the ItemUpdate frequency to be allowed to a User for a specific * Item. * * @param user A User name. Not used. * @param item An Item Name. Not used. * @return The allowed Update frequency, in Updates/sec, as supplied * in the Metadata Adapter configuration. */ @Override public double getAllowedMaxItemFrequency(String user, String item) { return _maxFrequency; } /** * Returns the size of the buffer internally used to enqueue subsequent * ItemUpdates for the same Item. * * @param user A User name. Not used. * @param item An Item Name. Not used. * @return The allowed buffer size, as supplied in the Metadata Adapter * configuration. */ @Override public int getAllowedBufferSize(String user, String item) { return _bufferSize; } /** * Returns the maximum allowed length for a Snapshot of any Item that * has been requested with publishing Mode DISTINCT. * * @param item An Item Name. * @return The maximum allowed length for the Snapshot, as supplied * in the Metadata Adapter configuration. In case no value has been * supplied, a default value of 10 events is returned, which is thought * to be enough to satisfy typical Client requests. */ @Override public int getDistinctSnapshotLength(String item) { return _distinctSnapshotLength; } // //////////////////////////////////////////////////////////////////////// // Internal methods private boolean checkUser(String user) { if ((_allowedUsers == null) || (_allowedUsers.length == 0)) { return true; } if (user == null) { return false; } for (int i = 0; i < _allowedUsers.length; i++) { if (_allowedUsers[i] == null) { continue; } if (_allowedUsers[i].equals(user)) { return true; } } return false; } private String[] mySplit(String arg, String separator) { if (arg.indexOf(separator) < 0) { String[] results = new String[1]; results[0] = arg; return results; } StringTokenizer tokenizer = new StringTokenizer(arg, separator); String[] results = new String[tokenizer.countTokens()]; for (int i = 0; tokenizer.hasMoreTokens(); i++) { results[i] = tokenizer.nextToken(); } return results; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.servicediscovery.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/servicediscovery-2017-03-14/CreateHttpNamespace" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateHttpNamespaceRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name that you want to assign to this namespace. * </p> */ private String name; /** * <p> * A unique string that identifies the request and that allows failed <code>CreateHttpNamespace</code> requests to * be retried without the risk of running the operation twice. <code>CreatorRequestId</code> can be any unique * string (for example, a date/time stamp). * </p> */ private String creatorRequestId; /** * <p> * A description for the namespace. * </p> */ private String description; /** * <p> * The tags to add to the namespace. Each tag consists of a key and an optional value that you define. Tags keys can * be up to 128 characters in length, and tag values can be up to 256 characters in length. * </p> */ private java.util.List<Tag> tags; /** * <p> * The name that you want to assign to this namespace. * </p> * * @param name * The name that you want to assign to this namespace. */ public void setName(String name) { this.name = name; } /** * <p> * The name that you want to assign to this namespace. * </p> * * @return The name that you want to assign to this namespace. */ public String getName() { return this.name; } /** * <p> * The name that you want to assign to this namespace. * </p> * * @param name * The name that you want to assign to this namespace. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateHttpNamespaceRequest withName(String name) { setName(name); return this; } /** * <p> * A unique string that identifies the request and that allows failed <code>CreateHttpNamespace</code> requests to * be retried without the risk of running the operation twice. <code>CreatorRequestId</code> can be any unique * string (for example, a date/time stamp). * </p> * * @param creatorRequestId * A unique string that identifies the request and that allows failed <code>CreateHttpNamespace</code> * requests to be retried without the risk of running the operation twice. <code>CreatorRequestId</code> can * be any unique string (for example, a date/time stamp). */ public void setCreatorRequestId(String creatorRequestId) { this.creatorRequestId = creatorRequestId; } /** * <p> * A unique string that identifies the request and that allows failed <code>CreateHttpNamespace</code> requests to * be retried without the risk of running the operation twice. <code>CreatorRequestId</code> can be any unique * string (for example, a date/time stamp). * </p> * * @return A unique string that identifies the request and that allows failed <code>CreateHttpNamespace</code> * requests to be retried without the risk of running the operation twice. <code>CreatorRequestId</code> can * be any unique string (for example, a date/time stamp). */ public String getCreatorRequestId() { return this.creatorRequestId; } /** * <p> * A unique string that identifies the request and that allows failed <code>CreateHttpNamespace</code> requests to * be retried without the risk of running the operation twice. <code>CreatorRequestId</code> can be any unique * string (for example, a date/time stamp). * </p> * * @param creatorRequestId * A unique string that identifies the request and that allows failed <code>CreateHttpNamespace</code> * requests to be retried without the risk of running the operation twice. <code>CreatorRequestId</code> can * be any unique string (for example, a date/time stamp). * @return Returns a reference to this object so that method calls can be chained together. */ public CreateHttpNamespaceRequest withCreatorRequestId(String creatorRequestId) { setCreatorRequestId(creatorRequestId); return this; } /** * <p> * A description for the namespace. * </p> * * @param description * A description for the namespace. */ public void setDescription(String description) { this.description = description; } /** * <p> * A description for the namespace. * </p> * * @return A description for the namespace. */ public String getDescription() { return this.description; } /** * <p> * A description for the namespace. * </p> * * @param description * A description for the namespace. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateHttpNamespaceRequest withDescription(String description) { setDescription(description); return this; } /** * <p> * The tags to add to the namespace. Each tag consists of a key and an optional value that you define. Tags keys can * be up to 128 characters in length, and tag values can be up to 256 characters in length. * </p> * * @return The tags to add to the namespace. Each tag consists of a key and an optional value that you define. Tags * keys can be up to 128 characters in length, and tag values can be up to 256 characters in length. */ public java.util.List<Tag> getTags() { return tags; } /** * <p> * The tags to add to the namespace. Each tag consists of a key and an optional value that you define. Tags keys can * be up to 128 characters in length, and tag values can be up to 256 characters in length. * </p> * * @param tags * The tags to add to the namespace. Each tag consists of a key and an optional value that you define. Tags * keys can be up to 128 characters in length, and tag values can be up to 256 characters in length. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new java.util.ArrayList<Tag>(tags); } /** * <p> * The tags to add to the namespace. Each tag consists of a key and an optional value that you define. Tags keys can * be up to 128 characters in length, and tag values can be up to 256 characters in length. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * The tags to add to the namespace. Each tag consists of a key and an optional value that you define. Tags * keys can be up to 128 characters in length, and tag values can be up to 256 characters in length. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateHttpNamespaceRequest withTags(Tag... tags) { if (this.tags == null) { setTags(new java.util.ArrayList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * The tags to add to the namespace. Each tag consists of a key and an optional value that you define. Tags keys can * be up to 128 characters in length, and tag values can be up to 256 characters in length. * </p> * * @param tags * The tags to add to the namespace. Each tag consists of a key and an optional value that you define. Tags * keys can be up to 128 characters in length, and tag values can be up to 256 characters in length. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateHttpNamespaceRequest withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getCreatorRequestId() != null) sb.append("CreatorRequestId: ").append(getCreatorRequestId()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateHttpNamespaceRequest == false) return false; CreateHttpNamespaceRequest other = (CreateHttpNamespaceRequest) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getCreatorRequestId() == null ^ this.getCreatorRequestId() == null) return false; if (other.getCreatorRequestId() != null && other.getCreatorRequestId().equals(this.getCreatorRequestId()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getCreatorRequestId() == null) ? 0 : getCreatorRequestId().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public CreateHttpNamespaceRequest clone() { return (CreateHttpNamespaceRequest) super.clone(); } }
package me.cococow123.vote; import java.io.File; import me.cococow123.vote.JsonMessage.HoverAction; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Location; import org.bukkit.World; import org.bukkit.command.Command; import org.bukkit.command.CommandExecutor; import org.bukkit.command.CommandSender; import org.bukkit.configuration.file.FileConfiguration; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.entity.Player; public class Commands implements CommandExecutor { @SuppressWarnings("unused") private Main main; public Commands(Main main) { this.main = main; } //----------------------------- final String pre = ChatColor.DARK_GRAY + "[" + ChatColor.GOLD + "mVote" + ChatColor.DARK_GRAY + "] "; final String invalid = pre + ChatColor.RED + "Invalid Arguments!"; final String noperms = pre + ChatColor.RED + "You don't have permission to do that!"; final String console = pre + ChatColor.RED + "You must be a player to run this command."; final String help = pre + ChatColor.RED + "Type /mvote help for help!"; final String files = pre + ChatColor.RED + "There is no vote poll with that ID."; final String noid = pre + ChatColor.RED + "There isn't a vote poll with that ID."; //------------------------------ public boolean onCommand(CommandSender sender, Command command, String alias, String[] args) { if (command.getName().equalsIgnoreCase("mvote")) //------------------------------- if(!(sender instanceof Player)) { // CONSOLE sender.sendMessage(console); return true; //---------------------------- } else { if (args[0].equalsIgnoreCase("end")) { Player p = (Player) sender; if (p.hasPermission("mvote.command.end")) { if (args.length == 3) { String id = args[1]; String end = args[2] + " " + args[3]; File file = new File("plugins" + File.separator + "mVote" + File.separator + "votes" + File.separator + id + File.separator + id + ".yml"); FileConfiguration check = YamlConfiguration.loadConfiguration(file); // END if (args[2].contains("/") && args[3].contains(":")) check.set("end-date", end); } } else { { if(args.length == 2 || args.length == 2 || !args[3].contains("/") && !args[4].contains(":")) { p.sendMessage(invalid); p.sendMessage(pre + ChatColor.DARK_RED + "Incorrect Date Format. Should be 'dd/MM/yyyy hh:mm(PM OR AM)'"); p.sendMessage(help); } } } } else { Player p = (Player) sender; String id = args[1]; File check = new File("plugins" + File.separator + "mVote" + File.separator + "votes" + File.separator + id); if(!check.exists()) { p.sendMessage(noid); } //---------------------------- else { if (args[0].equalsIgnoreCase("tp")) { if (args.length == 2) { if (p.hasPermission("mvote.command.teleport")) { File file = new File("plugins" + File.separator + "mVote" + File.separator + "votes" + File.separator + id + File.separator + id + ".yml"); FileConfiguration check2 = YamlConfiguration.loadConfiguration(file); // TELEPORT double x = check2.getDouble("x"); double y = check2.getDouble("y"); double z = check2.getDouble("z"); World w = Bukkit.getServer().getWorld(check2.getString("world")); p.teleport(new Location (w, x, y, z)); } else { if (args.length == 1) { p.sendMessage(invalid); p.sendMessage(pre + ChatColor.DARK_RED + "Missing ID"); p.sendMessage(help); } } } else { if(!check.exists()) { p.sendMessage(noid); //---------------------------- } else if (args[0].equalsIgnoreCase("help")) { if (p.hasPermission("mvote.command.help")) // HELP JsonMessage.sendJson(p, JsonMessage.newJson() .add(ChatColor.GREEN + "Commands") .hoverEvent(HoverAction.Show_Text, ChatColor.GOLD + "COMMANDS HOVER") .build() .build() ); p.sendMessage(ChatColor.GRAY + ChatColor.ITALIC.toString() + "(Hover over for more info)"); //----------------------------- } else if (args[0].equalsIgnoreCase("q")) { if (p.hasPermission("mvote.command.question")); if (args.length >= 3); StringBuilder sb = new StringBuilder(); for (int i = 2; i < args.length; i++){ sb.append(args[i]).append(" "); } String question = sb.toString().trim(); // QUESTION File file = new File("plugins" + File.separator + "mVote" + File.separator + "votes" + File.separator + id + File.separator + id + ".yml"); FileConfiguration q = YamlConfiguration.loadConfiguration(file); p.chat("/ss set 4 &2" + question); p.sendMessage(pre + ChatColor.GREEN + "Break the vote info sign that matches the ID:'" +ChatColor.GRAY + id + "'"); q.set("question", question); } else { if(!check.exists()) { p.sendMessage(noid); } else { if(args.length == 2 || args.length == 1) { p.sendMessage(invalid); p.sendMessage(help); } //----------------------------- else { if (!p.hasPermission("mvote.command.help") || !p.hasPermission("mvote.command.question") || !p.hasPermission("mvote.command.teleport") || !p.hasPermission("mvote.command.end")) { p.sendMessage(noperms); } //----------------------------- else { p.sendMessage(invalid); p.sendMessage(help); } } } } } } } } } return true; } }
package net.lecousin.framework.core.test.collections; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.NoSuchElementException; import org.junit.Assert; import org.junit.Test; import net.lecousin.framework.core.test.LCCoreAbstractTest; public abstract class TestCollection extends LCCoreAbstractTest { public abstract Collection<Long> createLongCollection(); public boolean supportRetainAll() { return true; } @Test public void testCollection() { Collection<Long> c = createLongCollection(); testCollectionEmpty(c); // add 1 element c.add(Long.valueOf(1)); checkCollection(c, 1); // remove non present element Assert.assertFalse(c.remove(Long.valueOf(0))); checkCollection(c, 1); // remove element Assert.assertTrue(c.remove(Long.valueOf(1))); testCollectionEmpty(c); // add 3 elements Assert.assertTrue(c.add(Long.valueOf(10))); checkCollection(c, 10); Assert.assertTrue(c.add(Long.valueOf(11))); checkCollection(c, 10, 11); Assert.assertTrue(c.add(Long.valueOf(12))); checkCollection(c, 10, 11, 12); // remove non present element Assert.assertFalse(c.remove(Long.valueOf(111))); checkCollection(c, 10, 11, 12); // remove 1 element Assert.assertTrue(c.remove(Long.valueOf(11))); checkCollection(c, 10, 12); ArrayList<Long> arr = new ArrayList<>(10); arr.add(Long.valueOf(13)); arr.add(Long.valueOf(14)); arr.add(Long.valueOf(15)); // test addAll c.addAll(arr); checkCollection(c, 10, 12, 13, 14, 15); arr.clear(); c.addAll(arr); checkCollection(c, 10, 12, 13, 14, 15); arr.add(Long.valueOf(21)); arr.add(Long.valueOf(22)); arr.add(Long.valueOf(23)); arr.add(Long.valueOf(24)); arr.add(Long.valueOf(25)); arr.add(Long.valueOf(26)); arr.add(Long.valueOf(27)); arr.add(Long.valueOf(28)); arr.add(Long.valueOf(29)); c.addAll(arr); checkCollection(c, 10, 12, 13, 14, 15, 21, 22, 23, 24, 25, 26, 27, 28, 29); // test removeAll arr.clear(); arr.add(Long.valueOf(12)); arr.add(Long.valueOf(14)); c.removeAll(arr); checkCollection(c, 10, 13, 15, 21, 22, 23, 24, 25, 26, 27, 28, 29); arr.add(Long.valueOf(12)); arr.add(Long.valueOf(14)); arr.add(Long.valueOf(22)); arr.add(Long.valueOf(24)); c.removeAll(arr); checkCollection(c, 10, 13, 15, 21, 23, 25, 26, 27, 28, 29); // remove Assert.assertTrue(c.remove(Long.valueOf(25))); Assert.assertFalse(c.remove(Long.valueOf(35))); Assert.assertTrue(c.remove(Long.valueOf(10))); Assert.assertTrue(c.remove(Long.valueOf(29))); Assert.assertFalse(c.remove(Long.valueOf(10))); checkCollection(c, 13, 15, 21, 23, 26, 27, 28); // test retainAll arr.clear(); arr.add(Long.valueOf(10)); arr.add(Long.valueOf(15)); arr.add(Long.valueOf(16)); arr.add(Long.valueOf(26)); if (supportRetainAll()) { c.retainAll(arr); checkCollection(c, 15, 26); } else { assertException(() -> { c.retainAll(arr); }, UnsupportedOperationException.class); c.remove(Long.valueOf(13)); c.remove(Long.valueOf(21)); c.remove(Long.valueOf(23)); c.remove(Long.valueOf(27)); c.remove(Long.valueOf(28)); checkCollection(c, 15, 26); } // test clear c.clear(); testCollectionEmpty(c); c.clear(); testCollectionEmpty(c); // test to add many elements for (long i = 1000; i < 15000; ++i) Assert.assertTrue(c.add(Long.valueOf(i))); Assert.assertTrue(c.size() == 14000); for (long i = 1000; i < 15000; ++i) Assert.assertTrue(c.contains(Long.valueOf(i))); // test to remove half of elements for (long i = 1000; i < 15000; i += 2) Assert.assertTrue(c.remove(Long.valueOf(i))); Assert.assertTrue(c.size() == 7000); for (long i = 1000; i < 15000; ++i) if ((i % 2) == 0) Assert.assertFalse(c.contains(Long.valueOf(i))); else Assert.assertTrue(c.contains(Long.valueOf(i))); // test to remove elements from 10000 to 13000 (1500 elements) for (long i = 10000; i < 13000; ++i) if ((i % 2) == 0) Assert.assertFalse(c.remove(Long.valueOf(i))); else Assert.assertTrue(c.remove(Long.valueOf(i))); Assert.assertTrue(c.size() == 5500); // test to remove elements from 14000 to 15000 (500 elements) for (long i = 14000; i < 15000; ++i) if ((i % 2) == 0) Assert.assertFalse(c.remove(Long.valueOf(i))); else Assert.assertTrue(c.remove(Long.valueOf(i))); Assert.assertTrue(c.size() == 5000); arr.clear(); for (Long e : c) arr.add(e); Assert.assertTrue(arr.size() == 5000); Assert.assertTrue(c.containsAll(arr)); arr.add(Long.valueOf(0)); Assert.assertFalse(c.containsAll(arr)); arr.remove(Long.valueOf(0)); for (long i = 1000; i < 2000; ++i) arr.remove(Long.valueOf(i)); Assert.assertTrue(c.containsAll(arr)); // retain only between 1000 and 2000, half of them are already removed if (supportRetainAll()) { arr.clear(); for (long i = 1000; i < 2000; ++i) arr.add(Long.valueOf(i)); Assert.assertTrue(c.retainAll(arr)); Assert.assertTrue(c.size() == 500); for (long i = 1000; i < 2000; ++i) if ((i % 2) == 0) Assert.assertFalse(c.contains(Long.valueOf(i))); else Assert.assertTrue(c.contains(Long.valueOf(i))); } } protected static void testCollectionEmpty(Collection<Long> c) { Assert.assertTrue(c.isEmpty()); Assert.assertEquals(c.size(), 0); Assert.assertFalse(c.iterator().hasNext()); assertException(() -> { c.iterator().next(); }, NoSuchElementException.class); Assert.assertFalse(c.remove(Long.valueOf(1))); Assert.assertFalse(c.removeAll(Arrays.asList(Long.valueOf(1), Long.valueOf(2)))); Assert.assertFalse(c.contains(Long.valueOf(1))); Assert.assertFalse(c.containsAll(Arrays.asList(Long.valueOf(1), Long.valueOf(2)))); Assert.assertArrayEquals(c.toArray(), new Long[0]); Assert.assertArrayEquals(c.toArray(new Long[0]), new Long[0]); Assert.assertArrayEquals(c.toArray( new Long[] { Long.valueOf(23), Long.valueOf(45) }), new Long[] { Long.valueOf(23), Long.valueOf(45) }); } protected static void checkCollection(Collection<Long> c, long... values) { // size Assert.assertFalse(c.isEmpty()); Assert.assertEquals(values.length, c.size()); // contains for (int i = 0; i < values.length; ++i) Assert.assertTrue(c.contains(Long.valueOf(values[i]))); Assert.assertFalse(c.contains(Long.valueOf(123456789))); // iterator Iterator<Long> it = c.iterator(); boolean[] found = new boolean[values.length]; for (int i = 0; i < values.length; ++i) found[i] = false; for (int i = 0; i < values.length; ++i) { Assert.assertTrue(it.hasNext()); long val = it.next().longValue(); boolean valFound = false; for (int j = 0; j < found.length; ++j) { if (found[j]) continue; if (values[j] == val) { found[j] = true; valFound = true; break; } } if (!valFound) throw new AssertionError("Value " + val + " returned by the iterator is not expected in the collection"); } for (int i = 0; i < found.length; ++i) if (!found[i]) throw new AssertionError("Value " + values[i] + " was never returned by the iterator"); Assert.assertFalse(it.hasNext()); assertException(() -> { it.next(); }, NoSuchElementException.class); // to array Object[] a = c.toArray(); Assert.assertEquals(a.length, values.length); for (int i = 0; i < values.length; ++i) found[i] = false; for (int i = 0; i < values.length; ++i) { Assert.assertTrue(a[i] instanceof Long); long val = ((Long)a[i]).longValue(); boolean valFound = false; for (int j = 0; j < found.length; ++j) { if (found[j]) continue; if (values[j] == val) { found[j] = true; valFound = true; break; } } if (!valFound) throw new AssertionError("Value " + val + " returned by toArray is not expected in the collection"); } for (int i = 0; i < found.length; ++i) if (!found[i]) throw new AssertionError("Value " + values[i] + " was not in the result of toArray"); // toArray too small a = c.toArray(new Long[values.length - 1]); Assert.assertEquals(a.length, values.length); for (int i = 0; i < values.length; ++i) found[i] = false; for (int i = 0; i < values.length; ++i) { Assert.assertTrue(a[i] instanceof Long); long val = ((Long)a[i]).longValue(); boolean valFound = false; for (int j = 0; j < found.length; ++j) { if (found[j]) continue; if (values[j] == val) { found[j] = true; valFound = true; break; } } if (!valFound) throw new AssertionError("Value " + val + " returned by toArray is not expected in the collection"); } for (int i = 0; i < found.length; ++i) if (!found[i]) throw new AssertionError("Value " + values[i] + " was not in the result of toArray"); // toArray bigger a = c.toArray(new Long[values.length + 10]); Assert.assertEquals(a.length, values.length + 10); for (int i = 0; i < values.length; ++i) found[i] = false; for (int i = 0; i < values.length; ++i) { Assert.assertTrue(a[i] instanceof Long); long val = ((Long)a[i]).longValue(); boolean valFound = false; for (int j = 0; j < found.length; ++j) { if (found[j]) continue; if (values[j] == val) { found[j] = true; valFound = true; break; } } if (!valFound) throw new AssertionError("Value " + val + " returned by toArray is not expected in the collection"); } for (int i = 0; i < found.length; ++i) if (!found[i]) throw new AssertionError("Value " + values[i] + " was not in the result of toArray"); } }
/* * This file is part of dependency-check-core. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Copyright (c) 2017 Steve Springett. All Rights Reserved. */ package org.owasp.dependencycheck.data.nsp; import java.util.Arrays; /** * The response from NSP check API will respond with 0 or more advisories. This * class defines the Advisory objects returned. * * @author Steve Springett */ public class Advisory { /** * The unique ID of the advisory as issued by Node Security Platform. */ private int id; /** * The timestamp of the last update to the advisory. */ private String updatedAt; /** * The timestamp of which the advisory was created. */ private String createdAt; /** * The timestamp of when the advisory was published. */ private String publishDate; /** * A detailed description of the advisory. */ private String overview; /** * Recommendations for mitigation. Typically involves updating to a newer * release. */ private String recommendation; /** * The CVSS vector used to calculate the score. */ private String cvssVector; /** * The CVSS score. */ private float cvssScore; /** * The name of the Node module the advisory is for. */ private String module; /** * The version of the Node module the advisory is for. */ private String version; /** * A string representation of the versions containing the vulnerability. */ private String vulnerableVersions; /** * A string representation of the versions that have been patched. */ private String patchedVersions; /** * The title/name of the advisory. */ private String title; /** * The linear dependency path that lead to this module. [0] is the root with * each subsequent array member leading up to the final (this) module. */ private String[] path; /** * The URL to the advisory. */ private String advisory; /** * Returns the unique ID of the advisory as issued by Node Security * Platform. * * @return a unique ID */ public int getId() { return id; } /** * Sets the unique ID of the advisory as issued by Node Security Platform. * * @param id a unique ID */ public void setId(int id) { this.id = id; } /** * Returns the timestamp of the last update to the advisory. * * @return a timestamp */ public String getUpdatedAt() { return updatedAt; } /** * Sets the timestamp of the last update to the advisory. * * @param updatedAt a timestamp */ public void setUpdatedAt(String updatedAt) { this.updatedAt = updatedAt; } /** * Returns the timestamp of which the advisory was created. * * @return a timestamp */ public String getCreatedAt() { return createdAt; } /** * Sets the timestamp of which the advisory was created. * * @param createdAt a timestamp */ public void setCreatedAt(String createdAt) { this.createdAt = createdAt; } /** * Returns the timestamp of when the advisory was published. * * @return a timestamp */ public String getPublishDate() { return publishDate; } /** * Sets the timestamp of when the advisory was published. * * @param publishDate a timestamp */ public void setPublishDate(String publishDate) { this.publishDate = publishDate; } /** * Returns a detailed description of the advisory. * * @return the overview */ public String getOverview() { return overview; } /** * Sets the detailed description of the advisory. * * @param overview the overview */ public void setOverview(String overview) { this.overview = overview; } /** * Returns recommendations for mitigation. Typically involves updating to a * newer release. * * @return recommendations */ public String getRecommendation() { return recommendation; } /** * Sets recommendations for mitigation. Typically involves updating to a * newer release. * * @param recommendation recommendations */ public void setRecommendation(String recommendation) { this.recommendation = recommendation; } /** * Returns the CVSS vector used to calculate the score. * * @return the CVSS vector */ public String getCvssVector() { return cvssVector; } /** * Sets the CVSS vector used to calculate the score. * * @param cvssVector the CVSS vector */ public void setCvssVector(String cvssVector) { this.cvssVector = cvssVector; } /** * Returns the CVSS score. * * @return the CVSS score */ public float getCvssScore() { return cvssScore; } /** * Sets the CVSS score. * * @param cvssScore the CVSS score */ public void setCvssScore(float cvssScore) { this.cvssScore = cvssScore; } /** * Returns the name of the Node module the advisory is for. * * @return the name of the module */ public String getModule() { return module; } /** * Sets the name of the Node module the advisory is for. * * @param module the name of the4 module */ public void setModule(String module) { this.module = module; } /** * Returns the version of the Node module the advisory is for. * * @return the module version */ public String getVersion() { return version; } /** * Sets the version of the Node module the advisory is for. * * @param version the module version */ public void setVersion(String version) { this.version = version; } /** * Returns a string representation of the versions containing the * vulnerability. * * @return the affected versions */ public String getVulnerableVersions() { return vulnerableVersions; } /** * Sets the string representation of the versions containing the * vulnerability. * * @param vulnerableVersions the affected versions */ public void setVulnerableVersions(String vulnerableVersions) { this.vulnerableVersions = vulnerableVersions; } /** * Returns a string representation of the versions that have been patched. * * @return the patched versions */ public String getPatchedVersions() { return patchedVersions; } /** * Sets the string representation of the versions that have been patched. * * @param patchedVersions the patched versions */ public void setPatchedVersions(String patchedVersions) { this.patchedVersions = patchedVersions; } /** * Returns the title/name of the advisory. * * @return the title/name of the advisory */ public String getTitle() { return title; } /** * Sets the title/name of the advisory. * * @param title the title/name of the advisory */ public void setTitle(String title) { this.title = title; } /** * Returns the linear dependency path that lead to this module. * * @return the dependency path */ public String[] getPath() { if (path == null) { return null; } return Arrays.copyOf(path, path.length); } /** * Sets the linear dependency path that lead to this module. * * @param path the dependency path */ public void setPath(String[] path) { if (path == null) { this.path = null; } else { this.path = Arrays.copyOf(path, path.length); } } /** * Returns the URL to the advisory. * * @return the advisory URL */ public String getAdvisory() { return advisory; } /** * Sets the URL to the advisory. * * @param advisory the advisory URL */ public void setAdvisory(String advisory) { this.advisory = advisory; } }
/* * Copyright (c) 2015, GoMint, BlackyPaw and geNAZt * * This code is licensed under the BSD license found in the * LICENSE file in the root directory of this source tree. */ package io.gomint.server.plugin; import com.google.common.base.Preconditions; import io.gomint.command.Command; import io.gomint.command.CommandSender; import io.gomint.event.*; import io.gomint.event.EventListener; import io.gomint.plugin.*; import io.gomint.server.GoMintServer; import io.gomint.server.command.CommandManager; import io.gomint.server.event.EventManager; import io.gomint.server.scheduler.CoreScheduler; import io.gomint.server.util.CallerDetectorUtil; import javassist.bytecode.AnnotationsAttribute; import javassist.bytecode.ClassFile; import javassist.bytecode.annotation.Annotation; import javassist.bytecode.annotation.EnumMemberValue; import javassist.bytecode.annotation.IntegerMemberValue; import javassist.bytecode.annotation.StringMemberValue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.DataInputStream; import java.io.File; import java.io.IOException; import java.lang.reflect.Constructor; import java.net.URL; import java.util.*; import java.util.jar.JarEntry; import java.util.jar.JarFile; /** * An implementation of {@link PluginManager} * * @author Fabian * @author Digot * @version 1.0 */ public class SimplePluginManager implements PluginManager { private final Logger logger = LoggerFactory.getLogger( SimplePluginManager.class ); private final CoreScheduler scheduler; private final File pluginsFolder; private final EventManager eventManager; private final CommandManager commandManager; private final Set<Plugin> installedPlugins; private final Set<PluginMeta> detectedPlugins; public SimplePluginManager( GoMintServer server ) { this.scheduler = new CoreScheduler( server.getExecutorService(), server.getSyncTaskManager() ); this.installedPlugins = new HashSet<>(); this.detectedPlugins = new LinkedHashSet<>(); this.pluginsFolder = new File( "plugins/" ); //Can be customizable in the future using command line args or config this.eventManager = new EventManager(); this.commandManager = new CommandManager(); } @Override public void disablePlugin( Plugin plugin ) { // Check for security if ( !CallerDetectorUtil.getCallerClassName( 2 ).equals( plugin.getClass().getName() ) ) { throw new SecurityException( "Plugins can only disable themselves" ); } } @Override public String getBaseDirectory() { return this.pluginsFolder.getAbsolutePath(); } @Override public boolean isPluginInstalled( String pluginName ) { return this.installedPlugins.stream().anyMatch(plugin -> plugin.getName().equalsIgnoreCase( pluginName ) ); } public void loadPlugins(){ //Only GoMint Server should be allowed to call this if ( !CallerDetectorUtil.getCallerClassName( 1 ).equals( GoMintServer.class.getName() ) ) { throw new SecurityException( "The plugins can only be loaded by the GoMint Server class" ); } // STEP 1: DETECTION - Scan the plugins directory if( !this.scanForPlugins() ) return; // STEP 2: INSTALLATION - Load the classes of the plugins and construct them this.installPlugins(); // STEP 3: STARTUP - Invoke the startup hook on all plugins this.enablePlugins(); } private void enablePlugins ( ) { for ( Plugin installedPlugin : this.installedPlugins ) { try{ installedPlugin.onStartup(); //Not sure if onInstall would be called here this.logger.info( "Enabled " + installedPlugin.toString() ); } catch ( Exception e ) { this.logger.error( "Failed to enable " + installedPlugin.getName() + "!"); e.printStackTrace(); } } } private void installPlugins () { //Sort the plugins so plugins with a lower startup priority are loaded sooner. List<PluginMeta> pluginMetaList = new ArrayList<>( this.detectedPlugins ); pluginMetaList.sort( ( o1, o2 ) -> o1.getPriority().compareTo( o2.getPriority() ) ); for ( PluginMeta detectedPlugin : pluginMetaList ) { try { //Load the main class ClassLoader classLoader = new PluginClassLoader(new URL[] { detectedPlugin.getPluginFile().toURI().toURL() } ); Class<?> pluginClass = classLoader.loadClass( detectedPlugin.getMainClass() ); //Retrieve the constructor Constructor<?> constructor = pluginClass.getConstructor(); constructor.setAccessible( true ); //Initialize the instance Plugin plugin = ( Plugin ) constructor.newInstance(); plugin.loadMetaData( detectedPlugin ); plugin.setPluginManager( this ); //Invoke the install hook plugin.onInstall(); //Not sure if onStartup would be called here //Add to installed plugins this.installedPlugins.add( plugin ); } catch ( Exception e ) { this.logger.error( "Failed to install plugin " + detectedPlugin.getName() + ": " + e.getMessage() ); } } //We don't need the detected plugin collection anymore. Clear it. this.detectedPlugins.clear(); } /** * Scans the plugin folder (and creates it if it doesn't exist) for plugins * @return if any plugins were found */ private boolean scanForPlugins(){ //Check if the plugin directory exists if( !this.pluginsFolder.exists() ) { //It doesn't exist - create it if( !this.pluginsFolder.mkdir() ){ this.logger.error( "Couldn't create plugins folder. Check your file permissions!" ); return false; } } //Retrieve a list of all files inside the plugins folder File[] files = this.pluginsFolder.listFiles(); if( files == null || files.length == 0 ) { //Do nothing return false; } //Start scanning for ( File file : files ) { if( file.getName().toLowerCase().endsWith( ".jar" )) { try( JarFile jarFile = new JarFile( file )) { //Lets find the main class of the plugin ClassFile mainClassFile = null; File mainFile = null; //Retrieve all elements in the plugin jar file Enumeration<JarEntry> entryEnumeration = jarFile.entries(); while( entryEnumeration.hasMoreElements() ) { JarEntry jarEntry = entryEnumeration.nextElement(); //Check if its a class file if( jarEntry.getName().toLowerCase().endsWith( ".class" )) { ClassFile classFile = new ClassFile( new DataInputStream( jarFile.getInputStream( jarEntry ) ) ); if( classFile.getSuperclass().equals( Plugin.class.getName() ) ) { if( mainClassFile == null ) { //Set it as the main class for now mainClassFile = classFile; mainFile = file; } else { //There are more than one main class throw new IllegalStateException( "Multiple plugin main classes found in " + file.getName() ); } } } } //Check if the main class was found if( mainClassFile == null ) { throw new IllegalStateException( "No class that extends Plugin found in " + file.getName() ); } //We have the main class now - let's try to load metadata AnnotationsAttribute annotations = (AnnotationsAttribute) mainClassFile.getAttribute( AnnotationsAttribute.visibleTag ); String pluginName = null; PluginVersion pluginVersion = null; StartupPriority pluginStartupPriority = StartupPriority.LOAD; //Default to load for ( Annotation annotation : annotations.getAnnotations() ) { switch ( annotation.getTypeName() ) { case "io.gomint.plugin.Name": pluginName = ( (StringMemberValue ) annotation.getMemberValue( "value" ) ).getValue(); break; case "io.gomint.plugin.Version": int major = ( (IntegerMemberValue ) annotation.getMemberValue( "major" ) ).getValue(); int minor = ( (IntegerMemberValue) annotation.getMemberValue( "minor" ) ).getValue(); pluginVersion = new PluginVersion( major, minor ); break; case "io.gomint.plugin.Startup": pluginStartupPriority = StartupPriority.valueOf( ( (EnumMemberValue ) annotation.getMemberValue( "value" ) ).getValue() ); break; } } //Validate if( pluginName == null || pluginVersion == null ) { this.logger.error( "Couldn't find the name or the version of " + file.getName() + ". Are the annotations missing?" ); return false; } //Fill plugin meta PluginMeta pluginMeta = new PluginMeta( pluginName, pluginVersion, pluginStartupPriority, mainClassFile.getName(), mainFile ); //Done! Add to detected plugins this.detectedPlugins.add( pluginMeta ); } catch ( IOException e ) { e.printStackTrace(); } } } return true; } @Override public void registerListener ( EventListener eventListener ) { Preconditions.checkArgument( eventListener != null, "Given EventListener is null" ); this.eventManager.registerListener( eventListener ); } @Override public void registerCommand ( Command command ) { Preconditions.checkArgument( command != null, "Command is null" ); this.commandManager.registerCommand( command ); } @Override public void callEvent ( Event event ) { Preconditions.checkArgument( event != null, "Given event is null" ); this.eventManager.triggerEvent( event ); } @Override public void executeCommand ( CommandSender sender, String name, String... args ) { this.commandManager.executeCommand( sender, name, args ); } @Override public void executeCommand ( CommandSender sender, String name ) { this.commandManager.executeCommand( sender, name, null ); } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Random; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; /** * Test performance improvement of joined scanners optimization: * https://issues.apache.org/jira/browse/HBASE-5416 */ @Category({RegionServerTests.class, LargeTests.class}) public class TestJoinedScanners { static final Log LOG = LogFactory.getLog(TestJoinedScanners.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final String DIR = TEST_UTIL.getDataTestDir("TestJoinedScanners").toString(); private static final byte[] cf_essential = Bytes.toBytes("essential"); private static final byte[] cf_joined = Bytes.toBytes("joined"); private static final byte[] col_name = Bytes.toBytes("a"); private static final byte[] flag_yes = Bytes.toBytes("Y"); private static final byte[] flag_no = Bytes.toBytes("N"); private static DataBlockEncoding blockEncoding = DataBlockEncoding.FAST_DIFF; private static int selectionRatio = 30; private static int valueWidth = 128 * 1024; @Test public void testJoinedScanners() throws Exception { String dataNodeHosts[] = new String[] { "host1", "host2", "host3" }; int regionServersCount = 3; HBaseTestingUtility htu = new HBaseTestingUtility(); final int DEFAULT_BLOCK_SIZE = 1024*1024; htu.getConfiguration().setLong("dfs.blocksize", DEFAULT_BLOCK_SIZE); htu.getConfiguration().setInt("dfs.replication", 1); htu.getConfiguration().setLong("hbase.hregion.max.filesize", 322122547200L); MiniHBaseCluster cluster = null; try { cluster = htu.startMiniCluster(1, regionServersCount, dataNodeHosts); byte [][] families = {cf_essential, cf_joined}; TableName tableName = TableName.valueOf(this.getClass().getSimpleName()); HTableDescriptor desc = new HTableDescriptor(tableName); for(byte[] family : families) { HColumnDescriptor hcd = new HColumnDescriptor(family); hcd.setDataBlockEncoding(blockEncoding); desc.addFamily(hcd); } htu.getHBaseAdmin().createTable(desc); Table ht = htu.getConnection().getTable(tableName); long rows_to_insert = 1000; int insert_batch = 20; long time = System.nanoTime(); Random rand = new Random(time); LOG.info("Make " + Long.toString(rows_to_insert) + " rows, total size = " + Float.toString(rows_to_insert * valueWidth / 1024 / 1024) + " MB"); byte [] val_large = new byte[valueWidth]; List<Put> puts = new ArrayList<Put>(); for (long i = 0; i < rows_to_insert; i++) { Put put = new Put(Bytes.toBytes(Long.toString (i))); if (rand.nextInt(100) <= selectionRatio) { put.add(cf_essential, col_name, flag_yes); } else { put.add(cf_essential, col_name, flag_no); } put.add(cf_joined, col_name, val_large); puts.add(put); if (puts.size() >= insert_batch) { ht.put(puts); puts.clear(); } } if (puts.size() >= 0) { ht.put(puts); puts.clear(); } LOG.info("Data generated in " + Double.toString((System.nanoTime() - time) / 1000000000.0) + " seconds"); boolean slow = true; for (int i = 0; i < 10; ++i) { runScanner(ht, slow); slow = !slow; } ht.close(); } finally { if (cluster != null) { htu.shutdownMiniCluster(); } } } private void runScanner(Table table, boolean slow) throws Exception { long time = System.nanoTime(); Scan scan = new Scan(); scan.addColumn(cf_essential, col_name); scan.addColumn(cf_joined, col_name); SingleColumnValueFilter filter = new SingleColumnValueFilter( cf_essential, col_name, CompareFilter.CompareOp.EQUAL, flag_yes); filter.setFilterIfMissing(true); scan.setFilter(filter); scan.setLoadColumnFamiliesOnDemand(!slow); ResultScanner result_scanner = table.getScanner(scan); Result res; long rows_count = 0; while ((res = result_scanner.next()) != null) { rows_count++; } double timeSec = (System.nanoTime() - time) / 1000000000.0; result_scanner.close(); LOG.info((slow ? "Slow" : "Joined") + " scanner finished in " + Double.toString(timeSec) + " seconds, got " + Long.toString(rows_count/2) + " rows"); } private static Options options = new Options(); /** * Command line interface: * @param args * @throws IOException if there is a bug while reading from disk */ public static void main(final String[] args) throws Exception { Option encodingOption = new Option("e", "blockEncoding", true, "Data block encoding; Default: FAST_DIFF"); encodingOption.setRequired(false); options.addOption(encodingOption); Option ratioOption = new Option("r", "selectionRatio", true, "Ratio of selected rows using essential column family"); ratioOption.setRequired(false); options.addOption(ratioOption); Option widthOption = new Option("w", "valueWidth", true, "Width of value for non-essential column family"); widthOption.setRequired(false); options.addOption(widthOption); CommandLineParser parser = new GnuParser(); CommandLine cmd = parser.parse(options, args); if (args.length < 1) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("TestJoinedScanners", options, true); } if (cmd.hasOption("e")) { blockEncoding = DataBlockEncoding.valueOf(cmd.getOptionValue("e")); } if (cmd.hasOption("r")) { selectionRatio = Integer.parseInt(cmd.getOptionValue("r")); } if (cmd.hasOption("w")) { valueWidth = Integer.parseInt(cmd.getOptionValue("w")); } // run the test TestJoinedScanners test = new TestJoinedScanners(); test.testJoinedScanners(); } }
/* * Copyright 2014 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apiman.gateway.engine.beans; import io.apiman.gateway.engine.beans.util.HeaderMap; import io.apiman.gateway.engine.beans.util.QueryMap; import java.io.Serializable; /** * An inbound request for a managed API. * * @author eric.wittmann@redhat.com */ public class ApiRequest implements IApiObject, Serializable { private static final long serialVersionUID = 8024669261165845962L; private String apiKey; private transient ApiContract contract; private transient Api api; private String type; private String url; private String destination; private QueryMap queryParams = new QueryMap(); private HeaderMap headers = new HeaderMap(); private String remoteAddr; private transient Object rawRequest; private boolean transportSecurity = false; /* * Optional fields - set these if you want the apiman engine to * validate that the apikey is valid for the given API coords. */ private String apiOrgId; private String apiId; private String apiVersion; /** * Constructor. */ public ApiRequest() { } /** * @return the apiKey */ public String getApiKey() { return apiKey; } /** * @param apiKey the apiKey to set */ public void setApiKey(String apiKey) { this.apiKey = apiKey; } /** * @return the rawRequest */ public Object getRawRequest() { return rawRequest; } /** * @param rawRequest the rawRequest to set */ public void setRawRequest(Object rawRequest) { this.rawRequest = rawRequest; } /** * @return the type */ public String getType() { return type; } /** * @param type the type to set */ public void setType(String type) { this.type = type; } /** * @see io.apiman.gateway.engine.beans.IApiObject#getHeaders() */ @Override public HeaderMap getHeaders() { return headers; } /** * @see io.apiman.gateway.engine.beans.IApiObject#setHeaders(HeaderMap) */ @Override public void setHeaders(HeaderMap headers) { this.headers = headers; } /** * @return the destination */ public String getDestination() { return destination; } /** * @param destination the destination to set */ public void setDestination(String destination) { this.destination = destination; } /** * @return the remoteAddr */ public String getRemoteAddr() { return remoteAddr; } /** * @param remoteAddr the remoteAddr to set */ public void setRemoteAddr(String remoteAddr) { this.remoteAddr = remoteAddr; } /** * @return the contract */ public ApiContract getContract() { return contract; } /** * @param contract the contract to set */ public void setContract(ApiContract contract) { this.contract = contract; } /** * @return the apiOrgId */ public String getApiOrgId() { return apiOrgId; } /** * @param apiOrgId the apiOrgId to set */ public void setApiOrgId(String apiOrgId) { this.apiOrgId = apiOrgId; } /** * @return the apiId */ public String getApiId() { return apiId; } /** * @param apiId the apiId to set */ public void setApiId(String apiId) { this.apiId = apiId; } /** * @return the apiVersion */ public String getApiVersion() { return apiVersion; } /** * @param apiVersion the apiVersion to set */ public void setApiVersion(String apiVersion) { this.apiVersion = apiVersion; } /** * @return the queryParams */ public QueryMap getQueryParams() { return queryParams; } /** * @param queryParams the queryParams to set */ public void setQueryParams(QueryMap queryParams) { this.queryParams = queryParams; } /** * Indicates whether api request or response was made with transport security. * * @return true if transport is secure; else false. */ public boolean isTransportSecure() { return transportSecurity; } /** * Set whether api request/response was made with transport security. * * @param isSecure transport security status */ public void setTransportSecure(boolean isSecure) { this.transportSecurity = isSecure; } /** * @return the api */ public Api getApi() { return api; } /** * @param api the api to set */ public void setApi(Api api) { this.api = api; } /** * @return the url */ public String getUrl() { return url; } /** * @param url the url to set */ public void setUrl(String url) { this.url = url; } }
/* * Copyright 2006-2012 ICEsoft Technologies Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.icepdf.core.pobjects.fonts; import org.icepdf.core.pobjects.*; import org.icepdf.core.util.Library; import java.util.Hashtable; import java.util.Vector; import java.util.logging.Logger; import java.util.logging.Level; /** * This class represents a PDF <code>FontDescriptor</code>. A FontDescriptor object * holds extra information about a particular parent Font object. In particular * information on font widths, flags, to unicode and embedded font program streams. * * @see org.icepdf.core.pobjects.fonts.Font */ public class FontDescriptor extends Dictionary { private static final Logger logger = Logger.getLogger(FontDescriptor.class.toString()); private FontFile font; public static final String FONT_NAME = "FontName"; public static final String FONT_FAMILY = "FontFamily"; public static final String MISSING_Stretch = "FontStretch"; public static final String FONT_WEIGHT = "FontWeight"; public static final String FLAGS = "Flags"; public static final String FONT_BBOX = "FontBBox"; public static final String ITALIC_ANGLE = "ItalicAngle"; public static final String ASCENT = "Ascent"; public static final String DESCENT = "Descent"; public static final String LEADING = "Leading"; public static final String CAP_HEIGHT = "CapHeight"; public static final String X_HEIGHT = "XHeight"; public static final String STEM_V = "StemV"; public static final String STEM_H = "StemH"; public static final String AVG_WIDTH = "AvgWidth"; public static final String MAX_WIDTH = "MaxWidth"; public static final String MISSING_WIDTH = "MissingWidth"; private static final String FONT_FILE = "FontFile"; private static final String FONT_FILE_2 = "FontFile2"; private static final String FONT_FILE_3 = "FontFile3"; private static final String FONT_FILE_3_TYPE_1C = "Type1C"; private static final String FONT_FILE_3_CID_FONT_TYPE_0 = "CIDFontType0"; private static final String FONT_FILE_3_CID_FONT_TYPE_2 = "CIDFontType2"; private static final String FONT_FILE_3_CID_FONT_TYPE_0C = "CIDFontType0C"; private static final String FONT_FILE_3_OPEN_TYPE = "OpenType"; /** * Creates a new instance of a FontDescriptor. * * @param l Libaray of all objects in PDF * @param h hash of parsed FontDescriptor attributes */ public FontDescriptor(Library l, Hashtable h) { super(l, h); } /** * Utility method for creating a FontDescriptor based on the font metrics * of the <code>AFM</code> * * @param library document library * @param afm adobe font metrics data * @return new instance of a <code>FontDescriptor</code> */ public static FontDescriptor createDescriptor(Library library, AFM afm) { Hashtable<String, Object> properties = new Hashtable<String, Object>(5); properties.put(FONT_NAME, afm.getFontName()); properties.put(FONT_FAMILY, afm.getFamilyName()); properties.put(FONT_BBOX, afm.getFontBBox()); properties.put(ITALIC_ANGLE, afm.getItalicAngle()); properties.put(MAX_WIDTH, afm.getMaxWidth()); properties.put(AVG_WIDTH, afm.getAvgWidth()); properties.put(FLAGS, afm.getFlags()); return new FontDescriptor(library, properties); } /** * Returns the PostScript name of the font. * * @return PostScript name of font. */ public String getFontName() { Object value = library.getObject(entries, FONT_NAME); if (value instanceof Name) { return ((Name) value).getName(); } else if (value instanceof String) { return (String) value; } return null; } /** * Gets a string specifying the preferred font family name. For example, the font * "Time Bold Italic" would have a font family of Times. * * @return preferred font family name. */ public String getFontFamily() { Object value = library.getObject(entries, FONT_FAMILY); if (value instanceof StringObject) { StringObject familyName = (StringObject) value; return familyName.getDecryptedLiteralString(library.getSecurityManager()); } return FONT_NAME; } /** * Gets the weight (thickness) component of the fully-qualified font name or * font specifier. The default value is zero. * * @return the weight of the font name. */ public float getFontWeight() { Object value = library.getObject(entries, FONT_WEIGHT); if (value instanceof Number) { return ((Number) value).floatValue(); } return 0.0f; } /** * Gets the width to use for character codes whose widths are not specifed in * the font's dictionary. The default value is zero. * * @return width of non-specified characters. */ public float getMissingWidth() { Object value = library.getObject(entries, MISSING_WIDTH); if (value instanceof Number) { return ((Number) value).floatValue(); } return 0.0f; } /** * Gets the average width of glyphs in the font. The default value is zero. * * @return average width of glyphs. */ public float getAverageWidth() { Object value = library.getObject(entries, AVG_WIDTH); if (value instanceof Number) { return ((Number) value).floatValue(); } return 0.0f; } /** * Gets the maximum width of glyphs in the font. The default value is zero. * * @return maximum width of glyphs. */ public float getMaxWidth() { Object value = library.getObject(entries, MAX_WIDTH); if (value instanceof Number) { return ((Number) value).floatValue(); } return 0.0f; } /** * Gets the ascent of glyphs in the font. The default value is zero. * * @return ascent of glyphs. */ public float getAscent() { Object value = library.getObject(entries, ASCENT); if (value instanceof Number) { return ((Number) value).floatValue(); } return 0.0f; } /** * Gets the descent of glyphs in the font. The default value is zero. * * @return descent of glyphs. */ public float getDescent() { Object value = library.getObject(entries, DESCENT); if (value instanceof Number) { return ((Number) value).floatValue(); } return 0.0f; } /** * Gets the embeddedFont if any. * * @return embedded font; null, if there is no valid embedded font. */ public FontFile getEmbeddedFont() { return font; } /** * Gets the fonts bounding box. * * @return bounding box in PDF coordinate space. */ public PRectangle getFontBBox() { Object value = library.getObject(entries, FONT_BBOX); if (value instanceof Vector) { Vector rectangle = (Vector) value; return new PRectangle(rectangle); } return null; } /** * Gets the font flag value, which is a collection of various characteristics * that describe the font. * * @return int value representing the flags; bits must be looked at to get * attribute values. */ public int getFlags() { Object value = library.getObject(entries, FLAGS); if (value instanceof Number) { return ((Number) value).intValue(); } return 0; } /** * Initiate the Font Descriptor object. Reads embedded font programs * or CMap streams. */ public void init() { /** * FontFile1 = A stream containing a Type 1 font program * FontFile2 = A stream containing a TrueType font program * FontFile3 = A stream containing a font program other than Type 1 or * TrueType. The format of the font program is specified by the Subtype entry * in the stream dictionary */ try { // get an instance of our font factory FontFactory fontFactory = FontFactory.getInstance(); if (entries.containsKey(FONT_FILE)) { Stream fontStream = (Stream) library.getObject(entries, FONT_FILE); if (fontStream != null) { font = fontFactory.createFontFile( fontStream, FontFactory.FONT_TYPE_1); } } if (entries.containsKey(FONT_FILE_2)) { Stream fontStream = (Stream) library.getObject(entries, FONT_FILE_2); if (fontStream != null) { font = fontFactory.createFontFile( fontStream, FontFactory.FONT_TRUE_TYPE); } } if (entries.containsKey(FONT_FILE_3)) { Stream fontStream = (Stream) library.getObject(entries, FONT_FILE_3); String subType = fontStream.getObject("Subtype").toString(); if (subType != null && (subType.equals(FONT_FILE_3_TYPE_1C) || subType.equals(FONT_FILE_3_CID_FONT_TYPE_0) || subType.equals(FONT_FILE_3_CID_FONT_TYPE_0C)) ) { font = fontFactory.createFontFile( fontStream, FontFactory.FONT_TYPE_1); } if (subType != null && subType.equals(FONT_FILE_3_OPEN_TYPE)) { // font = new NFontOpenType(fontStreamBytes); font = fontFactory.createFontFile( fontStream, FontFactory.FONT_OPEN_TYPE); } } } // catch everything, we can fall back to font substitution if a failure // occurs. catch (Throwable e) { logger.log(Level.FINE, "Error Reading Embedded Font ", e); } } /** * Return a string representation of the all the FontDescriptor object's * parsed attributes. * * @return all of FontDescriptors parsed attributes. */ public String toString() { String name = null; if (font != null) name = font.getName(); return super.getPObjectReference() + " FONTDESCRIPTOR= " + entries.toString() + " - " + name; } }
/*************************************************************************** * Copyright 2017 Kieker Project (http://kieker-monitoring.net) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ***************************************************************************/ package kieker.monitoring.writer.amqp; import java.io.IOException; import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.util.concurrent.TimeoutException; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; import com.rabbitmq.client.ConnectionFactory; import kieker.common.configuration.Configuration; import kieker.common.logging.Log; import kieker.common.logging.LogFactory; import kieker.common.record.IMonitoringRecord; import kieker.common.record.io.DefaultValueSerializer; import kieker.common.record.misc.RegistryRecord; import kieker.common.util.thread.DaemonThreadFactory; import kieker.monitoring.registry.GetIdAdapter; import kieker.monitoring.registry.IRegistryListener; import kieker.monitoring.registry.RegisterAdapter; import kieker.monitoring.registry.WriterRegistry; import kieker.monitoring.writer.AbstractMonitoringWriter; /** * Monitoring record writer which sends records using the AMQP protocol to a message queue. * * @author Holger Knoche, Christian Wulf * * @since 1.12 */ public class AmqpWriter extends AbstractMonitoringWriter implements IRegistryListener<String> { /** ID for registry records. */ public static final byte REGISTRY_RECORD_ID = (byte) 0xFF; /** ID for regular records. */ public static final byte REGULAR_RECORD_ID = (byte) 0x01; private static final Log LOG = LogFactory.getLog(AmqpWriter.class); /** The default size for the buffer used to serialize records */ private static final int DEFAULT_BUFFER_SIZE = 16384; /** Size of the "envelope" data which is prepended before the actual record. */ private static final int SIZE_OF_ENVELOPE = 1 + 8; private static final String PREFIX = AmqpWriter.class.getName() + "."; /** The name of the configuration property for the server URI. */ public static final String CONFIG_URI = PREFIX + "uri"; // NOCS (afterPREFIX) /** The name of the configuration property for the AMQP exchange name. */ public static final String CONFIG_EXCHANGENAME = PREFIX + "exchangename"; // NOCS (afterPREFIX) /** The name of the configuration property for the AMQP queue name. */ public static final String CONFIG_QUEUENAME = PREFIX + "queuename"; // NOCS (afterPREFIX) /** The name of the configuration property for the heartbeat timeout. */ public static final String CONFIG_HEARTBEAT = PREFIX + "heartbeat"; // NOCS (afterPREFIX) /** * Default heartbeat timeout interval in seconds. */ private static final int DEFAULT_HEARTBEAT = 60; private final String uri; private final String exchangeName; private final String queueName; private final int heartbeat; private final ByteBuffer buffer; private final Connection connection; private final Channel channel; private final WriterRegistry writerRegistry; /** * Adapter for the current, generated record structure. * The record generator should generate records with the new interface. */ private final RegisterAdapter<String> registerStringsAdapter; /** * Adapter for the current, generated record structure. * The record generator should generate records with the new interface. */ private final GetIdAdapter<String> writeBytesAdapter; public AmqpWriter(final Configuration configuration) throws KeyManagementException, NoSuchAlgorithmException, URISyntaxException, IOException, TimeoutException { super(configuration); // Read configuration parameters from configuration this.uri = configuration.getStringProperty(CONFIG_URI); this.exchangeName = configuration.getStringProperty(CONFIG_EXCHANGENAME); this.queueName = configuration.getStringProperty(CONFIG_QUEUENAME); final int configuredHeartbeat = configuration.getIntProperty(CONFIG_HEARTBEAT); if (configuredHeartbeat == 0) { this.heartbeat = DEFAULT_HEARTBEAT; } else { this.heartbeat = configuredHeartbeat; } final int bufferSize = DEFAULT_BUFFER_SIZE; this.buffer = ByteBuffer.allocate(bufferSize); this.connection = this.createConnection(); this.channel = this.connection.createChannel(); this.writerRegistry = new WriterRegistry(this); this.registerStringsAdapter = new RegisterAdapter<String>(this.writerRegistry); this.writeBytesAdapter = new GetIdAdapter<String>(this.writerRegistry); } private Connection createConnection() throws KeyManagementException, NoSuchAlgorithmException, URISyntaxException, IOException, TimeoutException { final ConnectionFactory connectionFactory = new ConnectionFactory(); connectionFactory.setUri(this.uri); connectionFactory.setRequestedHeartbeat(this.heartbeat); // Use only daemon threads for connections. Otherwise, all connections would have to be explicitly // closed for the JVM to terminate. connectionFactory.setThreadFactory(new DaemonThreadFactory()); return connectionFactory.newConnection(); } @Override public void onStarting() { // do nothing } @Override public void writeMonitoringRecord(final IMonitoringRecord monitoringRecord) { monitoringRecord.registerStrings(this.registerStringsAdapter); final ByteBuffer recordBuffer = this.buffer; final int requiredBufferSize = SIZE_OF_ENVELOPE + 4 + 8 + monitoringRecord.getSize(); if (recordBuffer.capacity() < requiredBufferSize) { throw new IllegalStateException("Insufficient buffer capacity for string registry data"); } // register monitoringRecord class name final String recordClassName = monitoringRecord.getClass().getName(); this.writerRegistry.register(recordClassName); // Prepend envelope data recordBuffer.put(REGULAR_RECORD_ID); recordBuffer.putLong(this.writerRegistry.getId()); // serialized monitoringRecord recordBuffer.putInt(this.writerRegistry.getId(recordClassName)); recordBuffer.putLong(monitoringRecord.getLoggingTimestamp()); monitoringRecord.serialize(DefaultValueSerializer.create(recordBuffer, this.writeBytesAdapter)); this.publishBuffer(recordBuffer); } @Override public void onNewRegistryEntry(final String value, final int id) { final ByteBuffer registryBuffer = this.buffer; final byte[] bytes = value.getBytes(StandardCharsets.UTF_8); final int requiredBufferSize = SIZE_OF_ENVELOPE + RegistryRecord.SIZE + bytes.length; if (registryBuffer.capacity() < requiredBufferSize) { throw new IllegalStateException("Insufficient buffer capacity for string registry data"); } // Prepend envelope data. registryBuffer.put(REGISTRY_RECORD_ID); registryBuffer.putLong(this.writerRegistry.getId()); // id-string pair registryBuffer.putInt(id); registryBuffer.putInt(bytes.length); registryBuffer.put(bytes); this.publishBuffer(registryBuffer); } private void publishBuffer(final ByteBuffer localBuffer) { final int dataSize = localBuffer.position(); final byte[] data = new byte[dataSize]; System.arraycopy(localBuffer.array(), localBuffer.arrayOffset(), data, 0, dataSize); // Reset the buffer position localBuffer.position(0); try { this.channel.basicPublish(this.exchangeName, this.queueName, null, data); } catch (final IOException e) { LOG.error("An exception occurred", e); } } @Override public void onTerminating() { try { this.connection.close(); } catch (final IOException e) { LOG.error("Error closing connection", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.graph.generator; import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.functions.FunctionAnnotation; import org.apache.flink.graph.Edge; import org.apache.flink.graph.Graph; import org.apache.flink.graph.Vertex; import org.apache.flink.types.LongValue; import org.apache.flink.types.NullValue; import org.apache.flink.util.Collector; import org.apache.flink.util.LongValueSequenceIterator; import org.apache.flink.util.Preconditions; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; /** * @see <a href="http://mathworld.wolfram.com/CirculantGraph.html">Circulant Graph at Wolfram MathWorld</a> */ public class CirculantGraph extends GraphGeneratorBase<LongValue, NullValue, NullValue> { public static final int MINIMUM_VERTEX_COUNT = 2; public static final int MINIMUM_OFFSET = 1; // Required to create the DataSource private final ExecutionEnvironment env; // Required configuration private final long vertexCount; private List<OffsetRange> offsetRanges = new ArrayList<>(); /** * An oriented {@link Graph} with {@code n} vertices where each vertex * v<sub>i</sub> is connected to vertex v<sub>(i+j)%n</sub> for each * configured offset {@code j}. * * @param env the Flink execution environment * @param vertexCount number of vertices */ public CirculantGraph(ExecutionEnvironment env, long vertexCount) { Preconditions.checkArgument(vertexCount >= MINIMUM_VERTEX_COUNT, "Vertex count must be at least " + MINIMUM_VERTEX_COUNT); this.env = env; this.vertexCount = vertexCount; } /** * Required configuration for each range of offsets in the graph. * * @param offset first offset appointing the vertices' position * @param length number of contiguous offsets in range * @return this */ public CirculantGraph addRange(long offset, long length) { Preconditions.checkArgument(offset >= MINIMUM_OFFSET, "Range offset must be at least " + MINIMUM_OFFSET); Preconditions.checkArgument(length <= vertexCount - offset, "Range length must not be greater than the vertex count minus the range offset."); offsetRanges.add(new OffsetRange(offset, length)); return this; } @Override public Graph<LongValue, NullValue, NullValue> generate() { // Vertices DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSequence(env, parallelism, vertexCount); // Edges LongValueSequenceIterator iterator = new LongValueSequenceIterator(0, this.vertexCount - 1); // Validate ranges Collections.sort(offsetRanges); Iterator<OffsetRange> iter = offsetRanges.iterator(); OffsetRange lastRange = iter.next(); while (iter.hasNext()) { OffsetRange nextRange = iter.next(); if (lastRange.overlaps(nextRange)) { throw new IllegalArgumentException("Overlapping ranges " + lastRange + " and " + nextRange); } lastRange = nextRange; } DataSet<Edge<LongValue, NullValue>> edges = env .fromParallelCollection(iterator, LongValue.class) .setParallelism(parallelism) .name("Edge iterators") .flatMap(new LinkVertexToOffsets(vertexCount, offsetRanges)) .setParallelism(parallelism) .name("Circulant graph edges"); // Graph return Graph.fromDataSet(vertices, edges, env); } @FunctionAnnotation.ForwardedFields("*->f0") private static class LinkVertexToOffsets implements FlatMapFunction<LongValue, Edge<LongValue, NullValue>> { private final long vertexCount; private final List<OffsetRange> offsetRanges; private LongValue target = new LongValue(); private Edge<LongValue, NullValue> edge = new Edge<>(null, target, NullValue.getInstance()); public LinkVertexToOffsets(long vertexCount, List<OffsetRange> offsetRanges) { this.vertexCount = vertexCount; this.offsetRanges = offsetRanges; } @Override public void flatMap(LongValue source, Collector<Edge<LongValue, NullValue>> out) throws Exception { edge.f0 = source; long sourceID = source.getValue(); for (OffsetRange offsetRange : offsetRanges) { long targetID = sourceID + offsetRange.getOffset(); for (long i = offsetRange.getLength(); i > 0; i--) { // add positive offset target.setValue(targetID++ % vertexCount); out.collect(edge); } } } } /** * Stores the start offset and length configuration for an offset range. */ public static class OffsetRange implements Serializable, Comparable<OffsetRange> { private long offset; private long length; /** * Construct a range with the given offset and length. * * @param offset the range offset * @param length the range length */ public OffsetRange(long offset, long length) { this.offset = offset; this.length = length; } /** * Get the range offset. * * @return the offset */ public long getOffset() { return offset; } /** * Get the range length. * * @return the length */ public long getLength() { return length; } /** * Get the offset of the last index in the range. * * @return last offset */ public long getLastOffset() { return offset + length - 1; } /** * Return true if and only if the other range and this range share a * common offset ID. * * @param other other range * @return whether ranges are overlapping */ public boolean overlaps(OffsetRange other) { boolean overlapping = false; long lastOffset = getLastOffset(); long otherLastOffset = other.getLastOffset(); // check whether this range contains other overlapping |= (offset <= other.offset && other.offset <= lastOffset); overlapping |= (offset <= otherLastOffset && otherLastOffset <= lastOffset); // check whether other contains this range overlapping |= (other.offset <= offset && offset <= otherLastOffset); overlapping |= (other.offset <= lastOffset && lastOffset <= otherLastOffset); return overlapping; } @Override public String toString() { return Long.toString(offset) + ":" + Long.toString(length); } @Override public int compareTo(OffsetRange o) { int cmp = Long.compare(offset, o.offset); if (cmp != 0) { return cmp; } return Long.compare(length, o.length); } } }
package org.basex.query.func; import static org.basex.query.QueryError.*; import static org.basex.query.func.Function.*; import org.basex.query.ast.*; import org.basex.query.expr.*; import org.basex.query.expr.constr.*; import org.basex.query.value.item.*; import org.junit.*; /** * This class tests the functions of the Utility Module. * * @author BaseX Team 2005-20, BSD License * @author Christian Gruen */ public final class UtilModuleTest extends QueryPlanTest { /** Test method. */ @Test public void chars() { final Function func = _UTIL_CHARS; // test pre-evaluation query(func.args(" ()"), ""); check(func.args(" ()"), "", empty()); query(func.args(""), ""); query(func.args("abc"), "a\nb\nc"); query("count(" + func.args(" string-join(util:replicate('A', 100000))") + ')', 100000); check("count(" + func.args(" string-join(util:replicate('A', 100000))") + ')', 100000, empty(func), empty(STRING_LENGTH)); // test iterative evaluation query(func.args(" <_/>"), ""); query(func.args(" <_>abc</_>"), "a\nb\nc"); query(func.args(" <_>abc</_>") + "[2]", "b"); query(func.args(" <_>abc</_>") + "[last()]", "c"); check("count(" + func.args(" string-join(util:replicate(<_>A</_>, 100000))") + ')', 100000, exists(STRING_LENGTH)); } /** Test method. */ @Test public void ddo() { final Function func = _UTIL_DDO; query(func.args(" <a/>"), "<a/>"); query(func.args(" (<a/>, <b/>)"), "<a/>\n<b/>"); query(func.args(" reverse((<a/>, <b/>))"), "<a/>\n<b/>"); error(func.args(1), INVTYPE_X_X_X); } /** Test method. */ @Test public void deepEquals() { final Function func = _UTIL_DEEP_EQUAL; query(func.args(1, 1), true); query(func.args(1, 1, "ALLNODES"), true); error(func.args("(1 to 2)", "(1 to 2)", "X"), INVALIDOPTION_X); } /** Test method. */ @Test public void iff() { final Function func = _UTIL_IF; query(func.args(" 1", 1), 1); query(func.args(" ()", 1), ""); query(func.args(" 1", 1, 2), 1); query(func.args(" ()", 1, 2), 2); query(func.args(" (<a/>, <b/>)", 1, 2), 1); error(func.args(" (1, 2)", 1, 2), EBV_X); } /** Test method. */ @Test public void init() { final Function func = _UTIL_INIT; // static rewrites query(func.args(" ()"), ""); query(func.args("A"), ""); query(func.args(" (1, 2)"), 1); query(func.args(" (1 to 3)"), "1\n2"); // known result size query(func.args(" <_>1</_> + 1"), ""); query(func.args(" (<_>1</_> + 1, 3)"), 2); query(func.args(" prof:void(())"), ""); // unknown result size query(func.args(" 1[. = 0]"), ""); query(func.args(" 1[. = 1]"), ""); query(func.args(" (1 to 2)[. = 0]"), ""); query(func.args(" (1 to 4)[. < 3]"), 1); // value-based iterator query(func.args(" tokenize(<_></_>)"), ""); query(func.args(" tokenize(<_>X</_>)"), ""); query(func.args(" tokenize(<_>X Y</_>)"), "X"); query(func.args(" tokenize(<_>X Y Z</_>)"), "X\nY"); // iterator with known result size check(func.args(" (<a/>, <b/>)"), "<a/>", exists(HEAD)); check(func.args(" sort((1 to 3) ! <_>{ . }</_>)"), "<_>1</_>\n<_>2</_>", exists(func)); check("reverse(" + func.args(" (<a/>, <b/>, <c/>))"), "<b/>\n<a/>", exists(func)); // nested function calls check(func.args(func.args(" ()")), "", empty()); check(func.args(func.args(" (<a/>)")), "", empty()); check(func.args(func.args(" (<a/>, <b/>)")), "", empty()); check(func.args(func.args(" (<a/>, <b/>, <c/>)")), "<a/>", exists(HEAD)); check(func.args(func.args(" (<a/>, <b/>, <c/>, <d/>)")), "<a/>\n<b/>", exists(SUBSEQUENCE)); } /** Test method. */ @Test public void item() { final Function func = _UTIL_ITEM; query(func.args(" ()", 1), ""); query(func.args(1, 1), 1); query(func.args(1, 0), ""); query(func.args(1, 2), ""); query(func.args(" 1 to 2", 2), 2); query(func.args(" 1 to 2", 3), ""); query(func.args(" 1 to 2", 0), ""); query(func.args(" 1 to 2", -1), ""); query(func.args(" 1 to 2", 1.5), ""); query("for $i in 1 to 2 return " + func.args(" $i", 1), "1\n2"); query(func.args(" (<a/>, <b/>)", 1), "<a/>"); query(func.args(" (<a/>, <b/>)", 3), ""); query(func.args(" (<a/>, <b/>)[name()]", 1), "<a/>"); query(func.args(" (<a/>, <b/>)[name()]", 2), "<b/>"); query(func.args(" (<a/>, <b/>)[name()]", 3), ""); query(func.args(" (<a/>, <b/>)", 1.5), ""); query(func.args(" <a/>", 2), ""); query(func.args(" (<a/>, <b/>)", " <_>1</_>"), "<a/>"); query(func.args(" tokenize(<_>1</_>)", 2), ""); query(func.args(" 1", " <_>0</_>"), ""); query(func.args(" 1[. = 1]", " <_>1</_>"), 1); query(func.args(" 1[. = 1]", " <_>2</_>"), ""); check(func.args(" prof:void(())", 0), "", empty(func)); check(func.args(" (7 to 9)[. = 8]", -1), "", empty()); check(func.args(" (7 to 9)[. = 8]", 0), "", empty()); check(func.args(" (7 to 9)[. = 8]", 1.5), "", empty()); check(func.args(" 1[. = 1]", 1), 1, empty(func)); check(func.args(" 1[. = 1]", 2), "", empty()); check(func.args(" (1, 2, <_/>)", 3), "<_/>", exists(_UTIL_LAST)); check(func.args(" reverse((1, 2, <_/>))", 2), 2, empty(REVERSE)); check(func.args(" tail((1, 2, 3, <_/>))", 2), 3, empty(TAIL)); check(func.args(" util:init((1, 2, 3, <_/>))", 2), 2, empty(_UTIL_INIT)); check(func.args(" (7 to 9)[. = 8]", 1), 8, exists(HEAD), type(HEAD, "xs:integer?")); } /** Test method. */ @Test public void last() { final Function func = _UTIL_LAST; query(func.args(" ()"), ""); query(func.args(1), 1); query(func.args(" 1 to 2"), 2); query("for $i in 1 to 2 return " + func.args(" $i"), "1\n2"); query(func.args(" (<a/>, <b/>)"), "<b/>"); query(func.args(" (<a/>, <b/>)[position() > 2]"), ""); query("for $i in 1 to 2 return " + func.args(" $i"), "1\n2"); query(func.args(" (<a/>, <b/>)"), "<b/>"); check(func.args(" prof:void(())"), "", empty(func)); check(func.args(" <a/>"), "<a/>", empty(func)); check(func.args(" (<a/>, <b/>)[name()]"), "<b/>", type(func, "element()?")); check(func.args(" reverse((1, 2, 3)[. > 1])"), 2, exists(HEAD)); check(func.args(" tokenize(<_/>)"), "", exists(_UTIL_LAST)); check(func.args(" tokenize(<_>1</_>)"), 1, exists(_UTIL_LAST)); check(func.args(" tokenize(<_>1 2</_>)"), 2, exists(_UTIL_LAST)); check(func.args(" tail(tokenize(<a/>))"), "", exists(TAIL)); check(func.args(" tail(1 ! <_>{.}</_>)"), "", empty()); check(func.args(" tail((1 to 2) ! <_>{.}</_>)"), "<_>2</_>", empty(TAIL)); check(func.args(" tail((1 to 3) ! <_>{.}</_>)"), "<_>3</_>", empty(TAIL)); check(func.args(" util:init((1 to 3) ! <_>{.}</_>)"), "<_>2</_>", empty(_UTIL_INIT)); check(func.args(" util:init(tokenize(<a/>))"), "", exists(_UTIL_INIT)); } /** Test method. */ @Test public void or() { final Function func = _UTIL_OR; query(func.args(1, 2), 1); query(func.args(" <x/>", 2), "<x/>"); query(func.args(" (1 to 2)[. = 1]", 2), 1); // test if second branch will be evaluated query(func.args(" (1 to 2)[. != 0]", " (1 to 1000000000000)[. != 0]"), "1\n2"); query(func.args(" ()", 2), 2); query(func.args(" ()", " <x/>"), "<x/>"); query(func.args(" (1 to 2)[. = 0]", " <x/>"), "<x/>"); query(func.args(" tokenize(<a/>)", 2), 2); query(func.args(" tokenize(<a>1</a>)", 2), 1); query("sort(" + func.args(" tokenize(<a>1</a>)", 2) + ")", 1); query("sort(" + func.args(" tokenize(<a/>)", 2) + ")", 2); query("count(" + func.args(" <_>1</_>[. = 1]", 2) + ')', 1); query("count(" + func.args(" (1, 2)[. = 1]", 3) + ')', 1); query("count(" + func.args(" (1, 2, 3)[. = 1]", 4) + ')', 1); query("count(" + func.args(" (1, 2, 3)[. = 4]", 4) + ')', 1); query("count(" + func.args(" (1, 2, 3)[. = 4]", " (4, 5)") + ')', 2); check(func.args(null, null), "", empty()); check(func.args(null, 1), 1, root(Int.class)); check(func.args(1, null), 1, root(Int.class)); check(func.args(null, " <x/>"), "<x/>", root(CElem.class)); check(func.args(" <x/>", null), "<x/>", root(CElem.class)); check(func.args(" (1, <_>2</_>[. = 3])", null), 1, root(List.class)); check(func.args(" (2, <_>3</_>[. = 4])", "<z/>"), 2, root(List.class)); check(func.args(" (3, <_>4</_>)[. = 3]", null), 3, root(IterFilter.class)); check(func.args(" (4, <_>5</_>)[. = 4]", "<z/>"), 4, root(_UTIL_OR)); check(func.args(" prof:void(1)", 2), 2, root(_UTIL_OR)); check(func.args(" prof:void(2)", " prof:void(3)"), "", root(_UTIL_OR)); check(func.args(" <_>6</_>[. = 6]", 7), "<_>6</_>", root(_UTIL_OR)); } /** Test method. */ @Test public void range() { final Function func = _UTIL_RANGE; query(func.args(" ()", 1, 2), ""); query(func.args(1, 1, 2), 1); query(func.args(1, 0, 2), 1); query(func.args(1, 2, 2), ""); query(func.args(" 1 to 2", 2, 2), 2); query(func.args(" 1 to 2", 3, 2), ""); query(func.args(" 1 to 2", 0, 2), "1\n2"); query(func.args(" 1 to 2", -1, 2), "1\n2"); query(func.args(" 1 to 2", 1.5, 2), 2); query("for $i in 1 to 2 return " + func.args(" $i", 1, 2), "1\n2"); query(func.args(" (<a/>, <b/>)", 0, 2), "<a/>\n<b/>"); query(func.args(" (<a/>, <b/>)", 1, 2), "<a/>\n<b/>"); query(func.args(" (<a/>, <b/>)", 2.5, 3.5), ""); query(func.args(" (<a/>, <b/>)", 3, 4), ""); query(func.args(" (<a/>, <b/>)[name()]", 1, 9223372036854775807L), "<a/>\n<b/>"); } /** Test method. */ @Test public void replicate() { final Function func = _UTIL_REPLICATE; query(func.args(" ()", 0), ""); query(func.args(" ()", 1), ""); query(func.args(1, 0), ""); query(func.args("A", 1), "A"); query(func.args("A", 2), "A\nA"); query(func.args(" (0, 'A')", 1), "0\nA"); query(func.args(" (0, 'A')", 2), "0\nA\n0\nA"); query(func.args(" 1 to 10000", 10000) + "[last()]", "10000"); query(func.args(" 1 to 10000", 10000) + "[1]", "1"); query(func.args(" 1 to 10000", 10000) + "[10000]", "10000"); query(func.args(" 1 to 10000", 10000) + "[10001]", "1"); query("count(" + func.args(" 1 to 1000000", 1000000) + ")", 1000000000000L); query("count(" + func.args(func.args(" 1 to 3", 3), 3) + ")", 27); query("for $i in 1 to 2 return " + func.args(1, " $i"), "1\n1\n1"); query(func.args(" <a/>", 2), "<a/>\n<a/>"); query(func.args(" <a/>", " <_>2</_>"), "<a/>\n<a/>"); query(func.args(" 1[. = 1]", 2), "1\n1"); check(func.args(" <a/>", 0), "", empty()); check(func.args(" ()", " <_>2</_>"), "", empty()); check(func.args(" <a/>", 1), "<a/>", empty(func)); check(func.args(" <a/>", 2), "<a/>\n<a/>", type(func, "element()+")); check(func.args(" <a/>", " <_>2</_>"), "<a/>\n<a/>", type(func, "element()*")); error(func.args(1, -1), UTIL_NEGATIVE_X); } }
package apple.carplay; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSCoder; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import apple.foundation.protocol.NSSecureCoding; import apple.mapkit.MKMapItem; import apple.uikit.UIImage; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.MappedReturn; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.ProtocolClassMethod; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; @Generated @Library("CarPlay") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class CPPointOfInterest extends NSObject implements NSSecureCoding { static { NatJ.register(); } @Generated protected CPPointOfInterest(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native CPPointOfInterest alloc(); @Owned @Generated @Selector("allocWithZone:") public static native CPPointOfInterest allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); /** * Subtitle to be used when POI detail card is visible * <p> * [@note] If not provided, the point of interest will fall back to displaying @c subtitle */ @Generated @Selector("detailSubtitle") public native String detailSubtitle(); /** * Summary text to be used when POI detail card is visible. * <p> * [@note] If not provided, the point of interest will fall back to displaying @c summary */ @Generated @Selector("detailSummary") public native String detailSummary(); /** * Title to be used when POI detail card is visible * <p> * [@note] If not provided, the point of interest will fall back to displaying @c title */ @Generated @Selector("detailTitle") public native String detailTitle(); @Generated @Selector("encodeWithCoder:") public native void encodeWithCoder(NSCoder coder); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("init") public native CPPointOfInterest init(); @Generated @Selector("initWithCoder:") public native CPPointOfInterest initWithCoder(NSCoder coder); /** * Initializes a point of interest to be used with @c CPPointOfInterestTemplate * <p> * [@note] When providing an image, your app should provide a @c UIImage that is display-ready. If necessary for the image, provide * light and dark styles by using an asset from your asset catalog, prepared with light and dark styles * or by using @c UIImageAsset to combine two @c UIImage instances into a single image with * both styles. * <p> * CPPointOfInterest instances appear on the Point of Interest map view as both selectable items in the table view overlay and as map annotations. * * @param location Location indicator use by map annotations. * @param title Primary title for this point of interest * @param subtitle Optional: Secondary title for this point of interest * @param summary Optional: Summary text for this point of interest * @param detailTitle Optional: Title to be used when POI detail card is visible * @param detailSubtitle Optional: Subtitle to be used when POI detail card is visible * @param detailSummary Optional: Summary text to be used when POI detail card is visible * @param pinImage Optional: a custom map annotation image */ @Generated @Selector("initWithLocation:title:subtitle:summary:detailTitle:detailSubtitle:detailSummary:pinImage:") public native CPPointOfInterest initWithLocationTitleSubtitleSummaryDetailTitleDetailSubtitleDetailSummaryPinImage( MKMapItem location, String title, String subtitle, String summary, String detailTitle, String detailSubtitle, String detailSummary, UIImage pinImage); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); /** * Location associated with this point of interest */ @Generated @Selector("location") public native MKMapItem location(); @Generated @Owned @Selector("new") public static native CPPointOfInterest new_objc(); /** * Image used for map view annotations */ @Generated @Selector("pinImage") public native UIImage pinImage(); /** * Point of Interest detail card buttons */ @Generated @Selector("primaryButton") public native CPTextButton primaryButton(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("secondaryButton") public native CPTextButton secondaryButton(); /** * Subtitle to be used when POI detail card is visible * <p> * [@note] If not provided, the point of interest will fall back to displaying @c subtitle */ @Generated @Selector("setDetailSubtitle:") public native void setDetailSubtitle(String value); /** * Summary text to be used when POI detail card is visible. * <p> * [@note] If not provided, the point of interest will fall back to displaying @c summary */ @Generated @Selector("setDetailSummary:") public native void setDetailSummary(String value); /** * Title to be used when POI detail card is visible * <p> * [@note] If not provided, the point of interest will fall back to displaying @c title */ @Generated @Selector("setDetailTitle:") public native void setDetailTitle(String value); /** * Location associated with this point of interest */ @Generated @Selector("setLocation:") public native void setLocation(MKMapItem value); /** * Image used for map view annotations */ @Generated @Selector("setPinImage:") public native void setPinImage(UIImage value); /** * Point of Interest detail card buttons */ @Generated @Selector("setPrimaryButton:") public native void setPrimaryButton(CPTextButton value); @Generated @Selector("setSecondaryButton:") public native void setSecondaryButton(CPTextButton value); /** * Subtitle for this point of interest */ @Generated @Selector("setSubtitle:") public native void setSubtitle(String value); /** * Summary text for this point of interest */ @Generated @Selector("setSummary:") public native void setSummary(String value); /** * Primary title for this point of interest */ @Generated @Selector("setTitle:") public native void setTitle(String value); /** * Any custom data or an object associated with this Point of Interest. */ @Generated @Selector("setUserInfo:") public native void setUserInfo(@Mapped(ObjCObjectMapper.class) Object value); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); /** * Subtitle for this point of interest */ @Generated @Selector("subtitle") public native String subtitle(); /** * Summary text for this point of interest */ @Generated @Selector("summary") public native String summary(); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("supportsSecureCoding") public static native boolean supportsSecureCoding(); @Generated @ProtocolClassMethod("supportsSecureCoding") public boolean _supportsSecureCoding() { return supportsSecureCoding(); } /** * Primary title for this point of interest */ @Generated @Selector("title") public native String title(); /** * Any custom data or an object associated with this Point of Interest. */ @Generated @Selector("userInfo") @MappedReturn(ObjCObjectMapper.class) public native Object userInfo(); @Generated @Selector("version") @NInt public static native long version_static(); }